[{"data":1,"prerenderedAt":563},["ShallowReactive",2],{"tag-count-blender-en":3,"tag-blender-en":4,"tags-sidebar-en":18,"posts-tag-blender-en-1":90,"tags-header-en":505,"tags-footer-en":534},15,{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":9,"name":6,"navigation":13,"path":14,"seo":15,"slug":16,"stem":16,"__hash__":17},"tag/blender.json","Blender",null,"json",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":11,"url":12},"public",{"posts":3},"https://blog.cg-wire.com/tag/blender/",true,"/blender",{"description":7},"blender","NGhuNL5GEEpGrAt0Y1hoiAFOBRkB8zKBFq90XcJR47E",[19,23,35,46,57,69,80],{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":20,"name":6,"navigation":13,"path":14,"seo":22,"slug":16,"stem":16,"__hash__":17},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":21,"url":12},{"posts":3},{"description":7},{"id":24,"title":25,"body":7,"description":7,"extension":8,"meta":26,"name":30,"navigation":13,"path":31,"seo":32,"slug":33,"stem":33,"__hash__":34},"tag/company.json","Company",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":27,"url":29},{"posts":28},35,"https://blog.cg-wire.com/tag/company/","Company News","/company",{"description":7},"company","CSg2BLNemwEASf_RYxGHsJOXTxg3xNUldTg2Upc7ZC0",{"id":36,"title":37,"body":7,"description":7,"extension":8,"meta":38,"name":37,"navigation":13,"path":42,"seo":43,"slug":44,"stem":44,"__hash__":45},"tag/customer-stories.json","Customer Stories",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":39,"url":41},{"posts":40},3,"https://blog.cg-wire.com/tag/customer-stories/","/customer-stories",{"description":7},"customer-stories","vO2w4OuionBXR7-dsFeWvCucjpG7VuCqGV3NZOYyVw0",{"id":47,"title":48,"body":7,"description":7,"extension":8,"meta":49,"name":52,"navigation":13,"path":53,"seo":54,"slug":55,"stem":55,"__hash__":56},"tag/glossary.json","Glossary",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":50,"url":51},{"posts":3},"https://blog.cg-wire.com/tag/glossary/","Animation Glossary","/glossary",{"description":7},"glossary","ahYw1ulGqHh4X1VqtWmRXHQzLH25NsXPHgKJ8kwOMwA",{"id":58,"title":59,"body":7,"description":7,"extension":8,"meta":60,"name":64,"navigation":13,"path":65,"seo":66,"slug":67,"stem":67,"__hash__":68},"tag/pipeline.json","Pipeline",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":61,"url":63},{"posts":62},77,"https://blog.cg-wire.com/tag/pipeline/","Pipeline Automation","/pipeline",{"description":7},"pipeline","qa7lmThepbMYAJ--m7WHgcY7p9lpC51BDn7imjnLoHY",{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":72,"name":71,"navigation":13,"path":76,"seo":77,"slug":78,"stem":78,"__hash__":79},"tag/production-management.json","Production Management",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":73,"url":75},{"posts":74},52,"https://blog.cg-wire.com/tag/production-management/","/production-management",{"description":7},"production-management","CK3g20iyLvLAN6TiR91N008bRCUY5R5T0A-dnAm-nfI",{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":83,"name":82,"navigation":13,"path":86,"seo":87,"slug":88,"stem":88,"__hash__":89},"tag/resources.json","Resources",{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":84,"url":85},{"posts":40},"https://blog.cg-wire.com/tag/resources/","/resources",{"description":7},"resources","uMVK_T3_oD87qJ7NOx5cVBCT5uXC9zFj44ZZatYH5RQ",[91,127,154,181,208,233,260,287,313,337,362,387,413,449,481],{"id":92,"title":93,"authors":94,"body":7,"description":7,"extension":8,"html":100,"meta":101,"navigation":13,"path":117,"published_at":118,"seo":119,"slug":120,"stem":121,"tags":122,"__hash__":126,"uuid":102,"comment_id":103,"feature_image":104,"featured":105,"visibility":10,"created_at":106,"updated_at":107,"custom_excerpt":108,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":111,"primary_tag":112,"url":114,"excerpt":108,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":116},"ghost/posts:self-hosted-blender-render-farm.json","Self-Hosting a Blender Render Farm Using Flamenco In 2026",[95],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"630632b2ca5910003d4a70af","Basile Samel","basile","https://blog.cg-wire.com/author/basile/","\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🖥️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn idle machines into a powerful Blender render farm without touching the cloud.\u003C/div>\u003C/div>\u003Cp>When was the last time you almost missed a deadline because of rendering?\u003C/p>\u003Cp>Every time you open Blender, your workstation sounds like a jet engine preparing for takeoff, and your entire film worth months of work is held hostage by a single progress bar.\u003C/p>\u003Cp>Meanwhile, your old college laptop sits in a box gathering dust. It's not a powerhouse, but it has a GPU. It has RAM. It's a perfectly functional computer doing absolutely nothing while you panic.\u003C/p>\u003Cp>The concept of a \"render farm\" can sound intimidating to one-person studios. You might imagine server racks in a chilled room, expensive licenses, and IT professionals shouting about IP addresses.\u003C/p>\u003Cp>But in the modern Blender ecosystem, that's no longer the reality.\u003C/p>\u003Cp>In this article, \u003Cstrong>I'm going to walk you through how to turn old devices into a unified rendering system using \u003Cem>Flamenco\u003C/em>.\u003C/strong> We will demystify the network setup and get you rendering on multiple machines in a few hours.\u003C/p>\u003Chr>\u003Ch2 id=\"why-self-host-a-render-farm\">Why Self-Host a Render Farm?\u003C/h2>\u003Cp>Before we start plugging in Ethernet cables, let's talk about why you should bother. You might think, \"Why not just send everything to a cloud farm?\" Cloud farms are amazing, but having a local, self-hosted render farm changes your workflow in three fundamental ways.\u003C/p>\u003Cp>When you pay for a cloud farm, you are paying for the final output. \u003Ca href=\"https://blog.cg-wire.com/blender-kitsu-low-res-preview/\">This psychologically discourages you from test rendering\u003C/a>. \u003Cstrong>You become afraid to hit \"Render\" until you are 100% sure everything is perfect.\u003C/strong>\u003C/p>\u003Cp>When you own the farm, the cost of a render is electricity. \u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\">You can render a rough animation\u003C/a> at 50% resolution just to check the timing or lighting. \u003Cstrong>This freedom allows you to iterate faster.\u003C/strong> You stop guessing and start testing.\u003C/p>\u003Cp>Sometimes, working on a commercial project for a tech client with an NDA is so strict you aren't allowed to even whisper the product name. \u003Cstrong>Uploading those assets to a third-party cloud server - even a secure one - can sometimes violate strict NDA contracts.\u003C/strong> Keeping your data on your local network (LAN) ensures that no pixels leave your studio until you say so.\u003C/p>\u003Cp>There is a specific kind of agony in uploading a 2GB project file to the cloud, waiting for it to render, downloading the frames, and realizing you left a physics cache unbaked. \u003Cstrong>With a local farm like Flamenco, if you spot a mistake, you just hit \"Cancel,\" fix it, and hit \"Render\" again. No upload times, no download times.\u003C/strong> It feels like an extension of your workstation.\u003C/p>\u003Chr>\u003Ch2 id=\"what-is-blender-flamenco\">What is Blender Flamenco?\u003C/h2>\u003Cp>Setting up a render farm from scratch \u003Ca href=\"https://blog.cg-wire.com/blender-programmatic-rendering/\">used to involve complex scripting\u003C/a> or expensive third-party software. Now, we have Blender Flamenco.\u003C/p>\u003Cp>\u003Cstrong>Flamenco is Blender's open-source render farm.\u003C/strong> It's extremely easy to setup: the manager is the brain holding the list of tasks (frames to render) and tells the other computers what to do. The workers are your extra laptops or desktops. They listen to the Manager, ask for a frame, render it, save it, and ask for another.\u003C/p>\u003Cp>Flamenco is designed to be zero-config. It practically discovers itself on your network. If you can install Blender, you can set up Flamenco.\u003C/p>\u003Chr>\u003Ch2 id=\"1-the-setup\">1. The Setup\u003C/h2>\u003Cp>For this tutorial, we start with the simplest configuration possible with our desktop computer acting both as manager and worker. We'll later see how to add our laptop.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Install Blender\u003C/strong> - Ensure your computer has Blender installed.\u003C/li>\u003Cli>\u003Cstrong>Download Flamenco\u003C/strong> - Go to the Flamenco website and download the package for your OS. Extract it to a folder.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1064\" height=\"721\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-cec7140f-c6aa-4e18-83fb-be86e5a39ac7.png 1064w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"2-run-flamenco-manager\">2. Run Flamenco Manager\u003C/h2>\u003Col>\u003Cli>Open the Flamenco folder you extracted.\u003C/li>\u003Cli>Double-click \u003Ccode>flamenco-manager\u003C/code>.\u003C/li>\u003Cli>A terminal window will pop up with some text logs.\u003C/li>\u003Cli>Go through the configuration wizard to set up the job folder where you'll upload your blend files to render.\u003C/li>\u003Cli>Shortly after, your web browser should open automatically to \u003Ccode>http://localhost:8080\u003C/code>. This is the Flamenco web interface.\u003C/li>\u003C/ol>\u003Cp>If you see a friendly, dark-themed dashboard, congratulations. You are half a server admin already. The Manager is alive.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ac803a05-e189-4c17-9fe9-d5749f916aa0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The manager will tell you to download the addon. Do it now as we'll need it for step 4.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1064\" height=\"721\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-ccd6a3fb-4abd-469e-a566-5adfddf76196.png 1064w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-the-worker\">3. The Worker\u003C/h2>\u003Cp>Now, leave the manager running and double-click \u003Ccode>flamenco-worker\u003C/code>.\u003C/p>\u003Cp>That's it.\u003C/p>\u003Cp>The Worker will scan your local network, find the Manager running on the same computer, and introduce itself. If you look back at your Desktop's web browser (the Manager interface), you should see it appear in the \"Workers\" tab, listed as \"Idle\" and ready for duty.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"821\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6bad58f1-615a-4a7b-8aff-38f07279ebe0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You should also run \u003Ccode>flamenco-worker\u003C/code> on your Desktop! Your main computer can render and manage at the same time.\u003C/p>\u003Chr>\u003Ch2 id=\"4-add-the-blend-file-and-render\">4. Add the Blend File and Render\u003C/h2>\u003Cp>The stage is set. Now, we can get to work!\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Open Blender\u003C/strong> on your Desktop.\u003C/li>\u003Cli>\u003Cstrong>Enable the Addon\u003C/strong> - Go to Edit &gt; Preferences &gt; Add-ons &gt; Install from Disk. Search for the flamenco zip file you downloaded during the manager setup.\u003C/li>\u003Cli>\u003Cstrong>Link the Manager\u003C/strong> - In the Flamenco add-on preferences, copy/paste the manager's URL address.\u003C/li>\u003Cli>\u003Cstrong>Save Your File\u003C/strong> - Save your \u003Ccode>.blend\u003C/code> file in the configured job folder.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"724\" height=\"732\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-88504c81-44cf-4d32-a374-0b2dc6746b56.png 724w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>In the Render Properties tab in Blender, scroll down to the \u003Cstrong>Flamenco\u003C/strong> panel.\u003C/p>\u003Col>\u003Cli>Click \u003Cstrong>\"Fetch Job Types\"\u003C/strong>.\u003C/li>\u003Cli>Select \u003Cstrong>\"Simple Render\"\u003C/strong>.\u003C/li>\u003Cli>Hit \u003Cstrong>\"Submit to Flamenco\"\u003C/strong>.\u003C/li>\u003C/ol>\u003Cp>Now, tab over to your web browser. You will see the job pop up. The status bars on your \"Workers\" list will turn green. Your Desktop will grab one frame to render at a time.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1319\" height=\"918\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-6e7fa2fb-b997-4f6f-ba60-bcc3c70d5bb0.png 1319w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"5-bringing-in-the-laptop\">5. Bringing in the Laptop\u003C/h2>\u003Cp>Now, to add your dusty laptop to the farm.\u003C/p>\u003Cp>Here is the single most actionable piece of advice I can give you, and it is where 90% of beginners fail: \u003Cstrong>All computers must see the files in the exact same place.\u003C/strong>\u003C/p>\u003Cp>If your texture is located at \u003Ccode>C:\\Users\\Dave\\Texture.png\u003C/code> on your desktop, your laptop \u003Cem>cannot\u003C/em> access that path. The laptop doesn't have a user named Dave, and it doesn't have the file on its C drive.\u003C/p>\u003Cp>You need a shared network folder, typically through a NAS. Depending on your operating system, the steps are similar but will slightly differ:\u003C/p>\u003Col>\u003Cli>Connect your desktop and laptop via Ethernet cable\u003C/li>\u003Cli>Create a NAS folder on your Desktop called \u003Ccode>RenderFarm\u003C/code>.\u003C/li>\u003Cli>Right-click it &gt; \u003Cstrong>Properties\u003C/strong> &gt; \u003Cstrong>Sharing\u003C/strong> &gt; \u003Cstrong>Share\u003C/strong>. Give read/write permission to your user.\u003C/li>\u003Cli>\u003Cstrong>Map the Network Drive:\u003C/strong> On your Desktop, map this folder to a drive letter, say \u003Ccode>Z:\u003C/code>. On your Laptop, navigate to the Desktop's network share and map it to **the same letter \u003Ccode>Z:**\u003C/code>.\u003C/li>\u003C/ol>\u003Cp>Now, when you save your Blender file to \u003Ccode>Z:\\RenderFarm\\MyProject.blend\u003C/code>, both computers see it at \u003Ccode>Z:\\RenderFarm\\MyProject.blend\u003C/code>. The path is absolute and identical.\u003C/p>\u003Cp>Now, leave the Desktop running and move over to \u003Cstrong>Computer B (Laptop)\u003C/strong>.\u003C/p>\u003Col>\u003Cli>Make sure your \u003Ccode>Z:\u003C/code> drive (or whatever shared storage you set up) is accessible. Open a file inside it just to be sure.\u003C/li>\u003Cli>Install and open the Flamenco folder on the laptop.\u003C/li>\u003Cli>Make sure you have the same Blender version installed as the one on your desktop.\u003C/li>\u003Cli>Double-click \u003Ccode>flamenco-worker\u003C/code>.\u003C/li>\u003C/ol>\u003Cp>That's it.\u003C/p>\u003Cp>The Worker will scan your local network and find the Manager running on the Desktop.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1504\" height=\"932\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-90501d50-29c3-4d8f-9b54-511e6c674739.png 1504w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Flamenco will now automatically orchestrate jobs between your computers.\u003C/p>\u003Cp>If you do not have access to a NAS or do not wish to purchase one, you can have a look at installing a free Samba server on a Linux workstation. Using cloud storage isn't possible because Flamenco doesn't handle asynchronous services, unless you create your own custom job type. We'll see how to do that \u003Ca href=\"https://blog.cg-wire.com/\">in a future article\u003C/a>, using Kitsu as an asynchronous \u003Ca href=\"https://blog.cg-wire.com/animation-asset-storage/\">asset storage server\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion-knowing-when-to-scale\">Conclusion: Knowing When to Scale\u003C/h2>\u003Cp>We have covered the hardware setup, the crucial shared storage logic, and the software installation. If you have followed along, \u003Cstrong>you have a functioning render farm in your house and your dusty laptop is now a productive member of your team.\u003C/strong>\u003C/p>\u003Cp>Flamenco makes the barrier to entry for self-hosted rendering incredibly low. It respects your privacy, costs nothing but electricity, and allows you to squeeze every ounce of performance out of the hardware you already own.\u003C/p>\u003Cp>But there is a limit on what you can achieve by yourself.\u003C/p>\u003Cp>Eventually, you will hit a deadline where even your Desktop + Laptop combo isn't enough. Maybe you need to render a 4K sequence with heavy volumetrics in 24 hours and your home farm estimates a completion time of 3 weeks. This is the ceiling of self-hosting.\u003C/p>\u003Cp>When you hit this wall, you don't need to buy five more computers. \u003Cstrong>That's when you transition to a service like Ranch Computing\u003C/strong> that allows you to access hundreds of CPU/GPU nodes instantly. Your home farm is a great daily driver that's perfect for tests, previews, and lighter projects, while a cloud render farm is invaluable for quickly rendering high-quality deliverables to your clients.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":102,"comment_id":103,"feature_image":104,"featured":105,"visibility":10,"created_at":106,"updated_at":107,"custom_excerpt":108,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":111,"primary_tag":112,"url":114,"excerpt":108,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":116},"80ad6c13-1312-46ac-a74b-94e022668680","695bb702c665470001df4dcd","https://images.unsplash.com/photo-1683322499436-f4383dd59f5a?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fGRhdGElMjBjZW50ZXJ8ZW58MHx8fHwxNzY3NjE4NDAxfDA&ixlib=rb-4.1.0&q=80&w=2000",false,"2026-01-05T14:05:06.000+01:00","2026-02-20T06:04:52.000+01:00","Learn how to build a self-hosted Blender render farm using Flamenco. This guide walks through setup, shared storage, workers, and scaling strategies to help artists render faster using the hardware they already own.","\u003C!-- Prism.js theme (syntax colors) -->\n\u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/themes/prism.min.css\">\n\n\u003C!-- Toolbar plugin styles (for the Copy button) -->\n\u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/toolbar/prism-toolbar.min.css\">\n\n\u003C!-- (Optional) Line-numbers styles -->\n\u003C!-- \u003Clink rel=\"stylesheet\" href=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/line-numbers/prism-line-numbers.min.css\"> -->\n\n\u003Cstyle>\n/* Tweak code block appearance a bit (keeps theme styles intact) */\npre[class*=\"language-\"] {\n  border-radius: 8px;\n  overflow: auto;\n}\n\n/* ✅ Always wrap long lines (no horizontal scroll needed) */\npre[class*=\"language-\"],\npre[class*=\"language-\"] code {\n  white-space: pre-wrap;    /* preserve indentation but allow wrapping */\n  word-break: break-word;   /* break long tokens if needed */\n  overflow-wrap: anywhere;  /* last-resort wrapping */\n}\n\n/* Improve toolbar (Copy button) spacing/looks */\ndiv.code-toolbar > .toolbar {\n  opacity: 1;\n  right: 6px;\n  top: 6px;\n}\ndiv.code-toolbar > .toolbar .toolbar-item > button {\n  background: #1f2937;\n  color: #fff;\n  border-radius: 6px;\n  padding: 6px 10px;\n  font-size: 12px;\n}\ndiv.code-toolbar > .toolbar .toolbar-item > button:hover {\n  filter: brightness(1.1);\n}\n\n/* (Optional) Auto line numbers on all code blocks\n   If you want line numbers, uncomment both this and the CSS/JS includes above/below. */\n/*\npre[class*=\"language-\"] {\n  padding-left: 3.25em;\n}\n*/\n\u003C/style>","\u003C!-- Prism core -->\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/prism-core.min.js\">\u003C/script>\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/autoloader/prism-autoloader.min.js\">\u003C/script>\n\n\u003C!-- Toolbar + Copy-to-Clipboard plugins -->\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/toolbar/prism-toolbar.min.js\">\u003C/script>\n\u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/copy-to-clipboard/prism-copy-to-clipboard.min.js\">\u003C/script>\n\n\u003C!-- (Optional) Line-numbers plugin -->\n\u003C!-- \u003Cscript defer src=\"https://cdn.jsdelivr.net/npm/prismjs@1.29.0/plugins/line-numbers/prism-line-numbers.min.js\">\u003C/script> -->\n\n\u003Cscript>\n  // Configure autoloader to fetch language definitions (bash, python, etc.)\n  window.Prism = window.Prism || {};\n  Prism.plugins = Prism.plugins || {};\n  Prism.plugins.autoloader = Prism.plugins.autoloader || {};\n  Prism.plugins.autoloader.languages_path = 'https://cdn.jsdelivr.net/npm/prismjs@1.29.0/components/';\n\n  // OPTIONAL: If you want line numbers on every block automatically, uncomment:\n  /*\n  document.addEventListener('DOMContentLoaded', function () {\n    document.querySelectorAll('pre > code').forEach(function (code) {\n      const pre = code.parentElement;\n      pre.classList.add('line-numbers');\n    });\n  });\n  */\n\u003C/script>",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"5fff0e54653a0c003924f7f2","https://blog.cg-wire.com/self-hosted-blender-render-farm/",7,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@scottrodgerson?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Scott Rodgerson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/self-hosted-blender-render-farm","2026-01-19T10:00:41.000+01:00",{"title":93},"self-hosted-blender-render-farm","posts/self-hosted-blender-render-farm",[123,124],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"69c20ddbcb09d8000107cfe5","DCPLn1PWShGHKlv5NXuil2qtBDL7tnabWDmi33KjLoc",{"id":128,"title":129,"authors":130,"body":7,"description":7,"extension":8,"html":132,"meta":133,"navigation":13,"path":145,"published_at":146,"seo":147,"slug":148,"stem":149,"tags":150,"__hash__":153,"uuid":134,"comment_id":135,"feature_image":136,"featured":105,"visibility":10,"created_at":137,"updated_at":138,"custom_excerpt":139,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":140,"primary_tag":141,"url":142,"excerpt":139,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":144},"ghost/posts:blender-shaders-explained.json","Working with Blender Shaders (2026): Nodes & Scripting",[131],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🎨\u003C/div>\u003Cdiv class=\"kg-callout-text\">Shaders are not magic, they’re visual recipes you can control and automate.\u003C/div>\u003C/div>\u003Cp>It's easy to panic the first time you hear the word \u003Cem>shader\u003C/em>. Someone mentions GLSL, GPUs start sweating, and suddenly you're imagining walls of unreadable code and your computer fan screaming for mercy.\u003C/p>\u003Cp>oHere's the part no one tells you early enough: you don't need to be a mathematician or a graphics programmer to work with shaders. You're not required to write low-level GPU code or understand every equation behind light physics. Blender doesn't expect that from you. Instead, it gives you nodes: visual building blocks that behave more like Lego than code. You plug things together, see the result instantly, and adjust until it feels right.\u003C/p>\u003Cp>Think of shaders less as code and more as recipes. You're mixing values, textures, and logic to describe how a surface should react to light. Sometimes you'll follow a known recipe, sometimes you'll improvise, and sometimes you'll break things just to see what happens. It's how you'll learn.\u003C/p>\u003Cp>\u003Cstrong>In this article, we're going to demystify what shading actually is, strip away the fear around it, and explore how to manipulate shaders procedurally using Blender's node system or a bit of scripting for an animation pipeline.\u003C/strong> By the end, shading won't feel like a forbidden room anymore.\u003C/p>\u003Chr>\u003Ch2 id=\"whats-a-shader\">\u003Cstrong>What's a Shader?\u003C/strong>\u003C/h2>\u003Cp>To understand shaders, we have to stop thinking about \"colors\" and start thinking about \"physics.\"\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/hard-surface-modeling/\">\u003Cu>If you paint a wooden chair red in the real world\u003C/u>\u003C/a>, you aren't just changing its color. You are adding a layer of material that interacts with light. That red paint has a specific roughness (how much it scatters light), a specific specularity (how shiny it is), and a specific refractive index.\u003C/p>\u003Cp>\u003Cstrong>A shader is a set of instructions that tells the computer how to simulate that light interaction.\u003C/strong>\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"1067\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-296bf085-924e-40f9-92fc-346c5dc31de0.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: TurboSquid\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>When a ray of light from your digital sun hits the surface of your object, the shader steps in and asks:\u003C/p>\u003Cul>\u003Cli>\"Are you bouncing off?\" (Reflection)\u003C/li>\u003Cli>\"Are you going through?\" (Transmission/Glass)\u003C/li>\u003Cli>\"Are you getting trapped inside?\" (Absorption)\u003C/li>\u003Cli>\"Are you scattering under the skin?\" (Subsurface Scattering)\u003C/li>\u003C/ul>\u003Cp>If you're modeling a wet cobblestone street, a simple image texture makes it look like a flat photo of a street. A shader tells the renderer that the water in the cracks is perfectly reflective and smooth, while the stone is rough and dull. It tells the light to bounce differently off the wet parts than the dry parts.\u003Ca href=\"https://blog.cg-wire.com/how-light-shapes-emotion-in-animation/\"> \u003Cu>Light shapes reality.\u003C/u>\u003C/a>\u003C/p>\u003Chr>\u003Ch2 id=\"why-you-must-master-shader-nodes\">\u003Cstrong>Why You Must Master Shader Nodes\u003C/strong>\u003C/h2>\u003Cp>You might ask, \"Why not just download textures?\"\u003C/p>\u003Cp>Photo-scanning is great, but procedural shading gives you three superpowers that static images cannot match.\u003C/p>\u003Cp>When you use an image texture (a JPG or PNG), you are limited by pixels. Zoom in too close to a wall, and it becomes blurry.\u003C/p>\u003Cp>Shaders use math. \u003Cstrong>Math has no resolution limit.\u003C/strong> You can zoom into a procedural scratch on metal until you see the microscopic grooves, and it will remain crisp. Even if you have a model you're proud of, with clean topology and nice proportions, it'll still look flat without shaders.\u003C/p>\u003Cp>Blender's shader nodes make it \u003Cstrong>easy to tweak your textures in a consistent way\u003C/strong>. Let's say you are texturing a spaceship: you paint rust onto the hull using a texture map. Your Art Director walks in and says, \"Great, but the ship looks too old. Reduce the rust by 50%.\" If you hand-painted that, you have to start over or spend hours erasing. With shader nodes, you simply locate the \"Rust Amount\" value you created and slide it from \u003Ccode>1.0\u003C/code> to \u003Ccode>0.5\u003C/code>. Done.\u003C/p>\u003Cp>Static textures look frozen, but \u003Cstrong>shaders can also be animated\u003C/strong>. You can build a shader setup where moss grows on a rock over time based on the frame number, or where a shield glows brighter as it gets hit. Shaders allow your materials to react to the environment.\u003C/p>\u003Cp>For all these reasons, learning to master shader nodes is an incredible unlock for professional artists working with tight deadlines.\u003C/p>\u003Chr>\u003Ch2 id=\"the-different-types-of-shader-nodes\">\u003Cstrong>The Different Types of Shader Nodes\u003C/strong>\u003C/h2>\u003Cp>Blender's node system works like a flow chart. You click \u003Ccode>Add\u003C/code> to add nodes and connect them together. Data flows from left to right. To understand how to leverage each feature, you need to understand the different node types available.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"900\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-2573386d-adc9-4979-a848-89d1cae3645e.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Ch3 id=\"1-input-nodes\">\u003Cstrong>1. Input Nodes\u003C/strong>\u003C/h3>\u003Cp>Input nodes provide data from the scene, object, geometry, or user-defined values into the shader network.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Texture Coordinate\u003C/strong> - Provides UV, object, generated, and camera coordinates + use UV output to correctly map an image texture onto a UV-unwrapped model\u003C/li>\u003Cli>\u003Cstrong>Geometry\u003C/strong> - Outputs geometric information such as normals and pointiness + use Pointiness to create dirt accumulation in crevices\u003C/li>\u003Cli>\u003Cstrong>Fresnel\u003C/strong> - Calculates view-angle-based reflectivity + use it to create stronger reflections on the edges of glass\u003C/li>\u003Cli>\u003Cstrong>Object Info\u003C/strong> - Supplies per-object data like random values or object color + use Random output to give each object a slightly different color\u003C/li>\u003Cli>\u003Cstrong>Value\u003C/strong> - Outputs a constant numerical value + use it to control roughness with a single slider\u003C/li>\u003Cli>\u003Cstrong>Color\u003C/strong> - Outputs a constant color value + use it as a base color for a stylized material\u003C/li>\u003C/ul>\u003Ch3 id=\"2-output-nodes\">\u003Cstrong>2. Output Nodes\u003C/strong>\u003C/h3>\u003Cp>Output nodes define the final result of a shader and connect the node network to Blender’s rendering system.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Material Output\u003C/strong> - Outputs the final surface, volume, and displacement data + connect a Principled BSDF to the Surface input\u003C/li>\u003C/ul>\u003Ch3 id=\"3-shader-nodes\">\u003Cstrong>3. Shader Nodes\u003C/strong>\u003C/h3>\u003Cp>Shader nodes define how light interacts with a surface, including reflection, refraction, and emission.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Principled BSDF\u003C/strong> - Physically based all-in-one surface shader + create realistic metal, plastic, or skin materials\u003C/li>\u003Cli>\u003Cstrong>Diffuse BSDF\u003C/strong> - Produces matte, non-reflective surfaces + use for chalk, clay, or unpolished stone\u003C/li>\u003Cli>\u003Cstrong>Glossy BSDF\u003C/strong> - Produces mirror-like reflections + use for polished metal or mirrors\u003C/li>\u003Cli>\u003Cstrong>Glass BSDF\u003C/strong> - Combines refraction and reflection + use for windows or glass bottles\u003C/li>\u003Cli>\u003Cstrong>Emission\u003C/strong> - Emits light from a surface + use for screens, LEDs, or neon signs\u003C/li>\u003Cli>\u003Cstrong>Mix Shader\u003C/strong> - Blends two shader outputs + mix diffuse and glossy shaders for worn metal\u003C/li>\u003C/ul>\u003Ch3 id=\"4-displacement-nodes\">\u003Cstrong>4. Displacement Nodes\u003C/strong>\u003C/h3>\u003Cp>Displacement nodes alter surface detail by modifying geometry or shading normals.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Displacement\u003C/strong> - Performs true geometric displacement + create real depth in a brick wall using a height map (Cycles)\u003C/li>\u003Cli>\u003Cstrong>Bump\u003C/strong> - Simulates surface detail using normal perturbation + add fine scratches without increasing geometry\u003C/li>\u003Cli>\u003Cstrong>Normal Map\u003C/strong> - Converts normal textures into usable normal data + apply a baked normal map from a game asset\u003C/li>\u003C/ul>\u003Ch3 id=\"5-color-nodes\">\u003Cstrong>5. Color Nodes\u003C/strong>\u003C/h3>\u003Cp>Color nodes adjust, blend, and transform color information within the shader network.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Mix Color\u003C/strong> - Blends two colors or textures + mix a dirt texture over a clean base color\u003C/li>\u003Cli>\u003Cstrong>RGB Curves\u003C/strong> - Adjusts contrast and color balance + increase texture contrast without re-editing the image\u003C/li>\u003Cli>\u003Cstrong>Hue/Saturation\u003C/strong> - Modifies hue, saturation, and value + tint a material blue without repainting textures\u003C/li>\u003Cli>\u003Cstrong>Invert\u003C/strong> - Reverses color values + invert a roughness map to create a glossiness map\u003C/li>\u003C/ul>\u003Ch3 id=\"6-texture-nodes\">\u003Cstrong>6. Texture Nodes\u003C/strong>\u003C/h3>\u003Cp>Texture nodes generate or load image and procedural textures for materials.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Image Texture\u003C/strong> - Loads external image files + use an albedo map for a PBR material\u003C/li>\u003Cli>\u003Cstrong>Noise Texture\u003C/strong> - Generates smooth procedural noise + add subtle roughness variation to plastic\u003C/li>\u003Cli>\u003Cstrong>Voronoi Texture\u003C/strong> - Produces cell-based patterns + create cracks, scales, or stone tiles\u003C/li>\u003Cli>\u003Cstrong>Gradient Texture\u003C/strong> - Outputs smooth gradients + use as a mask for blending materials\u003C/li>\u003C/ul>\u003Ch3 id=\"7-utility-nodes\">\u003Cstrong>7. Utility Nodes\u003C/strong>\u003C/h3>\u003Cp>Utility nodes perform mathematical operations and data conversions.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Mapping\u003C/strong> - Transforms texture coordinates + scale and rotate a texture pattern\u003C/li>\u003Cli>\u003Cstrong>Math\u003C/strong> - Performs numerical operations + clamp roughness values to prevent extremes\u003C/li>\u003Cli>\u003Cstrong>Vector Math\u003C/strong> - Performs vector-based calculations + modify normal or direction vectors\u003C/li>\u003Cli>\u003Cstrong>Clamp\u003C/strong> - Limits values to a specified range + prevent over-bright emission values\u003C/li>\u003C/ul>\u003Ch3 id=\"8-group-nodes\">\u003Cstrong>8. Group Nodes\u003C/strong>\u003C/h3>\u003Cp>Group nodes package multiple nodes into reusable, organized components.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Node Group\u003C/strong> - Encapsulates complex node setups + create a reusable “Rust Shader” used across multiple assets\u003C/li>\u003C/ul>\u003Ch3 id=\"9-layout-nodes\">\u003Cstrong>9. Layout Nodes\u003C/strong>\u003C/h3>\u003Cp>Layout nodes organize the node graph visually and do not affect rendering output.\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Frame\u003C/strong> - Visually groups related nodes + frame all texture-related nodes together\u003C/li>\u003Cli>\u003Cstrong>Reroute\u003C/strong> - Redirects node connections for clarity + clean up overlapping noodle connections\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"the-next-level-scripting-your-shaders\">\u003Cstrong>The Next Level: Scripting Your Shaders\u003C/strong>\u003C/h2>\u003Cp>When you get comfortable connecting nodes manually, you can make wood, plastic, gold, or any kind of material. But \u003Cstrong>what if you have a scene with 500 unique objects, and you need to generate a random variation\u003C/strong> of a worn metal material for each one with some tweaks?\u003C/p>\u003Cp>This is where Python scripting becomes key. You can use it to ensure every material in your project follows the same node structure. You can write a script that says, \"Make this material red, but vary the hue slightly by a random number for every object.\"\u003C/p>\u003Cp>Let's get our hands dirty. We are going to write a Python script that creates a new material, adds a Principled BSDF, generates a noise texture to control the color, and links it all up.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders?ref=blog.cg-wire.com\">https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders\u003C/a>\u003C/div>\u003C/div>\u003Cp>Open the \u003Cem>Scripting\u003C/em> tab in Blender, create a new text block, and follow along.\u003C/p>\u003Cp>First, we need to import the library and tell Blender we want to create a new material.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import random\n\nimport bpy\n\ndef create_procedural_material(mat_name):\n&nbsp;&nbsp;&nbsp;&nbsp;mat = bpy.data.materials.new(name=mat_name)\n\n&nbsp;&nbsp;&nbsp;&nbsp;mat.use_nodes = True\n&nbsp;&nbsp;&nbsp;&nbsp;nodes = mat.node_tree.nodes\n&nbsp;&nbsp;&nbsp;&nbsp;links = mat.node_tree.links\n\n&nbsp;&nbsp;&nbsp;&nbsp;nodes.clear()\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, let's add the nodes. Think of this as pulling items out of the \"Add\" menu programmatically:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">    node_output = nodes.new(type='ShaderNodeOutputMaterial')\n&nbsp;&nbsp;&nbsp;&nbsp;node_output.location = (400, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled = nodes.new(type='ShaderNodeBsdfPrincipled')\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.location = (0, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.inputs['Roughness'].default_value = 0.2\n&nbsp;&nbsp;&nbsp;&nbsp;node_principled.inputs['Metallic'].default_value = 1.0\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, let's make it interesting. We will add a Noise Texture and a ColorRamp to generate a random color pattern.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">node_noise = nodes.new(type='ShaderNodeTexNoise')\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.location = (-600, 0)\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.inputs['Scale'].default_value = 15.0\n&nbsp;&nbsp;&nbsp;&nbsp;node_noise.inputs['Detail'].default_value = 10.0\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp = nodes.new(type='ShaderNodeValToRGB')\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.location = (-300, 0)\n\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.color_ramp.elements[0].color = (0.1, 0.1, 0.1, 1)\n\n&nbsp;&nbsp;&nbsp;&nbsp;rand_r = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;rand_g = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;rand_b = random.random()\n&nbsp;&nbsp;&nbsp;&nbsp;node_ramp.color_ramp.elements[1].color = (rand_r, rand_g, rand_b, 1)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>Finally, we have to wire them together and apply this new shader to the current context (the default cube):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">    links.new(node_noise.outputs['Fac'], node_ramp.inputs['Fac'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;links.new(node_ramp.outputs['Color'], node_principled.inputs['Base Color'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;links.new(node_principled.outputs['BSDF'], node_output.inputs['Surface'])\n\n&nbsp;&nbsp;&nbsp;&nbsp;return mat\n\nmy_new_mat = create_procedural_material(\"SciFi_Metal_Random\")\n\nbpy.context.object.data.materials.append(my_new_mat)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Copy that code into your text editor and press \"Run Script\" (the Play button). Look at your active object. It is now a metallic surface with a noise pattern of a random color. Run it again (change the name in the function call), and you get a different color.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1268\" height=\"827\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2026/01/data-src-image-99dc12fe-068b-40f7-9f10-ef0c5e000ba0.png 1268w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Congratulations, \u003Cstrong>you just created a procedural material generator!\u003C/strong>\u003C/p>\u003Cp>Have a look at\u003Ca href=\"https://github.com/cgwire/blog-tutorials/tree/main/blender-shaders?ref=blog.cg-wire.com\" rel=\"noreferrer\"> \u003Cu>our corresponding Github repository\u003C/u>\u003C/a> to play with the code!\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Shaders are more than just coloring within the lines. They are the skin of your digital world. \u003Cstrong>They tell the story of the object\u003C/strong>: how old it is, where it has been, and what it is made of.\u003C/p>\u003Cp>By understanding the logic of shader nodes, \u003Cstrong>you can create anything from photorealistic skin to stylized cartoon fire\u003C/strong>. And by taking that leap into Python scripting, you unlock the ability to \u003Cstrong>work faster and smarter\u003C/strong>, automating the tedious parts of the job so you can focus on the art.\u003C/p>\u003Cp>But this is just one piece of the puzzle. You can change the surface, but what about the shape? The next logical step in your journey is \u003Cem>Geometry Nodes\u003C/em>. Just as Shader Nodes control the color and light procedurally, Geometry Nodes control the mesh and structure programmatically.\u003Ca href=\"https://blog.cg-wire.com/blender-scripting-geometry-nodes-2/\"> \u003Cu>Have a look at our dedicated article\u003C/u>\u003C/a> to create entire scenes from code!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":134,"comment_id":135,"feature_image":136,"featured":105,"visibility":10,"created_at":137,"updated_at":138,"custom_excerpt":139,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":140,"primary_tag":141,"url":142,"excerpt":139,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":144},"67a0028f-66b2-4116-ac34-040c8a14d052","695b7d1dc665470001df4d80","https://images.unsplash.com/photo-1664526936810-ec0856d31b92?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fHNoYWRlciUyMG5vZGVzfGVufDB8fHx8MTc2NzYwMzU4M3ww&ixlib=rb-4.1.0&q=80&w=2000","2026-01-05T09:58:05.000+01:00","2026-03-26T09:56:11.000+01:00","Learn how Blender shaders really work, from node-based materials to procedural shading and Python-driven automation. This guide breaks down shader concepts, node types, and scripting techniques to help artists build flexible, production-ready materials.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-shaders-explained/",8,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@guerrillabuzz?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">GuerrillaBuzz\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-shaders-explained","2026-01-05T10:35:18.000+01:00",{"title":129},"blender-shaders-explained","posts/blender-shaders-explained",[151,152],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"L9nHGKFoNkSSxbDZv_Z2mmZLxxHmhill232zPkpfpCE",{"id":155,"title":156,"authors":157,"body":7,"description":7,"extension":8,"html":159,"meta":160,"navigation":13,"path":172,"published_at":173,"seo":174,"slug":175,"stem":176,"tags":177,"__hash__":180,"uuid":161,"comment_id":162,"feature_image":163,"featured":105,"visibility":10,"created_at":164,"updated_at":165,"custom_excerpt":166,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":167,"primary_tag":168,"url":169,"excerpt":166,"reading_time":170,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":171},"ghost/posts:blender-programmatic-rendering.json","Programmatic Video Rendering in Blender Using Python (2026)",[158],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧠\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn Blender into a programmable rendering engine with just a few lines of Python.\u003C/div>\u003C/div>\u003Cp>Learning Blender as a 3D artist usually means learning about its addon ecosystem. Tasks that would take hours like rigging a character can be turned into seconds with addons like Rigify. The same goes for most workflows, and we often end up asking ourselves the same recurring question: \"Can Blender do this automatically?\"\u003C/p>\u003Cp>The answer is yes. The key is the programming language Python.\u003C/p>\u003Cp>Blender includes a powerful built-in scripting engine, and with just a few lines of code, you can create objects, position cameras, and even trigger full renders.\u003C/p>\u003Cp>You won't need to pay for an addon if you know how to build one yourself. And at its core, an addon is just a script wrapped in a custom Blender user interface.\u003C/p>\u003Cp>If you've never scripted in Blender before, discovering the \u003Ccode>bpy\u003C/code> module feels like opening a secret door inside a tool you thought you already knew: suddenly, every part of the interface becomes programmable. You're not just clicking buttons anymore but giving instructions to build repeatable systems.\u003C/p>\u003Cp>One of the most important workflows you can automate is rendering. Not only to make your pipeline faster but also to help keep rendering settings consistent and predictable. In this tutorial, we'll implement a basic programmatic rendering system to automatically animate a 3D text and turn it into a full HD video. We'll start from zero, exploring how to run Python for Blender and how to use it to control the scene. By the end, you'll have a good overview of how to automate common animation tasks.\u003C/p>\u003Chr>\u003Ch2 id=\"use-cases\">\u003Cstrong>Use Cases\u003C/strong>\u003C/h2>\u003Cp>Programmatic rendering unlocks a wide range of powerful workflows that go far beyond traditional manual scene building:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Data-driven motion graphics\u003C/strong> — Animated charts, realtime API-driven broadcast graphics, or automatically generated social videos.\u003C/li>\u003Cli>\u003Cstrong>Generative art\u003C/strong> — Procedural patterns, noise fields, particle experiments, and algorithmic illustrations that evolve from code.\u003C/li>\u003Cli>\u003Cstrong>Batch-rendered variants\u003C/strong> — Personalized ads, product color variations, automated aspect-ratio crops, and bulk social asset generation.\u003C/li>\u003Cli>\u003Cstrong>Procedural 3D content\u003C/strong> — Terrain builders, parametric modeling, foliage/world population, and automated 3D asset variations.\u003C/li>\u003Cli>\u003Cstrong>Generative UI &amp; design systems\u003C/strong> — Dynamic SVGs, templated banners, and brand-consistent graphics rendered on demand.\u003C/li>\u003Cli>\u003Cstrong>VFX and animation scripting\u003C/strong> — Automated rig controls, crowd systems, particle population, and repeatable simulation setups.\u003C/li>\u003Cli>\u003Cstrong>Simulation visualizations\u003C/strong> — Fluid and smoke simulations, traffic and crowd dynamics, and scientific or physics-based renders.\u003C/li>\u003C/ul>\u003Cp>Many 3D modeling tasks are repetitive and time-consuming. By integrating them into an automated, script-driven pipeline, artists can focus more on creative worldbuilding while Python handles the tedious parts in the background.\u003C/p>\u003Cp>In any case, the development workflow is pretty much the same:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Setup\u003C/strong> - define needed input data and scene cleanup\u003C/li>\u003Cli>\u003Cstrong>Geometry generation\u003C/strong> - modeling the actual assets needed for the task\u003C/li>\u003Cli>\u003Cstrong>Animation\u003C/strong> - defining the transforms and their associated keyframes\u003C/li>\u003Cli>\u003Cstrong>Output\u003C/strong> - the desired assets (3D models, video, image sequence, etc.)\u003C/li>\u003C/ol>\u003Cp>This is exactly the path we're going to take for our 3D text video rendering example.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-programmatic-rendering?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-programmatic-rendering\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-scene-setup\">\u003Cstrong>1. Scene Setup\u003C/strong>\u003C/h2>\u003Cp>Before we dive into generating scenes, we first need a clean starting point. When you open Blender, it loads a default scene usually containing a cube, a camera, and a light. For this tutorial, we'll only need the latter two.\u003C/p>\u003Cp>The first step in using Blender programmatically is importing the \u003Ccode>bpy\u003C/code> module. This gives you full access to Blender's data, tools, and rendering pipeline directly from Python:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Here, we remove the default \u003Cstrong>Cube\u003C/strong> object. The \u003Ccode>do_unlink=True\u003C/code> parameter makes sure Blender not only deletes the object but also unlinks it from any scene that might reference it.\u003C/p>\u003Chr>\u003Ch2 id=\"2-manipulating-3d-text\">\u003Cstrong>2. Manipulating 3D Text\u003C/strong>\u003C/h2>\u003Cp>Next, we add a 3D text object to the scene to serve as the core element we'll manipulate and eventually render programmatically.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.object.text_add(location=(0, 0, 0))\ntext_obj = bpy.context.object\ntext_obj.name = \"CaptionText\"\ntext_obj.data.body = \"Hello world!\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This code snippet creates a new text object at the world origin, assigns it a readable name, and sets its displayed text to \u003Ccode>\"Hello world!\"\u003C/code>.\u003C/p>\u003Cp>To give the text more presence in the scene, we can adjust its geometry. Increasing the size and adding extrusion make the text fully 3D, and centering it on both axes simplifies future transformations and animations:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.size = 0.6\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = \"CENTER\"\ntext_obj.data.align_y = \"CENTER\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With these adjustments, the text is cleanly centered, properly scaled, and ready for further processing.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-8cb519b5-e128-4bdd-9348-9aa0dfe2c36c.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-adding-keyframes\">\u003Cstrong>3. Adding Keyframes\u003C/strong>\u003C/h2>\u003Cp>We\u003Ca href=\"https://blog.cg-wire.com/stepped-animation/\"> \u003Cu>create a simple animation by inserting keyframes\u003C/u>\u003C/a> for the text position over time.\u003C/p>\u003Cp>First, we move our timeline cursor to frame 1, position the text at the starting location, and record that position with a keyframe:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.frame_set(1)\ntext_obj.location = (-4.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=1)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Next, we advance to frame 40, shift the text along the X axis, and insert another keyframe to mark its new position:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.frame_set(40)\ntext_obj.location = (0.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=40)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With these two keyframes in place, Blender automatically interpolates the movement between them, creating a smooth animation as the text glides into the center of the frame.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-c33d7b37-264c-4c9f-a1ea-e8f2e2a39ff2.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"4-video-rendering\">\u003Cstrong>4. Video Rendering\u003C/strong>\u003C/h2>\u003Cp>All we have left to do is\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>configure Blender's rendering settings\u003C/u>\u003C/a> and output the final video.\u003C/p>\u003Cp>The first choice is which rendering engine to use: \u003Cstrong>Eevee\u003C/strong> or \u003Cstrong>Cycles\u003C/strong>.\u003C/p>\u003Cp>Eevee is a real-time rasterization engine, making it extremely fast and ideal for previews or stylized animation. Cycles, on the other hand, is a physically based path tracer that produces more realistic lighting but requires much longer render times. For quick iteration and most automated workflows, Eevee is generally the better option:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.engine = \"BLENDER_EEVEE\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Next, we specify the output resolution:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.resolution_x = 1920\nbpy.context.scene.render.resolution_y = 1080\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Then we set the frame rate and define the animation range. Here, a 60-frame shot at 24 fps:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.fps = 24\nbpy.context.scene.frame_start = 1\nbpy.context.scene.frame_end = 60\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Blender also needs to know how to encode the final video. We'll export it as an MP4 using H.264 video encoding for rendering speed:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.image_settings.file_format = \"FFMPEG\"\nbpy.context.scene.render.ffmpeg.format = \"MPEG4\"\nbpy.context.scene.render.ffmpeg.codec = \"H264\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we choose where the output file will be written using the current folder for convenience:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.context.scene.render.filepath = \"//render.mp4\"\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>With everything configured, we can start the render process with a single command:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"5-putting-it-all-together\">\u003Cstrong>5. Putting it all together\u003C/strong>\u003C/h2>\u003Cp>Our code is complete and we just need to put it into a Python file \u003Ccode>render.py\u003C/code>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\n\nbpy.ops.object.text_add(location=(0, 0, 0))\ntext_obj = bpy.context.object\ntext_obj.name = \"CaptionText\"\ntext_obj.data.body = \"Hello world!\"\n\ntext_obj.data.size = 0.6\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = \"CENTER\"\ntext_obj.data.align_y = \"CENTER\"\n\nbpy.context.scene.frame_set(1)\ntext_obj.location = (-4.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=1)\n\nbpy.context.scene.frame_set(40)\ntext_obj.location = (0.0, 0.0, 1.0)\ntext_obj.keyframe_insert(data_path=\"location\", frame=40)\n\nbpy.context.scene.render.engine = \"BLENDER_EEVEE\"\nbpy.context.scene.render.resolution_x = 1920\nbpy.context.scene.render.resolution_y = 1080\nbpy.context.scene.render.resolution_percentage = 100\nbpy.context.scene.render.fps = 24\nbpy.context.scene.frame_start = 1\nbpy.context.scene.frame_end = 60\n\nbpy.context.scene.render.image_settings.file_format = \"FFMPEG\"\nbpy.context.scene.render.ffmpeg.format = \"MPEG4\"&nbsp; # container\nbpy.context.scene.render.ffmpeg.codec = \"H264\"\nbpy.context.scene.render.ffmpeg.constant_rate_factor = \"HIGH\"\nbpy.context.scene.render.ffmpeg.gopsize = 12\nbpy.context.scene.render.ffmpeg.audio_codec = \"AAC\"\nbpy.context.scene.render.filepath = \"//render.mp4\"\n\nbpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Now, run the script to start rendering:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 render.py\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Once the render finishes, check your working directory and your fully programmatically generated animation should now be ready to view.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-2b287259-a96b-456b-b95e-375bf116e3a1.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">🔗\u003C/div>\u003Cdiv class=\"kg-callout-text\">You can find our code in a Github repository for easy reproducibility:\u003Ca href=\"https://github.com/cgwire/blender-programmatic-rendering?ref=blog.cg-wire.com\"> \u003Cu>github.com/cgwire/blender-programmatic-rendering\u003C/u>\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>In this walkthrough, you built a complete automated pipeline inside Blender: setting up a clean scene, creating and modifying 3D text, animating it with keyframes, and rendering the sequence with smooth interpolation. All of it handled through Python with no manual adjustments needed!\u003C/p>\u003Cp>Now that you've seen how much control the Blender API provides, you can take these ideas much further: automate your workflows, generate graphics from data, build internal tools that assemble scenes, render variations, or create entire animations with a single command... the list to help your animation studio become more productive never ends.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":161,"comment_id":162,"feature_image":163,"featured":105,"visibility":10,"created_at":164,"updated_at":165,"custom_excerpt":166,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":167,"primary_tag":168,"url":169,"excerpt":166,"reading_time":170,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":171},"4457d779-ae8e-4ed7-9398-91772c0996c0","6948dba20bfbc7000190a8bf","https://images.unsplash.com/photo-1622547748225-3fc4abd2cca0?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fHJlbmRlcnN8ZW58MHx8fHwxNzY2MzgyNjA1fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-12-22T06:48:18.000+01:00","2026-02-20T06:04:02.000+01:00","Learn how to automate animation and video rendering in Blender using Python. This tutorial covers scene setup, 3D text generation, keyframe animation, and programmatic rendering to build repeatable, script-driven workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-programmatic-rendering/",6,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@sebastiansvenson?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Sebastian Svenson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-programmatic-rendering","2025-12-29T10:00:10.000+01:00",{"title":156},"blender-programmatic-rendering","posts/blender-programmatic-rendering",[178,179],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"vOpwec7s0eruEbUu1OcdDfl9ESqnn1LglPRNKNn4kgw",{"id":182,"title":183,"authors":184,"body":7,"description":7,"extension":8,"html":186,"meta":187,"navigation":13,"path":199,"published_at":200,"seo":201,"slug":202,"stem":203,"tags":204,"__hash__":207,"uuid":188,"comment_id":189,"feature_image":190,"featured":105,"visibility":10,"created_at":191,"updated_at":192,"custom_excerpt":193,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":194,"primary_tag":195,"url":196,"excerpt":193,"reading_time":197,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":198},"ghost/posts:blender-kitsu-versioning-addon.json","Managing Blender File Revisions with a Kitsu Versioning Addon (2026)",[185],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧱\u003C/div>\u003Cdiv class=\"kg-callout-text\">Replace chaotic file naming with a single source of truth for Blender revisions.\u003C/div>\u003C/div>\u003Cp>Every project begins with good intentions. You start with a clean \u003Ccode>model.blend\u003C/code>, organized folders, and the promise that this time you’ll keep things tidy.\u003C/p>\u003Cp>But as deadlines tighten, the quiet entropy of production sets in. Before long, your project directory starts to resemble an archaeological dig site of panicked last-minute edits:\u003C/p>\u003Cpre>\u003Ccode>model.blend\nmodel_v2.blend\nmodel_v2b.blend\nmodel_final.blend\nmodel_final_really_final.blend\nmodel_FINAL_v3.blend\u003C/code>\u003C/pre>\u003Cp>You know how it happens: someone needs a quick change, another artist branches off a version \"just in case,\" and soon no one is entirely certain which file is \"the real one.\" Comments in chat threads contradict filenames, shots render from outdated versions, and the supervisor sighs deeply.\u003C/p>\u003Cp>In an animation studio, these micro-chaos moments add up. That’s where a proper source of truth needs to enter the story.\u003C/p>\u003Cp>For many teams, that source is Kitsu. And for Blender artists, the missing piece is an automated bridge that keeps files versioned, traceable, and aligned with the project’s production data.\u003C/p>\u003Cp>So you decide to take control: you’re going to make Blender talk to Kitsu and build a versioning system that makes your pipeline feel like it finally has your back.\u003C/p>\u003Cp>In this tutorial, we’ll create an addon that manages file revisions directly from Blender. You’ll be able to connect Blender to a Kitsu project, create and upload revisions of your 3D models, view all existing revisions, and pull older revisions back into Blender.\u003C/p>\u003Chr>\u003Ch2 id=\"workflow-overview\">\u003Cstrong>Workflow Overview\u003C/strong>\u003C/h2>\u003Cp>In a typical Kitsu-driven workflow, an artist opens a Blender scene, does their work, hits a milestone, and uploads a revision. Artists review, iterate, revise, and upload again. Kitsu keeps every step neatly.\u003C/p>\u003Cp>But it wouldn't hurt if you could just upload or pull revisions with a click, right?\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Start in Blender\u003C/strong> - We open our working scene—modeling, shading, rigging, whatever the task at hand demands.\u003C/li>\u003Cli>\u003Cstrong>Checkpoint the work\u003C/strong> - When we hit a milestone (\"blocking complete,\" \"ready for review\"), we create a new revision in Kitsu.\u003C/li>\u003Cli>\u003Cstrong>Review the history\u003C/strong> - Kitsu stores all revisions, giving supervisors a clear timeline and letting you compare versions without digging through files.\u003C/li>\u003Cli>\u003Cstrong>Pull new changes\u003C/strong> - When we need a different version, we can just click to pull in an asset in our current workspace.\u003C/li>\u003C/ol>\u003Cp>This is a very basic workflow, so we are bound to run into problems like how to handle conflict resolution (what if two artists work on the same shot and create a new revision each, how do we handle this?), but it's good enough to give us a functional addon we can improve later on to fit our animation pipeline needs.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-versioning-addon?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-versioning-addon\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-populating-the-kitsu-dashboard\">\u003Cstrong>1. Populating The Kitsu Dashboard\u003C/strong>\u003C/h2>\u003Cp>Kitsu’s web interface is designed so producers, coordinators, or leads can quickly set up the structure of a project. Before Blender artists can publish revisions, we need to populate our production with work-in-progress assets. In\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>the Kitsu Docker instance for local development\u003C/u>\u003C/a>:\u003C/p>\u003Col>\u003Cli>Log into the \u003Cstrong>Kitsu dashboard\u003C/strong>.\u003C/li>\u003Cli>In the main navigation bar, go to \u003Cstrong>Productions\u003C/strong>.\u003C/li>\u003Cli>Click \u003Cstrong>\"Create production\"\u003C/strong> (usually top-right corner).\u003C/li>\u003Cli>Fill in the production details\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"985\" height=\"694\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-58cb0571-2b74-4110-9b07-9e15030bbd05.png 985w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The new production will appear in the list, and you can open it to begin adding assets.\u003C/p>\u003Cp>Assets are the building blocks of your project: characters, props, environments, vehicles... anything that needs production tracking.\u003C/p>\u003Col>\u003Cli>Go to \u003Cstrong>Productions → Your Production Name\u003C/strong>.\u003C/li>\u003Cli>Switch to the \u003Cstrong>Assets\u003C/strong> tab within the production.\u003C/li>\u003Cli>Click \u003Cstrong>\"Create Asset\"\u003C/strong>.\u003C/li>\u003Cli>Enter an \u003Cstrong>Asset Name\u003C/strong> (e.g., \"RobotHead\") and \u003Cstrong>Asset Type\u003C/strong> (Character, Prop, Set, etc.)\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1270\" height=\"870\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-f4336c33-57ef-4baa-9715-e0c749f7d9b4.png 1270w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Your asset now exists and has 3 tasks assigned to it.&nbsp;\u003C/p>\u003Cp>Tasks define the workflow steps (Modeling, Shading, Rigging, etc.) that artists will perform on each asset.\u003C/p>\u003Cp>We now have everything we need to test our addon.\u003C/p>\u003Chr>\u003Ch2 id=\"2-linking-the-current-blender-project-to-a-kitsu-task\">\u003Cstrong>2. Linking the Current Blender Project to a Kitsu Task\u003C/strong>\u003C/h2>\u003Cp>We start with a minimal addon declaration that defines the UI location, loads \u003Ccode>gazu\u003C/code>, and prepares the data we’ll expose in dropdown menus:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Model Versioning (Production/Task/Asset/Revisions)\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"cgwire\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (2, 80, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; ModelVersioning\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"Browse productions, tasks, assets, and manage revisions (list/create/load)\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\n\nimport sys\n\nsys.path.append(\"~/.local/lib/python3.11/site-packages\")\n\nimport os\nimport tempfile\n\nimport bpy\nimport gazu\nfrom bpy.props import EnumProperty, PointerProperty\nfrom bpy.types import Operator, Panel, PropertyGroup\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Note that \u003Ccode>sys.path.append(\"~/.local/lib/python3.11/site-packages\")\u003C/code> allows us to use our local Python installation to access external packages like \u003Ccode>gazu\u003C/code>. By default, Blender runs its own Python environment, so installing packages can be cumbersome. To solve this, we just tell Blender to have a look at our local modules. Update this path accordingly to match your system configuration.\u003C/p>\u003Cp>Before we can automate versioning, Blender needs to know \u003Cem>where\u003C/em> in Kitsu the current model belongs. That means identifying the project, the asset, the task, and eventually the revisions associated with it.\u003C/p>\u003Cp>The first step is simple: authenticate with Kitsu, retrieve available productions, and let the artist pick the context directly from the Sidebar UI.\u003C/p>\u003Cp>Once the addon loads, we authenticate and point the addon at the Kitsu API host:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">gazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\ntemp_dir_path = tempfile.gettempdir()\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This establishes the session we’ll use to browse productions, find tasks, and eventually create revisions.\u003C/p>\u003Cp>From here, we can begin exposing the production structure. With helper functions for project, asset, task, and revision lookup, we populate each dropdown dynamically:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def find_project(name):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.project.get_project_by_name(name)\n\ndef find_asset(project, name):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.asset.get_asset_by_name(project, name)\n\ndef find_task(asset, type_id):\n&nbsp;&nbsp;&nbsp;&nbsp;return gazu.task.get_task_by_name(asset, type_id, \"main\")\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Each \u003Ccode>EnumProperty\u003C/code> callback pulls fresh data from Kitsu:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def enum_projects(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;projects = gazu.project.all_projects()\n&nbsp;&nbsp;&nbsp;&nbsp;for p in projects:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((p[\"name\"], p[\"name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no productions ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Assets, tasks, and revisions follow the same pattern:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def enum_assets(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if project:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;assets = gazu.asset.all_assets_for_project(project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for t in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((t[\"name\"], t[\"name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no tasks ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\n\ndef enum_tasks(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if asset:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for t in tasks:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((t[\"task_type_id\"], t[\"task_type_name\"], \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no tasks ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\n\ndef enum_revisions(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;items = []\n&nbsp;&nbsp;&nbsp;&nbsp;if task:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revisions = gazu.files.get_all_preview_files_for_task(task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for r in revisions:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((str(r[\"revision\"]), str(r[\"revision\"]), \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;if not items:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;items.append((\"NONE\", \"--- no revisions ---\", \"\"))\n&nbsp;&nbsp;&nbsp;&nbsp;return items\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we store all UI selections in a single state object:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_State(PropertyGroup):\n&nbsp;&nbsp;&nbsp;&nbsp;project: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Project\", description=\"Select project\", items=enum_projects\n&nbsp;&nbsp;&nbsp;&nbsp;)\n&nbsp;&nbsp;&nbsp;&nbsp;asset: EnumProperty(name=\"Asset\", description=\"Select asset\", items=enum_assets)\n&nbsp;&nbsp;&nbsp;&nbsp;task: EnumProperty(name=\"Task\", description=\"Select task\", items=enum_tasks)\n&nbsp;&nbsp;&nbsp;&nbsp;revision: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Revision\", description=\"Select revision\", items=enum_revisions\n&nbsp;&nbsp;&nbsp;&nbsp;)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>This is the foundation of our pipeline integration: Blender now knows how to browse Kitsu and bind itself to the exact task the artist is working on. From here, we can start working on the revision lifecycle.\u003C/p>\u003Chr>\u003Ch2 id=\"3-creating-a-new-revision-button\">\u003Cstrong>3. Creating a \"New Revision\" Button\u003C/strong>\u003C/h2>\u003Cp>We can start automating the part artists interact with most: creating new revisions. In a typical manual workflow, you’d export your file and upload it in Kitsu to the correct task. Our addon will streamline this into a single button press inside Blender.\u003C/p>\u003Cp>Kitsu handles new revisions through \u003Ccode>publish_preview()\u003C/code>. This call sends both the file and metadata:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\nbpy.ops.export_scene.gltf(filepath=temp_file_path, export_format=\"GLB\")\n\n(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;revision=new_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"increment revision\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=temp_file_path,\n)\n\nos.remove(temp_file_path)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>In our addon, we’ll trigger this from a button in the Sidebar.\u003C/p>\u003Cp>The operator performs three main steps: grab the user’s selections from the addon's state, compute the next revision number, and upload the exported file as the new revision:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_OT_create_revision(Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"mv.create_revision\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Create Revision\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def invoke(self, context, event):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;wm = context.window_manager\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return wm.invoke_props_dialog(self, width=400)\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision = context.scene.mv_state.revision\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;new_revision = int(revision) + 1\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task_status = gazu.task.get_task_status_by_name(\"todo\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.export_scene.gltf(filepath=temp_file_path, export_format=\"GLB\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision=new_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;comment=\"increment revision\",\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=temp_file_path,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.remove(temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, \"Revision created\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"4-pulling-a-revision-into-blender\">\u003Cstrong>4. Pulling a Revision into Blender\u003C/strong>\u003C/h2>\u003Cp>Versioning isn’t just about publishing your work, it's also about being able to \u003Cem>go back\u003C/em>. Whether you’re reviewing earlier stages, comparing topology, or recovering a detail from a previous iteration, you need a quick, reliable way to load new and older revisions into Blender.\u003C/p>\u003Cp>Once a task is selected, pulling a revision from Kitsu becomes a simple two-step operation: download the preview file associated with the selected revision, and import it into Blender.\u003C/p>\u003Cp>After fetching all preview files for the current task, we can target the revision by index and bring the asset directly into Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\npreview_file = preview_files[int(revision) - 1]\ngazu.files.download_preview_file(preview_file, temp_file_path)\nbpy.ops.import_scene.gltf(filepath=temp_file_path)\n\nos.remove(temp_file_path)\u003C/code>\u003C/pre>\u003Cp>This gives us a consistent way to retrieve assets exactly as they were at that point in production.\u003C/p>\u003Cp>We encapsulate this workflow inside an operator that mirrors the structure of the Create Revision button:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class MV_OT_load_revision(Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"mv.load_revision\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Load Revision\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;project = find_project(context.scene.mv_state.project)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = find_asset(project, context.scene.mv_state.asset)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;task = find_task(asset, context.scene.mv_state.task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;revision = context.scene.mv_state.revision\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(task)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;temp_file_path = os.path.join(temp_dir_path, \"new_version.glb\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_file = preview_files[int(revision) - 1]\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(preview_file, temp_file_path)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.remove(temp_file_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, \"Opened Revision\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\u003C/code>\u003C/pre>\u003Cp>This operator makes it trivial for artists to browse and load any version stored in Kitsu without leaving Blender.\u003C/p>\u003Chr>\u003Ch2 id=\"5-registering-the-addon\">\u003Cstrong>5. Registering The Addon\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\">\u003Cu>The panel now ties the whole revision workflow together\u003C/u>\u003C/a>:\u003C/p>\u003Cul>\u003Cli>Select the project\u003C/li>\u003Cli>Choose the asset\u003C/li>\u003Cli>Pick the task\u003C/li>\u003Cli>Browse revisions\u003C/li>\u003Cli>Create or load versions with a single click\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">class MV_PT_panel(Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Model Versioning\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"MV_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = \"VIEW_3D\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = \"UI\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = \"ModelVersion\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;scene = context.scene\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;mv = scene.mv_state\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Project\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"project\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Asset\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"asset\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Task\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"task\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.label(text=\"Revision\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(mv, \"revision\", text=\"\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;row = layout.row(align=True)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;row.operator(\"mv.create_revision\", text=\"Create Revision\", icon=\"ADD\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;\"mv.load_revision\", text=\"Load Selected Revision\", icon=\"IMPORT\"\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>Finally, we register the operators, panel, and state so Blender knows how to construct the UI:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">classes = (\n&nbsp;&nbsp;&nbsp;&nbsp;MV_State,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_OT_create_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_OT_load_revision,\n&nbsp;&nbsp;&nbsp;&nbsp;MV_PT_panel,\n)\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.types.Scene.mv_state = PointerProperty(type=MV_State)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in reversed(classes):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;if hasattr(bpy.types.Scene, \"mv_state\"):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;del bpy.types.Scene.mv_state\n\nif __name__ == \"__main__\":\n&nbsp;&nbsp;&nbsp;&nbsp;register()\u003C/code>\u003C/pre>\u003Cp>At this point, the model versioning workflow is fully bidirectional: you can publish new revisions from Blender and retrieve earlier ones instantly.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"759\" height=\"488\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-00e861e7-3b2e-4bdc-80b8-1af740cab480.png 759w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>With just a handful of Blender API operators and the convenience of the Gazu SDK, we’ve built a practical (yet basic) versioning workflow that lives directly inside Blender and stays in sync with Kitsu. Artists can link their Blender scene to a Kitsu project, asset, and task, create new revisions with a single button press, browse the full revision history for any task, and pull older versions straight into Blender whenever they need to compare or recover work.\u003C/p>\u003Cp>This workflow is only the beginning. From here, you could expand the addon with automated exports, thumbnail or turntable renders, support for multiple output formats, supervisor review tools, or even hooks into a render farm.\u003C/p>\u003Cp>To get you started, make sure to clone\u003Ca href=\"https://github.com/cgwire/blender-kitsu-versioning-addon?ref=blog.cg-wire.com\"> \u003Cu>our Github repository\u003C/u>\u003C/a> for this versioning addon and try it out yourself!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":188,"comment_id":189,"feature_image":190,"featured":105,"visibility":10,"created_at":191,"updated_at":192,"custom_excerpt":193,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":194,"primary_tag":195,"url":196,"excerpt":193,"reading_time":197,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":198},"4ee5e3ab-dd50-4121-99cb-c59d96c2eb7d","6948ca070bfbc7000190a884","https://images.unsplash.com/photo-1617746533234-288e5cf484e2?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDMwfHxhbmltYXRpb24lMjBwaXBlbGluZXxlbnwwfHx8fDE3NjYzODE5ODZ8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-12-22T05:33:11.000+01:00","2026-02-20T06:04:01.000+01:00","Learn how to build a Blender addon that connects to Kitsu to manage asset revisions. This tutorial covers creating, browsing, and loading file versions directly from Blender, keeping production files traceable and in sync with studio workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-kitsu-versioning-addon/",12,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jaspergarrattphotography?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jasper Garratt\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-versioning-addon","2025-12-22T10:00:20.000+01:00",{"title":183},"blender-kitsu-versioning-addon","posts/blender-kitsu-versioning-addon",[205,206],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"RvLHyMPCRMLBkkSF0lzBXOx7AHvfvlghiFKTD38-uwg",{"id":209,"title":210,"authors":211,"body":7,"description":7,"extension":8,"html":213,"meta":214,"navigation":13,"path":224,"published_at":225,"seo":226,"slug":227,"stem":228,"tags":229,"__hash__":232,"uuid":215,"comment_id":216,"feature_image":217,"featured":105,"visibility":10,"created_at":218,"updated_at":192,"custom_excerpt":219,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":220,"primary_tag":221,"url":222,"excerpt":219,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":223},"ghost/posts:blender-kitsu-low-res-preview.json","Automating Low-Res Animation Previews in Blender with Kitsu (2026)",[212],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚡\u003C/div>\u003Cdiv class=\"kg-callout-text\">Speed up animation reviews with lightweight previews that render in seconds, not hours.\u003C/div>\u003C/div>\u003Cp>Waiting for full-resolution renders just to review a shot slows down the entire production. Artists spend time waiting and supervisors get delayed feedback. The iteration loop is inefficient.\u003C/p>\u003Cp>To address this, we can create low-resolution animation previews directly in Blender and auto-upload them to Kitsu using Python as a part of our animation pipeline. These previews are fast to render, easy to review, and can be quickly used in Kitsu for approval.\u003C/p>\u003Cp>This is a big deal because full-resolution renders can take hours, and the cloud storage and network bandwidth costs are no joke when you're dealing with thousands of shots. Going from 1080p to 480p can divide the size by up to 5x!\u003C/p>\u003Cp>In this tutorial, we’ll cover how to:\u003C/p>\u003Cul>\u003Cli>Adjust Blender render settings for low-resolution previews\u003C/li>\u003Cli>Automate the render process using Python\u003C/li>\u003Cli>Use \u003Ccode>ffmpeg\u003C/code> to watermark and timestamp the video for fast contextualization\u003C/li>\u003Cli>Export videos and upload them to Kitsu\u003C/li>\u003C/ul>\u003Cp>By the end, you’ll have a script that saves time on shot reviews without sacrificing feedback quality.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-low-res-preview?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-low-res-preview\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-simple-blender-scene-setup\">\u003Cstrong>1. Simple Blender Scene Setup\u003C/strong>\u003C/h2>\u003Cp>Before we can create an animated preview, we need a starting object in the scene. For this tutorial, we’ll use Blender’s default cube.\u003C/p>\u003Cp>First, we create a reference of the scene and the cube:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\ncube = bpy.data.objects[\"Cube\"]\nscene = bpy.context.scene\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"901\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf950a7a-c387-4b8d-9318-49e5bd3251bd.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"2-adding-keyframes-for-animation\">\u003Cstrong>2. Adding Keyframes for Animation\u003C/strong>\u003C/h2>\u003Cp>The next step is animating our cube. For quick modeling previews, short sequences are ideal. Here, we’ll create a \u003Cstrong>360° rotation\u003C/strong> over 48 frames (2 seconds at 24 FPS):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">for frame, angle in [(1, 0), (12, 1.57), (24, 3.14), (36, 4.71), (48, 6.28)]:\n&nbsp;&nbsp;&nbsp;&nbsp;scene.frame_set(frame)\n&nbsp;&nbsp;&nbsp;&nbsp;cube.rotation_euler[2] = angle\n&nbsp;&nbsp;&nbsp;&nbsp;cube.keyframe_insert(data_path=\"rotation_euler\", index=2)\u003C/code>\u003C/pre>\u003Cp>This loop sets keyframes at regular intervals, rotating the cube smoothly around its Z-axis by increments of pi/2. Using a small number of frames keeps rendering fast and makes it perfect for preview purposes.\u003C/p>\u003Cp>At this point, you could scrub the timeline in Blender to verify the cube rotates as expected.\u003C/p>\u003Chr>\u003Ch2 id=\"3-low-resolution-rendering\">\u003Cstrong>3. Low-Resolution Rendering\u003C/strong>\u003C/h2>\u003Cp>With animation in place, we can configure Blender to render a \u003Cstrong>fast, low-resolution preview\u003C/strong>. The goal is speed over quality: we want something clear enough for review but quick to produce.\u003C/p>\u003Cp>Here, we use\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>the Eevee rendering engine for speed and to reduce unnecessary rendering overhead\u003C/u>\u003C/a>. It's much faster than Cycles because it's a simple rasterisation engine, and we don't need a hyper-realistic output in 90% of cases.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">scene.render.engine = \"BLENDER_EEVEE\"\n\nscene.render.resolution_x = 1920\nscene.render.resolution_y = 1080\nscene.render.resolution_percentage = 50\n\nscene.render.fps = 24\nscene.frame_start = 1\nscene.frame_end = 48&nbsp; # match your animation length\n\nscene.render.image_settings.file_format = \"FFMPEG\"\nscene.render.ffmpeg.format = \"MPEG4\"\nscene.render.ffmpeg.codec = \"H264\"\n\nscene.render.filepath = \"//preview.mp4\"\u003C/code>\u003C/pre>\u003Cp>Although we go for a classic landscape resolution, reducing \u003Ccode>resolution_percentage\u003C/code> or turning off high-quality sampling in Eevee can drastically reduce render times for previews.\u003C/p>\u003Cp>The rest of the settings are pretty standard: 24 frames per second, 48 frames total, and a mp4 output video with H264 encoding (for faster compression) written in the script's current folder.\u003C/p>\u003Cp>Depending on your use case, you can reduce the resolution, decrease the frame rate, and lower the bitrate to lower the size of your previews. You still need enough quality for the review process, though, so tweak the settings for an optimal balance with performance.\u003C/p>\u003Cp>Finally, we can trigger the render in one line:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.render.render(animation=True)\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-27b0c802-b589-4306-b52b-5f910b58320b.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The preview video can be immediately used for review or further processed with tools like FFmpeg for timestamps, watermarks, or custom naming conventions before uploading to Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"4-ffmpeg-processing-timestamp-naming-watermark\">\u003Cstrong>4. FFmpeg Processing: Timestamp, Naming, Watermark\u003C/strong>\u003C/h2>\u003Cp>Once Blender has rendered your animation to a video file, you can further process it using \u003Cstrong>FFmpeg\u003C/strong>. This is\u003Ca href=\"https://blog.cg-wire.com/ffmpeg-commands-for-animators/\"> \u003Cu>a common step in production pipelines\u003C/u>\u003C/a> to add timestamps, watermarks, or custom naming-making the previews ready for review.\u003C/p>\u003Cp>Run the following command in a terminal after rendering your preview:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">ffmpeg -framerate 24 \\\\\\\\\n&nbsp;&nbsp;-i preview.mp4 \\\\\\\\\n&nbsp;&nbsp;-i watermark.png \\\\\\\\\n&nbsp;&nbsp;-filter_complex \"\\\\\\\\\n&nbsp;&nbsp;&nbsp;&nbsp;[0:v]drawtext=text='%{pts\\\\\\\\:hms}':x=10:y=10:fontsize=24:fontcolor=white:bordercolor=black:borderw=2[v1]; \\\\\\\\\n&nbsp;&nbsp;&nbsp;&nbsp;[v1][1:v]overlay=W-w-20:H-h-20\" \\\\\\\\\n&nbsp;&nbsp;-c:v libx264 -crf 22 -pix_fmt yuv420p \\\\\\\\\n&nbsp;&nbsp;preview_with_stamp.mp4\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>drawtext\u003C/code>\u003C/strong> overlays a running timestamp in the top-left corner.\u003C/li>\u003Cli>\u003Ccode>\u003Cstrong>overlay\u003C/strong>\u003C/code> places a watermark image (\u003Ccode>watermark.png\u003C/code>) in the bottom-right corner.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>c:v libx264 -crf 22 -pix_fmt yuv420p\u003C/code>\u003C/strong> ensures good quality and broad compatibility for video playback.\u003C/li>\u003Cli>The output file, \u003Ccode>preview_with_stamp.mp4\u003C/code>, is your finalised preview ready for review.\u003C/li>\u003C/ul>\u003Cp>Of course, you can adjust the font size, position, or watermark placement as needed to standardise previews for your team or client reviews.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1088\" height=\"722\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-aaed9f6c-1b29-4592-b629-1830a6f2aa79.png 1088w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>This step completes the preparation of a production-ready, low-resolution animation preview. The file is now ready to be uploaded to \u003Cstrong>Kitsu\u003C/strong> for quick feedback.\u003C/p>\u003Chr>\u003Ch2 id=\"5-uploading-to-kitsu-via-gazu\">\u003Cstrong>5. Uploading to Kitsu via Gazu\u003C/strong>\u003C/h2>\u003Cp>Once your low-resolution preview is ready, you can upload it directly to \u003Cstrong>Kitsu\u003C/strong> via the dashboard or use the \u003Ccode>gazu\u003C/code> Python SDK. Kitsu is a collaborative pipeline tracker allowing artists and supervisors to access the preview immediately for review.\u003C/p>\u003Cp>The following Python script provides a simple interactive CLI that lets you choose the project and task to upload your preview to:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ndef pickProject(label, list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;\"\"\"Helper UI to pick one item from a list.\"\"\"\n&nbsp;&nbsp;&nbsp;&nbsp;for i, item in enumerate(list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"{i + 1}. {item['name']}\")\n&nbsp;&nbsp;&nbsp;&nbsp;idx = int(input(f\"Choose {label} number: \")) - 1\n&nbsp;&nbsp;&nbsp;&nbsp;return list_of_items[idx]\n\ndef pickTask(label, list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;\"\"\"Helper UI to pick one item from a list.\"\"\"\n&nbsp;&nbsp;&nbsp;&nbsp;for i, item in enumerate(list_of_items):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;asset = gazu.entity.get_entity(item[\"entity_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;status = gazu.task.get_task_status(item[\"task_status_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;type = gazu.task.get_task_type(item[\"task_type_id\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"{i + 1}. {asset['name']} {type['name']} {status['name']}\")\n&nbsp;&nbsp;&nbsp;&nbsp;idx = int(input(f\"Choose {label} number: \")) - 1\n&nbsp;&nbsp;&nbsp;&nbsp;return list_of_items[idx]\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = pickProject(\"project\", projects)\n\ntasks = gazu.task.all_tasks_for_project(project)\ntask = pickTask(\"task\", tasks)\n\nprint(\"Uploading preview...\")\ntask_status = gazu.task.get_task_status_by_name(\"todo\")\nresult = gazu.task.publish_preview(\n&nbsp;&nbsp;&nbsp;&nbsp;task,\n&nbsp;&nbsp;&nbsp;&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"Auto-generated preview\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=\"./preview.mp4\",\n)\n\nprint(\"Done:\", result)\u003C/code>\u003C/pre>\u003Cp>First, we log in to Kitsu via \u003Ccode>gazu\u003C/code> with your credentials. We use the\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>local development environment installation via Kitsu Docker\u003C/u>\u003C/a>. The program lets you select the \u003Cstrong>project\u003C/strong> and \u003Cstrong>task\u003C/strong> from available options using different Kitsu API endpoints to get all your production data:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1343\" height=\"816\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-21091709-64dd-41c6-875e-2cdce8b5b178.png 1343w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>We then upload the generated preview video from the previous steps to the selected task.\u003C/p>\u003Cp>Once complete, the preview is available in Kitsu’s review interface, making it easy for team members and supervisors to give feedback without waiting for high-resolution renders.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"985\" height=\"948\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-78d2cd48-21e9-4599-9b2b-a5e5bef63f76.png 985w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>The review engine is perfect to quickly annotate frames and add comments on precise shots:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1438\" height=\"809\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-6ae9b3dd-18e9-4d85-9fa6-e5106babc87e.png 1438w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"6-putting-it-all-together\">\u003Cstrong>6. Putting it all together\u003C/strong>\u003C/h2>\u003Cp>To automate the task end-to-end, let's write a quick bash command:\u003C/p>\u003Cp>\u003Cstrong>\u003Cu>preview.sh\u003C/u>\u003C/strong>\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 render.py &amp;&amp; ./watermark.sh &amp;&amp; python3 upload.py\u003C/code>\u003C/pre>\u003Cp>We can then run the script every time we need to share a preview:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">./preview.sh\u003C/code>\u003C/pre>\u003Cp>Check out our\u003Ca href=\"https://github.com/cgwire/blender-kitsu-low-res-preview?ref=blog.cg-wire.com\"> \u003Cu>Github repository blender-kitsu-low-res-preview\u003C/u>\u003C/a> to try out the final result yourself.\u003C/p>\u003Chr>\u003Ch2 id=\"7-artist-friendly-addon-overview\">\u003Cstrong>7. Artist-Friendly Addon Overview\u003C/strong>\u003C/h2>\u003Cp>Though this is out of the scope of this article, it could be easy to wrap up our code in a Blender addon for artists to easily use.\u003C/p>\u003Cp>You would need a main panel to hold dropdown menus to pick a production, asset, and task to upload to. And a button to click to upload. The uploading logic would take care of rendering, calling ffmpeg as a subprocess for watermarking, and actually sending the temporary files to Kitsu.\u003C/p>\u003Cp>Have a look at our article on\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\"> \u003Cu>Blender Add-on UI Development\u003C/u>\u003C/a> for more information.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>By now, you’ve set up a full pipeline: creating a simple 3D object in Blender, animating it, generating a low-resolution preview, adding timestamps and watermarks, and uploading it to Kitsu. The benefits are immediately clear:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Faster reviews\u003C/strong> - Supervisors and team members can watch previews immediately without waiting for full-resolution renders.\u003C/li>\u003Cli>\u003Cstrong>Quicker iterations\u003C/strong> - Artists get feedback faster, which shortens the iteration loop and reduces bottlenecks.\u003C/li>\u003Cli>\u003Cstrong>Fewer blockers\u003C/strong> - Automated previews and uploads eliminate repetitive manual steps in the pipeline to keep deliverables consistent.\u003C/li>\u003C/ul>\u003Cp>What used to take an hour of manual work can now be handled with a few scripts, giving the team more time to focus on the creative side of production instead of repetitive tasks.\u003C/p>\u003Cp>You can take this workflow even further depending on your animation studio's needs: add buttons or panels in Blender to run the entire pipeline with one click, automatically batch-generate previews for multiple shots or scenes in a single script, etc.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":215,"comment_id":216,"feature_image":217,"featured":105,"visibility":10,"created_at":218,"updated_at":192,"custom_excerpt":219,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":220,"primary_tag":221,"url":222,"excerpt":219,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":223},"d4c6e01e-3b37-4c90-b42c-cbfeecc518c2","693549d4ee42880001e4b1dc","https://images.unsplash.com/photo-1653200256306-6dc84510dfb6?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDN8fGFuaW1hdGlvbiUyMHBpcGVsaW5lfGVufDB8fHx8MTc2NTA5ODQ2Mnww&ixlib=rb-4.1.0&q=80&w=2000","2025-12-07T10:33:08.000+01:00","Learn how to generate low-resolution animation previews in Blender and automatically upload them to Kitsu. This tutorial covers Blender render settings, Python automation, FFmpeg processing, and preview publishing to streamline animation reviews.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-kitsu-low-res-preview/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@allisonsaeng?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Allison Saeng\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-low-res-preview","2025-12-15T10:00:23.000+01:00",{"title":210},"blender-kitsu-low-res-preview","posts/blender-kitsu-low-res-preview",[230,231],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"slGFk0J3LjB1nVzZocG4Vk6gTdZMox1-G7CWEnotp_I",{"id":234,"title":235,"authors":236,"body":7,"description":7,"extension":8,"html":238,"meta":239,"navigation":13,"path":251,"published_at":252,"seo":253,"slug":254,"stem":255,"tags":256,"__hash__":259,"uuid":240,"comment_id":241,"feature_image":242,"featured":105,"visibility":10,"created_at":243,"updated_at":244,"custom_excerpt":245,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":246,"primary_tag":247,"url":248,"excerpt":245,"reading_time":249,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":250},"ghost/posts:blender-kitsu-breakdown-automation.json","How to Build Blender Shots Automatically Using Python and Kitsu (2026)",[237],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧩\u003C/div>\u003Cdiv class=\"kg-callout-text\">Automate your shot setup and eliminate hours of manual asset placement.\u003C/div>\u003C/div>\u003Cp>Animation studios rely on \u003Cstrong>breakdown lists\u003C/strong> to track which assets must appear in each shot.\u003C/p>\u003Cp>Picture this. You’re a VFX artist staring at a blank Blender viewport for your latest production. Your manager hands you the detailed list of assets, shots, and timing cues and says, \u003Cem>\"Turn this into a Blender scene.\"\u003C/em>\u003C/p>\u003Cp>Your first thought could be to log in to your asset manager and place every object manually. But what about complex scenes with hundreds of assets?\u003C/p>\u003Cp>This is the moment where a simple automation can save the day. With Python Blender scripting, you can read Kitsu breakdown data and generate an initial scene automatically in a few minutes.\u003C/p>\u003Cp>In this article, we walk through a full example: fetching breakdowns via the \u003Cstrong>Gazu\u003C/strong> Python API, creating a fresh Blender scene, downloading the assets, and importing them into Blender. By the end, you’ll have a minimal pipeline that builds scenes automatically, ready for layout or animation.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-automated-scene-composition?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-kitsu-automated-scene-composition\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-getting-the-breakdown\">\u003Cstrong>1. Getting the Breakdown\u003C/strong>\u003C/h2>\u003Cp>Every 3D shot begins as a blank canvas, but the instructions for filling that canvas already exist in Kitsu:\u003Ca href=\"https://blog.cg-wire.com/3d-animation-process/\"> \u003Cu>the \u003Cstrong>breakdown\u003C/strong> dictates exactly what needs to be on stage\u003C/u>\u003C/a> before the animator begins working.\u003C/p>\u003Cp>A typical breakdown provides the essential narrative context your script needs to assemble the scene: the stage (start and end frames, duration, and other annotations stored in the sequence information), and the cast (the actual breakdown of character models, props, and environment assets).\u003C/p>\u003Cp>Before writing code, you need to define the breakdown in the Kitsu dashboard. This is where you manually link your library of 3D assets to the specific shots where they are required. You aren't creating new models here, just casting existing \"actors\" (assets) to a specific shot:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Enter your production\u003C/strong> - Navigate to your project in Kitsu and open the \u003Cstrong>Shots\u003C/strong> tab.\u003C/li>\u003Cli>\u003Cstrong>Locate the casting sheet\u003C/strong> - Look for the \u003Cstrong>Breakdown\u003C/strong> tab (usually found on the right-hand panel or a dedicated tab depending on your version).\u003C/li>\u003Cli>\u003Cstrong>Select the shot\u003C/strong> - Click on the specific shot you want to populate (e.g., \u003Ccode>SH01\u003C/code>) to open the detailed casting view.\u003C/li>\u003Cli>\u003Cstrong>Assign the assets\u003C/strong> - In the right side panel, click the \u003Cstrong>+ (Plus)\u003C/strong> button or \"Add Asset.\" You can also specify the quantity of each asset you need here.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1466\" height=\"804\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-ef6fba58-9c73-4a38-b466-0b9d92e4efc0.png 1466w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Make sure your \u003Cstrong>Assets\u003C/strong> page is already populated with the models (Characters, Props, etc.) you intend to use.\u003C/p>\u003Cp>Once you hit save, the link is established. Now, when your Python script asks Gazu, \"Who is in this shot?\", Kitsu will reply with the list of assets you just assigned. Your Python script acts as the bridge, parsing this casting to automatically populate the Blender viewport.\u003C/p>\u003Cp>If you need a local development environment, have a look at\u003Ca href=\"https://blog.cg-wire.com/dcc-integration-blender-kitsu/\"> \u003Cu>how to install Kitsu from Docker in how Custom DCC Bridge guide\u003C/u>\u003C/a>.\u003C/p>\u003Cp>While Kitsu holds the data, we need a way to fetch it. Enter \u003Cstrong>Gazu\u003C/strong>, the Python SDK for Kitsu’s REST API:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprojects = gazu.project.all_projects()\nproject = projects[0]\n\nsequence = gazu.shot.get_sequence_by_name(project, \"SQ01\")\nshot = gazu.shot.get_shot_by_name(sequence, \"SH01\")\n\nassets = gazu.casting.get_shot_casting(shot)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>We connect to our local Kitsu instance, and then we pick our first production (you can also retrieve a production by name) and the shot we need the casting for.\u003C/p>\u003Cp>We can use this shot ID to retrieve the corresponding casting of assets, the breakdown list.\u003C/p>\u003Chr>\u003Ch2 id=\"2-getting-assets-from-a-breakdown\">\u003Cstrong>2. Getting Assets From a Breakdown\u003C/strong>\u003C/h2>\u003Cp>Now that we know \u003Cem>who\u003C/em> is in the shot, we need to find out \u003Cem>what\u003C/em> they look like.\u003C/p>\u003Cp>In Kitsu, an asset can have many preview files we can use depending on revisions. Our script needs to be able to navigate this data to get the last revision of each asset:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">local_paths = []\nfor asset in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset[\"asset_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;last_task = max(tasks, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(last_task)\n&nbsp;&nbsp;&nbsp;&nbsp;last_preview_file = max(preview_files, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;download_dir = \"./previews\"\n&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(download_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;save_path = os.path.join(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview_file[\"original_name\"] + \".\" + last_preview_file[\"extension\"],\n&nbsp;&nbsp;&nbsp;&nbsp;)\n&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(last_preview_file, save_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;local_paths.append(save_path)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>For each asset, we retrieve a list of all corresponding tasks of any type ('Modeling', 'Animation', etc.) or status ('done', 'todo'...). We filter this list to retrieve the last updated task.\u003C/p>\u003Cp>We can use this task ID to get the last corresponding preview file revision and download it to a local folder \u003Ccode>previews\u003C/code>. We keep these download paths in memory for the importing step.\u003C/p>\u003Cp>At the end of this loop, you have successfully turned database entries into tangible model files on your hard drive, ready for Blender to ingest.\u003C/p>\u003Chr>\u003Ch2 id=\"3-creating-a-new-blender-scene\">\u003Cstrong>3. Creating a New Blender Scene\u003C/strong>\u003C/h2>\u003Cp>With the asset files safely downloaded, the next task is preparing the Blender environment to receive its new cast member.\u003C/p>\u003Cp>The \u003Ccode>bpy\u003C/code> module, Blender's native Python API, acts as your command console allowing you to manipulate every element of the application.\u003C/p>\u003Cp>Before we import our Kitsu assets, we must eliminate any default objects that come with a new Blender scene. For this simple tutorial, we're targeting the default \u003Cstrong>Cube\u003C/strong>, which is often the only object present besides the default Camera and Light:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.data.objects.remove(bpy.data.objects.get(\"Cube\"), do_unlink=True)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>The \u003Ccode>do_unlink=True\u003C/code> flag tells Blender to fully delete the object's data block (like its mesh data) if it’s no longer used by any other object to leave no clutter behind.\u003C/p>\u003Cp>We are now ready for the imported assets to take their places.\u003C/p>\u003Chr>\u003Ch2 id=\"4-importing-asset-files\">\u003Cstrong>4. Importing Asset Files\u003C/strong>\u003C/h2>\u003Cp>Now for the payoff! Since the file we downloaded from Kitsu is a standardised interchange \u003Ccode>.glb\u003C/code> format, which handles both geometry and basic materials, we use Blender’s dedicated \u003Ccode>gltf\u003C/code> import operator.\u003C/p>\u003Cp>The crucial part is providing the correct \u003Cstrong>absolute file path\u003C/strong> (\u003Ccode>glb_path\u003C/code>) to the downloaded asset. Fortunately, we stored those in the previous code snippet:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">for path in local_paths:\n&nbsp;&nbsp;&nbsp;&nbsp;if path.lower().endswith((\".glb\")):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(f\"Importing: {path}\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=path)\n\nprint(\"All preview GLB files imported successfully!\")\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>Once \u003Ccode>bpy.ops.import_scene.gltf()\u003C/code> executes, Blender reads the file and automatically creates the corresponding \u003Cstrong>objects\u003C/strong>, \u003Cstrong>meshes\u003C/strong>, and \u003Cstrong>materials\u003C/strong> in the current scene.\u003C/p>\u003Cp>The imported asset is now a full-fledged Blender object, placed at the world origin (0, 0, 0), ready for subsequent pipeline steps.\u003C/p>\u003Chr>\u003Ch2 id=\"5-saving-the-scene\">\u003Cstrong>5. Saving the Scene\u003C/strong>\u003C/h2>\u003Cp>The final step in this pipeline segment is to save the assembled layout into a permanent, versionable file. If you close Blender without this step, all the automated work is lost, so we use the \u003Ccode>bpy.ops.wm.save_as_mainfile\u003C/code> operator. This is the programmatic equivalent of clicking \u003Cstrong>File \\&gt; Save As\u003C/strong> in the Blender interface:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">scene_save_dir = \"./\"\nos.makedirs(scene_save_dir, exist_ok=True)\n\nblend_filename = \"SH01.blend\"\nblend_path = os.path.join(scene_save_dir, blend_filename)\n\nbpy.ops.wm.save_as_mainfile(filepath=blend_path)\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>The result is a new Blender file, \u003Ccode>SH01.blend\u003C/code>, that perfectly reflects the \u003Cstrong>breakdown requirements\u003C/strong> from Kitsu, ready for the next department to pick up.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1460\" height=\"828\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-91e5cf8e-acb1-4ac0-b5ec-d2c37a6a1ed6.png 1460w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"6-user-friendly-addon\">\u003Cstrong>6. User-Friendly Addon\u003C/strong>\u003C/h2>\u003Cp>The script works as expected, but what about artists? Not everyone knows how to run a script.\u003C/p>\u003Cp>Let's slightly modify our code to\u003Ca href=\"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/\"> \u003Cu>turn it into a Blender addon\u003C/u>\u003C/a>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Kitsu Shot Auto-Importer\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"Pick a project and shot and auto-import the latest preview assets\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"cgwire\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (3, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"Viewport &gt; N-Panel &gt; Kitsu\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"Import-Export\",\n}\n\nimport os\nimport sys\n\nsys.path.append(\"~/.local/lib/python3.11/site-packages\")\n\nimport bpy\nimport gazu\nfrom bpy.props import EnumProperty, StringProperty\n\ndef get_projects():\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;projects = gazu.project.all_projects()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(p[\"id\"], p[\"name\"], \"\") for p in projects]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\ndef get_sequences(project_id):\n&nbsp;&nbsp;&nbsp;&nbsp;if not project_id:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;seqs = gazu.shot.all_sequences_for_project(project_id)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(s[\"id\"], s[\"name\"], \"\") for s in seqs]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\ndef get_shots(sequence_id):\n&nbsp;&nbsp;&nbsp;&nbsp;if not sequence_id:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n&nbsp;&nbsp;&nbsp;&nbsp;try:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;shots = gazu.shot.all_shots_for_sequence(sequence_id)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return [(s[\"id\"], s[\"name\"], \"\") for s in shots]\n&nbsp;&nbsp;&nbsp;&nbsp;except:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return []\n\nclass KITSU_Props(bpy.types.PropertyGroup):\n&nbsp;&nbsp;&nbsp;&nbsp;project: EnumProperty(name=\"Project\", items=lambda self, context: get_projects())\n\n&nbsp;&nbsp;&nbsp;&nbsp;sequence: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Sequence\", items=lambda self, context: get_sequences(self.project)\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;shot: EnumProperty(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;name=\"Shot\", items=lambda self, context: get_shots(self.sequence)\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\nclass KITSU_OT_import_shot(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"kitsu.import_shot_assets\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Import Shot Assets\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = (\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;\"Download and import latest preview GLB/GLTF files for selected shot\"\n&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;props = context.scene.kitsu_props\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Fetch shot data\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;shot = gazu.shot.get_shot(props.shot)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;assets = gazu.casting.get_shot_casting(shot)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir = os.path.join(bpy.app.tempdir, \"kitsu_previews\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(download_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;local_paths = []\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for asset in assets:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;tasks = gazu.task.all_tasks_for_asset(asset[\"asset_id\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if not tasks:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;continue\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_task = max(tasks, key=lambda x: x[\"updated_at\"])\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;preview_files = gazu.files.get_all_preview_files_for_task(last_task)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if not preview_files:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;continue\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview = max(preview_files, key=lambda x: x[\"updated_at\"])\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;save_path = os.path.join(\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;download_dir,\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;last_preview[\"original_name\"] + \".\" + last_preview[\"extension\"],\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;gazu.files.download_preview_file(last_preview, save_path)\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;local_paths.append(save_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Clean default cube\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;obj = bpy.data.objects.get(\"Cube\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if obj:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.data.objects.remove(obj, do_unlink=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Import GLB/GLTF assets\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;for path in local_paths:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;if path.lower().endswith((\".glb\", \".gltf\")):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.import_scene.gltf(filepath=path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;# Auto-save blend file\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;save_dir = os.path.join(os.path.expanduser(\"~\"), \"kitsu_scenes\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;os.makedirs(save_dir, exist_ok=True)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;blend_path = os.path.join(save_dir, f\"{shot['name']}.blend\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.ops.wm.save_as_mainfile(filepath=blend_path)\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({\"INFO\"}, f\"Imported assets and saved: {blend_path}\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {\"FINISHED\"}\n\nclass KITSU_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Kitsu Auto-Importer\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"KITSU_PT_auto_importer\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = \"VIEW_3D\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = \"UI\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = \"Kitsu\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;props = context.scene.kitsu_props\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"project\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"sequence\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.prop(props, \"shot\")\n\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.separator()\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"kitsu.import_shot_assets\", icon=\"IMPORT\")\n\nclasses = (\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_Props,\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_OT_import_shot,\n&nbsp;&nbsp;&nbsp;&nbsp;KITSU_PT_panel,\n)\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.types.Scene.kitsu_props = bpy.props.PointerProperty(type=KITSU_Props)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;for c in classes:\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(c)\n&nbsp;&nbsp;&nbsp;&nbsp;del bpy.types.Scene.kitsu_props\n\nif __name__ == \"__main__\":\n&nbsp;&nbsp;&nbsp;&nbsp;register()\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>We can now manually pick a production, sequence, and shot to get breakdown data from, and import the corresponding casting in the current Blender viewport:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/12/data-src-image-bf3ea18d-fd62-4db5-9977-6374b3ee1aef.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"480\" height=\"270\">\u003C/figure>\u003Cp>The logic is simple: we use the same \u003Ccode>gazu\u003C/code> code to populate dropdown menus, and we encapsulate them all in a panel in the viewport. An \u003Ccode>import\u003C/code> button downloads all the corresponding breakdown assets and imports them into the current workspace.\u003C/p>\u003Cp>Keep in mind that adding \u003Ccode>sys.path.append(\"~/.local/lib/python3.11/site-packages\")\u003C/code> lets Blender use your system’s Python installation to load external libraries like \u003Ccode>gazu\u003C/code>. Since Blender ships with its own isolated Python environment, managing package installations can be inconvenient. By extending the path, you simply instruct Blender to check your local modules as well. Make sure to adjust this path to match your own setup.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>By pulling breakdown lists directly from Kitsu and scripting Blender to assemble scenes, you eliminate repetitive manual steps and ensure asset consistency across all shots. This approach doesn't just save time but also reduces human error and ensures every artist starts with the correct asset version and scene setup required by the producer. This way, you can easily handle ten shots or ten thousand with equal reliability.\u003C/p>\u003Cp>But don't take our word for it,\u003Ca href=\"https://github.com/cgwire/blender-kitsu-automated-scene-composition?ref=blog.cg-wire.com\"> \u003Cu>clone the Github repository\u003C/u>\u003C/a> to try out the result!\u003C/p>\u003Cp>You can extend this workflow by generating automated previews, reports, or even updating asset information from the new revisions created during the shot animation.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":240,"comment_id":241,"feature_image":242,"featured":105,"visibility":10,"created_at":243,"updated_at":244,"custom_excerpt":245,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":246,"primary_tag":247,"url":248,"excerpt":245,"reading_time":249,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":250},"d090d72e-fa3b-4af9-806a-a44f7732a7c4","6909b6d2df0ae600014fbb54","https://images.unsplash.com/photo-1725888358557-9f70661012c4?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDJ8fGFuaW1hdGlvbiUyMHBpcGVsaW5lfGVufDB8fHx8MTc2NTA5ODQ2Mnww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-04T09:18:26.000+01:00","2026-02-20T06:04:00.000+01:00","Learn how to automate Blender scene creation using Kitsu breakdown data and Python scripting. This guide walks through retrieving breakdowns via Gazu, downloading assets, importing GLB files, and generating a complete Blender scene ready for layout or animation.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-kitsu-breakdown-automation/",11,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@steve_j?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Steve Johnson\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-kitsu-breakdown-automation","2025-12-07T18:11:31.000+01:00",{"title":235},"blender-kitsu-breakdown-automation","posts/blender-kitsu-breakdown-automation",[257,258],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"i5Pduvllq_hTDBHuCFEVMzMxTyU5evzIUkMxND7t3YY",{"id":261,"title":262,"authors":263,"body":7,"description":7,"extension":8,"html":265,"meta":266,"navigation":13,"path":278,"published_at":279,"seo":280,"slug":281,"stem":282,"tags":283,"__hash__":286,"uuid":267,"comment_id":268,"feature_image":269,"featured":105,"visibility":10,"created_at":270,"updated_at":271,"custom_excerpt":272,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":273,"primary_tag":274,"url":275,"excerpt":272,"reading_time":276,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":277},"ghost/posts:blender-addon-ui-scripting-guide.json","A 2026 Guide to Blender Add-on UI Development",[264],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📄\u003C/div>\u003Cdiv class=\"kg-callout-text\">Turn your Blender scripts into real tools artists love using—here’s how to build clean, intuitive UI panels for your add-ons.\u003C/div>\u003C/div>\u003Cp>If you’ve ever \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">written a Blender script\u003C/a>, you’ve probably realized that getting the feature right is only half the battle: the other half is getting someone else to use it! A clean user interface is a must to share and sell Blender add-ons.\u003C/p>\u003Cp>In this guide, you’ll learn how to build user interfaces for your Blender add-ons using the built-in layout system. We’ll cover the most common types of UI components, where panels can appear, and walk through a minimal working example. By the end, you’ll know how to give your add-on a Blender-native graphical interface.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-ui-addon-script?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-ui-addon-script\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-common-ui-components\">\u003Cstrong>1. Common UI Components\u003C/strong>\u003C/h2>\u003Cp>In Blender, every element of the user interface has its equivalent in the Python library. You build UI by creating classes that inherit from one of the following types:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bpy.types.Panel\u003C/code> - for custom panels (the most common)\u003C/li>\u003Cli>\u003Ccode>bpy.types.Menu\u003C/code> - for menus and submenus\u003C/li>\u003Cli>\u003Ccode>bpy.types.Operator\u003C/code> - for actions or tools that can be run from buttons\u003C/li>\u003C/ul>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-daa22afa-ac20-4e3e-8543-c694588146bf.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"334\" height=\"542\">\u003C/figure>\u003Cp>Each of these classes can implement a \u003Ccode>draw(self, context)\u003C/code> method where you describe what the interface should look like using layout commands. Blender’s layout system handles the spacing, alignment, and positioning automatically: it's a declarative UI system where you just describe what should appear and in what order.\u003C/p>\u003Cp>Here are the most common layout elements you’ll use:\u003C/p>\u003Ch3 id=\"basic-display-elements\">\u003Cstrong>Basic Display Elements\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>\u003Cstrong>Label\u003C/strong> - Displays plain, non-interactive text. Format: \u003Ccode>layout.label(text=\"Hello!\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Separator\u003C/strong> - Adds vertical space between items for readability. Format: \u003Ccode>layout.separator()\u003C/code>\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"buttons-inputs-props-and-operators\">\u003Cstrong>Buttons, Inputs, Props, and Operators\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>\u003Cstrong>Operator Button\u003C/strong> - Creates a clickable button that triggers an operator (a function registered as a Blender command). You can use this for actions like exporting, duplicating, or running a custom script. Syntax: \u003Ccode>layout.operator(\"myaddon.some_action\", text=\"Run Action\")\u003C/code>\u003C/li>\u003C/ul>\u003Cp>The \u003Ccode>layout.prop()\u003C/code> method is used to display editable Blender properties which are either built-in data (like \u003Ccode>context.object\u003C/code>) or your own custom properties. For example, \u003Ccode>layout.prop(context.object, \"name\")\u003C/code> shows an editable text field for the object’s name. Blender automatically chooses the right widget (text box, slider, checkbox, etc.) based on the property’s type:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Checkbox (Boolean property)\u003C/strong> - Displays a toggle checkbox. Example: \u003Ccode>layout.prop(context.object, \"hide_viewport\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Number Field / Slider (Float or Int)\u003C/strong> - Displays a numeric input, often with a slider. Example: \u003Ccode>layout.prop(context.object, \"location\", index=0, text=\"X Location\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Dropdown Menu (Enum property)\u003C/strong> - Displays a dropdown list when the property is an EnumProperty. Example: \u003Ccode>layout.prop(context.object, \"type\")\u003C/code>\u003C/li>\u003Cli>\u003Cstrong>Text Input \u003C/strong>- Displays a text box for string properties. Example: \u003Ccode>layout.prop(my_settings, \"username\")\u003C/code>\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"organizing-the-layout\">\u003Cstrong>Organizing the Layout\u003C/strong>\u003C/h3>\u003Cp>To keep your UI structured and easy to understand, Blender provides layout containers like rows, columns, and boxes.\u003C/p>\u003Cp>A panel contains rows and columns. Rows and columns contain properties, operators, and labels. Blender automatically handles padding, alignment, and scaling to match the theme and layout rules.\u003C/p>\u003Cul>\u003Cli>A row (horizontal grouping) puts elements next to each other horizontally:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">row = layout.row()\nrow.prop(obj, \"location\")\nrow.prop(obj, \"rotation_euler\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>A column (vertical grouping) stacks elements vertically:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">col = layout.column()\ncol.prop(obj, \"scale\")\ncol.prop(obj, \"dimensions\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>box (Visual grouping) draws a bordered box that visually groups related controls, like sections:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">box = layout.box()\nbox.label(text=\"Transform Settings\")\nbox.prop(obj, \"location\")\nbox.prop(obj, \"rotation_euler\")\u003C/code>\u003C/pre>\u003Cp>For the full list of UI components, have a look at \u003Ca href=\"https://docs.blender.org/manual/en/latest/interface/index.html?ref=blog.cg-wire.com\">the User Interface page of the official Blender documentation\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"2-where-you-can-put-ui-panels\">\u003Cstrong>2. Where You Can Put UI Panels\u003C/strong>\u003C/h2>\u003Cp>When you create a custom panel in Blender, you can decide where in the interface it appears and what region it occupies with two key class attributes:\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bl_space_type\u003C/code> - which editor or workspace your panel belongs to (for example, the 3D View, the Properties Editor, or the Node Editor).\u003C/li>\u003Cli>\u003Ccode>bl_region_type\u003C/code> - which part of that editor the panel appears in (for example, the sidebar, toolbar, or main window).\u003C/li>\u003C/ul>\u003Cp>Here is a list of the most typical areas where you might place a custom panel:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1125\" height=\"650\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-070d3dfe-eb98-42a2-90a2-d2eabc4fc2d4.png 1125w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cul>\u003Cli>The 3D view sidebar appears in the right-hand N-panel sidebar of the 3D Viewport. This is the most common location for modeling, rigging, or scene tools:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'VIEW_3D'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>You can add panels inside the Properties Editor, among the Object, Material, or Scene tabs. Use this when your add-on deals with materials, objects, render settings, or scene properties:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'PROPERTIES'\nbl_region_type = 'WINDOW'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>In the UV/Image Editor sidebar (useful for texture tools or image utilities):\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'IMAGE_EDITOR'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>In the sidebar of the Shader, Geometry Node, or Compositor editors for tools that work with nodes, shaders, or procedural systems:\u003C/li>\u003C/ul>\u003Cpre>\u003Ccode class=\"language-python\">bl_space_type = 'NODE_EDITOR'\nbl_region_type = 'UI'\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>The best panel location depends on your tool’s purpose:\u003C/p>\u003Cul>\u003Cli>Modeling / Object tools → 3D View sidebar (\u003Ccode>VIEW_3D\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003Cli>Material or render settings → Properties editor (\u003Ccode>PROPERTIES\u003C/code> + \u003Ccode>WINDOW\u003C/code>)\u003C/li>\u003Cli>Texture utilities → Image editor sidebar (\u003Ccode>IMAGE_EDITOR\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003Cli>Shader / Geometry tools → Node editor sidebar (\u003Ccode>NODE_EDITOR\u003C/code> + \u003Ccode>UI\u003C/code>)\u003C/li>\u003C/ul>\u003Cp>Picking the right space helps users find your add-on where they naturally expect to, keeping your UI consistent with Blender’s.\u003C/p>\u003Chr>\u003Ch2 id=\"3-minimal-example-custom-panel-in-the-3d-view-sidebar\">\u003Cstrong>3. Minimal Example: Custom Panel in the 3D View Sidebar\u003C/strong>\u003C/h2>\u003Cp>Let's experiment with a simple plugin: a custom panel in the 3D view sidebar that displays a \"hello world\" text alert when clicking on a button.\u003C/p>\u003Ch3 id=\"1-blinfoaddon-metadata\">\u003Cstrong>1) \u003Ccode>bl_info\u003C/code> - addon metadata\u003C/strong>\u003C/h3>\u003Cp>We start by specifying the add-on metadata to tell Blender how to present our add-on to a potential user:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Simple Addon Example\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"Your Name\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (4, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; Simple Tab\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"A simple example addon that prints a message\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cul>\u003Cli>\u003Ccode>bl_info\u003C/code> is a module-level dictionary Blender uses to show addon info in Preferences → Add-ons\u003Cul>\u003Cli>\u003Ccode>name:\u003C/code> human-readable name shown in the list\u003C/li>\u003Cli>\u003Ccode>author:\u003C/code> author string\u003C/li>\u003Cli>\u003Ccode>version:\u003C/code> tuple representing addon version\u003C/li>\u003Cli>\u003Ccode>blender:\u003C/code> minimum Blender version this addon targets (tuple)\u003C/li>\u003Cli>\u003Ccode>location:\u003C/code> where the addon UI appears (helpful for users)\u003C/li>\u003Cli>\u003Ccode>description:\u003C/code> short description used in the UI\u003C/li>\u003Cli>\u003Ccode>category:\u003C/code> category grouping in the Add-ons list\u003C/li>\u003C/ul>\u003C/li>\u003C/ul>\u003Cp>It's essential to keep your \u003Ccode>bl_info\u003C/code> accurate, as Blender reads it when scanning installed add-ons.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"2-define-an-operator-class\">\u003Cstrong>2) Define an operator class\u003C/strong>\u003C/h3>\u003Cp>We then define an Operator subclass. Operators are the official way to perform actions in Blender: they can be invoked from UI, shortcuts, search menu, etc.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class SIMPLEADDON_OT_hello(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"simple_addon.say_hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Say Hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = \"Prints a message to the console\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({'INFO'}, \"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(\"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {'FINISHED'}\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bl_idname\u003C/code> - A unique identifier string in the form \u003Ccode>\"module_name.operator_name\"\u003C/code>, all lowercase and with a dot. This is how you call the operator from code or UI (\u003Ccode>bpy.ops.simple_addon.say_hello()\u003C/code>).\u003C/li>\u003Cli>\u003Ccode>bl_label\u003C/code> - User-facing label that appears on buttons/menus.\u003C/li>\u003Cli>\u003Ccode>bl_description\u003C/code> - Tooltip/description shown in the UI.\u003C/li>\u003Cli>\u003Ccode>execute(self, context)\u003C/code> - Core method called when the operator runs (synchronous execution). \u003Ccode>context\u003C/code> gives access to Blender's current state (active object, scene, area, etc.). \u003Ccode>self.report({'INFO'}, \"…\")\u003C/code> shows a small message in Blender's info bar / status (good for user feedback). \u003Ccode>print(\"…\")\u003C/code> prints to the system/Blender console (useful for debugging). Returns a set like \u003Ccode>{'FINISHED'}\u003C/code> or \u003Ccode>{'CANCELLED'}\u003C/code>. Blender uses this result to know whether the operator completed successfully.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"3-panel-classui-placement\">\u003Cstrong>3) Panel class - UI placement\u003C/strong>\u003C/h3>\u003Cp>We can then get to the Panel subclass to add UI in Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">class SIMPLEADDON_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Simple Addon Panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"SIMPLEADDON_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = 'VIEW_3D'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = 'UI'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = 'Simple'\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"simple_addon.say_hello\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bl_label\u003C/code> - panel title shown in the UI.\u003C/li>\u003Cli>\u003Ccode>bl_idname\u003C/code> - unique panel identifier.\u003C/li>\u003Cli>\u003Ccode>bl_space_type = 'VIEW_3D'\u003C/code> tells Blender this panel belongs in the 3D Viewport area.\u003C/li>\u003Cli>\u003Ccode>bl_region_type = 'UI'\u003C/code> places it in the right-side region (the N-panel). Other regions exist (e.g., \u003Ccode>'TOOLS', 'WINDOW'\u003C/code>).\u003C/li>\u003Cli>\u003Ccode>bl_category = 'Simple'\u003C/code> - The tab name in the sidebar. The panel will appear under a tab labeled “Simple”.\u003C/li>\u003Cli>\u003Ccode>draw(self, context)\u003C/code> is called to draw UI layout.\u003C/li>\u003Cli>\u003Ccode>self.layout\u003C/code> is a \u003Ccode>UILayout\u003C/code> object used to place buttons, labels, properties, etc.\u003C/li>\u003Cli>\u003Ccode>layout.operator(\"simple_addon.say_hello\")\u003C/code> creates a button that, when clicked, calls the operator with bl_idname \u003Ccode>\"simple_addon.say_hello\"\u003C/code>. The button text is taken from the operator's \u003Ccode>bl_label\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"4-register-unregister-functions\">\u003Cstrong>4) Register / unregister functions\u003C/strong>\u003C/h3>\u003Cp>Blender requires classes that define UI, operators, panels, properties, etc., to be registered so Blender knows about them:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">def register():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_OT_hello)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_PT_panel)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_PT_panel)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_OT_hello)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.utils.register_class(Class)\u003C/code> registers a class; \u003Ccode>unregister_class\u003C/code> removes it.\u003C/li>\u003Cli>It's important to unregister classes in the reverse order of registration, especially when classes reference each other. This is why the panel is unregistered before the operator.\u003C/li>\u003Cli>When the addon is enabled in Preferences, Blender calls \u003Ccode>register()\u003C/code>. When disabled, it calls \u003Ccode>unregister()\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>We put the full code in a Python file \u003Ccode>addon.py\u003C/code>:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bl_info = {\n&nbsp;&nbsp;&nbsp;&nbsp;\"name\": \"Simple Addon Example\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"author\": \"Your Name\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"version\": (1, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"blender\": (4, 0, 0),\n&nbsp;&nbsp;&nbsp;&nbsp;\"location\": \"View3D &gt; Sidebar &gt; Simple Tab\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"description\": \"A simple example addon that prints a message\",\n&nbsp;&nbsp;&nbsp;&nbsp;\"category\": \"3D View\",\n}\n\nimport bpy\n\nclass SIMPLEADDON_OT_hello(bpy.types.Operator):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"simple_addon.say_hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Say Hello\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_description = \"Prints a message to the console\"\n\n&nbsp;&nbsp;&nbsp;&nbsp;def execute(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;self.report({'INFO'}, \"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;print(\"Hello from Blender Addon!\")\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;return {'FINISHED'}\n\nclass SIMPLEADDON_PT_panel(bpy.types.Panel):\n&nbsp;&nbsp;&nbsp;&nbsp;bl_label = \"Simple Addon Panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_idname = \"SIMPLEADDON_PT_panel\"\n&nbsp;&nbsp;&nbsp;&nbsp;bl_space_type = 'VIEW_3D'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_region_type = 'UI'\n&nbsp;&nbsp;&nbsp;&nbsp;bl_category = 'Simple'\n\n&nbsp;&nbsp;&nbsp;&nbsp;def draw(self, context):\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout = self.layout\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;layout.operator(\"simple_addon.say_hello\")\n\ndef register():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_OT_hello)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.register_class(SIMPLEADDON_PT_panel)\n\ndef unregister():\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_PT_panel)\n&nbsp;&nbsp;&nbsp;&nbsp;bpy.utils.unregister_class(SIMPLEADDON_OT_hello)\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"4-running-and-packaging-your-add-on\">\u003Cstrong>4. Running and Packaging Your Add-on\u003C/strong>\u003C/h2>\u003Cp>Once you’ve written your add-on script, you can load it into Blender and test it right away. No tools required.\u003C/p>\u003Col>\u003Cli>Save your script - Save your Python file with a clear name like \u003Ccode>my_addon.py\u003C/code>.\u003C/li>\u003Cli>Open Blender’s Add-ons Preferences - Go to Edit → Preferences → Add-ons. This is where Blender manages all installed extensions.\u003C/li>\u003Cli>Install the add-on - Click the Install… button at the top of the preferences window. \u003Ccode>Select your my_addon.py\u003C/code> file and click Install Add-on.\u003C/li>\u003Cli>Enable it - After installing, your add-on should appear in the list. Find it (you can search for “My Add-on”) and check the box to enable it if it's not already.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1227\" height=\"800\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-27ff3592-fed1-4347-8930-9dd62b2d950b.png 1227w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"5\">\u003Cli>Check it in the interface - Open the 3D Viewport, open the sidebar, and look for the tab named Simple. Your custom panel should be there, ready to use!\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1227\" height=\"741\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-2a90e13f-b338-4235-a830-f9c8d8060562.png 1227w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>When you want to share your add-on with others, you can upload it to GitHub, Blender Artists, or Gumroad for distribution. Add a short README.md explaining what the add-on does and how to install it.\u003C/p>\u003Cp>For add-ons with multiple files (e.g. separate modules, icons, or resources), create a folder then zip the entire folder (\u003Ccode>my_addon.zip\u003C/code>) and share that. Blender can install \u003Ccode>.zip\u003C/code> archives directly via the same Install… button so no need to extract it beforehand. The main entry point must be named \u003Ccode>__init__.py\u003C/code>, since Blender treats it as a Python package.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Creating UI for Blender add-ons is intimidating at first, but it’s one of the easiest ways to share a tool you created. Once you understand how panels and layouts work, you can quickly add buttons, properties, and organized sections that users will find intuitive.\u003C/p>\u003Cp>\u003Ca href=\"https://github.com/cgwire/blender-ui-addon-script?ref=blog.cg-wire.com\">Have a look at the code repository on Github\u003C/a> to try the example yourself.\u003C/p>\u003Cp>Start small by adding a simple panel, a label, and a button to create an action, and build from there!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":267,"comment_id":268,"feature_image":269,"featured":105,"visibility":10,"created_at":270,"updated_at":271,"custom_excerpt":272,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":273,"primary_tag":274,"url":275,"excerpt":272,"reading_time":276,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":277},"e18120b7-5615-497e-8db8-9f03ceee9526","6922df21009fc3000190e38e","https://images.unsplash.com/photo-1760548425425-e42e77fa38f1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDF8fGNvZGluZyUyMGludGVyZmFjZSUyMHRvb2xzfGVufDB8fHx8MTc2Mzg5MzE4MXww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-23T11:17:05.000+01:00","2026-02-20T06:03:59.000+01:00","Turn your Blender scripts into real tools artists love using—here’s how to build clean, intuitive UI panels for your add-ons.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-addon-ui-scripting-guide/",9,"\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jakubzerdzicki?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Jakub Żerdzicki\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-addon-ui-scripting-guide","2025-11-24T10:00:34.000+01:00",{"title":262},"blender-addon-ui-scripting-guide","posts/blender-addon-ui-scripting-guide",[284,285],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"3-HhmFMhJkG_7Y2WuAQl2Cmyemg5YE38Mtwl_osaN7w",{"id":288,"title":289,"authors":290,"body":7,"description":7,"extension":8,"html":292,"meta":293,"navigation":13,"path":304,"published_at":305,"seo":306,"slug":307,"stem":308,"tags":309,"__hash__":312,"uuid":294,"comment_id":295,"feature_image":296,"featured":105,"visibility":10,"created_at":297,"updated_at":298,"custom_excerpt":299,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":300,"primary_tag":301,"url":302,"excerpt":299,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":303},"ghost/posts:blender-scripting-geometry-nodes-2.json","How to Script Geometry Nodes in Blender with Python (2026)",[291],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🐍\u003C/div>\u003Cdiv class=\"kg-callout-text\">Procedural modeling becomes far more powerful when you generate nodes with code instead of wiring them by hand.\u003C/div>\u003C/div>\u003Cp>Geometry nodes are an incredible Blender feature, but did you know Blender's Python API also lets you script geometry nodes just like any other data block?\u003C/p>\u003Cp>You can create nodes, set their parameters, and connect them programmatically, opening the door to automated scene generation, custom tools, and rapid model prototyping with just a few lines of code instead of manually wiring dozens of nodes.\u003C/p>\u003Cp>In this tutorial, you'll learn how to create geometry node setups entirely from a Python script. We'll cover the full process from building a new node tree to assigning it to an object with clear examples you can paste directly into Blender's scripting editor.\u003C/p>\u003Cp>In case you missed it, have a look at \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">our introduction to Blender scripting\u003C/a> first.\u003C/p>\u003Chr>\u003Ch2 id=\"why-script-geometry-nodes\">\u003Cstrong>Why Script Geometry Nodes?\u003C/strong>\u003C/h2>\u003Cp>Blender's Geometry Nodes editor is an excellent visual system for building procedural tools: it's intuitive, flexible, and great for experimentation once you get the hang of it. But as projects grow in complexity, manually managing large node networks can become tedious and difficult to maintain, especially if you need to reuse them throughout many 3D modeling pipelines.\u003C/p>\u003Cp>Scripting allows you to generate, modify, and connect nodes automatically. Instead of manually recreating the same setups across multiple projects, you can write a script once and reuse it whenever you need it to save time or make your animations more consistent.\u003C/p>\u003Cp>A scripted node setup isn't tied to a single .blend file: it can be stored, versioned, and shared just like any other piece of code. This makes it easy to build a library of procedural tools that can be reused across different projects or shared with other artists and developers.\u003C/p>\u003Cp>Let's see how scripting works in practice with a few code snippets.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-blue\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-scripting-geometry-nodes?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-scripting-geometry-nodes\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-creating-a-new-node-tree\">\u003Cstrong>1. Creating a New Node Tree\u003C/strong>\u003C/h2>\u003Cp>Every Geometry Nodes setup starts as a node tree, which stores nodes and their connections. You can create one from Python using Blender's data API:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nnode_tree = bpy.data.node_groups.new(\"MyGeoNodesTree\", 'GeometryNodeTree')\u003C/code>\u003C/pre>\u003Cp>You can think of this \u003Ccode>node_tree\u003C/code> as the digital canvas that will hold all your procedural logic. Once created, you can add nodes, connect them, and set their properties like in Blender's graphical user interface.\u003C/p>\u003Chr>\u003Ch2 id=\"2-add-nodes-and-connect-them\">\u003Cstrong>2. Add Nodes and Connect Them\u003C/strong>\u003C/h2>\u003Cp>Next, let's add a few basic nodes. We'll create an Input Geometry node, a Subdivision Surface node, and a Group Output node, then connect them and apply the result to our cube.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\"># ADD NODES\ngeo_input = node_tree.interface.new_socket(\n&nbsp;&nbsp;&nbsp;&nbsp;name=\"Geometry\",\n&nbsp;&nbsp;&nbsp;&nbsp;in_out='INPUT',\n&nbsp;&nbsp;&nbsp;&nbsp;socket_type='NodeSocketGeometry'\n)\ngeo_output = node_tree.interface.new_socket(\n&nbsp;&nbsp;&nbsp;&nbsp;name=\"Geometry\",\n&nbsp;&nbsp;&nbsp;&nbsp;in_out='OUTPUT',\n&nbsp;&nbsp;&nbsp;&nbsp;socket_type='NodeSocketGeometry'\n)\n\ninput_node = node_tree.nodes.new(\"NodeGroupInput\")\nsubdivide_node = node_tree.nodes.new(\"GeometryNodeSubdivideMesh\")\noutput_node = node_tree.nodes.new(\"NodeGroupOutput\")\n\ninput_node.location = (-300, 0)\nsubdivide_node.location = (0, 0)\noutput_node.location = (300, 0)\n\n# LINK NODES\nnode_tree.links.new(input_node.outputs['Geometry'], subdivide_node.inputs['Mesh'])\nnode_tree.links.new(subdivide_node.outputs['Mesh'], output_node.inputs['Geometry'])\n\n# APPLY TO CURRENT OBJECT\nobj = bpy.context.object\nmod = obj.modifiers.new(\"MyGeoNodesModifier\", \"NODES\")\nmod.node_group = node_tree\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>When you run this script, you'll have a functional (though simple) geometry node setup that subdivides any geometry it's applied to:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-de23dbc9-781f-4730-9a46-a6fec93c97a7.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-set-parameters-and-link-geometry-to-objects\">\u003Cstrong>3. Set Parameters and Link Geometry to Objects\u003C/strong>\u003C/h2>\u003Cp>You can modify parameters directly via the node's properties. For example, let's increase the subdivision level and apply this node group to an object:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">subdivide_node.inputs['Level'].default_value = 3\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-18e48250-6a76-4eda-b14c-ce8065b78f9e.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Adjusting \u003Ccode>default_value\u003C/code> for inputs is an easy way to parameterize your setup.\u003C/p>\u003Cp>For a full breakdown of the available parameters and types, refer to \u003Ca href=\"https://docs.blender.org/api/current/bpy.types.Node.html?ref=blog.cg-wire.com\">the official Blender Python API documentation\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"4-create-a-custom-%E2%80%9Ccube-crowd-generator%E2%80%9D-node-group-programmatically\">\u003Cstrong>4. Create a Custom “Cube Crowd Generator” Node Group Programmatically\u003C/strong>\u003C/h2>\u003Cp>We now know how to define geometry nodes programmatically, but what about creating reusable custom nodes?\u003C/p>\u003Cp>Let's work on a new example that builds a tiny procedural system that scatters many cubes on a surface. The script creates a Geometry Nodes group that takes a surface, scatters points across it, randomly offsets those points, places a cube on each point (instances), converts the instances to real geometry, and outputs the final mesh as \"Cubes\".\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"1-create-a-new-node-group\">\u003Cstrong>1) Create a new node group\u003C/strong>\u003C/h3>\u003Cp>First, we create a new Geometry Node group in Blender named \u003Ccode>\"CubeCrowdGenerator\"\u003C/code>.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">crowd_group = bpy.data.node_groups.new(\"CubeCrowdGenerator\", \"GeometryNodeTree\")\u003C/code>\u003C/pre>\u003Cp>Like a function, we want to be able to attach this node to any object with a Geometry Nodes modifier later on.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"2-add-group-input-and-output-nodes-uientry-points\">\u003Cstrong>2) Add group input and output nodes (UI/entry points)\u003C/strong>\u003C/h3>\u003Cp>We place standard input and output groups on the canvas as usual:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">group_in = crowd_group.nodes.new(\"NodeGroupInput\")\ngroup_out = crowd_group.nodes.new(\"NodeGroupOutput\")\n\ngroup_in.location = (-600, 0)\ngroup_out.location = (600, 0)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>group_in\u003C/code> and \u003Ccode>group_out\u003C/code> are the visible sockets of the node group in the Geometry Nodes editor.\u003C/li>\u003Cli>The script also positions them so the graph is readable.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"3-define-the-group-interface-what-the-group-acceptsreturns\">\u003Cstrong>3) Define the group interface (what the group accepts/returns)\u003C/strong>\u003C/h3>\u003Cp>We need to expose an \u003Cstrong>input socket named \u003Ccode>Surface\u003C/code>\u003C/strong> where we'll plug the mesh you want to populate (e.g., a plane) and an \u003Cstrong>output socket named \u003Ccode>Cubes\u003C/code>\u003C/strong>, the resulting geometry.\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">interface = crowd_group.interface\ninterface.new_socket(name=\"Surface\", in_out=\"INPUT\", socket_type=\"NodeSocketGeometry\")\ninterface.new_socket(name=\"Cubes\", in_out=\"OUTPUT\", socket_type=\"NodeSocketGeometry\")\u003C/code>\u003C/pre>\u003Cp>In practice, when you add this node group to an object, you will plug its surface (an object's original geometry) into \u003Ccode>Surface\u003C/code>.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"4-create-the-internal-nodes-the-building-blocks\">\u003Cstrong>4) Create the internal nodes (the building blocks)\u003C/strong>\u003C/h3>\u003Cp>We can then work on the actual internal logic:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">distribute = crowd_group.nodes.new(\"GeometryNodeDistributePointsOnFaces\")\nrand_vec = crowd_group.nodes.new(\"FunctionNodeRandomValue\")\nset_pos = crowd_group.nodes.new(\"GeometryNodeSetPosition\")\ncube = crowd_group.nodes.new(\"GeometryNodeMeshCube\")\ninstance = crowd_group.nodes.new(\"GeometryNodeInstanceOnPoints\")\nrealize = crowd_group.nodes.new(\"GeometryNodeRealizeInstances\")\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Cstrong>GeometryNodeDistributePointsOnFaces\u003C/strong>: creates points across the input surface (controls how many points, distribution).\u003C/li>\u003Cli>\u003Cstrong>FunctionNodeRandomValue (Float Vector)\u003C/strong>: produces a random 3D vector per point used as an offset.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeSetPosition\u003C/strong>: moves each point by a vector (the random offset).\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeMeshCube\u003C/strong>: generates a cube mesh that will be used as the instance object.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeInstanceOnPoints\u003C/strong>: places the cube on each point. It doesn't create real geometry, it's just a cheap instance of the original cube.\u003C/li>\u003Cli>\u003Cstrong>GeometryNodeRealizeInstances\u003C/strong>: converts instances into actual mesh geometry so they can be output as a single mesh.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"5-configure-the-random-vector-node\">\u003Cstrong>5) Configure the random vector node\u003C/strong>\u003C/h3>\u003Cp>We set the \u003Ccode>Random Value\u003C/code> node to return a \u003Cstrong>3-component vector \u003C/strong>we can use to offset the generated cubes in the 3D space:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">rand_vec.data_type = \"FLOAT_VECTOR\"\nrand_vec.inputs[\"Min\"].default_value = (-0.5, -0.5, 0.0)\nrand_vec.inputs[\"Max\"].default_value = (0.5, 0.5, 0.5)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>Min\u003C/code> and \u003Ccode>Max\u003C/code> define the range for each component. For example, X will be between \u003Ccode>-0.5\u003C/code> and \u003Ccode>0.5\u003C/code>.\u003C/li>\u003Cli>Result: each point gets a slightly different offset so cubes don't sit exactly on top of one another.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Ch3 id=\"6-node-layout-ui-only\">\u003Cstrong>6) Node layout (UI only)\u003C/strong>\u003C/h3>\u003Cp>We then position the internal nodes to make them easy to understand if we want to check our workflow in Blender:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">distribute.location = (-400, 0)\nrand_vec.location = (-200, -200)\nset_pos.location = (-100, 0)\ninstance.location = (100, 0)\ncube.location = (-400, -200)\nrealize.location = (300, 0)\u003C/code>\u003C/pre>\u003Cp>These \u003Ccode>location\u003C/code> assignments only affect how the nodes are visually arranged in the node editor. They don't affect what the graph does.\u003C/p>\u003Cp>\u003C/p>\u003Ch3 id=\"7-wire-the-nodes-together\">\u003Cstrong>7) Wire the nodes together\u003C/strong>\u003C/h3>\u003Cp>Finally, we define how the data flows:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">links.new(group_in.outputs[\"Surface\"], distribute.inputs[\"Mesh\"])\nlinks.new(distribute.outputs[\"Points\"], set_pos.inputs[\"Geometry\"])\nlinks.new(rand_vec.outputs[\"Value\"], set_pos.inputs[\"Offset\"])\nlinks.new(set_pos.outputs[\"Geometry\"], instance.inputs[\"Points\"])\nlinks.new(cube.outputs[\"Mesh\"], instance.inputs[\"Instance\"])\nlinks.new(instance.outputs[\"Instances\"], realize.inputs[\"Geometry\"])\nlinks.new(realize.outputs[\"Geometry\"], group_out.inputs[\"Cubes\"])\u003C/code>\u003C/pre>\u003Col>\u003Cli>\u003Cstrong>Surface → DistributePointsOnFaces\u003C/strong>: the input surface (plane) is used to create scattered points.\u003C/li>\u003Cli>\u003Cstrong>Points → SetPosition (Geometry)\u003C/strong>: set position receives the points as geometry to be moved.\u003C/li>\u003Cli>\u003Cstrong>RandomValue → SetPosition (Offset)\u003C/strong>: each point gets a random vector offset.\u003C/li>\u003Cli>\u003Cstrong>SetPosition → InstanceOnPoints (Points)\u003C/strong>: the moved points become the anchor positions for instances.\u003C/li>\u003Cli>\u003Cstrong>Cube Mesh → InstanceOnPoints (Instance)\u003C/strong>: each point receives a cube instance.\u003C/li>\u003Cli>\u003Cstrong>InstanceOnPoints → RealizeInstances\u003C/strong>: instances are converted to mesh geometry.\u003C/li>\u003Cli>\u003Cstrong>RealizeInstances → Group Output (\"Cubes\")\u003C/strong>: final result is made available as the group's output.\u003C/li>\u003C/ol>\u003Cp>This is the full code we obtained:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\n# Create a new Geometry Node group\ncrowd_group = bpy.data.node_groups.new(\"CubeCrowdGenerator\", \"GeometryNodeTree\")\n\n# Create input/output nodes\ngroup_in = crowd_group.nodes.new(\"NodeGroupInput\")\ngroup_out = crowd_group.nodes.new(\"NodeGroupOutput\")\n\ngroup_in.location = (-600, 0)\ngroup_out.location = (600, 0)\n\n# Define group interface sockets\ninterface = crowd_group.interface\ninterface.new_socket(name=\"Surface\", in_out=\"INPUT\", socket_type=\"NodeSocketGeometry\")\ninterface.new_socket(name=\"Cubes\", in_out=\"OUTPUT\", socket_type=\"NodeSocketGeometry\")\n\n# Create internal nodes\ndistribute = crowd_group.nodes.new(\"GeometryNodeDistributePointsOnFaces\")\ninstance = crowd_group.nodes.new(\"GeometryNodeInstanceOnPoints\")\ncube = crowd_group.nodes.new(\"GeometryNodeMeshCube\")\nrealize = crowd_group.nodes.new(\"GeometryNodeRealizeInstances\")\nset_pos = crowd_group.nodes.new(\"GeometryNodeSetPosition\")\nrand_vec = crowd_group.nodes.new(\"FunctionNodeRandomValue\")\n\n# Configure random vector node\nrand_vec.data_type = \"FLOAT_VECTOR\"\nrand_vec.inputs[\"Min\"].default_value = (-0.5, -0.5, 0.0)&nbsp; # minimum offset\nrand_vec.inputs[\"Max\"].default_value = (0.5, 0.5, 0.5)&nbsp; # maximum offset\n\n# Layout nodes\ndistribute.location = (-400, 0)\nrand_vec.location = (-200, -200)\nset_pos.location = (-100, 0)\ninstance.location = (100, 0)\ncube.location = (-400, -200)\nrealize.location = (300, 0)\n\n# Create links\nlinks = crowd_group.links\nlinks.new(group_in.outputs[\"Surface\"], distribute.inputs[\"Mesh\"])\nlinks.new(distribute.outputs[\"Points\"], set_pos.inputs[\"Geometry\"])\nlinks.new(rand_vec.outputs[\"Value\"], set_pos.inputs[\"Offset\"])\nlinks.new(set_pos.outputs[\"Geometry\"], instance.inputs[\"Points\"])\nlinks.new(cube.outputs[\"Mesh\"], instance.inputs[\"Instance\"])\nlinks.new(instance.outputs[\"Instances\"], realize.inputs[\"Geometry\"])\nlinks.new(realize.outputs[\"Geometry\"], group_out.inputs[\"Cubes\"])\u003C/code>\u003C/pre>\u003Cp>Now we just copy/paste this script into the scripting workspace, run it, and we can now add our custom node from the geometry node workspace:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1430\" height=\"920\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-d4ff8437-efb6-43b0-b45d-a54fce0b74b6.png 1430w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>We can open the node group to see what's inside by double-clicking on it:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1430\" height=\"920\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-679df6c4-2877-4419-8b79-4758df98290a.png 1430w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>With just a few dozen lines of code, you can script Geometry Nodes setups that would take much longer to assemble manually. You've learned in this article how to create Geometry Node trees, add and connect nodes programmatically, control parameters and assign node trees to objects, and build a full procedural system.\u003C/p>\u003Cp>Have a look at \u003Ca href=\"https://github.com/cgwire/blender-scripting-geometry-nodes?ref=blog.cg-wire.com\">the code repository on Github\u003C/a> to try the example yourself!\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-db488d5a-7ab5-4471-a904-0926b1fa7d11.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>This approach unlocks endless automation potential, from tool development to generative art. \u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":294,"comment_id":295,"feature_image":296,"featured":105,"visibility":10,"created_at":297,"updated_at":298,"custom_excerpt":299,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":300,"primary_tag":301,"url":302,"excerpt":299,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":303},"93358eb1-5534-43ed-89a8-0b0de2f00072","691ae1dba0beff00013f02eb","https://images.unsplash.com/photo-1675044794037-9262cedb6d5d?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fGJsZW5kZXIlMjBnZW9tZXRyeSUyMG5vZGVzfGVufDB8fHx8MTc2MzM2OTc0N3ww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-17T09:50:35.000+01:00","2026-02-20T06:04:04.000+01:00","Learn how to script Blender Geometry Nodes using Python to automate procedural setups, generate node trees programmatically, and build reusable tools for your animation pipeline.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-scripting-geometry-nodes-2/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@mirzaie?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Mehdi Mirzaie\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-scripting-geometry-nodes-2","2025-11-17T10:13:21.000+01:00",{"title":289},"blender-scripting-geometry-nodes-2","posts/blender-scripting-geometry-nodes-2",[310,311],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"ABwCKyHYQd2e24_gRrEcz2gAc349u2DzqkOMZrfJtyU",{"id":314,"title":315,"authors":316,"body":7,"description":7,"extension":8,"html":318,"meta":319,"navigation":13,"path":329,"published_at":330,"seo":331,"slug":332,"stem":333,"tags":334,"__hash__":336,"uuid":320,"comment_id":321,"feature_image":322,"featured":105,"visibility":10,"created_at":323,"updated_at":298,"custom_excerpt":324,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":325,"primary_tag":326,"url":327,"excerpt":324,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":328},"ghost/posts:blender-scripting-geometry-nodes.json","The Beginner’s Guide to Geometry Nodes in Blender 2026",[317],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🧩\u003C/div>\u003Cdiv class=\"kg-callout-text\">Rebuilding scenes by hand is so 2010. Geometry Nodes let you automate, randomize, and control Blender projects with precision — turning hours of manual modeling into minutes of procedural magic.\u003C/div>\u003C/div>\u003Cp>Spending hours manually duplicating geometry, reshaping, or animating repetitive movements in Blender isn't fun. Some workflows are like that: you need to do repetitive tasks over and over again, with only slight variations.\u003C/p>\u003Cp>But there is a smarter, faster way to create procedural effects called geometry nodes. They can seem intimidating and take time to master, but by the end of this article, you’ll know what geometry nodes are, why they matter, and how to start using them in your own Blender projects.\u003C/p>\u003Chr>\u003Ch2 id=\"what-are-geometry-nodes\">\u003Cstrong>What Are Geometry Nodes?\u003C/strong>\u003C/h2>\u003Cp>Geometry Nodes are Blender’s way of letting you create and manipulate models procedurally. Instead of editing mesh objects directly, you connect visual nodes that define operations like instancing, transforming, or scattering objects in a non-destructive, modular way.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-b4252feb-7713-4df1-98ca-cc453b53d4ee.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"830\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-b4252feb-7713-4df1-98ca-cc453b53d4ee.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-b4252feb-7713-4df1-98ca-cc453b53d4ee.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-b4252feb-7713-4df1-98ca-cc453b53d4ee.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>Each node performs a small task, but when connected, they can create incredibly detailed results: from forests made of thousands of randomized trees to animated particle trails or architectural patterns. Geometry Nodes allow you to build once and control everything with adjustable parameters.\u003C/p>\u003Chr>\u003Ch2 id=\"why-geometry-nodes-are-important\">\u003Cstrong>Why Geometry Nodes Are Important\u003C/strong>\u003C/h2>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/3d-modeling-animation/\">Traditional modeling and animation workflows\u003C/a> often depend on time-consuming manual adjustments where every change or variation requires direct edits to the model. Geometry Nodes revolutionize this process by introducing procedural control, a system that allows you to generate and modify models dynamically through input values, randomness, or mathematical relationships.\u003C/p>\u003Cp>This approach offers several major advantages. It makes you more productive by letting you update or randomize complex scenes instantly without the need to rebuild them from scratch. It also brings flexibility to your pipeline because parameters can be adjusted at any stage of production. Geometry Nodes open the door to experimentation for producing intricate shapes, patterns, and effects that would be difficult or impossible to achieve by hand, like generating a large patch of grass. This feature is ideal for large-scale modeling like crowd simulations or realistic natural environments.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example Blender–Kitsu integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-scripting-geometry-nodes?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-scripting-geometry-nodes\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"adding-a-geometry-node\">\u003Cstrong>Adding a Geometry Node\u003C/strong>\u003C/h2>\u003Cp>I know the concept looks intimidating, but give me 5 minutes and we'll create your first geometry node setup.\u003C/p>\u003Col>\u003Cli>Open a new Blender project with a default cube.\u003C/li>\u003Cli>In the Geometry Nodes tab, click New to create a new geometry node group.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-6cd7bfd9-e7fd-4220-834e-c6260fa00949.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-6cd7bfd9-e7fd-4220-834e-c6260fa00949.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-6cd7bfd9-e7fd-4220-834e-c6260fa00949.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-6cd7bfd9-e7fd-4220-834e-c6260fa00949.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You’ll now see a blank node tree in the Geometry Node Editor workspace, with two default nodes: Group Input and Group Output. These represent the start and end of your data flow: geometry comes in, gets modified, and goes out.\u003C/p>\u003Cp>To see your setup in action, just add your first node:\u003C/p>\u003Col>\u003Cli>Click \u003Cstrong>Add\u003C/strong> → \u003Cstrong>Geometry\u003C/strong> → \u003Cstrong>Operations\u003C/strong> → \u003Cstrong>Transform Geometry\u003C/strong>.\u003C/li>\u003Cli>Connect the \u003Cstrong>Group Input\u003C/strong> → \u003Cstrong>Transform Geometry\u003C/strong> → \u003Cstrong>Group Output\u003C/strong>.\u003C/li>\u003Cli>Adjust the translation or scale values in the Transform node.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-ea4b9476-8d89-4d56-bd6a-62dd751ba84d.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1314\" height=\"889\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-ea4b9476-8d89-4d56-bd6a-62dd751ba84d.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-ea4b9476-8d89-4d56-bd6a-62dd751ba84d.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-ea4b9476-8d89-4d56-bd6a-62dd751ba84d.png 1314w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You’ll immediately see your object move or resize in the viewport. Congratulations, you’ve just built your first procedural modifier!\u003C/p>\u003Cp>Geometry Nodes fall into several broad categories, each handling different aspects of your scene. Think of these categories as toolboxes: each one focuses on a different kind of task, from generating shapes to controlling data or math behind the scenes.\u003C/p>\u003Cp>Here is a quick overview of the different node types to find the ones you need for a new workflow:\u003C/p>\u003Chr>\u003Ch2 id=\"1-input-nodes\">\u003Cstrong>1. Input Nodes\u003C/strong>\u003C/h2>\u003Cp>Input nodes provide the starting information for your node tree. They bring in existing data from your object or scene, like position, normal, index, or object info that other nodes can use to calculate or transform geometry.\u003C/p>\u003Cp>For example, an Input → Scene → Object info node gives you all the information you need about an object instance to perform calculations.\u003C/p>\u003Cp>When creating a new node tree, Blender will always add a new Input Group node representing the group of models in the current scene.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"2-output-nodes\">\u003Cstrong>2. Output Nodes\u003C/strong>\u003C/h2>\u003Cp>Output nodes define what leaves your node system: the final geometry that Blender renders or displays. The Group Output node is the most common one, connecting the result of your entire node network back to your object in the viewport.\u003C/p>\u003Cp>Other specialized outputs (like Material Output in shader setups) pass data to different parts of Blender’s system. In Geometry Nodes, the Output stage determines what geometry, instances, or attributes are visible in the result.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"3-geometry-nodes\">\u003Cstrong>3. Geometry Nodes\u003C/strong>\u003C/h2>\u003Cp>Geometry nodes directly modify, combine, or generate geometry, the actual shapes in your scene.\u003C/p>\u003Cp>They’re the core of procedural modeling. Instead of sculpting by hand, you can create systems that generate geometry automatically, and you can tweak them later without destroying your base mesh.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"4-mesh-nodes\">\u003Cstrong>4. Mesh Nodes\u003C/strong>\u003C/h2>\u003Cp>Mesh nodes focus on fine control over mesh structures: the vertices, edges, and faces that make up your geometry. They let you access and modify specific mesh components or convert geometry types.\u003C/p>\u003Cp>When you need precise topology control, go for mesh nodes. They’re perfect for procedural modeling tasks like creating grids, manipulating edge loops, or generating new topology from existing meshes.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-da4fe358-ac3b-482a-8641-6b0540c9d792.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"947\" height=\"897\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-da4fe358-ac3b-482a-8641-6b0540c9d792.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-da4fe358-ac3b-482a-8641-6b0540c9d792.png 947w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>\u003C/p>\u003Ch2 id=\"5-instance-nodes\">\u003Cstrong>5. Instance Nodes\u003C/strong>\u003C/h2>\u003Cp>Instance nodes create copies (instances) of objects, scattered across surfaces or points. Nodes like Instance on Points or Realize Instances handle this.\u003C/p>\u003Cp>Instancing is one of the most powerful features in Geometry Nodes because it lets you duplicate thousands of objects (like trees, rocks, or particles) without slowing down your scene by only rendering one real copy and referencing it multiple times.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-1049c5fc-a3de-480a-b494-ed49a488af0c.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"1085\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-1049c5fc-a3de-480a-b494-ed49a488af0c.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-1049c5fc-a3de-480a-b494-ed49a488af0c.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-1049c5fc-a3de-480a-b494-ed49a488af0c.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>\u003C/p>\u003Ch2 id=\"6-attribute-nodes\">\u003Cstrong>6. Attribute Nodes\u003C/strong>\u003C/h2>\u003Cp>Attribute nodes control or pass around custom properties attached to geometry, like color, scale, or random values per point. These attributes can be used to drive transformations, materials, or effects.\u003C/p>\u003Cp>Attributes let you add variation and control to your procedural systems. You can randomize the size of scattered objects, color particles differently, or link material effects to geometry data.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"7-utilities-and-fields\">\u003Cstrong>7. Utilities and Fields\u003C/strong>\u003C/h2>\u003Cp>Utility nodes handle the logic and math behind your geometry network. They include operations like Math, Vector Math, Compare, or Map Range, and they’re often used to process or control other nodes’ inputs, like in a programming language.\u003C/p>\u003Cp>They’re the brains of your setup, allowing you to build relationships, create gradients, randomize values, etc.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"8-curve-nodes\">\u003Cstrong>8. Curve Nodes\u003C/strong>\u003C/h2>\u003Cp>Curve nodes work with curve-based geometry like lines, splines, or paths. They’re useful for generating cables, vines, roads, or abstract motion trails. Nodes like Resample Curve, Curve to Mesh, and Set Curve Radius let you adjust the shape, resolution, or thickness of curves procedurally.\u003C/p>\u003Cp>Curves can also drive instancing, letting you place objects along a path or animate their movement over time.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-6409cf07-26e7-4eb6-8e76-85e9a33a1581.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1142\" height=\"936\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-6409cf07-26e7-4eb6-8e76-85e9a33a1581.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-6409cf07-26e7-4eb6-8e76-85e9a33a1581.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-6409cf07-26e7-4eb6-8e76-85e9a33a1581.png 1142w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>\u003C/p>\u003Ch2 id=\"9-grease-pencil-nodes\">\u003Cstrong>9. Grease Pencil Nodes\u003C/strong>\u003C/h2>\u003Cp>Grease Pencil nodes integrate Blender’s 2D drawing system into the Geometry Nodes workflow. You can procedurally modify strokes, convert drawings into geometry, or apply effects like noise, extrusion, or deformation to 2D lines.\u003C/p>\u003Cp>These nodes bridge the gap between 2D animation and procedural design, giving artists new ways to stylize motion graphics or hybrid 2D/3D scenes.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"10-point-nodes\">\u003Cstrong>10. Point Nodes\u003C/strong>\u003C/h2>\u003Cp>Point nodes manipulate individual points in your geometry: the fundamental building blocks used for scattering, positioning, or transforming instances. You can add, move, or rotate points, or assign attributes like color or scale to each.\u003C/p>\u003Cp>For instance, Distribute Points on Faces generates evenly or randomly placed points across a surface, which can then serve as placement positions for instances like grass or particles.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"11-volume-nodes\">\u003Cstrong>11. Volume Nodes\u003C/strong>\u003C/h2>\u003Cp>Volume nodes let you create and manipulate volumetric data like fog, smoke, or procedural density fields. You can use them to generate 3D textures, shape clouds, or fill geometry with density-based effects and open the door to atmospheric or organic effects that go far beyond surface modeling.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"12-material-nodes\">\u003Cstrong>12. Material Nodes\u003C/strong>\u003C/h2>\u003Cp>Material nodes assign or modify materials and shading data. The Set Material or Material Index nodes let you dynamically apply different materials based on attributes, random seeds, or regions of your model.\u003C/p>\u003Cp>This makes it easy to, for example, color-code parts of a structure or assign materials procedurally to scattered objects.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"13-texture-nodes\">\u003Cstrong>13. Texture Nodes\u003C/strong>\u003C/h2>\u003Cp>Texture nodes sample or generate procedural textures that can drive geometry transformations or visual variation. They can provide grayscale masks, noise patterns, or gradients that influence scale, displacement, or color.\u003C/p>\u003Cp>By combining texture data with math or attribute nodes, you can create natural randomness for uneven terrain, wavy surfaces, or patterned distribution.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"14-group-nodes\">\u003Cstrong>14. Group Nodes\u003C/strong>\u003C/h2>\u003Cp>Group nodes bundle multiple nodes into a reusable unit. They’re crucial for organizing complex setups and keeping your node trees clean. You can expose parameters on the group’s input/output to make them adjustable, effectively turning your custom setup into a new super node.\u003C/p>\u003Cp>Once you start building your own groups, you’re not just using Geometry Nodes: you’re creating your own procedural tools.\u003C/p>\u003Cp>\u003C/p>\u003Ch2 id=\"15-hair-nodes\">\u003Cstrong>15. Hair Nodes\u003C/strong>\u003C/h2>\u003Cp>Hair nodes are designed to generate, style, and control procedural hair or fur systems. They provide access to strand length, density, and grooming attributes, allowing you to simulate everything from grass fields to character hair.\u003C/p>\u003Cp>These nodes replace older particle-based workflows with a modern, procedural approach that integrates seamlessly with Blender’s new hair system.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-64d8cc68-468d-4294-8abb-2fafc2ac9d87.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"916\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/11/data-src-image-64d8cc68-468d-4294-8abb-2fafc2ac9d87.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/11/data-src-image-64d8cc68-468d-4294-8abb-2fafc2ac9d87.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/11/data-src-image-64d8cc68-468d-4294-8abb-2fafc2ac9d87.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: Blender Stack Exchange\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Geometry Nodes can seem abstract or intimidating at first, but they are some of the most exciting features in Blender. Once you understand how to combine nodes, you can generate entire animations, environments, or visual effects driven by procedural logic rather than manual edits.\u003C/p>\u003Cp>Don’t feel like you need to memorize them all, however. Most Geometry Nodes setups rely on a handful of key nodes that you’ll naturally get comfortable with as you experiment.\u003C/p>\u003Cp>\u003Ca href=\"https://blog.cg-wire.com/\">In our next article\u003C/a>, we’ll go a step further: you’ll learn how to \u003Ca href=\"https://blog.cg-wire.com/blender-scripting-animation/\">create your own custom node groups using scripts\u003C/a> to automate effects while reducing the complexity of your workflows for unique animation pipelines.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":320,"comment_id":321,"feature_image":322,"featured":105,"visibility":10,"created_at":323,"updated_at":298,"custom_excerpt":324,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":325,"primary_tag":326,"url":327,"excerpt":324,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":328},"e65c213b-a70f-4f97-a5f0-3c56eb08a3d3","69118ad9e054fc00019520ad","https://images.unsplash.com/photo-1639322537504-6427a16b0a28?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fGJsZW5kZXIlMjBnZW9tZXRyeSUyMG5vZGVzfGVufDB8fHx8MTc2Mjc1NzU1NXww&ixlib=rb-4.1.0&q=80&w=2000","2025-11-10T07:48:57.000+01:00","Blender’s Geometry Nodes let you build 3D models procedurally. Learn how they work, why they’re essential for modern animation pipelines, and how to start using them to create smarter, faster, non-destructive workflows.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-scripting-geometry-nodes/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@theshubhamdhage?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Shubham Dhage\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/blender-scripting-geometry-nodes","2025-11-10T10:00:00.000+01:00",{"title":315},"blender-scripting-geometry-nodes","posts/blender-scripting-geometry-nodes",[335],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"bz_ljpXg75yGUgqM5toWV-cRyrzGxhRi3KTeg1MFwrA",{"id":338,"title":339,"authors":340,"body":7,"description":7,"extension":8,"html":342,"meta":343,"navigation":13,"path":354,"published_at":355,"seo":356,"slug":357,"stem":358,"tags":359,"__hash__":361,"uuid":344,"comment_id":345,"feature_image":346,"featured":105,"visibility":10,"created_at":347,"updated_at":348,"custom_excerpt":349,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":350,"primary_tag":351,"url":352,"excerpt":349,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":353},"ghost/posts:forward-vs-inverse-kinematics-blender.json","How To Use Forward and Inverse Kinematics In Blender (2026)",[341],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">🤖\u003C/div>\u003Cdiv class=\"kg-callout-text\">A 3D model is just a lifeless mannequin until you\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\"> \u003Cu>start rigging it\u003C/u>\u003C/a>. The real magic happens when animators make it move, and that’s where kinematics comes in.\u003C/div>\u003C/div>\u003Cp>The problem is, it’s not as simple as dragging a character’s arm or leg around. Push the limits too far, and suddenly your character’s elbow bends backwards, or their run looks like a broken wind-up toy. Play it too safe, and the movement feels stiff and robotic. Finding the balance between believable physics and expressiveness is hard.\u003C/p>\u003Cp>In this article, we explore what kinematics are and how they work in Blender. By the end, you'll have created your first rig for animation.\u003C/p>\u003Chr>\u003Ch2 id=\"what-are-kinematics\">\u003Cstrong>What Are Kinematics\u003C/strong>\u003C/h2>\u003Cp>Kinematics is \u003Cstrong>the study of how things move in space\u003C/strong> without worrying about the forces that cause the motion. In animation, it means focusing on how a character or object’s joints, limbs, and body parts transform from one pose to the next, rather than worrying about muscles or gravity pulling them.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card kg-card-hascaption\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-1727ea30-70cc-42b6-a5e1-085ffa16eef4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"841\" height=\"431\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-1727ea30-70cc-42b6-a5e1-085ffa16eef4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-1727ea30-70cc-42b6-a5e1-085ffa16eef4.png 841w\" sizes=\"(min-width: 720px) 720px\">\u003Cfigcaption>\u003Ci>\u003Cem class=\"italic\" style=\"white-space: pre-wrap;\">Source: MathWorks\u003C/em>\u003C/i>\u003C/figcaption>\u003C/figure>\u003Cp>Kinematics gives animators the rules and tools to make 3D models move in a way that looks consistent and believable. It's important to make the distinction between forward and inverse kinematics:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Forward Kinematics\u003C/strong>: In FK, motion starts at the top of the hierarchy. If you want to move a hand, you rotate the shoulder, then the elbow, then the wrist. It’s intuitive for arcs and natural swinging motions (like waving or swinging a sword) because you control the chain link by link. But it can be tedious: if you animate a finger touching a point in space, you have to manually adjust every joint to line it up.\u003C/li>\u003Cli>\u003Cstrong>Inverse Kinematics\u003C/strong>: IK flips the problem. Instead of rotating each joint, you place the end of the chain where you want it (say, a character’s hand on a table), and the computer calculates how the shoulder and elbow must bend to reach that spot. IK is perfect for locked motions, like keeping feet planted on the floor while the body moves. The downside is that it can sometimes create unnatural bends if not carefully controlled, so you'll need to define complex constraints.\u003C/li>\u003C/ul>\u003Cp>Animators don’t choose one or the other exclusively. They switch between FK and IK depending on the type of motion they need: FK for fluid arcs, IK for precise placement, and often blend the two to achieve the most natural end-to-end movement.\u003C/p>\u003Chr>\u003Ch2 id=\"why-kinematics-are-important\">\u003Cstrong>Why Kinematics Are Important\u003C/strong>\u003C/h2>\u003Cp>\u003Cstrong>Kinematics make sure a character’s movement respects anatomical logic\u003C/strong>: joints bend the right way, limbs maintain proper relationships, and actions flow naturally. Without it, even the best 3D model will look broken during animation. When a character reaches for a cup on a table, the elbow should bend correctly and the wrist rotate naturally. Without kinematics, the arm would hyperextend, or the hand could twist in an impossible way.\u003C/p>\u003Cp>By using forward and inverse kinematics, \u003Cstrong>animators can control complex body parts with far fewer steps\u003C/strong>. Instead of tweaking every single joint frame by frame, they can pose entire chains at once while reducing posing errors. Instead of manually adjusting the ankle, knee, and hip on every frame, the animator just locks the foot in place with inverse kinematics, and the software handles the rest.\u003C/p>\u003Cp>Let's try rigging a simple model in Blender to get a better feel of how it works.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-ik-fk?ref=blog.cg-wire.com\">https://github.com/cgwire/blender-ik-fk\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"forward-kinematics-fk-in-blender\">\u003Cstrong>Forward Kinematics (FK) in Blender\u003C/strong>\u003C/h2>\u003Cp>FK is like moving a marionette puppet: You control each string one by one, starting at the shoulder and working your way down to the fingertips. Every rotation builds on the previous one.\u003C/p>\u003Col>\u003Cli>Add a cube (\u003Ccode>Add → Mesh → Cube\u003C/code>) and scale it into a rectangular prism. Normalize the scale to 1 for beveling (\u003Ccode>Object → Apply → Scale\u003C/code>).\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-bbbf0c86-c1bb-4b7b-afd1-18cf0971aeab.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"646\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-bbbf0c86-c1bb-4b7b-afd1-18cf0971aeab.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-bbbf0c86-c1bb-4b7b-afd1-18cf0971aeab.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-bbbf0c86-c1bb-4b7b-afd1-18cf0971aeab.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"2\">\u003Cli>In Edit mode, bevel the edges to round each side. Make sure to use \u003Ccode>Edge\u003C/code> mode and select the four edges we need. In the Bevel window that appears, increase the number of segments to create round edges.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0c49b11d-3896-4035-a842-4eb98d661b33.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"816\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-0c49b11d-3896-4035-a842-4eb98d661b33.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-0c49b11d-3896-4035-a842-4eb98d661b33.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0c49b11d-3896-4035-a842-4eb98d661b33.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"3\">\u003Cli>Make two more segments to create a mechanical arm. In \u003Ccode>Object\u003C/code> Mode, select the prism and duplicate. Repeat once more so you have three segments.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-318d3ca9-6f1f-4afe-b602-ccff95474782.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"315\" height=\"171\">\u003C/figure>\u003Col start=\"4\">\u003Cli>Place the segments along the X axis to create the chain. Try to position them so they can sit end-to-end with a clear joint position.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-82b2e2e0-c34c-46a2-9418-f313e5c0f788.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"847\" height=\"467\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-82b2e2e0-c34c-46a2-9418-f313e5c0f788.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-82b2e2e0-c34c-46a2-9418-f313e5c0f788.png 847w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"5\">\u003Cli>Set up the parent hierarchy (FK chain). Build the chain from base to tip. Select the child object first, then put it in the intended parent (the one closer to the base). Repeat so each segment is parented to the previous segment.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-408d19e7-ccac-4706-90fa-1aa9ace327b7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"319\" height=\"218\">\u003C/figure>\u003Col start=\"6\">\u003Cli>Put each object's origin at its joint. For correct rotation, the origin must be at the joint end of each segment. Use the cursor tool to position the origin. Then, in \u003Ccode>Object\u003C/code> Mode, \u003Ccode>Object → Set Origin → Origin to 3D Cursor\u003C/code>. Do this for every segment.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-7a11ca12-b043-4f94-b663-b87271e51597.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"817\" height=\"416\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-7a11ca12-b043-4f94-b663-b87271e51597.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-7a11ca12-b043-4f94-b663-b87271e51597.png 817w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"7\">\u003Cli>Give each segment a small default rotation to observe how forward kinematics behave. When you rotate the base (parent) object, the children follow thanks to the parenting chain.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-50da86d0-88ec-4673-a55b-227f9df93c0b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"817\" height=\"416\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-50da86d0-88ec-4673-a55b-227f9df93c0b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-50da86d0-88ec-4673-a55b-227f9df93c0b.png 817w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You can then just rotate the arm as you want, keyframe the position, and\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>render the final result to get an animation\u003C/u>\u003C/a>!\u003C/p>\u003Cp>As you can notice, FK is great for smooth, arcing motions like waving, swinging a bat, or dancing.\u003C/p>\u003Cp>For more advanced rigs (IK, controls, constraints), Blender animators use an Armature instead of object parenting.\u003C/p>\u003Chr>\u003Ch2 id=\"inverse-kinematics-ik-in-blender\">\u003Cstrong>Inverse Kinematics (IK) in Blender\u003C/strong>\u003C/h2>\u003Cp>IK is more like controlling a puppet's hand, and the arm figures out how the elbow and shoulder should bend to follow along.\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Duplicate the FK arm mesh.\u003C/strong> Select your three-segment FK arm, duplicate it with and move it aside so you keep the FK version for comparison.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-59cd3241-e8e1-4b3b-b60a-8457c017a553.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1349\" height=\"526\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-59cd3241-e8e1-4b3b-b60a-8457c017a553.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-59cd3241-e8e1-4b3b-b60a-8457c017a553.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-59cd3241-e8e1-4b3b-b60a-8457c017a553.png 1349w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"2\">\u003Cli>\u003Cstrong>Merge the segments into one object.\u003C/strong> Select the new arm copy and join each segment into a single mesh (\u003Ccode>Select all → Object → Join\u003C/code>). Now you have one continuous object representing the whole arm.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-4f6843c9-ce25-4aa4-b391-3637d7543c4b.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1349\" height=\"526\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-4f6843c9-ce25-4aa4-b391-3637d7543c4b.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-4f6843c9-ce25-4aa4-b391-3637d7543c4b.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-4f6843c9-ce25-4aa4-b391-3637d7543c4b.png 1349w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"3\">\u003Cli>\u003Cstrong>Create an armature chain.\u003C/strong> Add an Armature in \u003Ccode>Add → Armature\u003C/code>. In the Armature’s \u003Cstrong>Edit Mode\u003C/strong>, extrude bones to match the segments. Select the tip of the first bone to extrude and place it at the elbow. Extrude again for the \"hand\".\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-f18a080f-af0d-44c9-a948-01b17e8d4ad7.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1349\" height=\"526\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-f18a080f-af0d-44c9-a948-01b17e8d4ad7.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-f18a080f-af0d-44c9-a948-01b17e8d4ad7.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-f18a080f-af0d-44c9-a948-01b17e8d4ad7.png 1349w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"4\">\u003Cli>\u003Cstrong>Add the IK controller.\u003C/strong> Switch to \u003Cstrong>Pose Mode\u003C/strong> andI select the \u003Cem>hand\u003C/em> bone. Press \u003Ccode>Shift+I → \u003Cem>Add Inverse Kinematics\u003C/em> → Without Targets\u003C/code>. The IK chain will now drive the arm. In the Bone Constraints tab, set \u003Cem>Chain Length\u003C/em> = 3.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-9ffe89a0-6326-410d-9574-91798ce5dbc5.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1008\" height=\"423\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-9ffe89a0-6326-410d-9574-91798ce5dbc5.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-9ffe89a0-6326-410d-9574-91798ce5dbc5.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-9ffe89a0-6326-410d-9574-91798ce5dbc5.png 1008w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-069281f6-1b06-4992-8a73-63b76c27f9eb.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1008\" height=\"423\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-069281f6-1b06-4992-8a73-63b76c27f9eb.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-069281f6-1b06-4992-8a73-63b76c27f9eb.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-069281f6-1b06-4992-8a73-63b76c27f9eb.png 1008w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"5\">\u003Cli>\u003Cstrong>Bind the mesh to the armature (skinning).\u003C/strong> In \u003Ccode>Object\u003C/code> mode, select the mesh first, then Ctrl-select the armature. Right-click on the objects and select \u003Ccode>Parent → Armature Deform → With Automatic Weights\u003C/code>. Blender assigns vertex groups for each bone so the arm follows the rig.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0c7343c3-1d1b-4ad3-b68a-804bdf1e1ba4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"992\" height=\"531\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-0c7343c3-1d1b-4ad3-b68a-804bdf1e1ba4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0c7343c3-1d1b-4ad3-b68a-804bdf1e1ba4.png 992w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"6\">\u003Cli>\u003Cstrong>Animate with IK.\u003C/strong> Go to \u003Cstrong>Pose Mode\u003C/strong>, grab the IK controller bone and move it: the whole arm follows naturally!\u003C/li>\u003C/ol>\u003Cp>You can still use FK by moving the bones in the parent chain.\u003C/p>\u003Cp>Note that the mesh deforms this way by default. You’ll need to add Bone Constraints to match the desired movement like only allowing the arm to move along a single axis to match the behavior of a mechanical arm.\u003C/p>\u003Chr>\u003Ch2 id=\"fkik-switch\">\u003Cstrong>FK/IK Switch\u003C/strong>\u003C/h2>\u003Cp>Most rigs in Blender use a \u003Cstrong>hybrid system\u003C/strong>: FK for flowing arcs and IK for fixed contact. Typically, an animator\u003Ca href=\"https://blog.cg-wire.com/staging-animation-principle/\"> \u003Cu>starts with FK for broad, gestural posing, then switches to IK for moments of contact or precise positioning\u003C/u>\u003C/a>.\u003C/p>\u003Cp>In more advanced rigs, Blender animators create a custom property (usually a slider or toggle in the N-panel or on a controller bone) to switch between FK and IK.\u003C/p>\u003Cp>This is out of the scope of this article, but it is important to keep in mind.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Kinematics is the basis of rigging and skinning, and what separates a stiff 3D mannequin from a character that feels alive.\u003C/p>\u003Cp>Forward kinematics gives you smooth arcs and natural flow, while inverse kinematics locks your character to the world with believable contact.\u003C/p>\u003Cp>But don’t just read about it, open Blender, grab a model, and start playing! A well-built rig doesn’t just connect bones: it defines how a character moves, poses, and interacts with the 3D world.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>\u003Cp>\u003C/p>",{"uuid":344,"comment_id":345,"feature_image":346,"featured":105,"visibility":10,"created_at":347,"updated_at":348,"custom_excerpt":349,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":350,"primary_tag":351,"url":352,"excerpt":349,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":353},"d40e9baf-0811-422c-ac9b-e61be18477d6","68ec43d6ded61600017fff81","https://images.unsplash.com/photo-1590285381943-9fbf39f4f75d?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fDNEJTIwY2hhcmFjdGVyJTIwcmlnfGVufDB8fHx8MTc2MDkyMDExNnww&ixlib=rb-4.1.0&q=80&w=2000","2025-10-13T02:12:06.000+02:00","2026-02-20T06:04:27.000+01:00","Discover the difference between Forward Kinematics (FK) and Inverse Kinematics (IK) in Blender. Learn how animators use these systems to bring 3D rigs to life with realistic motion, balance, and control. Includes hands-on rigging examples for beginners.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/forward-vs-inverse-kinematics-blender/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@jhc?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">James Coleman\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/forward-vs-inverse-kinematics-blender","2025-10-28T10:00:04.000+01:00",{"title":339},"forward-vs-inverse-kinematics-blender","posts/forward-vs-inverse-kinematics-blender",[360],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"-q57RfoOKdxB3NsG4Ft4R7s9R9Xeglq2q1yRmD78FkQ",{"id":363,"title":364,"authors":365,"body":7,"description":7,"extension":8,"html":367,"meta":368,"navigation":13,"path":378,"published_at":379,"seo":380,"slug":381,"stem":382,"tags":383,"__hash__":386,"uuid":369,"comment_id":370,"feature_image":371,"featured":105,"visibility":10,"created_at":372,"updated_at":373,"custom_excerpt":374,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":375,"primary_tag":376,"url":377,"excerpt":374,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":277},"ghost/posts:blender-scripting-animation.json","Blender Scripting for Animation Pipelines: 2026 Introduction",[366],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚙️\u003C/div>\u003Cdiv class=\"kg-callout-text\">You can bend Blender to your will with just a few lines of code. Repetitive clicks? Gone. Complex scenes? Built in seconds. Custom tools? Yours to design. That’s the magic of scripting.\u003C/div>\u003C/div>\u003Cp>Blender’s graphical user interface is no doubt amazing, but there are always some tasks that feel like a grind: sharing previews with the team, tweaking endless settings in a new project, or doing the same steps over and over. Sometimes, you just wish there was a button that just did the thing, and scripting is how you unlock it!\u003C/p>\u003Cp>In this article, we’ll crack open Blender’s scripting feature using the Python programming language. You’ll learn how to write your first script, how to run it, and how Blender’s scripting modules are organized. By the end, you’ll have a good understanding of how to start optimizing your production pipeline.\u003C/p>\u003Chr>\u003Ch2 id=\"what-can-i-do-with-scripting\">\u003Cstrong>What Can I Do With Scripting?\u003C/strong>\u003C/h2>\u003Cp>Blender scripting isn’t just a neat trick for hobbyists: it’s a necessity for studios of every size.\u003C/p>\u003Cp>In production, speed and consistency are everything. Studios constantly face tight deadlines, large asset libraries, and the need to keep dozens of shots and scenes perfectly in sync across workstations. Doing that by hand is slow, error-prone, and expensive: that’s why automation is such a big deal!\u003C/p>\u003Cp>Scripting isn’t about writing code, it’s about giving yourself creative shortcuts and superpowers. With Python, you can automate the boring, repetitive tasks that eat up your time, or generate procedural geometry, materials, and even entire environments in just a few lines. You can \u003Cstrong>design your own tools and menus\u003C/strong> tailored to your workflow, and \u003Cstrong>take full control over scenes\u003C/strong>,\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>render settings\u003C/u>\u003C/a>, cameras, and lights. Scripting even lets you \u003Cstrong>connect Blender with external tools or APIs\u003C/strong>, making it a powerful part of larger pipelines.\u003C/p>\u003Chr>\u003Ch2 id=\"prerequisites\">\u003Cstrong>Prerequisites\u003C/strong>\u003C/h2>\u003Cp>Before diving in, make sure you have the following:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Blender\u003C/strong> - Download and install the latest version from\u003Ca href=\"https://www.blender.org/download/?ref=blog.cg-wire.com\"> \u003Cu>blender.org\u003C/u>\u003C/a>.\u003C/li>\u003Cli>\u003Cstrong>Python\u003C/strong> - You'll need the Python programming language to use Blender's native scripting modules and run programs from your operating system's terminal.\u003C/li>\u003C/ul>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/intro-blender-scripting?ref=blog.cg-wire.com\">https://github.com/cgwire/intro-blender-scripting\u003C/a>\u003C/div>\u003C/div>\u003Chr>\u003Ch2 id=\"1-create-a-new-script\">\u003Cstrong>1. Create a New Script\u003C/strong>\u003C/h2>\u003Cp>Inside Blender, open the \u003Cstrong>Scripting workspace\u003C/strong>. You’ll see a text editor panel where you can create a new script by clicking \u003Cstrong>New\u003C/strong>. This is where you can write your Python code, and it's particularly useful to see results in real-time:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"731\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-05bcd44b-e1a3-4f6a-a5c7-edb11e40b1fb.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>For a production pipeline, it's usually more useful to run a script from the command line interface. Fortunately, Python now ships Blender modules. In this tutorial, we'll run a Python program directly from the OS terminal to avoid the extra steps of navigating the graphical user interface, so the first step is to install the required Blender module:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">pip install bpy==3.6.0 --extra-index-url &lt;https://download.blender.org/pypi/&gt;\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>As a test, let's create a new empty Blender file using Python:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.wm.save_as_mainfile(filepath=\"./new_empty_file.blend\")\u003C/code>\u003C/pre>\u003Cp>First, we import Blender’s \u003Cstrong>Python API module\u003C/strong> \u003Ccode>bpy\u003C/code>, which lets us control almost everything in Blender (objects, materials, rendering, etc.). Then, we save the current workspace in a new file.\u003C/p>\u003Cp>\u003C/p>\u003Cp>We can run the program in the terminal like so:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 script.py\u003C/code>\u003C/pre>\u003Cp>\u003C/p>\u003Cp>We can also open the newly created file with the Blender CLI:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">blender new_empty_file.blend\u003C/code>\u003C/pre>\u003Cp>Congrats! You completed your first script. Now, let's get to a more useful example: generating 3D text.\u003C/p>\u003Chr>\u003Ch2 id=\"2-hello-world-text-example\">\u003Cstrong>2. Hello World Text Example\u003C/strong>\u003C/h2>\u003Cp>Imagine you want to create a Star Wars intro animation. You know, the one with text slowly scrolling up at an angle:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"681\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-02ff3b4e-8e6f-4f1a-b6d0-e4fb9e0622eb.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>How would you do this efficiently to make it easy to edit? By using a script, of course! So let's try a simple example and generate some 3D text.\u003C/p>\u003Cp>We create a new file and delete all objects in the scene to start clean:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.object.select_all(action='SELECT')\nbpy.ops.object.delete(use_global=False)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.ops.object.select_all(action='SELECT')\u003C/code>: Selects all objects currently in the scene.\u003C/li>\u003Cli>\u003Ccode>bpy.ops.object.delete(use_global=False)\u003C/code>: Deletes all selected objects.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>Just two instructions are needed to add a new text object to the scene:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.object.text_add(enter_editmode=False, location=(0, 0, 0))\ntext_obj = bpy.context.object\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>bpy.ops.object.text_add(...)\u003C/code>: Adds a new \u003Cstrong>Text object\u003C/strong> at the location \u003Ccode>(0, 0, 0)\u003C/code> in the 3D world (XYZ coordinates).\u003C/li>\u003Cli>\u003Ccode>text_obj = bpy.context.object\u003C/code>: Stores a reference to the newly created text object in the variable \u003Ccode>text_obj\u003C/code>. Whenever you add something new, Blender makes it the active object, which you can access via \u003Ccode>bpy.context.object\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>Let's change the text string to \"Hello World\":\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.body = \"Hello World\"\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>text_obj.data\u003C/code> refers to the \u003Cstrong>Text DataBlock\u003C/strong>, the actual content or settings of the text object.\u003C/li>\u003Cli>\u003Ccode>.body = \"Hello World\"\u003C/code> sets the displayed string to “Hello World”.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>We can then adjust some text settings to give the text a little thickness and center it on the x and y axes:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.data.extrude = 0.05\ntext_obj.data.align_x = 'CENTER'\ntext_obj.data.align_y = 'CENTER'\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>extrude = 0.05\u003C/code>: Gives the text depth, turning it from flat 2D text into slightly extruded 3D text.\u003C/li>\u003Cli>\u003Ccode>align_x = 'CENTER'\u003C/code>: Horizontally centers the text.\u003C/li>\u003Cli>\u003Ccode>align_y = 'CENTER'\u003C/code>: Vertically centers the text.\u003C/li>\u003C/ul>\u003Cp>You can find more options by reading\u003Ca href=\"https://docs.blender.org/manual/en/latest/modeling/texts/properties.html?ref=blog.cg-wire.com\"> \u003Cu>the documentation on Blender’s text object properties\u003C/u>\u003C/a>.\u003C/p>\u003Cp>\u003C/p>\u003Cp>Last but not least, we can rotate the text so it faces the camera instead of lying flat on the ground, since Blender text defaults to lying flat on the XY plane:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">text_obj.rotation_euler[0] = 1.5708 &nbsp; # 90 degrees in radians\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>rotation_euler[0]\u003C/code>: Refers to the \u003Cstrong>rotation around the X-axis\u003C/strong>.\u003C/li>\u003Cli>\u003Ccode>1.5708\u003C/code> radians ≈ \u003Cstrong>90 degrees\u003C/strong>.\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Cp>We can save the result using the previously mentioned instruction:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.wm.save_as_mainfile(filepath=\"./text.blend\")\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>To sum up, this is what our final code looks like:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.object.select_all(action='SELECT')\nbpy.ops.object.delete(use_global=False)\n\nbpy.ops.object.text_add(enter_editmode=False, location=(0, 0, 0))\ntext_obj = bpy.context.object\n\ntext_obj.data.body = \"Hello World\"\n\ntext_obj.data.extrude = 0.05\ntext_obj.data.align_x = 'CENTER'\ntext_obj.data.align_y = 'CENTER'\n\ntext_obj.rotation_euler[0] = 1.5708\n\nbpy.ops.wm.save_as_mainfile(filepath=\"./text.blend\")\u003C/code>\u003C/pre>\u003Chr>\u003Ch2 id=\"3-how-to-run-a-script-script-loading\">\u003Cstrong>3. How to Run a Script (Script Loading)\u003C/strong>\u003C/h2>\u003Cp>As previously mentioned, the syntax to run a script in headless mode is simply like any Python program:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">python3 text.py\u003C/code>\u003C/pre>\u003Cp>And that's it! You’ve just run your first \u003Cem>useful\u003C/em> Blender script. It's super useful for automation, pipelines, or batch processing.\u003C/p>\u003Cp>Just open the \u003Ccode>text.blend\u003C/code> file and see the result:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"1600\" height=\"731\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w1000/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 1000w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-eab235c1-3513-4b9d-9f89-8a4d7c1cd122.png 1600w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>You can also open a specific \u003Ccode>.blend\u003C/code> file and run the script inside that context:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">bpy.ops.wm.open_mainfile(filepath='my_scene.blend')\u003C/code>\u003C/pre>\u003Cp>This loads \u003Ccode>my_scene.blend\u003C/code> first, then runs the rest of the script on it.\u003C/p>\u003Cp>\u003C/p>\u003Cp>Sometimes, you want to send custom arguments:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 args.py – --text \"CLI Hello\"\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>\u003C/p>\u003Cp>Inside \u003Ccode>args.py\u003C/code>, you can access these arguments like this:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import sys\n\nargv = sys.argv\nargv = argv[argv.index(\"--\") + 1:]&nbsp; # get args after --\n\nprint(\"Custom args:\", argv)\u003C/code>\u003C/pre>\u003Cp>That's it for the basics, but you still have a lot to discover.\u003C/p>\u003Chr>\u003Ch2 id=\"4-scripting-modules-explained\">\u003Cstrong>4. Scripting Modules Explained\u003C/strong>\u003C/h2>\u003Cp>Blender exposes its scripting features through different modules. Understanding what each module does helps you define what you can script and how to search the documentation to code it.\u003C/p>\u003Cp>First, you have the core \u003Ccode>bpy\u003C/code> modules:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>bpy.context\u003C/code> (Context Access)\u003C/strong> - Provides information about Blender’s current state (active object, scene, mode, selected objects, etc.), e.g., \u003Ccode>bpy.context.object\u003C/code> gets the active object.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.data\u003C/code> (Data Access)\u003C/strong> - Gives direct access to Blender’s datablocks such as meshes, objects, materials, and cameras. Example: \u003Ccode>bpy.data.objects[\"Cube\"]\u003C/code> gets the Cube object.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.msgbus\u003C/code> (Message Bus)\u003C/strong> - A pub/sub system for listening to changes in Blender’s data and triggering callbacks like subscribing to frame-change events.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.ops\u003C/code> (Operators)\u003C/strong> - Exposes functions that mimic UI actions like adding objects, deleting, or rendering. Example: \u003Ccode>bpy.ops.mesh.primitive_cube_add()\u003C/code> adds a cube.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.types\u003C/code> (Types)\u003C/strong> - Defines the core classes of Blender’s data (e.g., \u003Ccode>Object\u003C/code>, \u003Ccode>Mesh\u003C/code>, \u003Ccode>Material\u003C/code>) for extension and customization, to create custom panels or operators.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.utils\u003C/code> (Utilities)\u003C/strong> - Provides helper functions for class registration, add-on handling, and system path access, e.g., \u003Ccode>bpy.utils.register_class(MyOperator)\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.path\u003C/code> (Path Utilities)\u003C/strong> - Tools for handling file paths, including resolving relative paths and creating absolute paths, e.g., \u003Ccode>bpy.path.abspath(\"//textures/wood.png\")\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.app\u003C/code> (Application Data)\u003C/strong> - Provides information about Blender itself like version, build details, and runtime mode. Example: \u003Ccode>bpy.app.version\u003C/code> returns \u003Ccode>(3, 6, 2)\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy.props\u003C/code> (Property Definitions)\u003C/strong> - Used to define custom properties like numbers, strings, and enums for operators, panels, or addons, e.g., \u003Ccode>my_prop: bpy.props.IntProperty(name=\"My Number\")\u003C/code>.\u003C/li>\u003C/ul>\u003Cp>Then, you can find more specialized libraries:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>\u003Ccode>aud\u003C/code> (Audio System)\u003C/strong> - Blender’s audio library for playing sounds, loading files, and mixing audio. Example: play a .wav file directly in Blender with Python.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bgl\u003C/code> (OpenGL Wrapper)\u003C/strong> - Low-level OpenGL wrapper for custom 3D viewport drawing (being replaced by \u003Ccode>gpu\u003C/code>). To draw custom overlays, for example.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bl_math\u003C/code> (Additional Math Functions)\u003C/strong> - Extra math helpers for interpolation, distance calculations, and geometry operations, like computing distances between points.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>blf\u003C/code> (Font Drawing)\u003C/strong> - Blender’s font drawing module for rendering text in viewport overlays or panels, e.g., \u003Ccode>blf.draw(font_id, \"Hello World\")\u003C/code>.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bmesh\u003C/code> (BMesh Module)\u003C/strong> - Provides direct low-level access to Blender’s mesh editing system for procedural modeling and topology operations. Example: creating or modifying vertices and faces in edit mode.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>bpy_extras\u003C/code> (Extra Utilities)\u003C/strong> - Contains helper functions like import/export support, math conversions, and view3d utilities, e.g., simplifying coordinate conversions.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>freestyle\u003C/code> (Freestyle Module)\u003C/strong> - Controls Blender’s Freestyle line rendering for non-photorealistic edge rendering. Example: adjusting line styles or visibility rules.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>gpu\u003C/code> (GPU Module)\u003C/strong> - Modern GPU drawing API that allows custom shaders and viewport overlays (successor to \u003Ccode>bgl\u003C/code>). Example: rendering with custom GLSL shaders.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>gpu_extras\u003C/code> (GPU Utilities)\u003C/strong> - Helper functions for GPU drawing, simplifying shape rendering without full GLSL code, e.g., drawing a simple rectangle.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>idprop.types\u003C/code> (ID Property Access)\u003C/strong> - Provides structured access to Blender’s custom ID properties in dictionary/array form. For example, to manipulate custom metadata on objects.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>imbuf\u003C/code> (Image Buffer)\u003C/strong> - Handles image buffers, enabling loading, saving, and pixel-level manipulation, e.g., procedural image generation.\u003C/li>\u003Cli>\u003Cstrong>\u003Ccode>mathutils\u003C/code> (Math Types &amp; Utilities)\u003C/strong> - Blender’s math library offering \u003Ccode>Vector\u003C/code>, \u003Ccode>Matrix\u003C/code>, \u003Ccode>Quaternion\u003C/code>, and geometric utilities, e.g., \u003Ccode>Vector((1,0,0)).cross(Vector((0,1,0))) → (0,0,1)\u003C/code>.\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Blender scripting with Python is one of the most powerful ways to extend and personalize your workflow.\u003C/p>\u003Cp>In this article, we explored how to create and run scripts, print your very first \"Hello World\" in the 3D world, and use the bpy module to make Blender do exactly what you want.\u003C/p>\u003Cp>At first glance, scripting might feel intimidating, but as you’ve seen, even a handful of lines can open doors to entirely new possibilities!\u003C/p>\u003Cp>Now, it’s your turn. Automate the boring stuff or craft tools from scratch for your studio pipeline. You can do it!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>\u003Cp>\u003C/p>",{"uuid":369,"comment_id":370,"feature_image":371,"featured":105,"visibility":10,"created_at":372,"updated_at":373,"custom_excerpt":374,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":375,"primary_tag":376,"url":377,"excerpt":374,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":277},"a68ec682-3536-4c62-ab40-f59e63eae8b1","68ec43d4ded61600017fff7b","https://images.unsplash.com/photo-1760548425425-e42e77fa38f1?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDd8fCUyMHNjcmlwdGluZ3xlbnwwfHx8fDE3NjA2MTMxODl8MA&ixlib=rb-4.1.0&q=80&w=2000","2025-10-13T02:12:04.000+02:00","2026-02-20T06:04:03.000+01:00","Learn how to automate Blender with Python! Discover how scripting can speed up production, eliminate repetitive work, and let you build custom tools tailored to your animation pipeline.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"https://blog.cg-wire.com/blender-scripting-animation/","/posts/blender-scripting-animation","2025-10-21T10:00:42.000+02:00",{"title":364},"blender-scripting-animation","posts/blender-scripting-animation",[384,385],{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"bdKf7MIhtakMGVSZgWhSLqHvKXdi7Me_aKU6pQKUlbI",{"id":388,"title":389,"authors":390,"body":7,"description":7,"extension":8,"html":392,"meta":393,"navigation":13,"path":404,"published_at":405,"seo":406,"slug":407,"stem":408,"tags":409,"__hash__":412,"uuid":394,"comment_id":395,"feature_image":396,"featured":105,"visibility":10,"created_at":397,"updated_at":398,"custom_excerpt":399,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":400,"primary_tag":401,"url":402,"excerpt":399,"reading_time":170,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":403},"ghost/posts:dcc-integration-blender-kitsu.json","From Blender to Kitsu: How to Create a Custom DCC Bridge (2026)",[391],{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},"\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">⚙️\u003C/div>\u003Cdiv class=\"kg-callout-text\">Ever wished your creative tools could talk to your production tracker? With a custom DCC integration, they finally can — no more manual uploads, mismatched versions, or lost time between Blender and Kitsu.\u003C/div>\u003C/div>\u003Cp>Artists rely on Digital Content Creation (DCC) tools like \u003Cstrong>Blender\u003C/strong>, \u003Cstrong>Maya\u003C/strong>, or \u003Cstrong>Houdini\u003C/strong> to bring stories to life.\u003C/p>\u003Cp>But while the creative work happens inside these tools, production tracking happens elsewhere. This disconnect can lead to version mismatches, time lost in repetitive manual uploads, and eventually less time spent creating. Without a smooth connection between the DCC software and your production tracker, your pipeline suffers.\u003C/p>\u003Cp>That’s where custom integrations come in.\u003C/p>\u003Cp>In this article, we walk through the basics of creating a Blender integration in Kitsu similar to Kitsu Publisher to publish 3D model previews from Blender to Kitsu.\u003C/p>\u003Chr>\u003Ch2 id=\"what%E2%80%99s-a-dcc-integration\">\u003Cstrong>What’s a DCC Integration?\u003C/strong>\u003C/h2>\u003Cp>A DCC integration is \u003Cstrong>a bridge between a creative software and another software tool\u003C/strong>, like a production tracker.\u003C/p>\u003Cp>For example, instead of exporting files, navigating to a web browser, and manually uploading versions, an integration could\u003Ca href=\"https://blog.cg-wire.com/working-with-multiple-digital-content-creation-tools/\"> \u003Cu>allow artists to publish directly from their tool of choice\u003C/u>\u003C/a>.\u003C/p>\u003Cp>Integrations can handle tasks like\u003Ca href=\"https://blog.cg-wire.com/rendering-explained/\"> \u003Cu>managing complex rendering pipelines\u003C/u>\u003C/a>,\u003Ca href=\"https://blog.cg-wire.com/animation-asset-storage/\"> \u003Cu>managing asset storage and versioning\u003C/u>\u003C/a>, or generating preview images: they automate the boring parts of production so artists can focus on telling stories.\u003C/p>\u003Chr>\u003Ch2 id=\"why-dcc-integration\">\u003Cstrong>Why DCC Integration?\u003C/strong>\u003C/h2>\u003Cp>Every studio eventually hits the same bottleneck: as projects grow, manual processes break down.\u003C/p>\u003Cp>\u003Cstrong>Integrations save time\u003C/strong> because they remove context switching between software.\u003C/p>\u003Cp>They also \u003Cstrong>reduce errors by standardizing repetitive tasks\u003C/strong> like delivering outputs by enforcing naming conventions, formats, and metadata consistency.\u003C/p>\u003Cp>Last but not least, they \u003Cstrong>improve project management and communication\u003C/strong> by giving supervisors and producers real-time updates.\u003C/p>\u003Cp>All professional animation studios rely on a pipeline, and DCC integrations are essential.\u003C/p>\u003Cp>To give you a concrete example, let's try building a script integration that uploads a preview from Blender to Kitsu to easily review work with your team.\u003C/p>\u003Chr>\u003Ch2 id=\"1-getting-started\">\u003Cstrong>1. Getting Started\u003C/strong>\u003C/h2>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-green\">\u003Cdiv class=\"kg-callout-emoji\">💡\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Looking for working examples?\u003C/strong>\u003C/b>\u003Cbr>\u003Cbr>You can find the complete source code for the example Blender–Kitsu integration showcased in this guide on our GitHub:\u003Cbr>\u003Cbr>🔗 \u003Ca href=\"https://github.com/cgwire/blender-kitsu-dcc-integration-example?ref=blog.cg-wire.com\">github.com/cgwire/blender-kitsu-dcc-integration-example\u003C/a>\u003C/div>\u003C/div>\u003Cp>Before we dive into scripting, let’s set up a local Kitsu instance where we can safely test our integration.\u003C/p>\u003Cp>The easiest way to run Kitsu locally is by using the kitsu-docker repository. Clone the repository to your machine and follow the instructions:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">git clone &lt;https://github.com/cgwire/kitsu-docker.git&gt;\ncd kitsu-docker\ndocker build -t cgwire/cgwire .\ndocker run --init -ti --rm -p 80:80 -p 1080:1080 --name cgwire cgwire/cgwire\u003C/code>\u003C/pre>\u003Cp>This will start all necessary services: Kitsu, the postgres database, and supporting components.\u003C/p>\u003Cp>Once the containers are running, open \u003Ccode>http://localhost:80\u003C/code> in your browser. Use the default credentials:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Email\u003C/strong>: admin@example.com\u003C/li>\u003Cli>\u003Cstrong>Password:\u003C/strong> mysecretpassword\u003C/li>\u003C/ul>\u003Cp>You’ll be taken to the Kitsu dashboard.\u003C/p>\u003Cp>Before we can upload previews, we need something to upload them to. In Kitsu:\u003C/p>\u003Col>\u003Cli>Create a new production (e.g., Blender Test Project) by going to the \"\u003Cstrong>Productions\u003C/strong>\" page from the sidebar.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/CleanShot-2025-10-13-at-9---.26.46-1.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"206\" height=\"479\">\u003C/figure>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-0e43401b-afb6-4345-b773-db3d9b03bed3.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"2\">\u003Cli>Inside the production, create an asset.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-83cce3b0-70a0-486d-87e7-4914a5304262.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Col start=\"3\">\u003Cli>Creating an asset automatically adds new tasks for all the selected task categories during the production creation. We can use those to upload previews.\u003C/li>\u003C/ol>\u003Cp>To interact with Kitsu programmatically,\u003Ca href=\"https://github.com/cgwire/gazu?ref=blog.cg-wire.com\"> \u003Cu>we use gazu, the official Python client for the Kitsu API\u003C/u>\u003C/a>. It allows us to authenticate, create entities, and upload previews directly from scripts.\u003C/p>\u003Cp>Install it with:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">pip install gazu\u003C/code>\u003C/pre>\u003Cp>Next, authenticate with your Kitsu instance using your username and password:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import gazu\n\ngazu.set_host(\"&lt;http://localhost/api&gt;\")\n\nuser = gazu.log_in(\"admin@example.com\", \"mysecretpassword\")\n\nprint(\"Logged in as:\", user['user']['full_name'])\u003C/code>\u003C/pre>\u003Cp>\u003Cbr>Once logged in, we can\u003Ca href=\"https://gazu.cg-wire.com/?ref=blog.cg-wire.com\"> \u003Cu>use gazu to fetch productions, assets, and tasks, then attach media files to them\u003C/u>\u003C/a>.\u003C/p>\u003Chr>\u003Ch2 id=\"2-creating-a-preview-from-blender\">\u003Cstrong>2. Creating a preview from Blender\u003C/strong>\u003C/h2>\u003Cp>Producing a preview render is a common use case for animators. You need to get regular feedback throughout the production phase, and a preview is easier to reason with than importing an entire project.\u003C/p>\u003Cp>You can automate this with Blender’s Python API by setting up a viewport capture to render a single frame, saving the output to a temporary folder, and applying studio-wide render settings (resolution, format, watermarking):\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">import bpy\n\nbpy.ops.wm.open_mainfile(filepath=\"./project.blend\")\n\nbpy.context.scene.render.resolution_x = 256\nbpy.context.scene.render.resolution_y = 256\nbpy.context.scene.render.resolution_percentage = 100\n\nbpy.context.scene.render.image_settings.file_format = 'PNG'\nbpy.context.scene.render.filepath = \"./preview.png\"\n\nbpy.ops.render.render(write_still=True)\u003C/code>\u003C/pre>\u003Cul>\u003Cli>\u003Ccode>import bpy\u003C/code>: Import Blender’s Python API\u003C/li>\u003Cli>b\u003Ccode>py.ops.wm.open_mainfile(filepath=\"./project.blend\")\u003C/code>: Opens an existing Blender project file called \u003Ccode>project.blend\u003C/code>\u003C/li>\u003Cli>\u003Ccode>bpy.context.scene.render.resolution_x = 256 [...]\u003C/code>We configure the render resolution to 256 pixels by 256 pixels with no downscale.\u003C/li>\u003Cli>\u003Ccode>bpy.context.scene.render.image_settings.file_format = 'PNG'\u003C/code>: Set the output format to PNG and define the output path to  \u003Ccode>preview.png\u003C/code> before executing a still render of the scene.\u003C/li>\u003C/ul>\u003Cp>This script gives you a lightweight preview file that’s easy to store in Kitsu and quick for supervisors to review.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"946\" height=\"914\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e936efc9-2c3b-43ea-86f7-8845bdc6c50f.png 946w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Cp>To run it, just install the bpy package and launch the program like you would for any other python script:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 preview.py\u003C/code>\u003C/pre>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"687\" height=\"768\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-8fc4a1a4-01c7-4fcb-a8a6-b5d50588d6b8.png 687w\">\u003C/figure>\u003Chr>\u003Ch2 id=\"3-uploading-a-preview-to-kitsu\">\u003Cstrong>3. Uploading a preview to Kitsu\u003C/strong>\u003C/h2>\u003Cp>With the preview file ready, the final step is pushing the data into Kitsu with gazu.\u003C/p>\u003Cp>First, we retrieve the task we previously created:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">projects = gazu.project.all_projects()\n\nassets = gazu.asset.all_assets_for_project(projects[0])\n\ntasks = gazu.task.all_tasks_for_asset(assets[0])\ntask_status = gazu.task.get_task_status_by_short_name(\"todo\")\u003C/code>\u003C/pre>\u003Cp>To do so, we get a list of all available projects, then the assets of our newly created project, and finally the tasks assigned to this asset.\u003C/p>\u003Cp>We publish a comment for the task while linking the preview file to it:\u003C/p>\u003Cpre>\u003Ccode class=\"language-python\">(comment, preview_file) = gazu.task.publish_preview(\n&nbsp;tasks[0],\n&nbsp;task_status,\n&nbsp;&nbsp;&nbsp;&nbsp;comment=\"upload preview\",\n&nbsp;&nbsp;&nbsp;&nbsp;preview_file_path=\"./preview.png\"\n)\u003C/code>\u003C/pre>\u003Cp>And run the script:\u003C/p>\u003Cpre>\u003Ccode class=\"language-bash\">python3 upload.py\u003C/code>\u003C/pre>\u003Cp>Once uploaded, the file becomes instantly available in Kitsu’s web interface. Supervisors can review it, leave feedback, and mark statuses—all without any manual file juggling from the artist.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png\" class=\"kg-image\" alt=\"\" loading=\"lazy\" width=\"955\" height=\"931\" srcset=\"https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/size/w600/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png 600w, https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/10/data-src-image-e9710dd1-d727-4e9f-85f8-9db075a159f4.png 955w\" sizes=\"(min-width: 720px) 720px\">\u003C/figure>\u003Chr>\u003Ch2 id=\"4-distribution\">\u003Cstrong>4. Distribution\u003C/strong>\u003C/h2>\u003Cp>Once your script is working, you have a few options for how to use or share it:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Run it directly in Blender\u003C/strong> - Open the \u003Cem>Scripting\u003C/em> workspace and execute the script from there.\u003C/li>\u003Cli>\u003Cstrong>Run it from the command line\u003C/strong> - Just like we did earlier, you can run your script from the terminal like you would for any Python program.\u003C/li>\u003Cli>\u003Cstrong>Package it as an add-on\u003C/strong> - This allows you to enable it from Blender’s preferences and even design a custom user interface for easier access.\u003C/li>\u003C/ul>\u003Cp>Creating a full add-on with its own UI is a must for sharing integrations with artists, but it's a much bigger topic we won’t cover here. If you’d like to dive deeper, check out the\u003Ca href=\"https://docs.blender.org/manual/en/latest/advanced/scripting/addon_tutorial.html?ref=blog.cg-wire.com\"> \u003Cu>official Blender add-on tutorial\u003C/u>\u003C/a>. And stay tuned, we’ll be covering this in more detail in a future post!\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>DCC pipeline integrations are foundational for efficient animation studios: by connecting tools like Blender directly with Kitsu, you reduce friction, improve communication, and make life easier for both artists and production managers.\u003C/p>\u003Cp>You don’t need a massive pipeline team to see the benefits of integrations. Even a small studio can start simple, automate a few pain points, and scale up over time as needed.\u003C/p>\u003Cp>\u003Ca href=\"https://github.com/cgwire/kitsu-publisher-next?ref=blog.cg-wire.com#readme\">\u003Cu>Check out the Kitsu Publisher documentation\u003C/u>\u003C/a> for a production-ready DCC integration solution for Blender, Toon Boom Harmony, and Unreal Engine!\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":394,"comment_id":395,"feature_image":396,"featured":105,"visibility":10,"created_at":397,"updated_at":398,"custom_excerpt":399,"codeinjection_head":109,"codeinjection_foot":110,"custom_template":7,"canonical_url":7,"primary_author":400,"primary_tag":401,"url":402,"excerpt":399,"reading_time":170,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":403},"1618a7a1-ff36-4259-910d-2902ca5adbbf","68ec43d0ded61600017fff75","https://images.unsplash.com/photo-1580894894513-541e068a3e2b?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=M3wxMTc3M3wwfDF8c2VhcmNofDV8fFNvZnR3YXJlJTIwaW50ZWdyYXRpb258ZW58MHx8fHwxNzYwMzE0NjM1fDA&ixlib=rb-4.1.0&q=80&w=2000","2025-10-13T02:12:00.000+02:00","2026-02-20T06:04:22.000+01:00","Learn how to build a custom Blender integration for Kitsu using Python. This guide walks you through setting up a local environment, generating previews in Blender, and uploading them to Kitsu automatically—streamlining your DCC pipeline for faster, more reliable production.",{"id":96,"name":97,"slug":98,"profile_image":7,"cover_image":7,"bio":7,"website":7,"location":7,"facebook":7,"twitter":7,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":99},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/dcc-integration-blender-kitsu/","\u003Cspan style=\"white-space: pre-wrap;\">Photo by \u003C/span>\u003Ca href=\"https://unsplash.com/@thisisengineering?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">ThisisEngineering\u003C/span>\u003C/a>\u003Cspan style=\"white-space: pre-wrap;\"> / \u003C/span>\u003Ca href=\"https://unsplash.com/?utm_source=ghost&amp;utm_medium=referral&amp;utm_campaign=api-credit\">\u003Cspan style=\"white-space: pre-wrap;\">Unsplash\u003C/span>\u003C/a>","/posts/dcc-integration-blender-kitsu","2025-10-14T11:23:34.000+02:00",{"title":389},"dcc-integration-blender-kitsu","posts/dcc-integration-blender-kitsu",[410,411],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":113,"name":64,"slug":67,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":63},"Y6D4qXodYv1lXvjekjP26GNUDi8I9hI336Agp1r2n8s",{"id":414,"title":415,"authors":416,"body":7,"description":7,"extension":8,"html":426,"meta":427,"navigation":13,"path":439,"published_at":440,"seo":441,"slug":442,"stem":443,"tags":444,"__hash__":448,"uuid":428,"comment_id":429,"feature_image":430,"featured":105,"visibility":10,"created_at":431,"updated_at":432,"custom_excerpt":433,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":434,"primary_tag":435,"url":436,"excerpt":433,"reading_time":437,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":438},"ghost/posts:blender-vs-maya.json","Blender vs Maya In 2026: Which 3D Tool is Right for You?",[417],{"id":418,"name":419,"slug":420,"profile_image":421,"cover_image":7,"bio":422,"website":7,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":425},"5fe9b27094f20f00398a1673","Gwénaëlle Dupré","gwen","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2020/12/profile_pics.png","Product Manager at CGWire","Paris, France","@gelnior","https://blog.cg-wire.com/author/gwen/","\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">💻\u003C/div>\u003Cdiv class=\"kg-callout-text\">\u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Blender\u003C/strong>\u003C/b> or \u003Cb>\u003Cstrong style=\"white-space: pre-wrap;\">Maya\u003C/strong>\u003C/b>? The debate continues in 2025—let’s break down the differences!\u003C/div>\u003C/div>\u003Cp>There are plenty of digital content creation tools out there, some of which can spark ongoing debates among 3D artists.\u003C/p>\u003Cp>Maybe you just started studying animation and wonder which tool to pick. Or you have a new project to manage and try to decide between hiring Maya or Blender animators.\u003C/p>\u003Cp>In this article, we’ll take a friendly stroll through the features of both \u003Cstrong>Blender\u003C/strong> and \u003Cstrong>Maya\u003C/strong> to explore what sets them apart, their strengths and weaknesses, and which projects they suit best.\u003C/p>\u003Chr>\u003Ch2 id=\"comparison-criteria\">\u003Cstrong>Comparison Criteria\u003C/strong>\u003C/h2>\u003Cp>When comparing Maya and Blender, we will focus on a few key criteria to make it as objective as possible:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Target audience\u003C/strong> - who is the tool built for?\u003C/li>\u003Cli>\u003Cstrong>Pricing\u003C/strong> - what's the business model like?\u003C/li>\u003Cli>\u003Cstrong>Pros &amp; cons\u003C/strong> - what are the key advantages and inconveniences of each tool?\u003C/li>\u003Cli>\u003Cstrong>Learning curve\u003C/strong> - how easy can you get started?\u003C/li>\u003Cli>\u003Cstrong>Community support\u003C/strong> - how active are the users?\u003C/li>\u003Cli>\u003Cstrong>Modeling\u003C/strong> - does it fare well with common 3D modeling tasks?\u003C/li>\u003Cli>\u003Cstrong>Animation &amp; rigging\u003C/strong> - what about the animation process of 3D models?\u003C/li>\u003Cli>\u003Cstrong>Rendering\u003C/strong> - how can I turn my animations into videos?\u003C/li>\u003C/ul>\u003Cp>Before we dive in, I want to emphasize the fact that neither is better―it all depends on your requirements and use cases! With that out of the way, let's start with Maya.\u003C/p>\u003Chr>\u003Ch2 id=\"maya\">\u003Cstrong>Maya\u003C/strong>\u003C/h2>\u003Cp>Autodesk Maya is considered the industry standard for 3D animation, and it is used by major studios around the globe, such as Walt Disney Animation Studios.\u003C/p>\u003Ch3 id=\"target-audience\">\u003Cstrong>Target audience\u003C/strong>\u003C/h3>\u003Cp>Maya's toolset is tailored for professionals working within film, television, and gaming industries, especially those involved in complex production pipelines.\u003C/p>\u003Ch3 id=\"pricing\">\u003Cstrong>Pricing\u003C/strong>\u003C/h3>\u003Cp>Maya is quite expensive at $1,945 per year ($245 monthly), with subscription costs potentially a barrier for independent artists and small studios. But Autodesk does offer free student licenses and discounted indie licenses for only $320 per year if you are eligible.\u003C/p>\u003Ch3 id=\"pros\">\u003Cstrong>Pros\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>Used by many big studios in the industry\u003C/li>\u003Cli>Great animation and rigging UI\u003C/li>\u003Cli>Works out-of-the-box but fully customizable\u003C/li>\u003Cli>Industry standards closed-source plugins\u003C/li>\u003C/ul>\u003Ch3 id=\"cons\">\u003Cstrong>Cons\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>Cost\u003C/li>\u003Cli>Few community resources\u003C/li>\u003C/ul>\u003Cp>\u003C/p>\u003Chr>\u003Ch2 id=\"blender\">\u003Cstrong>Blender\u003C/strong>\u003C/h2>\u003Cp>Blender is a free and open-source 3D content creation tool that is steadily gaining traction across various creative industries. It supports the entirety of the 3D pipeline, including modeling, rigging, animation, and rendering.\u003C/p>\u003Cp>Blender has been used in several acclaimed productions, like the 2023 film \"Spider-Man: Across the Spider-Verse,\" and was pivotal in the Grease Pencil feature in short films and animations.\u003C/p>\u003Ch3 id=\"target-audience-1\">\u003Cstrong>Target audience\u003C/strong>\u003C/h3>\u003Cp>Blender is designed to be accessible to everyone, from hobbyists to professionals. Smaller studios appreciate its zero-cost entry point and evolving feature set. It's particularly appealing to freelancers and startups looking for powerful but hackable tools without the financial strain.\u003C/p>\u003Ch3 id=\"pricing-1\">\u003Cstrong>Pricing\u003C/strong>\u003C/h3>\u003Cp>The biggest advantage of Blender is its price—free—and its highly active community that fosters innovation and support. However, it may not integrate as seamlessly into larger production pipelines compared to Maya, which can be a drawback for some studios.\u003C/p>\u003Ch3 id=\"pros-1\">\u003Cstrong>Pros\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>Open-source\u003C/li>\u003Cli>Big community\u003C/li>\u003Cli>Great modeling UI\u003C/li>\u003C/ul>\u003Ch3 id=\"cons-1\">\u003Cstrong>Cons\u003C/strong>\u003C/h3>\u003Cul>\u003Cli>Less popular in large-scale productions\u003C/li>\u003Cli>Need plugins to improve your animation workflow\u003C/li>\u003C/ul>\u003Chr>\u003Ch2 id=\"learning-curve\">\u003Cstrong>Learning Curve\u003C/strong>\u003C/h2>\u003Cp>Both software packages have steep learning curves, but Maya is often considered more challenging to master due to its complexity.\u003C/p>\u003Cp>Blender, as an open-source software, has a reputation for being less intimidating to start with, especially for newcomers.\u003C/p>\u003Cp>Numerous tutorials and courses are available for both, but Blender has the edge here thanks to its open-source nature and large community.\u003C/p>\u003Chr>\u003Ch2 id=\"community\">\u003Cstrong>Community\u003C/strong>\u003C/h2>\u003Cp>The community support for Blender is one of its strongest assets. With its open-source model, developers, and artists frequently contribute to its growth with tutorials, plugins, and resources. This open-source vibe encourages experimentation, and community-driven platforms like Blender Artists and Blender Nation are buzzing with open project discussions.\u003C/p>\u003Cp>Maya also benefits from a large, though more commercially focused, community providing professional resources and support. Maya is used primarily by professionals engaged in large-scale production settings, so the Maya community is often composed of industry veterans and experts with deep knowledge of complex production pipelines. The community tends to communicate through forums like Autodesk's own support and community networks. Because of this setting, free assets and plugins aren't as common since animators are busy working on studio projects.\u003C/p>\u003Chr>\u003Ch2 id=\"modeling\">\u003Cstrong>Modeling\u003C/strong>\u003C/h2>\u003Cp>Blender is praised for its fast and flexible modeling tools. The software offers a versatile range of features, from polygonal and sculpting workflows to advanced procedural modeling with modifiers. Blender’s non-destructive modifier stack allows you to modify geometry in a flexible and non-linear way, which is particularly beneficial for iterative design processes. Its customizable interface and hotkey-centric workflow enable modelers to streamline their processes and work more efficiently once they master the toolset. Lastly, Blender has better built-in sculpting support than Maya.\u003C/p>\u003Cp>Maya excels in polygonal modeling features, with a range of advanced tools that support precise and detailed work. Its integration with other Autodesk products and advanced support for NURBS modeling give it an edge in creating complex, high-precision models that are often required in professional projects. Maya’s user interface can appear more daunting to beginners.\u003C/p>\u003Cp>All in all, both are pretty\u003Ca href=\"https://blog.cg-wire.com/3d-modeling-animation/\"> \u003Cu>similar at 3D modeling\u003C/u>\u003C/a>, and your choice will boil down to your preferences regarding UI and controls.\u003C/p>\u003Chr>\u003Ch2 id=\"animation-rigging\">\u003Cstrong>Animation &amp; Rigging\u003C/strong>\u003C/h2>\u003Cp>Maya offers advanced animation and\u003Ca href=\"https://blog.cg-wire.com/rigging-in-animation/\"> \u003Cu>rigging tools\u003C/u>\u003C/a> out-of-the-box for professional animators who need precision and fluidity. Its rigging system is incredibly powerful and has a great depth of control. The software supports complex characters with intricate riggings thanks to tools like the Node Editor, which simplifies customized control rigs and advanced deformations. Maya’s robust scripting capabilities with MEL and Python allow technical directors to create custom tools and automate rigging processes.\u003C/p>\u003Cp>Blender has made significant strides in animation and rigging with its constant updates, and the Grease Pencil feature is opening new opportunities for 2D and 3D animation. Blender's animation and rigging systems aren't as advanced as Maya's, but they are still there. Blender’s Rigify plugin is a fantastic starting point for creating complex rigs without diving deep into manual rigging.\u003C/p>\u003Chr>\u003Ch2 id=\"rendering\">\u003Cstrong>Rendering\u003C/strong>\u003C/h2>\u003Cp>Maya uses the Interactive Photorealistic Rendering engine for visualization during development and the Autodesk Arnold engine for the final render.\u003C/p>\u003Cp>Arnold is a high-quality Monte Carlo ray-tracing renderer celebrated for its ability to handle complex scenes. Arnold is CPU-driven, although recent updates have included GPU acceleration support.\u003C/p>\u003Cp>Blender has two rendering engines:\u003Ca href=\"https://blog.cg-wire.com/getting-started-with-blender-rendering/\"> \u003Cu>Cycles and Eevee\u003C/u>\u003C/a>.\u003C/p>\u003Cp>Cycles is a physically-based path tracer providing realistic lighting, reflections, and shadows. It relies heavily on GPU acceleration, which can significantly speed up the rendering process if you have a compatible graphics card.\u003C/p>\u003Cp>On the other hand, Eevee is a real-time rendering engine that excels in speed. While it doesn’t provide the same level of detail as Cycles, Eevee is perfect for pre-visualization during development or projects that require quick iterations and less realistic but high-quality visual feedback.\u003C/p>\u003Chr>\u003Ch2 id=\"conclusion\">\u003Cstrong>Conclusion\u003C/strong>\u003C/h2>\u003Cp>Maya and Blender are both powerful tools for creating 3D animations, each with unique strengths and weaknesses.\u003C/p>\u003Cp>With its considerable clout in large studios and major productions, Maya continues to be the standard. But Blender is a formidable alternative, increasingly closing the gap with flexibility and a budget-friendly approach.\u003C/p>\u003Cp>Starting with Blender offers a comprehensive understanding of the 3D world, especially for those working independently. As your skills and studio requirements evolve, transitioning to Maya can align more closely with industry standards.\u003C/p>\u003Cp>If you're still in school, make sure to leverage the free student license to get a feel of both software.\u003C/p>\u003Cp>In any case, the skills you learn in each software are transferable, so don't worry too much about picking one or the other. If you have a good understanding of Blender, you can easily transfer your skills to Maya by watching a few how-to videos, reading the documentation, and practising for a few weeks.\u003C/p>\u003Cdiv class=\"kg-card kg-callout-card kg-callout-card-yellow\">\u003Cdiv class=\"kg-callout-emoji\">📽️\u003C/div>\u003Cdiv class=\"kg-callout-text\">To learn more about the animation process \u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" rel=\"noreferrer\">consider joining our Discord community\u003C/a>! We connect with over a thousand experts who share best practices and occasionally organize in-person events. We’d be happy to welcome you! 😊\u003C/div>\u003C/div>\u003Cdiv class=\"kg-card kg-button-card kg-align-center\">\u003Ca href=\"https://www.cg-wire.com/community?ref=blog.cg-wire.com\" class=\"kg-btn kg-btn-accent\">Join Our Discord Community\u003C/a>\u003C/div>",{"uuid":428,"comment_id":429,"feature_image":430,"featured":105,"visibility":10,"created_at":431,"updated_at":432,"custom_excerpt":433,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":434,"primary_tag":435,"url":436,"excerpt":433,"reading_time":437,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":438},"db254e69-5771-4a7d-a891-fc49a870a06b","67aad34ae95d410001686252","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2025/02/blender_vs_maya_cover.1102x655.jpg","2025-02-11T05:34:18.000+01:00","2026-03-26T10:27:19.000+01:00","Blender and Maya are two of the most popular 3D animation tools, but which one is right for you? This guide compares their features, pricing, strengths, and best use cases to help you decide.",{"id":418,"name":419,"slug":420,"profile_image":421,"cover_image":7,"bio":422,"website":7,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":425},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/blender-vs-maya/",5,"\u003Ci>\u003Cb>\u003Cstrong class=\"italic\" style=\"white-space: pre-wrap;\">Source: Pinglestudio\u003C/strong>\u003C/b>\u003C/i>","/posts/blender-vs-maya","2025-02-24T10:00:50.000+01:00",{"title":415},"blender-vs-maya","posts/blender-vs-maya",[445,446],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},{"id":447,"name":82,"slug":88,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":85},"69c20dfccb09d8000107cfe9","pWff7SjVzDRT2VjO4NBblvCVh6Z8XO6i15KiZoibeSg",{"id":450,"title":451,"authors":452,"body":7,"description":7,"extension":8,"html":462,"meta":463,"navigation":13,"path":473,"published_at":474,"seo":475,"slug":476,"stem":477,"tags":478,"__hash__":480,"uuid":464,"comment_id":465,"feature_image":466,"featured":105,"visibility":10,"created_at":467,"updated_at":468,"custom_excerpt":469,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":470,"primary_tag":471,"url":472,"excerpt":469,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:getting-started-with-blender-rendering.json","Getting Started with Blender Rendering In 2026",[453],{"id":454,"name":455,"slug":456,"profile_image":457,"cover_image":458,"bio":459,"website":460,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":461},"68d2f1e036b5be000835a0db","Frank Rousseau","frankrousseau","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2021/01/photo_identite.png","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2024/05/Annecy-Booth_Linework_004-2-1.jpg","CEO &Founder of CGWire","https://addictedtointer.net","https://blog.cg-wire.com/author/frankrousseau/","\u003Cp>Blender rendering is the process of generating image or video output files from 3D scenes created in Blender. Rendering is a highly resource-intensive task because you need to simulate complex physics like light, materials, and other visual elements to produce a realistic result. And because a rendering mistake has a direct impact on the project, you need to understand the process well enough to match your technical requirements. At scale, rendering becomes even more challenging, to the point where it’s best outsourced to third-party providers who can offer better hardware.\u003C/p>\u003Cp>For all these reasons, this article is a must-read if you want to optimize your rendering process: learn how to do basic rendering in Blender, choose a rendering engine, and scale as your animation studio grows! \u003C/p>\u003Cp>At CGWire, we’ve been working within the Blender ecosystem since before our creation in 2018, so we’re familiar with the challenges brought by the rendering stage. Make sure to share this article with your teammates to help optimize your studio’s workflow!\u003C/p>\u003Ch2 id=\"how-to-render-animation-in-blender\">How To Render Animation In Blender\u003C/h2>\u003Cp>Rendering in Blender, no matter the input follows the same steps:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Set up your scene\u003C/strong> - Design your 3D models, set up lighting, camera angles, and any other elements you want to include in your animation.\u003C/li>\u003Cli>\u003Cstrong>Set your render settings\u003C/strong> - Navigate to the Rendering tab. Adjust the output resolution, frame range, frame rate, and other settings according to your requirements.\u003C/li>\u003Cli>\u003Cstrong>Configure the output format\u003C/strong> - In the Output Properties section of the Rendering tab, choose the output format for your animation. You can select image sequences (e.g., PNG, JPEG) or video formats (e.g., MP4, AVI). Specify the output path where Blender will save the rendered frames or video.\u003C/li>\u003Cli>\u003Cstrong>Choose the rendering engine\u003C/strong> - Blender offers two rendering engines: Cycles and Eevee. Cycles provides realistic, physically-based rendering, while Eevee is a real-time engine suitable for quicker previews. Select the desired engine from the Render Engine drop-down menu in the Render Properties section.\u003C/li>\u003Cli>\u003Cstrong>Check the animation playback\u003C/strong> - Use the Play button in the Timeline or Dope Sheet editor to preview your animation. Ensure that everything is moving as intended and make any necessary adjustments.\u003C/li>\u003Cli>\u003Cstrong>Start rendering\u003C/strong> - Once you are satisfied with your scene and animation, click the Render Animation button in the Render tab. Blender will start rendering each frame based on your settings. The rendering time will depend on the complexity of your scene, the chosen rendering engine, and your computer's performance.\u003C/li>\u003Cli>\u003Cstrong>Monitor the progress\u003C/strong> - You can monitor the progress of the rendering in the status bar at the top of the Blender interface. It will display the current frame being rendered and the estimated time remaining.\u003C/li>\u003Cli>\u003Cstrong>Save and review the output\u003C/strong> - Once the rendering is complete, Blender will save the rendered frames or video to the specified output path. You can then review the animation to ensure it meets your expectations.\u003C/li>\u003C/ol>\u003Cp>The rendering tab looks like this:\u003C/p>\u003Cp>Depending on your project’s requirements or what you want to do with the renders, you’ll need to dive into more advanced features.\u003C/p>\u003Ch2 id=\"blender-rendering-engines-eevee-vs-cycles\">Blender Rendering Engines: Eevee vs Cycles\u003C/h2>\u003Cp>Eevee and Cycles are two rendering engines available in Blender with several differences in terms of features and use cases.\u003C/p>\u003Cp>As always, the choice between Eevee and Cycles depends on your specific project requirements: if you need speed, interactivity, and real-time rendering, Eevee is an excellent choice for previews for example. But if you aim for high-quality, physically accurate renders to use in production, Cycles is the recommended option.\u003C/p>\u003Cp>Of course, you aren’t forced to use one or the other throughout the whole project’s duration. You can combine each engine’s pros and cons to make the most of them:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Rendering Speed\u003C/strong> - Eevee is a real-time rendering engine designed for fast rendering and interactivity. It utilizes your computer's GPU (Graphics Processing Unit) to deliver quick previews and animations. On the other hand, Cycles is a path-tracing engine that focuses on producing realistic and physically accurate renders that generally take longer to render.\u003C/li>\u003Cli>\u003Cstrong>Output quality\u003C/strong> - Cycles is known for its ability to generate highly realistic images with accurate lighting, shadows, and reflections. It employs ray tracing techniques, which simulate the path of light rays as they interact with objects in the scene. Eevee, although not physically accurate like Cycles, can still produce impressive results with real-time shadows, reflections, and ambient occlusion.\u003C/li>\u003Cli>\u003Cstrong>GPU vs CPU Rendering\u003C/strong> - While both engines can use GPU rendering, Eevee is designed explicitly for GPU acceleration and performs exceptionally well with compatible graphics cards. Cycles can use both CPU (Central Processing Unit) and GPU rendering but generally performs better with CPU rendering and complex scenes.\u003C/li>\u003Cli>\u003Cstrong>Workflow and Interactivity\u003C/strong> - Eevee provides real-time feedback, allowing you to quickly iterate and make adjustments to your scene without waiting for lengthy renders. It's ideal for collaborative workflows with fast iteration loops like the one you can find in the animation industry. Cycles, although slower, is well-suited for final renders and achieving photorealistic results.\u003C/li>\u003C/ul>\u003Ch2 id=\"use-viewport-render-workbench-engine-for-storyboarding\">Use Viewport Render (Workbench engine) For Storyboarding\u003C/h2>\u003Cp>In the situation where you just want to quickly share snapshots of a scene for feedback, rendering is overkill. The Viewport Render mode provides a quick and interactive way to preview your scene without the need for a separate render. The viewport render isn’t as accurate as a final render, but it saves precious resources and only takes 5 simple steps:\u003C/p>\u003Col>\u003Cli>Open your 3D scene in Blender\u003C/li>\u003Cli>Navigate to the area where you want to display the rendered view. By default, this is the 3D Viewport.\u003C/li>\u003Cli>In the top-right corner of the 3D Viewport, click on View then Viewport Render Animation\u003C/li>\u003Cli>Blender will start rendering the view in real time, using either the Workbench engine or the Eevee rendering engine. You will see the scene with materials, lighting, shadows, and other effects. The rendering will update in real time as you make changes to the scene, providing immediate feedback.\u003C/li>\u003C/ol>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh3.googleusercontent.com/f0YU30WPImeGqw6_a1GCNtG4ShDENq-7R4XeUOkQnaZNr_74yIqGV2Q0z6j4TUCXVyI-wNc-poYMQ1rEwBOkq2p37aXWwHcfZF_7BbZmQsAGQikmYo69qAzY52SUuryBfMM4oD0pu6wRyUt1HZzXkog\" class=\"kg-image\" alt loading=\"lazy\" width=\"297\" height=\"77\">\u003C/figure>\u003Cp>While in the Viewport Render mode, you can navigate around the scene, manipulate objects, adjust lighting, and make other changes. The viewport will update the render accordingly, allowing you to preview the changes instantly.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh3.googleusercontent.com/5aCphsaEBQKqbe5xdiyjtsnhKzMtwvYrL3R9rKQCrDxoTQ9tVvj2aT9O8vA-y59Xq6BurCMQPeNAGduSAsncKvvAwdtwLfUckPv2VrAUzjjjYzwLgBieSiwV04jVKqCEpvsTdZqgJ5HcR-VcNNpQiY4\" class=\"kg-image\" alt loading=\"lazy\" width=\"624\" height=\"668\">\u003C/figure>\u003Cp>To exit the Viewport Render mode and return to the regular 3D Viewport, click on the dropdown menu in the top-right corner and select another shading mode, such as Solid, Wireframe, or Material Preview.\u003C/p>\u003Ch2 id=\"using-a-blender-rendering-farm\">Using A Blender Rendering Farm\u003C/h2>\u003Cp>Using a rendering farm for Blender rendering brings several benefits. First, \u003Cstrong>rendering your 3D models on your own machine is time-consuming and limited\u003C/strong> by your processing power. A render farm provides a solution by lending additional power, allowing your projects to be rendered in just minutes instead of weeks.\u003C/p>\u003Cp>\u003Cstrong>Building your own render farm is not always a viable option\u003C/strong> because of how expensive it is: you’ll need costly hardware, hours spent configuring and maintaining it, and your electricity bills will grow. If you have a Render Wrangler on your team, it may be efficient because he will take full advantage of it. But in most cases, with a render farm service, it will be much easier to manage. You can scale up or down as needed without managing servers. Simply log in, upload your assets, and enjoy the simplicity.\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh5.googleusercontent.com/XRTIxKyD6zAcREE-ul4QDr2DvTD-VNvhnpoWwxO5w401g7McoK7qs-pzwaJ9vKzaHFNOdFi6k5FUh5vED6CV8n7PAdCNAXCcLAukK9BdNYDSULOf5eVBWfLu1rXiuBpv9IPO5xevDkqy0AZn1JHbOzo\" class=\"kg-image\" alt loading=\"lazy\" width=\"624\" height=\"521\">\u003C/figure>\u003Cp>You can then download the rendered frames as soon as they are ready:\u003C/p>\u003Cfigure class=\"kg-card kg-image-card\">\u003Cimg src=\"https://lh4.googleusercontent.com/NbH3JSZZytC2DfO3zNTyJVuYdcAmlLRSlgHKM4JBHrujabgvwD1IKPZ7zTO20K9_Zz_tCSeaZww7YQzk7BJ7j9ULA8d43e-hVSQk_RupkPdsZvIIvfux3r3gDYBTdISEe9YR9AH2pHkI_nn3b7M8Ftg\" class=\"kg-image\" alt loading=\"lazy\" width=\"624\" height=\"384\">\u003C/figure>\u003Cp>Not only does a render farm accelerate processing time, but \u003Cstrong>it also increases rendering quality\u003C/strong>. You can just pick resolution settings to meet project requirements and get done with it. As the demand for more realistic 3D models increases, a render farm often becomes essential to keep rendering time low and stay competitive.\u003C/p>\u003Cp>Check out \u003Ca href=\"https://blog.cg-wire.com/partnership-with-ranch-computing/\">our dedicated article to learn more about our rendering farm partner Ranch Computing\u003C/a>.\u003C/p>\u003Ch2 id=\"using-kitsu-to-keep-track-of-your-renders\">Using Kitsu To Keep Track Of Your Renders\u003C/h2>\u003Cp>In an animation production with hundreds of assets to manage, rendering is no easy task. Instead of rendering each asset / animation one by one, you can use Kitsu Publisher to automatically share a preview for collaborative work without leaving your favorite digital content creation (DCC) tool―be it Blender, Unreal Engine 5, or Harmony.\u003C/p>\u003Cp>Kitsu is a collaboration platform for animation studios to share the progress of their productions and validate deliveries. The Kitsu Publisher is a desktop application that connects DCC tools to Kitsu to automatically send render previews to your Kitsu workspace. All you need to do is install Kitsu Publisher and add it as a Blender plugin in a few minutes. You can \u003Ca href=\"https://github.com/cgwire/kitsu-publisher-next?ref=blog.cg-wire.com#readme\">read the official documentation to get detailed steps\u003C/a> depending on which operating system you use.\u003C/p>\u003Cp>Not only does it allow you to skip the rendering task altogether if you just want to share previews to collect feedback, but it’s also a precious communication tool to keep track of all the assets you need to (re-)render for production and how to prioritize rendering tasks according to your teammates’ needs: no more back-and-forth and endless meetings!\u003C/p>\u003Cp>Blender also has \u003Ca href=\"https://studio.blender.org/pipeline/addons/blender_kitsu?ref=blog.cg-wire.com\">an official Kitsu plugin\u003C/a> to interact with Kitsu from within Blender, including features like rendering snapshots and thumbnails.\u003C/p>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>In conclusion, rendering with Blender offers a powerful way to bring your 3D scenes to life. While the multitude of options may initially seem overwhelming, it's important to remember that simplicity is key when starting out. Pick the most straightforward option and improve as you go!\u003C/p>\u003Cp>\u003Cem>As the demand for collaboration and efficient workflow grows, it's crucial to explore alternative rendering methods. Moreover, it's a complex task that requires advanced skills at some point. That's why we encourage you to join our \u003Ca href=\"https://discord.com/invite/VbCxtKN?ref=blog.cg-wire.com\">Discord\u003C/a> community made of pipeline and production experts!\u003C/em>\u003C/p>",{"uuid":464,"comment_id":465,"feature_image":466,"featured":105,"visibility":10,"created_at":467,"updated_at":468,"custom_excerpt":469,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":470,"primary_tag":471,"url":472,"excerpt":469,"reading_time":115,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"dff3acdd-158e-462b-89c0-255b82422eaf","64b5621940bf3f0001b4a137","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/07/pasted-image-0.png","2023-07-17T17:45:29.000+02:00","2026-03-24T05:12:02.000+01:00","Blender rendering is the process of generating image or video output files from 3D scenes created in Blender. Rendering is a highly resource-intensive task because you need to simulate complex physics like light, materials, and other visual elements to produce a realistic result. ",{"id":454,"name":455,"slug":456,"profile_image":457,"cover_image":458,"bio":459,"website":460,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":461},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/getting-started-with-blender-rendering/","/posts/getting-started-with-blender-rendering","2023-08-29T17:08:09.000+02:00",{"title":451},"getting-started-with-blender-rendering","posts/getting-started-with-blender-rendering",[479],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"jflNqt8fLd49No_hVzvk0QD5polUy05Y4SXDa6Lug3g",{"id":482,"title":483,"authors":484,"body":7,"description":7,"extension":8,"html":486,"meta":487,"navigation":13,"path":497,"published_at":498,"seo":499,"slug":500,"stem":501,"tags":502,"__hash__":504,"uuid":488,"comment_id":489,"feature_image":490,"featured":105,"visibility":10,"created_at":491,"updated_at":492,"custom_excerpt":7,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":493,"primary_tag":494,"url":495,"excerpt":496,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"ghost/posts:getting-started-with-export-in-blender.json","Getting Started With Export In Blender In 2026",[485],{"id":454,"name":455,"slug":456,"profile_image":457,"cover_image":458,"bio":459,"website":460,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":461},"\u003Cp>Blender is a popular 3D computer graphics software used in animation. At some point in your project, you’ll need to export your files―to store them, share them with teammates, or use them in other specialized software like uploading to a rendering farm. \u003C/p>\u003Cp>Fortunately, Blender provides several export options: in this article, we explain each one and when they are best used in your animation pipeline to make the most of it. Lastly, we’ll talk about how to use Kitsu, our open-source production management software, to make export tasks more efficient.\u003C/p>\u003Ch2 id=\"how-to-export-in-blender\">How To Export In Blender\u003C/h2>\u003Cp>Exporting files from Blender always follows these general steps:\u003C/p>\u003Col>\u003Cli>\u003Cstrong>Select the objects or animation you want to export\u003C/strong> - In Blender, select the objects or animation you want to export. You can select multiple objects by holding down the Shift key while clicking on them.\u003C/li>\u003Cli>\u003Cstrong>Open the Export menu\u003C/strong> - Go to the File menu at the top-left corner of the Blender interface and select \"Export\" to access the export options.\u003C/li>\u003Cli>\u003Cstrong>Choose the file format\u003C/strong> - In the export options, you will see a list of available file formats to choose from. Select the format that is appropriate for your needs and is compatible with the software or platform you intend to use the exported files. We’ll explain each file format in the next section.\u003C/li>\u003Cli>\u003Cstrong>Configure export settings\u003C/strong> - Depending on the chosen file format, you will have different settings and options to configure. These settings can include options for animation, object hierarchy, materials, textures, scale, and more. Adjust these settings according to your requirements or the specifications of your target software. \u003Ca href=\"https://docs.blender.org/manual/en/latest/files/import_export?ref=blog.cg-wire.com\" rel=\"noreferrer\">Check the official documentation\u003C/a> to learn more about the settings.\u003C/li>\u003Cli>\u003Cstrong>Set the file path and name\u003C/strong> - Specify the file path where you want to save the exported file. Choose a location on your computer or network that is easily accessible and organized. Give the file a suitable name that reflects its content.\u003C/li>\u003Cli>\u003Cstrong>Export the file\u003C/strong> - Once you have configured all the necessary settings, click on the \"Export\" or \"Save\" button to initiate the export process. Blender will process the selected objects or animation and generate the exported file in the specified location.\u003C/li>\u003Cli>\u003Cstrong>Check the exported file\u003C/strong> - After the export is complete, navigate to the chosen file path and ensure that the exported file is present. Open the file in the target software or platform to confirm that the animation or objects are correctly transferred and functioning as expected.\u003C/li>\u003C/ol>\u003Cp>The Export menu looks like this:\u003C/p>\u003Ch2 id=\"export-file-formats-pros-cons\">Export File Formats: Pros &amp; Cons\u003C/h2>\u003Cp>Blender supports various file formats with their own features, advantages, and limitations.\u003C/p>\u003Ch3 id=\"fbx-filmbox\">FBX (Filmbox)\u003C/h3>\u003Cp>FBX is a proprietary file format developed by Autodesk, primarily used in the animation industry to simplify the exchange of 3D content between different software applications like Blender, Maya, and 3DS Max. FBX files can store various types of data related to 3D models, including geometry, textures, materials, animations, cameras, and lights.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Interchangeability\u003C/strong> - The FBX format is primarily used for interchanging character animations between different applications. It’s supported by popular applications like Cinema4D, Maya, Autodesk 3ds Max, Wings3D, and Unreal Engine 5.\u003C/li>\u003Cli>\u003Cstrong>Baked mesh modifiers and animation\u003C/strong> -\u003Cstrong> \u003C/strong>The exporter can bake mesh modifiers and animation into the FBX file, ensuring that the final result appears the same as in Blender.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Lack of support for armature instances\u003C/strong> -\u003Cstrong> \u003C/strong>Armature instances are not supported in the FBX format, which may limit certain capabilities or workflows involving armature-based animations.\u003C/li>\u003Cli>\u003Cstrong>Complex bone orientation importing\u003C/strong> -\u003Cstrong> \u003C/strong>Importing bones' orientation can be complex and may require adjusting related settings until the desired results are achieved.\u003C/li>\u003Cli>\u003Cstrong>Limited animation support\u003C/strong> -\u003Cstrong> \u003C/strong>The current level of animation support in FBX is minimal. When saving just animations in FBX, it's necessary to manually keep track of which animation belongs to which model. Take selection and organization require manual effort to optimize file size and facilitate faster export and import processes.\u003C/li>\u003C/ul>\u003Ch3 id=\"alembic\">Alembic\u003C/h3>\u003Cp>The Alembic file format is an open computer graphics interchange format designed for efficient storage and exchange of animated and simulated 3D geometry.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Efficient storage\u003C/strong> - Alembic is designed to efficiently store computed results of complex procedural geometric constructions like animated vertex positions and animated transforms.\u003C/li>\u003Cli>\u003Cstrong>Fast read and write operations\u003C/strong> - It allows for quick and efficient writing of animated meshes to a drive and reading them back.\u003C/li>\u003Cli>\u003Cstrong>Reduction of CPU usage\u003C/strong> - By \"baking\" the animated mesh to an Alembic file, it reduces the CPU-intensive rig processing, resulting in moderate CPU usage during shading and lighting.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Limited scope\u003C/strong> - Alembic is specifically focused on storing computed results and is not concerned with storing the complex dependency graph of procedural tools used in the creation process. It does not store the network of computations (rigs) required to produce the final animated positions and transforms.\u003C/li>\u003Cli>\u003Cstrong>Lack of representation of computations\u003C/strong> - Since Alembic does not store the network of computations, it may not be suitable for scenarios where it's necessary to preserve the complete history or dependencies of the procedural tools used in the animation and simulation process.\u003C/li>\u003C/ul>\u003Ch3 id=\"obj-wavefront\">OBJ (Wavefront)\u003C/h3>\u003Cp>The OBJ (Wavefront OBJ) file format is a widely used plain text format for exchanging 3D model data, originally developed by Wavefront Technologies for their Advanced Visualizer software..\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Universally supported\u003C/strong> by most 3D software and platforms.\u003C/li>\u003Cli>Simple ASCII-based format that is \u003Cstrong>easy to read and modify\u003C/strong>.\u003C/li>\u003Cli>\u003Cstrong>Supports basic\u003C/strong> geometry, UV mapping, and material assignments.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>No animation support\u003C/strong> - Does not support armatures, lights, cameras, empty objects, parenting, or transformations.\u003C/li>\u003Cli>\u003Cstrong>Only for simple scene\u003C/strong> - Limited ability to handle large scenes or high-resolution meshes.\u003C/li>\u003C/ul>\u003Ch3 id=\"collada-dae\">Collada (DAE)\u003C/h3>\u003Cp>The Collada file format, also known by its file extension .dae (Digital Asset Exchange), is an open standard XML-based file format specifically designed for the interoperability of 3D digital assets and animation data.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Widely supported\u003C/strong> and can retain geometry, materials, textures, animations, and more.\u003C/li>\u003Cli>Supports \u003Cstrong>complex scene hierarchies and multiple animation layers\u003C/strong>.\u003C/li>\u003Cli>\u003Cstrong>Open and XML-based format\u003C/strong> that is human-readable and can be modified.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>The Blender plugin is \u003Cstrong>still a work in progress\u003C/strong>\u003C/li>\u003Cli>More of a \u003Cstrong>legacy file extension\u003C/strong> (no updates since 2014)\u003C/li>\u003C/ul>\u003Ch3 id=\"gltf-gl-transmission-format\">glTF (GL Transmission Format)\u003C/h3>\u003Cp>The glTF (GL Transmission Format) is an open standard file format designed for efficient transmission and loading of 3D scenes and models with a focus on real-time rendering. It uses a JSON (JavaScript Object Notation) structure or a binary format to store 3D model data, including geometry, textures, materials, animations, and more. The binary format, called \"glTF Binary,\" further enhances efficiency by reducing file size and improving loading times.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>Designed for \u003Cstrong>real-time rendering\u003C/strong> (web, gaming).\u003C/li>\u003Cli>Supports geometry, materials, textures, animations, and more.\u003C/li>\u003Cli>\u003Cstrong>Efficient compression\u003C/strong> and small file sizes.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Limited support for advanced features\u003C/strong> like hair, particles, or complex shaders.\u003C/li>\u003Cli>\u003Cstrong>Compatibility\u003C/strong> varies across DCC tools.\u003C/li>\u003C/ul>\u003Ch3 id=\"universal-scene-description-usd\">Universal Scene Description (USD)\u003C/h3>\u003Cp>An open and scalable interchange format developed by Pixar Animation Studios to address the complexities and demands of modern animation and visual effects pipelines. USD is tailored to manage complex scenes involving large amounts of data, assets, and intricate interdependencies. It provides a hierarchical and layer-based approach to organizing and representing 3D scenes, allowing for efficient editing, versioning, and collaboration among artists and technical directors.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Scalability\u003C/strong> - USD can handle large-scale scenes with complex interdependencies. It provides efficient mechanisms for organizing, referencing, and reusing assets.\u003C/li>\u003Cli>\u003Cstrong>Layered editing\u003C/strong> - USD is based on a layered editing approach, allowing artists and technical directors to work on different aspects of a scene independently for non-destructive editing, versioning, and iterative workflows, enhancing productivity and flexibility.\u003C/li>\u003Cli>\u003Cstrong>Efficient animation handling\u003C/strong> - USD efficiently manages animation data from skeletal and vertex animations to keyframe animation, blend shapes (morph targets), and rigging information, making it suitable for complex character animation and rigging workflows.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Learning curve\u003C/strong> - Working with USD may have a steeper learning curve compared to other file formats because of its advanced features and layered approach\u003C/li>\u003Cli>\u003Cstrong>Tool support\u003C/strong> - While USD has gained widespread adoption, not all software applications may have native support for it. Blender doesn’t support invisible objects, USD layers, variants, and skeletal animation.\u003C/li>\u003Cli>\u003Cstrong>File size\u003C/strong> - Depending on the complexity of the scene and the amount of data stored, USD files can become large in size. This may impact file transfer and storage requirements, particularly when dealing with large-scale projects.\u003C/li>\u003C/ul>\u003Ch3 id=\"stanford-ply\">Stanford PLY\u003C/h3>\u003Cp>The Stanford PLY file format, developed at Stanford University, is a flexible and widely supported format for representing 3D geometry. It can store information about vertices, faces, edges, normals, colors, texture coordinates, and other attributes of a 3D model. PLY files can be used to represent both polygonal meshes and point clouds.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Flexibility\u003C/strong> - The Stanford PLY format is flexible and supports a wide range of geometric data. It can store vertex coordinates, polygonal faces, normals, colors, and other attributes, allowing for versatile representation of 3D geometry.\u003C/li>\u003Cli>\u003Cstrong>Wide support\u003C/strong> - PLY files have gained wide support across various software applications and libraries in the computer graphics community.\u003C/li>\u003Cli>\u003Cstrong>Simple file structure\u003C/strong> - The PLY file format has a relatively simple and straightforward structure, making it easy to read, write, and parse programmatically and at scale.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Animation limitations\u003C/strong> - The Stanford PLY format is primarily designed for static geometric data and doesn’t support animation-specific features like skeletal animation, rigging, or keyframe animation.\u003C/li>\u003Cli>\u003Cstrong>Lack of standardization\u003C/strong> - While the PLY format itself is well-defined, there is no universal standard for additional attributes beyond basic geometry. This lack of standardization can result in compatibility issues.\u003C/li>\u003Cli>\u003Cstrong>Large file sizes \u003C/strong>- Depending on the complexity and level of detail of the geometry, PLY files can become large in size.\u003C/li>\u003C/ul>\u003Ch3 id=\"x3d-extensible-3d\">X3D Extensible 3D\u003C/h3>\u003Cp>The X3D (Extensible 3D) file format is an open standard for representing and exchanging 3D computer graphics and animations. It supports a wide range of applications, including animation, visualization, virtual reality, and augmented reality, building upon the capabilities of the VRML format.\u003C/p>\u003Cp>\u003Cstrong>Pros\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Rich animation capabilities\u003C/strong> - X3D provides extensive support for various animation techniques, allowing for the creation of complex and dynamic animations with different types of movement and transformations.  It includes features for simulating dynamic systems like real-world physics.\u003C/li>\u003C/ul>\u003Cp>\u003Cstrong>Cons\u003C/strong>\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>Complexity \u003C/strong>- X3D files are more complex compared to other formats, requiring specific software for viewing and editing due to the extensive feature set and flexibility as well as time and effort to master.\u003C/li>\u003Cli>\u003Cstrong>Limited adoption \u003C/strong>- While an open standard, X3D has less widespread adoption compared to other formats.\u003C/li>\u003C/ul>\u003Ch3 id=\"movies-and-pictures-mp4-png-jpg\">Movies and pictures (.mp4, .png, .jpg)\u003C/h3>\u003Cp>You may need to generate preview files from your scenes. The previews are essential to an efficient collaboration. It allows Supervisors and Directors to give their feedback. Thanks to them, iterations can be done from anywhere in a smoother way.\u003C/p>\u003Cp>We'll talk about how to render pictures and animations in Blender in an upcoming article.\u003C/p>\u003Ch3 id=\"other-file-formats\">Other file formats\u003C/h3>\u003Cp>Blender proposes two other file formats:\u003C/p>\u003Cul>\u003Cli>\u003Cstrong>STL\u003C/strong> (STereoLithography) for 3D printing\u003C/li>\u003Cli>The \u003Cstrong>Grease Pencil\u003C/strong> file format creates 2D animations from SVG or PDF files that can be combined with 3D elements.\u003C/li>\u003C/ul>\u003Ch2 id=\"using-kitsu-to-streamline-preview-exports\">Using Kitsu To Streamline Preview Exports\u003C/h2>\u003Cp>In an animation production with hundreds of assets to manage, exporting previews is not an easy task. Instead of exporting each asset / animation one by one, you can use Kitsu Publisher to automatically share a preview for collaborative work without leaving your favorite digital content creation (DCC) tool―be it Blender, Unreal Engine 5, or Harmony.\u003C/p>\u003Cp>Kitsu is a collaboration platform for animation studios to share the progress of their productions and validate deliveries. The Kitsu Publisher is a desktop application that connects DCC tools to Kitsu to automatically send export previews to your Kitsu workspace. All you need to do is install Kitsu Publisher and add it as a Blender plugin in a few minutes. You can \u003Ca href=\"https://github.com/cgwire/kitsu-publisher-next?ref=blog.cg-wire.com#readme\">read the official documentation to get detailed steps\u003C/a> depending on which operating system you use.\u003C/p>\u003Cp>Not only does it allow you to skip the export task altogether if you just want to share previews to collect feedback, but it’s also a precious communication tool to keep track of all the assets you need to export for production and how to prioritize export tasks according to your teammates’ needs: no more back-and-forth and endless meetings!\u003C/p>\u003Cp>Even better, through its API, Kitsu provides you with helpers to build file paths for your export files of all kinds.\u003C/p>\u003Cp>And, last but not least, Blender also has \u003Ca href=\"https://studio.blender.org/pipeline/addons/blender_kitsu?ref=blog.cg-wire.com\">an official Kitsu plugin\u003C/a> to interact with Kitsu from within Blender, including features like exporting snapshots and thumbnails.\u003C/p>\u003Ch2 id=\"conclusion\">Conclusion\u003C/h2>\u003Cp>In conclusion, exporting with Blender provides a straightforward process for sharing 3D assets, but the array of options and file formats available can be overwhelming for beginners. To simplify the export workflow, it's advisable to start with the simplest option that meets your requirements. While Blender supports numerous file formats catering to various needs, it's essential to consider the intended use, compatibility with other software, and specific project requirements when choosing an export format.\u003C/p>\u003Cp>\u003Cem>For teams working collaboratively, manual exports to share assets can be time-consuming and cumbersome. Kitsu offers an automated solution for storing and sharing assets directly from Blender with team members. \u003C/em>\u003Ca href=\"https://account.cg-wire.com/signup/?ref=blog.cg-wire.com\" rel=\"noreferrer\">\u003Cem>Try it for free today\u003C/em>\u003C/a>\u003Cem>, it only takes a few minutes to get started!\u003C/em>\u003C/p>",{"uuid":488,"comment_id":489,"feature_image":490,"featured":105,"visibility":10,"created_at":491,"updated_at":492,"custom_excerpt":7,"codeinjection_head":7,"codeinjection_foot":7,"custom_template":7,"canonical_url":7,"primary_author":493,"primary_tag":494,"url":495,"excerpt":496,"reading_time":143,"access":13,"comments":105,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"email_subject":7,"frontmatter":7,"feature_image_alt":7,"feature_image_caption":7},"873d2a42-c2dc-417a-9856-2841612381d7","64b565f140bf3f0001b4a14d","https://storage.ghost.io/c/be/86/be86007c-1b95-476e-8b3b-895720c0d138/content/images/2023/07/pasted-image-0-1.png","2023-07-17T18:01:53.000+02:00","2026-03-26T09:44:02.000+01:00",{"id":454,"name":455,"slug":456,"profile_image":457,"cover_image":458,"bio":459,"website":460,"location":423,"facebook":7,"twitter":424,"meta_title":7,"meta_description":7,"threads":7,"bluesky":7,"mastodon":7,"tiktok":7,"youtube":7,"instagram":7,"linkedin":7,"url":461},{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"https://blog.cg-wire.com/getting-started-with-export-in-blender/","Blender is a popular 3D computer graphics software used in animation. At some point in your project, you’ll need to export your files―to store them, share them with teammates, or use them in other specialized software like uploading to a rendering farm.\n\nFortunately, Blender provides several export options: in this article, we explain each one and when they are best used in your animation pipeline to make the most of it. Lastly, we’ll talk about how to use Kitsu, our open-source production manag","/posts/getting-started-with-export-in-blender","2023-07-31T11:40:41.000+02:00",{"title":483},"getting-started-with-export-in-blender","posts/getting-started-with-export-in-blender",[503],{"id":125,"name":6,"slug":16,"description":7,"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"url":12},"DrrbBLyJ7atUOhBa4JBWJKz9Sp1XDIFq9h1GXIagHp4",[506,510,514,518,522,526,530],{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":507,"name":6,"navigation":13,"path":14,"seo":509,"slug":16,"stem":16,"__hash__":17},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":508,"url":12},{"posts":3},{"description":7},{"id":24,"title":25,"body":7,"description":7,"extension":8,"meta":511,"name":30,"navigation":13,"path":31,"seo":513,"slug":33,"stem":33,"__hash__":34},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":512,"url":29},{"posts":28},{"description":7},{"id":36,"title":37,"body":7,"description":7,"extension":8,"meta":515,"name":37,"navigation":13,"path":42,"seo":517,"slug":44,"stem":44,"__hash__":45},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":516,"url":41},{"posts":40},{"description":7},{"id":47,"title":48,"body":7,"description":7,"extension":8,"meta":519,"name":52,"navigation":13,"path":53,"seo":521,"slug":55,"stem":55,"__hash__":56},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":520,"url":51},{"posts":3},{"description":7},{"id":58,"title":59,"body":7,"description":7,"extension":8,"meta":523,"name":64,"navigation":13,"path":65,"seo":525,"slug":67,"stem":67,"__hash__":68},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":524,"url":63},{"posts":62},{"description":7},{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":527,"name":71,"navigation":13,"path":76,"seo":529,"slug":78,"stem":78,"__hash__":79},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":528,"url":75},{"posts":74},{"description":7},{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":531,"name":82,"navigation":13,"path":86,"seo":533,"slug":88,"stem":88,"__hash__":89},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":532,"url":85},{"posts":40},{"description":7},[535,539,543,547,551,555,559],{"id":5,"title":6,"body":7,"description":7,"extension":8,"meta":536,"name":6,"navigation":13,"path":14,"seo":538,"slug":16,"stem":16,"__hash__":17},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":537,"url":12},{"posts":3},{"description":7},{"id":24,"title":25,"body":7,"description":7,"extension":8,"meta":540,"name":30,"navigation":13,"path":31,"seo":542,"slug":33,"stem":33,"__hash__":34},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":541,"url":29},{"posts":28},{"description":7},{"id":36,"title":37,"body":7,"description":7,"extension":8,"meta":544,"name":37,"navigation":13,"path":42,"seo":546,"slug":44,"stem":44,"__hash__":45},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":545,"url":41},{"posts":40},{"description":7},{"id":47,"title":48,"body":7,"description":7,"extension":8,"meta":548,"name":52,"navigation":13,"path":53,"seo":550,"slug":55,"stem":55,"__hash__":56},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":549,"url":51},{"posts":3},{"description":7},{"id":58,"title":59,"body":7,"description":7,"extension":8,"meta":552,"name":64,"navigation":13,"path":65,"seo":554,"slug":67,"stem":67,"__hash__":68},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":553,"url":63},{"posts":62},{"description":7},{"id":70,"title":71,"body":7,"description":7,"extension":8,"meta":556,"name":71,"navigation":13,"path":76,"seo":558,"slug":78,"stem":78,"__hash__":79},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":557,"url":75},{"posts":74},{"description":7},{"id":81,"title":82,"body":7,"description":7,"extension":8,"meta":560,"name":82,"navigation":13,"path":86,"seo":562,"slug":88,"stem":88,"__hash__":89},{"feature_image":7,"visibility":10,"og_image":7,"og_title":7,"og_description":7,"twitter_image":7,"twitter_title":7,"twitter_description":7,"meta_title":7,"meta_description":7,"codeinjection_head":7,"codeinjection_foot":7,"canonical_url":7,"accent_color":7,"count":561,"url":85},{"posts":40},{"description":7},1776340304245]