{"id":28439,"library":"vellum-workflow-server","title":"Vellum Workflow Server","description":"The Vellum Workflow Server is a Python library for building and hosting AI workflows. It provides a framework to define, execute, and serve complex LLM-based pipelines with features like node-based DAGs, retries, observability, and API endpoints. Current version: 1.14.7. Release cadence: frequent updates (weekly/biweekly). Requires Python >=3.9, <4.","status":"active","version":"1.14.7","language":"python","source_language":"en","source_url":"https://github.com/vellum-ai/vellum-workflow-server","tags":["workflow","ai","llm","pipeline","vellum"],"install":[{"cmd":"pip install vellum-workflow-server","lang":"bash","label":"Install from PyPI"}],"dependencies":[{"reason":"Core SDK for Vellum API interactions","package":"vellum","optional":false}],"imports":[{"note":"Old import path before library split; vellum package does not contain Workflow as of v1.0+.","wrong":"from vellum import Workflow","symbol":"Workflow","correct":"from vellum_workflow_server import Workflow"},{"note":null,"wrong":null,"symbol":"WorkflowNode","correct":"from vellum_workflow_server.nodes import WorkflowNode"}],"quickstart":{"code":"from vellum_workflow_server import Workflow\nfrom vellum_workflow_server.nodes import PromptNode, InputNode, OutputNode\nimport os\n\nworkflow = Workflow()\ninput_node = InputNode(name=\"input\")\nprompt_node = PromptNode(\n    name=\"prompt\",\n    prompt_template=\"Hello {{ input.text }}\",\n    model=\"gpt-4\",\n    api_key=os.environ.get(\"OPENAI_API_KEY\", \"\")\n)\noutput_node = OutputNode(name=\"output\")\n\nworkflow.add_node(input_node)\nworkflow.add_node(prompt_node, depends_on=[input_node])\nworkflow.add_node(output_node, depends_on=[prompt_node])\n\nresult = workflow.run(input={\"text\": \"world\"})\nprint(result)","lang":"python","description":"Defines a simple workflow with input, prompt (GPT-4), and output nodes, then executes it."},"warnings":[{"fix":"Replace 'from vellum import ...' with 'from vellum_workflow_server import ...'. Also update requirements.txt.","message":"Library was renamed from 'vellum' to 'vellum-workflow-server' in v2.0. Old 'pip install vellum' installs a different package. Must update imports and dependencies.","severity":"breaking","affected_versions":"<2.0.0"},{"fix":"Use os.environ.get('VARIABLE_NAME') or set keys in code (not recommended for production).","message":"API keys must be set via environment variables or passed explicitly. The library does not auto-read .env files.","severity":"gotcha","affected_versions":">=1.0.0"},{"fix":"Use provided node classes or extend their subclasses.","message":"Node base class 'Node' is deprecated in favor of specific node types (e.g., PromptNode, CodeNode). Direct subclassing of Node may break in future releases.","severity":"deprecated","affected_versions":">=1.10.0"},{"fix":"For async, ensure event loop is running and use await workflow.arun().","message":"Workflow.run() is synchronous and blocking. For async execution, use Workflow.arun() but requires async environment.","severity":"gotcha","affected_versions":">=1.0.0"}],"env_vars":null,"last_verified":"2026-05-09T00:00:00.000Z","next_check":"2026-08-07T00:00:00.000Z","problems":[{"fix":"Use 'from vellum_workflow_server import ...' instead of 'from vellum import ...'.","cause":"Installed 'vellum-workflow-server' but trying to import 'vellum' (old package name).","error":"ModuleNotFoundError: No module named 'vellum'"},{"fix":"Set environment variable (e.g., OPENAI_API_KEY) before running workflow.","cause":"API key not provided, causing prompt node to fail internally.","error":"TypeError: 'NoneType' object is not subscriptable"},{"fix":"Specify 'depends_on' and map input names correctly, e.g., node.inputs['text'] = previous_node.outputs['result'].","cause":"Output of one node not properly connected to input of another.","error":"ValueError: Node 'prompt' has unconnected input 'text'"}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}