{"id":27929,"library":"llama-index-llms-ibm","title":"LlamaIndex IBM Watsonx LLM","description":"LlamaIndex integration for IBM watsonx.ai foundation models. Current version 0.7.0.post1 requires Python 3.11+ and <3.14. Lightly maintained, with occasional releases following LlamaIndex updates.","status":"active","version":"0.7.0.post1","language":"python","source_language":"en","source_url":"https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-ibm","tags":["llama-index","ibm","watsonx","llm","foundation-models"],"install":[{"cmd":"pip install llama-index-llms-ibm","lang":"bash","label":"PyPI install"}],"dependencies":[{"reason":"core framework dependency","package":"llama-index-core","optional":false},{"reason":"IBM watsonx.ai SDK","package":"ibm-watsonx-ai","optional":false}],"imports":[{"note":"requires subpackage import in v0.7+","wrong":"from llama_index.llms import IBM","symbol":"IBM","correct":"from llama_index.llms.ibm import IBM"},{"note":"class renamed to WatsonxLLM in v0.7+","wrong":"from llama_index.llms.ibm import Watsonx","symbol":"IBM Watsonx","correct":"from llama_index.llms.ibm import WatsonxLLM"}],"quickstart":{"code":"import os\nfrom llama_index.llms.ibm import WatsonxLLM\n\napi_key = os.environ.get('WATSONX_APIKEY', '')\nproject_id = os.environ.get('WATSONX_PROJECT_ID', '')\n\nllm = WatsonxLLM(\n    model_id='meta-llama/llama-3-70b-instruct',\n    api_key=api_key,\n    project_id=project_id,\n)\nresponse = llm.complete('What is the capital of France?')\nprint(response.text)","lang":"python","description":"Minimal example using WatsonxLLM. Set WATSONX_APIKEY and WATSONX_PROJECT_ID environment variables."},"warnings":[{"fix":"Update imports to use WatsonxLLM class and correct path.","message":"In v0.7.0, the import path changed from `llama_index.llms.ibm` to requiring specific submodule imports. Old `from llama_index.llms.ibm import IBM` no longer works; use `from llama_index.llms.ibm import WatsonxLLM`.","severity":"breaking","affected_versions":">=0.7.0"},{"fix":"Always specify `model_id` when creating WatsonxLLM instance.","message":"The `model_id` parameter is now required and defaults removed. Previously optional models may fail if not explicitly provided.","severity":"breaking","affected_versions":">=0.7.0"},{"fix":"Ensure both WATSONX_APIKEY and WATSONX_PROJECT_ID are set.","message":"IBM watsonx.ai credentials must be passed via environment variables (WATSONX_APIKEY, WATSONX_PROJECT_ID) or constructor arguments. Passing only API key without project ID raises a missing credentials error.","severity":"gotcha","affected_versions":"all"},{"fix":"Replace `IBM` with `WatsonxLLM`.","message":"The `IBM` class alias is deprecated in v0.7.0 and will be removed in a future release. Use `WatsonxLLM` instead.","severity":"deprecated","affected_versions":">=0.7.0"}],"env_vars":null,"last_verified":"2026-05-09T00:00:00.000Z","next_check":"2026-08-07T00:00:00.000Z","problems":[{"fix":"Install the package: `pip install llama-index-llms-ibm` and import using `from llama_index.llms.ibm import WatsonxLLM`.","cause":"Outdated package or wrong import path for v0.7+. The correct subpackage is not installed.","error":"ModuleNotFoundError: No module named 'llama_index.llms.ibm'"},{"fix":"Set the WATSONX_APIKEY environment variable or pass `api_key` parameter.","cause":"Missing or empty API key.","error":"ValueError: The api_key client must be specified, or set the WATSONX_APIKEY environment variable."},{"fix":"Set WATSONX_PROJECT_ID environment variable or pass `project_id` parameter.","cause":"Missing project ID required for watsonx.ai.","error":"ClientError: Error: ibm_watsonx_ai...Project ID is required for this operation."}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}