{"id":23807,"library":"gpt4all","title":"GPT4All","description":"Python bindings for GPT4All, a locally running open-source LLM ecosystem. Version 2.8.2 supports local inference with models like Mistral, Llama, and GPT4All. Released regularly with updates to model support and API improvements.","status":"active","version":"2.8.2","language":"python","source_language":"en","source_url":"https://github.com/nomic-ai/gpt4all","tags":["llm","local-ai","gpt4all","nomic-ai","inference"],"install":[{"cmd":"pip install gpt4all","lang":"bash","label":"Install from PyPI"}],"dependencies":[{"reason":"Used for downloading models if not present locally.","package":"requests","optional":false},{"reason":"Progress bars during model download.","package":"tqdm","optional":true}],"imports":[{"note":"Older versions exposed the class under a different submodule path.","wrong":"from gpt4all.gpt4all import GPT4All","symbol":"GPT4All","correct":"from gpt4all import GPT4All"}],"quickstart":{"code":"from gpt4all import GPT4All\nmodel = GPT4All(\"Meta-Llama-3-8B-Instruct-4bit\")\noutput = model.generate(\"What is the capital of France?\", max_tokens=50)\nprint(output)","lang":"python","description":"Initialize model with a known model name (or path to local file), then generate text."},"warnings":[{"fix":"Use `GPT4All('model-name')` or `GPT4All(model_path='/path/to/model.bin')`.","message":"In version 2.5.0+, the GPT4All class constructor changed to require a model name string instead of a model object. Old code using `GPT4All(llmodel=...)` will break.","severity":"breaking","affected_versions":"<2.5.0"},{"fix":"Check available models with `GPT4All.list_models()` and use the exact `name` field.","message":"Model names in GPT4All v2.x must match the exact name in the official model list (e.g., 'Meta-Llama-3-8B-Instruct-4bit'). Using a wrong name downloads a new model or raises an exception.","severity":"gotcha","affected_versions":">=2.0.0"},{"fix":"Use `max_tokens` instead of `n_predict`.","message":"The `generate()` method's `n_predict` parameter was renamed to `max_tokens` in v2.7.0. Old `n_predict` still works but is deprecated and will be removed.","severity":"deprecated","affected_versions":">=2.7.0"},{"fix":"Use `GPT4All(model_path='./model.bin')` to load from current directory.","message":"By default, models are downloaded to the user's cache directory (~/.cache/gpt4all/). If you expect a model to be in the current directory, specify `model_path` explicitly.","severity":"gotcha","affected_versions":">=2.0.0"}],"env_vars":null,"last_verified":"2026-05-01T00:00:00.000Z","next_check":"2026-07-30T00:00:00.000Z","problems":[{"fix":"Call `GPT4All.list_models()` to see available model names, or provide a valid file path using `model_path`.","cause":"Trying to instantiate a model with an incorrect or unsupported name.","error":"ValueError: Unknown model name 'my-model'"},{"fix":"Run `pip install gpt4all` in your active Python environment.","cause":"The package is not installed or installed in a different environment.","error":"ModuleNotFoundError: No module named 'gpt4all'"},{"fix":"Update to the latest version with `pip install --upgrade gpt4all`. Ensure the model is a text generation model (not an embedding model).","cause":"Using an older version of the library or a model that does not support text generation.","error":"AttributeError: 'GPT4All' object has no attribute 'generate'"}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}