{"id":21318,"library":"fair-esm","title":"FAIR ESM (Evolutionary Scale Modeling)","description":"FAIR ESM provides pretrained transformer language models for proteins, including ESM-2 and ESM-1b. Version 2.0.0 adds new models and enhancements. The library is actively maintained by Meta AI.","status":"active","version":"2.0.0","language":"python","source_language":"en","source_url":"https://github.com/facebookresearch/esm","tags":["proteins","language-model","biology","deep-learning","esm","fair"],"install":[{"cmd":"pip install fair-esm","lang":"bash","label":"Install from PyPI"}],"dependencies":[],"imports":[{"note":"Direct import of ESM2 class is not typical; use pretrained model aliases.","wrong":"","symbol":"ESM2","correct":"from esm.pretrained import esm2_t48_15B_UR50D"},{"note":"The package is imported as 'esm', not 'fair_esm'.","wrong":"from fair_esm import Alphabet","symbol":"Alphabet","correct":"from esm import Alphabet"}],"quickstart":{"code":"import torch\nfrom esm.pretrained import esm2_t33_650M_UR50D\nfrom esm import Alphabet\n\nmodel, alphabet = esm2_t33_650M_UR50D()\nbatch_converter = alphabet.get_batch_converter()\nmodel.eval()\n\ndata = [\n    (\"protein1\", \"MKTVRQERLKSIVRILERSKEPVSGAQLAEELSVSRQVIVQDIAYLRSLGYNIVATPRGYVLAGG\"),\n]\nbatch_labels, batch_strs, batch_tokens = batch_converter(data)\n\nwith torch.no_grad():\n    results = model(batch_tokens, repr_layers=[33], return_contacts=True)\ntoken_representations = results[\"representations\"][33]\nprint(token_representations.shape)","lang":"python","description":"Load an ESM-2 model, tokenize sequences, and extract representations."},"warnings":[{"fix":"Use from esm.pretrained import ... instead of torch.hub.load('facebookresearch/esm', ...)","message":"In v2.0.0, model loading through torch.hub is deprecated; use esm.pretrained instead.","severity":"breaking","affected_versions":">=2.0.0"},{"fix":"Refer to the model zoo documentation for updated model names.","message":"The 'esm.pretrained' module no longer provides the old ESM-1b model alias 'esm1b_t33_650M_UR50S' directly; use 'esm1b_t33_650M_UR50S()' remains but check model list.","severity":"breaking","affected_versions":">=2.0.0"},{"fix":"Use esm2_t33_650M_UR50D (650M params) for typical usage.","message":"GPU memory is very high (e.g., ESM-2 15B requires ~300GB); smaller models like 650M are recommended for most users.","severity":"gotcha","affected_versions":"all"},{"fix":"Set return_contacts=False (default) unless you need attention map.","message":"The 'return_contacts' flag in forward() can cause OOM; only use if needed.","severity":"gotcha","affected_versions":"all"},{"fix":"Use esm.pretrained for model loading and esm for Alphabet.","message":"The 'esm.model' submodule is being reorganized; direct class imports may break in future versions.","severity":"deprecated","affected_versions":"<2.0.0"}],"env_vars":null,"last_verified":"2026-04-27T00:00:00.000Z","next_check":"2026-07-26T00:00:00.000Z","problems":[{"fix":"Use torch.float32 or run on GPU with torch.cuda.amp if supported.","cause":"Half precision not supported on CPU.","error":"RuntimeError: \"LayerNormKernelImpl\" not implemented for 'Half'"},{"fix":"Set repr_layers to a valid layer number, e.g., [33] for ESM-2 650M which has 33 layers.","cause":"Incorrect repr_layers argument; model does not have requested layer.","error":"KeyError: 'representations'"},{"fix":"Run: pip install fair-esm, then import as esm (not fair_esm).","cause":"Package not installed or wrong import name.","error":"ModuleNotFoundError: No module named 'esm'"},{"fix":"Upgrade to latest: pip install --upgrade fair-esm, or use torch.hub.load for older versions.","cause":"Old version of fair-esm (<2.0.0?) or import error.","error":"AttributeError: module 'esm' has no attribute 'pretrained'"}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}