{"id":24706,"library":"tflite-runtime","title":"TFLite Runtime","description":"TensorFlow Lite Runtime is a lightweight library for on-device machine learning inference, optimized for mobile and embedded devices. Version 2.14.0 supports model conversion and execution with hardware acceleration. Releases are tied to TensorFlow Lite versions.","status":"active","version":"2.14.0","language":"python","source_language":"en","source_url":"https://github.com/tensorflow/tflite-micro/tree/main/tflite-runtime","tags":["tensorflow-lite","onn-device-ml","inference","embedded"],"install":[{"cmd":"pip install tflite-runtime","lang":"bash","label":"pip install"}],"dependencies":[],"imports":[{"note":"Interpreter is in tflite_runtime.interpreter module, not top-level.","wrong":"from tflite_runtime import Interpreter","symbol":"Interpreter","correct":"from tflite_runtime.interpreter import Interpreter"},{"note":"Use for GPU or Edge TPU delegates.","wrong":"","symbol":"load_delegate","correct":"from tflite_runtime.interpreter import load_delegate"}],"quickstart":{"code":"import numpy as np\nfrom tflite_runtime.interpreter import Interpreter\n\ninterpreter = Interpreter(model_path='model.tflite')\ninterpreter.allocate_tensors()\n\ninput_details = interpreter.get_input_details()\noutput_details = interpreter.get_output_details()\n\ninput_data = np.array([[1.0, 2.0, 3.0]], dtype=np.float32)\ninterpreter.set_tensor(input_details[0]['index'], input_data)\ninterpreter.invoke()\noutput_data = interpreter.get_tensor(output_details[0]['index'])\nprint(output_data)","lang":"python","description":"Basic inference with TFLite Runtime."},"warnings":[{"fix":"Use full TensorFlow for training; export to TFLite and ensure ops are supported.","message":"tflite_runtime does not include training ops. If your model uses custom ops or training-only ops, inference may fail.","severity":"breaking","affected_versions":"all"},{"fix":"Use `from tflite_runtime.interpreter import Interpreter`.","message":"The Interpreter class must be imported from tflite_runtime.interpreter, not from top-level tflite_runtime. Common mistake: `from tflite_runtime import Interpreter` -> AttributeError.","severity":"gotcha","affected_versions":"all"},{"fix":"Upgrade to Python 3.7+ or use an older tflite-runtime version if absolutely necessary.","message":"Support for Python 3.6 ended in tflite-runtime 2.7. Check your Python version if you encounter installation errors.","severity":"deprecated","affected_versions":">=2.7"}],"env_vars":null,"last_verified":"2026-05-01T00:00:00.000Z","next_check":"2026-07-30T00:00:00.000Z","problems":[{"fix":"Replace with 'from tflite_runtime.interpreter import Interpreter'.","cause":"Using wrong import path.","error":"ImportError: cannot import name 'Interpreter' from 'tflite_runtime'"},{"fix":"Reshape input data to include batch dimension, e.g., np.array([[1,2,3]]) instead of np.array([1,2,3]).","cause":"Input shape mismatch. TFLite models often expect a batch dimension.","error":"ValueError: Cannot set tensor: Dimension mismatch. Got 3 but expected 4 for input 0."}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}