{"id":23166,"library":"sklearn-evaluation","title":"sklearn-evaluation","description":"A Python library for evaluating scikit-learn models, providing a rich set of plots, tables, and markdown reports. Current version 0.12.2. Released irregularly; latest releases are minor patches.","status":"active","version":"0.12.2","language":"python","source_language":"en","source_url":"https://github.com/ploomber/sklearn-evaluation","tags":["scikit-learn","model-evaluation","plots","reports","machine-learning"],"install":[{"cmd":"pip install sklearn-evaluation","lang":"bash","label":"Install via pip"}],"dependencies":[{"reason":"Core dependency for model evaluation","package":"scikit-learn","optional":false},{"reason":"Required for plotting","package":"matplotlib","optional":false},{"reason":"Used for data manipulation and tables","package":"pandas","optional":false}],"imports":[{"note":"The correct import uses underscores, not hyphens.","symbol":"ClassificationReport","correct":"from sklearn_evaluation import ClassificationReport"}],"quickstart":{"code":"from sklearn.datasets import make_classification\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn_evaluation import ClassificationReport\n\nX, y = make_classification(random_state=0)\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)\nclf = RandomForestClassifier(random_state=0)\nclf.fit(X_train, y_train)\ny_pred = clf.predict(X_test)\n\nreport = ClassificationReport(y_test, y_pred)\nprint(report)\nreport.plot()","lang":"python","description":"Train a Random Forest classifier and generate a classification report."},"warnings":[{"fix":"Use 'from sklearn_evaluation import ...'","message":"Import from 'sklearn_evaluation' with underscores, not 'sklearn-evaluation' (hyphen). The package name on PyPI uses a hyphen, but the import uses an underscore.","severity":"gotcha","affected_versions":"all"},{"fix":"Use classes like ClassificationReport, ConfusionMatrix, etc.","message":"Version 0.5 introduced a new API for reports, deprecating the old 'sklearn_evaluation.plot.*' functions. Users should use the new object-oriented API (e.g., ClassificationReport, ConfusionMatrix) instead of the old functional interface.","severity":"deprecated","affected_versions":">=0.5"},{"fix":"Migrate to the new API: from sklearn_evaluation import ClassificationReport","message":"In version 0.5, the report generation API changed. Old code using 'sklearn_evaluation.report' may break.","severity":"breaking","affected_versions":"<0.5 to >=0.5"}],"env_vars":null,"last_verified":"2026-05-01T00:00:00.000Z","next_check":"2026-07-30T00:00:00.000Z","problems":[{"fix":"Use 'import sklearn_evaluation' or 'from sklearn_evaluation import ...'","cause":"Trying to import the package with a hyphen instead of an underscore.","error":"ModuleNotFoundError: No module named 'sklearn-evaluation'"},{"fix":"Use the new class-based API: from sklearn_evaluation import ConfusionMatrix; ConfusionMatrix(...).plot()","cause":"Trying to use old functional API (sklearn_evaluation.plot.confusion_matrix) after version 0.5.","error":"AttributeError: module 'sklearn_evaluation' has no attribute 'plot'"}],"ecosystem":"pypi","meta_description":null,"install_score":null,"install_tag":null,"quickstart_score":null,"quickstart_tag":null}