Spaces:
Sleeping
Sleeping
| # -*- coding: utf-8 -*- | |
| """tabular_gradio.ipynb | |
| Automatically generated by Colab. | |
| Original file is located at | |
| https://colab.research.google.com/drive/1AOluTRpRSCrd9WaBE5wvo6sojvJ3My6F | |
| """ | |
| # app.py | |
| import os | |
| import shutil | |
| import zipfile | |
| import pathlib | |
| import pandas as pd | |
| import gradio as gr | |
| from huggingface_hub import hf_hub_download | |
| from autogluon.tabular import TabularPredictor | |
| MODEL_REPO_ID = "its-zion-18/flowers-tabular-autolguon-predictor" | |
| ZIP_FILENAME = "autogluon_predictor_dir.zip" | |
| CACHE_DIR = pathlib.Path("hf_assets") | |
| EXTRACT_DIR = CACHE_DIR / "predictor_native" | |
| FEATURE_COLS = [ | |
| "flower_diameter_cm", | |
| "petal_length_cm", | |
| "petal_width_cm", | |
| "petal_count", | |
| "stem_height_cm", | |
| ] | |
| COLOR_LABELS = ["Red", "Orange", "Pink", "White", "Yellow", "Purple"] | |
| def _prepare_predictor_dir() -> str: | |
| CACHE_DIR.mkdir(parents=True, exist_ok=True) | |
| local_zip = hf_hub_download( | |
| repo_id=MODEL_REPO_ID, | |
| filename=ZIP_FILENAME, | |
| repo_type="model", | |
| local_dir=str(CACHE_DIR), | |
| local_dir_use_symlinks=False, | |
| ) | |
| if EXTRACT_DIR.exists(): | |
| shutil.rmtree(EXTRACT_DIR) | |
| EXTRACT_DIR.mkdir(parents=True, exist_ok=True) | |
| with zipfile.ZipFile(local_zip, "r") as zf: | |
| zf.extractall(str(EXTRACT_DIR)) | |
| contents = list(EXTRACT_DIR.iterdir()) | |
| predictor_root = contents[0] if (len(contents) == 1 and contents[0].is_dir()) else EXTRACT_DIR | |
| return str(predictor_root) | |
| PREDICTOR_DIR = _prepare_predictor_dir() | |
| PREDICTOR = TabularPredictor.load(PREDICTOR_DIR, require_py_version_match=False) | |
| def do_predict(flower_diameter_cm, petal_length_cm, petal_width_cm, petal_count, stem_height_cm): | |
| X = pd.DataFrame([{ | |
| FEATURE_COLS[0]: float(flower_diameter_cm), | |
| FEATURE_COLS[1]: float(petal_length_cm), | |
| FEATURE_COLS[2]: float(petal_width_cm), | |
| FEATURE_COLS[3]: int(petal_count), | |
| FEATURE_COLS[4]: float(stem_height_cm), | |
| }], columns=FEATURE_COLS) | |
| try: | |
| pred = PREDICTOR.predict(X).iloc[0] | |
| except Exception as e: | |
| return f"**Prediction failed:** {e}", {} | |
| proba_dict = {} | |
| try: | |
| proba = PREDICTOR.predict_proba(X) | |
| if isinstance(proba, pd.Series): | |
| proba = proba.to_frame().T | |
| row0 = proba.iloc[0] | |
| proba_dict = {str(k): float(v) for k, v in row0.items()} | |
| proba_dict = dict(sorted(proba_dict.items(), key=lambda kv: kv[1], reverse=True)) | |
| except Exception: | |
| pass | |
| md = f"**Predicted color:** {pred}" | |
| if pred in proba_dict: | |
| md += f"\n\n**Confidence:** {proba_dict[pred]*100:.2f}%" | |
| return md, proba_dict | |
| EXAMPLES = [ | |
| [3.4, 1.3, 1.0, 7, 68.7], | |
| [5.7, 3.2, 0.9, 5, 21.2], | |
| [9.7, 4.6, 0.6, 21, 72.6], | |
| [7.8, 2.8, 0.7, 30, 25.5], | |
| [9.0, 4.5, 1.8, 10, 13.1], | |
| [3.5, 4.6, 1.6, 17, 13.4], | |
| ] | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Flower Color Classifier (Tabular)") | |
| gr.Markdown("Adjust the sliders to set flower measurements. The model predicts the flower's color and shows class probabilities.") | |
| with gr.Row(): | |
| flower_diameter_cm = gr.Slider(3.0, 10.0, step=0.1, value=6.0, label=FEATURE_COLS[0]) | |
| petal_length_cm = gr.Slider(1.0, 5.0, step=0.1, value=3.0, label=FEATURE_COLS[1]) | |
| petal_width_cm = gr.Slider(0.5, 2.0, step=0.1, value=1.0, label=FEATURE_COLS[2]) | |
| with gr.Row(): | |
| petal_count = gr.Slider(1, 30, step=1, value=12, label=FEATURE_COLS[3]) | |
| stem_height_cm = gr.Slider(10.0, 80.0, step=0.1, value=40.0, label=FEATURE_COLS[4]) | |
| pred_md = gr.Markdown() | |
| proba_pretty = gr.Label(num_top_classes=6, label="Class probabilities") | |
| inputs = [flower_diameter_cm, petal_length_cm, petal_width_cm, petal_count, stem_height_cm] | |
| for comp in inputs: | |
| comp.change(fn=do_predict, inputs=inputs, outputs=[pred_md, proba_pretty]) | |
| gr.Examples(examples=EXAMPLES, inputs=inputs, label="Representative examples", examples_per_page=5, cache_examples=False) | |
| if __name__ == "__main__": | |
| demo.launch() | |
| """UPLOADING TO HUGGINGFACE""" |