Recycle-AI / app.py
A-I4All's picture
Update app.py
a477e16 verified
import os
# ✅ Use temp dir for safe model caching in Spaces/Docker
os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
import streamlit as st
from PIL import Image
import random
import torch
from transformers import AutoImageProcessor, SiglipForImageClassification, logging
# Optional: show more debug info if something fails
logging.set_verbosity_error()
# Constants
MODEL_NAME = "prithivMLmods/Recycling-Net-11"
# Daily sustainability tips
TIPS = [
"Rinse containers before recycling to avoid contamination.",
"Avoid using plastic bags for recyclables – use bins or boxes.",
"Compost your kitchen scraps instead of tossing them.",
"Recycle electronics only at designated e-waste centers.",
"Buy products made from recycled materials to close the loop.",
"Don’t recycle greasy pizza boxes – compost or trash them.",
"Learn your local recycling rules – they vary by region.",
"Use reusable bags, bottles, and containers to reduce waste.",
"Donate old clothes and furniture instead of throwing them away.",
"Avoid single-use plastics whenever possible.",
]
# Government recycling links
GOVERNMENT_LINKS = {
"Pakistan": "https://environment.gov.pk/",
"India": "https://www.cpcb.nic.in/",
"China": "http://english.mee.gov.cn/",
"Japan": "https://www.env.go.jp/en/",
"USA": "https://www.epa.gov/recycle",
"UK": "https://www.gov.uk/recycling-collections",
"Canada": "https://www.canada.ca/en/services/environment/conservation/recycling.html",
"Germany": "https://www.bmu.de/en/topics/water-waste-soil/waste-management",
}
# Load model and processor
@st.cache_resource(show_spinner="🔄 Loading AI model...")
def load_model():
try:
processor = AutoImageProcessor.from_pretrained(MODEL_NAME, revision="main")
model = SiglipForImageClassification.from_pretrained(MODEL_NAME, revision="main")
model.eval()
return processor, model
except Exception as e:
st.error("❌ Failed to load the model. Please check the model name or your connection.")
st.exception(e)
raise e
# Prediction function
def predict(image: Image.Image, processor, model):
inputs = processor(images=image, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
probs = torch.nn.functional.softmax(logits, dim=-1)
conf, idx = torch.max(probs, dim=-1)
class_name = model.config.id2label[idx.item()]
confidence = conf.item()
return class_name, confidence
# Recycling tip per label
def get_suggestion(label: str) -> str:
suggestions = {
"aluminium": "Rinse and recycle aluminum cans. They are infinitely recyclable.",
"batteries": "Do not throw in the trash. Use proper e-waste collection centers.",
"cardboard": "Flatten and keep dry. Avoid greasy pizza boxes.",
"glass": "Rinse and remove lids. Separate by color if required.",
"hard plastic": "Check recycling codes. Clean before recycling.",
"paper": "Do not recycle shredded paper in curbside bins. Reuse or compost instead.",
"paper towel": "Compost if clean. Trash if soiled.",
"polystyrene": "Rarely accepted in curbside. Reuse or bring to special centers.",
"soft plastics": "Often require store drop-off. Don’t mix with other recyclables.",
"takeaway cups": "Check local rules. Many are lined and not recyclable curbside.",
}
return suggestions.get(label, "Please check your local rules for proper disposal of this item.")
# Main app
def main():
st.set_page_config(page_title="♻️ Recycling Helper AI", layout="centered")
st.title("♻️ Recycling Helper AI")
st.subheader("An AI-powered app to identify recyclable materials and promote sustainability.")
st.markdown("---")
# Sidebar
with st.sidebar:
st.header("📘 About This App")
st.markdown(
"This open-source app helps you identify recyclable materials from waste images "
"using a machine learning model. It promotes proper disposal and reduces contamination "
"in the recycling stream. Built for hackathons using Hugging Face + Streamlit."
)
st.markdown("---")
st.header("🌐 Recycling Resources")
st.markdown("For proper recycling and disposal of waste, refer to the following resources:")
for country, url in GOVERNMENT_LINKS.items():
st.markdown(f"- [{country}]({url})", unsafe_allow_html=True)
st.markdown("---")
st.header("🌱 Daily Sustainability Tip")
tip = random.choice(TIPS)
st.success(tip)
# Load model
processor, model = load_model()
# Upload image
st.markdown("### 📤 Upload Waste Image")
uploaded_file = st.file_uploader("Upload an image of a recyclable item", type=["png", "jpg", "jpeg"])
if uploaded_file is not None:
try:
image = Image.open(uploaded_file).convert("RGB")
st.image(image, caption="Uploaded Image", use_column_width=True)
with st.spinner("🔍 Classifying image..."):
label, confidence = predict(image, processor, model)
st.success(f"**Predicted Material:** `{label}` \n**Confidence:** `{confidence:.2%}`")
st.info(f"**Disposal Tip:** {get_suggestion(label)}")
except Exception as e:
st.error("An error occurred during prediction.")
st.exception(e)
with st.expander("🔍 Show All Recognizable Materials"):
st.write(model.config.id2label)
st.markdown("---")
st.caption("Made with 💚 for a sustainable future | Hackathon 2025")
# Run
if __name__ == "__main__":
main()