Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 63 additions & 0 deletions src/apps/pages/models/ChatBot/spamDetectorModel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import os
import streamlit as st
import tensorflow as tf
import pickle
from tensorflow.keras.preprocessing.sequence import pad_sequences

# ---------------------------
# Download & load model in one line (cached)
# ---------------------------
@st.cache_resource
def load_model_and_tokenizer(username="smritipandey02", notebook="spam-detection", out="kaggle_models"):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Plz import and use our built-in function from https://github.com/Code-A2Z/jarvis/blob/main/src/helpers/kaggle.py here

os.makedirs(out, exist_ok=True)
# Download from Kaggle if not already present
os.system(f"kaggle kernels output {username}/{notebook} -p {out} --force")
# Load model
model = tf.keras.models.load_model(os.path.join(out, "spam_classifier.h5"))
with open(os.path.join(out, "tokenizer.pkl"), "rb") as f:
tokenizer = pickle.load(f)
return model, tokenizer

# ---------------------------
# Prediction function
# ---------------------------
def predict_message(message, model, tokenizer, max_len=100):
seq = tokenizer.texts_to_sequences([message])
padded = pad_sequences(seq, maxlen=max_len, padding="post", truncating="post")
prob = float(model.predict(padded, verbose=0)[0][0])
return prob

# ---------------------------
# Streamlit UI
# ---------------------------
def spam_app():
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Follow all the guidelines from the README.md file

st.title("Spam Message Detector")

# Load Model button
if st.button("Load Model", key="load_btn"):
with st.spinner("Downloading and loading model..."):
st.session_state["model"], st.session_state["tokenizer"] = load_model_and_tokenizer()
st.success("Model & Tokenizer Loaded Successfully!")

# Input box
message = st.text_area("Enter a message:", key="msg_input")

# Predict button
if st.button("Predict", key="predict_btn"):
if not message.strip():
st.warning("Please enter a message before predicting.")
elif "model" not in st.session_state or "tokenizer" not in st.session_state:
st.warning("Please load the model first.")
else:
prob = predict_message(message,
st.session_state["model"],
st.session_state["tokenizer"])
if prob > 0.5:
st.error(f"Spam detected! (Confidence: {prob:.2f})")
else:
st.success(f"Ham (Not Spam) (Confidence: {1-prob:.2f})")

# ---------------------------
# Run app
# ---------------------------
spam_app()