Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| from shared.hf_helpers import build_pipeline | |
| import yaml | |
| from pathlib import Path | |
| def main(): | |
| st.title("βοΈ LegalDoc Summarizer β AxionX Digital") | |
| CONFIG_PATH = Path(__file__).resolve().parent / "config.yaml" | |
| with open(CONFIG_PATH) as f: | |
| cfg = yaml.safe_load(f) | |
| base_model = cfg["base_model"] | |
| finetuned_model = cfg.get("finetuned_model") or os.getenv("LEGALDOC_MODEL_ID") | |
| local_model_dir = Path(cfg.get("finetuned_local_dir", "models/legaldoc_summarizer")) | |
| model_options = [base_model] | |
| if finetuned_model: | |
| model_options.append(finetuned_model) | |
| elif local_model_dir.exists(): | |
| model_options.append(str(local_model_dir)) | |
| else: | |
| st.info( | |
| "Using the base model until a fine-tuned checkpoint is available. " | |
| "Train a model to populate `models/legaldoc_summarizer` or set `LEGALDOC_MODEL_ID` / `finetuned_model`." | |
| ) | |
| model_name = st.selectbox("Model:", model_options) | |
| hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN") or os.getenv("HF_TOKEN") | |
| def get_pipeline(model_name, token): | |
| return build_pipeline(model_name, token=token) | |
| pipe = get_pipeline(model_name, hf_token) | |
| st.write("Paste a contract clause or judgment text below:") | |
| text = st.text_area("Clause or Legal Text", height=250) | |
| if st.button("Summarize"): | |
| if text.strip(): | |
| result = pipe(text, max_new_tokens=cfg["demo"]["max_new_tokens"]) | |
| st.markdown("### π§Ύ Summary") | |
| st.write(result[0]["generated_text"]) | |
| # Ensures compatibility with hub | |
| if __name__ == "__main__": | |
| main() | |