streamlit_llama2.py 895 B

123456789101112131415161718192021222324252627
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. # TODO REFACTOR: Convert this to an ipynb notebook
  4. import streamlit as st
  5. from langchain.llms import Replicate
  6. import os
  7. st.title("Llama2-powered Streamlit App")
  8. with st.sidebar:
  9. os.environ["REPLICATE_API_TOKEN"] = "<your replicate api token>"
  10. def generate_response(input_text):
  11. llama2_13b_chat = "meta/llama-2-13b-chat:f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d"
  12. llm = Replicate(
  13. model=llama2_13b_chat,
  14. model_kwargs={"temperature": 0.01, "top_p": 1, "max_new_tokens":500}
  15. )
  16. st.info(llm(input_text))
  17. with st.form("my_form"):
  18. text = st.text_area("Enter text:", "What is Generative AI?")
  19. submitted = st.form_submit_button("Submit")
  20. generate_response(text)