streamlit_llama2.py 843 B

12345678910111213141516171819202122232425
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. import streamlit as st
  4. from langchain.llms import Replicate
  5. import os
  6. st.title("Llama2-powered Streamlit App")
  7. with st.sidebar:
  8. os.environ["REPLICATE_API_TOKEN"] = "<your replicate api token>"
  9. def generate_response(input_text):
  10. llama2_13b_chat = "meta/llama-2-13b-chat:f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d"
  11. llm = Replicate(
  12. model=llama2_13b_chat,
  13. model_kwargs={"temperature": 0.01, "top_p": 1, "max_new_tokens":500}
  14. )
  15. st.info(llm(input_text))
  16. with st.form("my_form"):
  17. text = st.text_area("Enter text:", "What is Generative AI?")
  18. submitted = st.form_submit_button("Submit")
  19. generate_response(text)