llama_messenger.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. import langchain
  4. from langchain.llms import Replicate
  5. from flask import Flask
  6. from flask import request
  7. import os
  8. import requests
  9. import json
  10. os.environ["REPLICATE_API_TOKEN"] = "<your replicate api token>"
  11. llama2_13b_chat = "meta/llama-2-13b-chat:f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d"
  12. llm = Replicate(
  13. model=llama2_13b_chat,
  14. model_kwargs={"temperature": 0.01, "top_p": 1, "max_new_tokens":500}
  15. )
  16. app = Flask(__name__)
  17. @app.route('/msgrcvd_pager', methods=['POST', 'GET'])
  18. def msgrcvd_pager():
  19. message = request.args.get('message')
  20. sender = request.args.get('sender')
  21. recipient = request.args.get('recipient')
  22. answer = llm(message)
  23. print(message)
  24. print(answer)
  25. url = f"https://graph.facebook.com/v18.0/{recipient}/messages"
  26. params = {
  27. 'recipient': '{"id": ' + sender + '}',
  28. 'message': json.dumps({'text': answer}),
  29. 'messaging_type': 'RESPONSE',
  30. 'access_token': "<your page access token>"
  31. }
  32. headers = {
  33. 'Content-Type': 'application/json'
  34. }
  35. response = requests.post(url, params=params, headers=headers)
  36. print(response.status_code)
  37. print(response.text)
  38. return message + "<p/>" + answer