llama_messenger.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445
  1. import langchain
  2. from langchain.llms import Replicate
  3. from flask import Flask
  4. from flask import request
  5. import os
  6. import requests
  7. import json
  8. os.environ["REPLICATE_API_TOKEN"] = "r8_dR6bALmiSCZCZRs3JKuxkMYxkEW8b2Z0oDwCm"
  9. llama2_13b_chat = "meta/llama-2-13b-chat:f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d"
  10. llm = Replicate(
  11. model=llama2_13b_chat,
  12. model_kwargs={"temperature": 0.01, "top_p": 1, "max_new_tokens":500}
  13. )
  14. app = Flask(__name__)
  15. @app.route('/msgrcvd_pager', methods=['POST', 'GET'])
  16. def msgrcvd_pager():
  17. message = request.args.get('message')
  18. sender = request.args.get('sender')
  19. recipient = request.args.get('recipient')
  20. answer = llm(message)
  21. print(message)
  22. print(answer)
  23. url = f"https://graph.facebook.com/v18.0/{recipient}/messages"
  24. params = {
  25. 'recipient': '{"id": ' + sender + '}',
  26. 'message': json.dumps({'text': answer}),
  27. 'messaging_type': 'RESPONSE',
  28. 'access_token': 'EAAEox5Brim0BOzT7xduQmLPmV5JEYC0wyfZBPE308kOPOUr02GITwIeABUT0ffvoHm2ktusKfXgwoZAQiaI6ZAobAhtGQjsYsm7VzCbVBLQjzKSMyKlmI2ZCFtZAZAEuYZCIZC2YMlCpBhjTbr1Tr7HC7Eom7EPchFpOWAGWktN1PCik17Q1KWCD1ZAdSLBQS6T1Jk4wmZA54eO3MCgQZDZ'
  29. }
  30. headers = {
  31. 'Content-Type': 'application/json'
  32. }
  33. response = requests.post(url, params=params, headers=headers)
  34. print(response.status_code)
  35. print(response.text)
  36. return message + "<p/>" + answer