@@ -1,4 +0,0 @@
-from llama_recipes.inference.llm import LLM
-
-together_example = LLM("TOGETHER::togethercomputer/llama-2-7b-chat::access-token")
-together_result = together_example.query(prompt="Why is the sky blue?")
@@ -14,7 +14,7 @@ from abc import ABC, abstractmethod
from typing import Callable
import openai
-from langchain.llms import Together
+from langchain_together import Together
from typing_extensions import override