Browse Source

Correct model urls for llama3 in tests

Matthias Reso 7 months ago
parent
commit
8f5db330de

+ 1 - 1
tests/conftest.py

@@ -6,7 +6,7 @@ import pytest
 from transformers import AutoTokenizer
 
 ACCESS_ERROR_MSG = "Could not access tokenizer at 'meta-llama/Llama-2-7b-hf'. Did you log into huggingface hub and provided the correct token?"
-LLAMA_VERSIONS = ["meta-llama/Llama-2-7b-hf", "meta-llama/Llama-3-8b-hf"]
+LLAMA_VERSIONS = ["meta-llama/Llama-2-7b-hf", "meta-llama/Meta-Llama-3-8B"]
 
 @pytest.fixture(params=LLAMA_VERSIONS)
 def llama_version(request):

+ 1 - 1
tests/datasets/test_custom_dataset.py

@@ -11,7 +11,7 @@ EXPECTED_RESULTS={
         "example_1": "[INST] Who made Berlin [/INST] dunno",
         "example_2": "[INST] Quiero preparar una pizza de pepperoni, puedes darme los pasos para hacerla? [/INST] Claro!",
     },
-    "meta-llama/Llama-3-8b-hf":{
+    "meta-llama/Meta-Llama-3-8B":{
         "example_1": "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\nWho made Berlin<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\ndunno<|eot_id|><|end_of_text|>",
         "example_2": "<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\nHow to start learning guitar and become a master at it?",
     },

+ 1 - 1
tests/datasets/test_grammar_datasets.py

@@ -10,7 +10,7 @@ EXPECTED_RESULTS = {
         "label": 1152,
         "pos": 31,
     },
-    "meta-llama/Llama-3-8b-hf":{
+    "meta-llama/Meta-Llama-3-8B":{
         "label": 40,
         "pos": 26,
     },

+ 1 - 1
tests/datasets/test_samsum_datasets.py

@@ -10,7 +10,7 @@ EXPECTED_RESULTS = {
         "label": 8432,
         "pos": 242,
     },
-    "meta-llama/Llama-3-8b-hf":{
+    "meta-llama/Meta-Llama-3-8B":{
         "label": 2250,
         "pos": 211,
     },

+ 1 - 1
tests/test_batching.py

@@ -9,7 +9,7 @@ EXPECTED_SAMPLE_NUMBER ={
         "train": 96,
         "eval": 42,
     },
-    "meta-llama/Llama-3-8b-hf": {
+    "meta-llama/Meta-Llama-3-8B": {
         "train": 79,
         "eval": 34,
     }