From af353007609d3a5fddfa98fee9cdab5aad1e4eb1 Mon Sep 17 00:00:00 2001 From: Akshay Shah <33681622+akshaynshah@users.noreply.github.com> Date: Thu, 3 Oct 2024 20:52:24 +0530 Subject: [PATCH] Fix meta-llama-3.1-70b-sampling install commands --- .../inference/meta-llama-3.1-70b-sampling.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torch-neuronx/transformers-neuronx/inference/meta-llama-3.1-70b-sampling.ipynb b/torch-neuronx/transformers-neuronx/inference/meta-llama-3.1-70b-sampling.ipynb index 7ef0638..4842808 100644 --- a/torch-neuronx/transformers-neuronx/inference/meta-llama-3.1-70b-sampling.ipynb +++ b/torch-neuronx/transformers-neuronx/inference/meta-llama-3.1-70b-sampling.ipynb @@ -75,7 +75,7 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install transformers==4.43.2 tokenizers==0.19.1 sentenpiece" + "!pip install transformers==4.43.2 tokenizers==0.19.1 sentencepiece" ] }, {