Skip to content

Commit

Permalink
Make style
Browse files Browse the repository at this point in the history
  • Loading branch information
regisss committed Aug 15, 2024
1 parent 768d6e5 commit 329b06f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 6 deletions.
3 changes: 2 additions & 1 deletion examples/summarization/run_summarization.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import datasets
import evaluate
import nltk # Here to have a nice missing dependency error message early on
nltk.download('punkt_tab') # Needed for ver 3.8.2
import numpy as np
import torch
import transformers
Expand Down Expand Up @@ -81,6 +80,8 @@ def check_optimum_habana_min_version(*a, **b):
with FileLock(".lock") as lock:
nltk.download("punkt", quiet=True)

nltk.download("punkt_tab") # Needed for version 3.8.2

# A list of all multilingual tokenizer which require lang attribute.
MULTILINGUAL_TOKENIZERS = [MBartTokenizer, MBartTokenizerFast, MBart50Tokenizer, MBart50TokenizerFast]

Expand Down
10 changes: 5 additions & 5 deletions notebooks/AI_HW_Summit_2022.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@
"source": [
"from optimum.habana.distributed import DistributedRunner\n",
"\n",
"\n",
"# Build the command to execute\n",
"training_args_command_line = \" \".join(f\"--{key} {value}\" for key, value in training_args.items())\n",
"command = f\"../examples/language-modeling/run_clm.py {training_args_command_line}\"\n",
Expand Down Expand Up @@ -327,15 +328,13 @@
"metadata": {},
"outputs": [],
"source": [
"# The sequence to complete\n",
"prompt_text = \"This live AI webinar is organized by Habana Labs and Hugging Face and\"\n",
"\n",
"import torch\n",
"import habana_frameworks.torch.hpu\n",
"\n",
"from transformers import GPT2LMHeadModel, GPT2Tokenizer\n",
"\n",
"\n",
"# The sequence to complete\n",
"prompt_text = \"This live AI webinar is organized by Habana Labs and Hugging Face and\"\n",
"\n",
"path_to_model = training_args[\"output_dir\"] # the folder where everything related to our run was saved\n",
"\n",
"device = torch.device(\"hpu\")\n",
Expand Down Expand Up @@ -397,6 +396,7 @@
"source": [
"import numpy as np\n",
"\n",
"\n",
"gaudi_price_per_hour = 13.10904\n",
"v100_price_per_hour = 12.24\n",
"\n",
Expand Down

0 comments on commit 329b06f

Please sign in to comment.