Skip to content

Commit b24ead8

Browse files
authored
fix some typos in docs, comments, logging/errors (#11432)
1 parent e3e70f9 commit b24ead8

File tree

77 files changed

+142
-142
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

77 files changed

+142
-142
lines changed

src/transformers/commands/add_new_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,14 +57,14 @@ def run(self):
5757
if not _has_cookiecutter:
5858
raise ImportError(
5959
"Model creation dependencies are required to use the `add_new_model` command. Install them by running "
60-
"the folowing at the root of your `transformers` clone:\n\n\t$ pip install -e .[modelcreation]\n"
60+
"the following at the root of your `transformers` clone:\n\n\t$ pip install -e .[modelcreation]\n"
6161
)
6262
# Ensure that there is no other `cookiecutter-template-xxx` directory in the current working directory
6363
directories = [directory for directory in os.listdir() if "cookiecutter-template-" == directory[:22]]
6464
if len(directories) > 0:
6565
raise ValueError(
6666
"Several directories starting with `cookiecutter-template-` in current working directory. "
67-
"Please clean your directory by removing all folders startign with `cookiecutter-template-` or "
67+
"Please clean your directory by removing all folders starting with `cookiecutter-template-` or "
6868
"change your working directory."
6969
)
7070

src/transformers/data/processors/squad.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ def squad_convert_example_to_features(
244244
cls_index = span["input_ids"].index(tokenizer.cls_token_id)
245245

246246
# p_mask: mask with 1 for token than cannot be in the answer (0 for token which can be in an answer)
247-
# Original TF implem also keep the classification token (set to 0)
247+
# Original TF implementation also keep the classification token (set to 0)
248248
p_mask = np.ones_like(span["token_type_ids"])
249249
if tokenizer.padding_side == "right":
250250
p_mask[len(truncated_query) + sequence_added_tokens :] = 0

src/transformers/feature_extraction_sequence_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
"""
16-
Sequence feature extraction class for common feature extrcactors to preprocess sequences.
16+
Sequence feature extraction class for common feature extractors to preprocess sequences.
1717
"""
1818
from typing import Dict, List, Optional, Union
1919

src/transformers/file_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -551,7 +551,7 @@ def wrapper(*args, **kwargs):
551551
("sklearn", (is_sklearn_available, SKLEARN_IMPORT_ERROR)),
552552
("speech", (is_speech_available, SPEECH_IMPORT_ERROR)),
553553
("tf", (is_tf_available, TENSORFLOW_IMPORT_ERROR)),
554-
("tokenziers", (is_tokenizers_available, TOKENIZERS_IMPORT_ERROR)),
554+
("tokenizers", (is_tokenizers_available, TOKENIZERS_IMPORT_ERROR)),
555555
("torch", (is_torch_available, PYTORCH_IMPORT_ERROR)),
556556
("vision", (is_vision_available, VISION_IMPORT_ERROR)),
557557
]

src/transformers/generation_logits_process.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -446,7 +446,7 @@ def _set_scores_to_inf_for_banned_tokens(self, scores: torch.Tensor, banned_toke
446446

447447
class PrefixConstrainedLogitsProcessor(LogitsProcessor):
448448
r"""
449-
:class:`transformers.LogitsProcessor` that enforces contrained generation and is useful for prefix-conditioned
449+
:class:`transformers.LogitsProcessor` that enforces constrained generation and is useful for prefix-conditioned
450450
constrained generation. See `Autoregressive Entity Retrieval <https://arxiv.org/abs/2010.00904>`__ for more
451451
information.
452452

src/transformers/generation_stopping_criteria.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
Prediction scores of a language modeling head. These can be scores for each vocabulary token before SoftMax
2424
or scores for each vocabulary token after SoftMax.
2525
kwargs:
26-
Additional stopping critera specific kwargs.
26+
Additional stopping criteria specific kwargs.
2727
2828
Return:
2929
:obj:`bool`. :obj:`False` indicates we should continue, :obj:`True` indicates we should stop.

src/transformers/generation_tf_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -442,8 +442,8 @@ def _generate_no_beam_search(
442442
**kwargs
443443
):
444444
"""
445-
Generate sequences for each example without beam search (num_beams == 1). All returned sequence are generated
446-
independantly.
445+
Generate sequences for each example without beam search (num_beams == 1). All returned sequences are generated
446+
independently.
447447
"""
448448

449449
# length of generated sentences / unfinished sentences

src/transformers/generation_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -821,7 +821,7 @@ def generate(
821821
... "at least two people were killed in a suspected bomb attack on a passenger bus "
822822
... "in the strife-torn southern philippines on monday , the military said."
823823
... )
824-
>>> # encode input contex
824+
>>> # encode input context
825825
>>> input_ids = tokenizer(document, return_tensors="pt").input_ids
826826
>>> # generate 3 independent sequences using beam search decoding (5 beams)
827827
>>> # with T5 encoder-decoder model conditioned on short news article.

src/transformers/modeling_flax_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ def __init__(
9494
self.key = PRNGKey(seed)
9595
self.dtype = dtype
9696

97-
# randomely initialized parameters
97+
# randomly initialized parameters
9898
random_params = self.init_weights(self.key, input_shape)
9999

100100
# save required_params as set

src/transformers/modeling_outputs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -343,7 +343,7 @@ class CausalLMOutputWithPast(ModelOutput):
343343
Language modeling loss (for next-token prediction).
344344
logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, config.vocab_size)`):
345345
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
346-
past_key_values (:obj:`tuple(tupel(torch.FloatTensor))`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``):
346+
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``):
347347
Tuple of :obj:`tuple(torch.FloatTensor)` of length :obj:`config.n_layers`, with each tuple having 2 tensors
348348
of shape :obj:`(batch_size, num_heads, sequence_length, embed_size_per_head)`)
349349
@@ -423,7 +423,7 @@ class SequenceClassifierOutputWithPast(ModelOutput):
423423
Classification (or regression if config.num_labels==1) loss.
424424
logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, config.num_labels)`):
425425
Classification (or regression if config.num_labels==1) scores (before SoftMax).
426-
past_key_values (:obj:`tuple(tupel(torch.FloatTensor))`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``):
426+
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))`, `optional`, returned when ``use_cache=True`` is passed or when ``config.use_cache=True``):
427427
Tuple of :obj:`tuple(torch.FloatTensor)` of length :obj:`config.n_layers`, with each tuple having 2 tensors
428428
of shape :obj:`(batch_size, num_heads, sequence_length, embed_size_per_head)`)
429429

0 commit comments

Comments
 (0)