Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions Lib/test/test_fstring.py
Original file line number Diff line number Diff line change
Expand Up @@ -1601,6 +1601,10 @@ def f(a):
self.assertEqual(f'{f(a=4)}', '3=')
self.assertEqual(x, 4)

# Check debug expressions in format spec
y = 20
self.assertEqual(f"{2:{y=}}", "yyyyyyyyyyyyyyyyyyy2")

# Make sure __format__ is being called.
class C:
def __format__(self, s):
Expand All @@ -1614,9 +1618,11 @@ def __repr__(self):
self.assertEqual(f'{C()=: }', 'C()=FORMAT- ')
self.assertEqual(f'{C()=:x}', 'C()=FORMAT-x')
self.assertEqual(f'{C()=!r:*^20}', 'C()=********REPR********')
self.assertEqual(f"{C():{20=}}", 'FORMAT-20=20')

self.assertRaises(SyntaxError, eval, "f'{C=]'")


# Make sure leading and following text works.
x = 'foo'
self.assertEqual(f'X{x=}Y', 'Xx='+repr(x)+'Y')
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
Fix f-strings with debug expressions in format specifiers. Patch by Pablo
Galindo
10 changes: 9 additions & 1 deletion Parser/action_helpers.c
Original file line number Diff line number Diff line change
Expand Up @@ -1444,8 +1444,16 @@ expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, Re
conversion_val = (int)'r';
}

expr_ty format_expr = format ? (expr_ty) format->result : NULL;
if (format_expr && format_expr->kind == JoinedStr_kind && asdl_seq_LEN(format_expr->v.JoinedStr.values) == 1) {
expr_ty format_value = asdl_seq_GET(format_expr->v.JoinedStr.values, 0);
if (format_value->kind == JoinedStr_kind) {
format_expr = format_value;
}
}

expr_ty formatted_value = _PyAST_FormattedValue(
expression, conversion_val, format ? (expr_ty) format->result : NULL,
expression, conversion_val, format_expr,
lineno, col_offset, end_lineno,
end_col_offset, arena
);
Expand Down
14 changes: 12 additions & 2 deletions Parser/lexer/lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -989,6 +989,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
the_current_tok->last_expr_buffer = NULL;
the_current_tok->last_expr_size = 0;
the_current_tok->last_expr_end = -1;
the_current_tok->in_format_spec = 0;
the_current_tok->f_string_debug = 0;

switch (*tok->start) {
Expand Down Expand Up @@ -1137,15 +1138,20 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
* by the `{` case, so for ensuring that we are on the 0th level, we need
* to adjust it manually */
int cursor = current_tok->curly_bracket_depth - (c != '{');
if (cursor == 0 && !_PyLexer_update_fstring_expr(tok, c)) {
int in_format_spec = current_tok->in_format_spec;
int cursor_in_format_with_debug =
cursor == 1 && (current_tok->f_string_debug || in_format_spec);
int cursor_valid = cursor == 0 || cursor_in_format_with_debug;
if (cursor_valid && !_PyLexer_update_fstring_expr(tok, c)) {
return MAKE_TOKEN(ENDMARKER);
}
if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) {
if (cursor_valid && c != '{' && set_fstring_expr(tok, token, c)) {
return MAKE_TOKEN(ERRORTOKEN);
}

if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) {
current_tok->kind = TOK_FSTRING_MODE;
current_tok->in_format_spec = 1;
p_start = tok->start;
p_end = tok->cur;
return MAKE_TOKEN(_PyToken_OneChar(c));
Expand Down Expand Up @@ -1235,6 +1241,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) {
current_tok->curly_bracket_expr_start_depth--;
current_tok->kind = TOK_FSTRING_MODE;
current_tok->in_format_spec = 0;
current_tok->f_string_debug = 0;
}
}
Expand Down Expand Up @@ -1337,6 +1344,7 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
if (in_format_spec && c == '\n') {
tok_backup(tok, c);
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
current_tok->in_format_spec = 0;
p_start = tok->start;
p_end = tok->cur;
return MAKE_TOKEN(FSTRING_MIDDLE);
Expand Down Expand Up @@ -1387,6 +1395,7 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "f-string: expressions nested too deeply"));
}
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
current_tok->in_format_spec = 0;
p_start = tok->start;
p_end = tok->cur;
} else {
Expand All @@ -1413,6 +1422,7 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
tok_backup(tok, peek);
tok_backup(tok, c);
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
current_tok->in_format_spec = 0;
p_start = tok->start;
p_end = tok->cur;
}
Expand Down
1 change: 1 addition & 0 deletions Parser/lexer/state.c
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ free_fstring_expressions(struct tok_state *tok)
mode->last_expr_buffer = NULL;
mode->last_expr_size = 0;
mode->last_expr_end = -1;
mode->in_format_spec = 0;
}
}
}
Expand Down
1 change: 1 addition & 0 deletions Parser/lexer/state.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ typedef struct _tokenizer_mode {
Py_ssize_t last_expr_end;
char* last_expr_buffer;
int f_string_debug;
int in_format_spec;
} tokenizer_mode;

/* Tokenizer state */
Expand Down
2 changes: 1 addition & 1 deletion Tools/requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Requirements file for external linters and checks we run on
# Tools/clinic, Tools/cases_generator/, and Tools/peg_generator/ in CI
mypy==1.10.0
mypy==1.10.1

# needed for peg_generator:
types-psutil==5.9.5.20240516
Expand Down