Skip to content

Commit

Permalink
Fix error handling tests on julia nightly
Browse files Browse the repository at this point in the history
  • Loading branch information
Drvi committed Nov 6, 2023
1 parent 03a8baf commit 6668abc
Showing 1 changed file with 19 additions and 25 deletions.
44 changes: 19 additions & 25 deletions test/e2e_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -318,14 +318,11 @@ end
@testset "consume!" begin
@testset "serial" begin
throw_ctx = TestThrowingContext(2)
lexer = Lexer(IOBuffer("[1,2]\n[3,4]"), nothing, '\n')
chunking_ctx = ChunkingContext(6, 1, 0, nothing)
ChunkedBase.read_and_lex!(lexer, chunking_ctx)
@test_throws ErrorException("These contexts are for throwing, and that's all what they do") begin
parse_file_serial(
Lexer(IOBuffer("[1,2]\n[3,4]"), nothing, '\n'),
TestParsingContext(),
throw_ctx,
ChunkingContext(6, 1, 0, nothing),
make_buf(1),
)
parse_file_serial(lexer, TestParsingContext(), throw_ctx, chunking_ctx, make_buf(1))
end
@assert !isempty(throw_ctx.tasks)
@test throw_ctx.tasks[1] === current_task()
Expand All @@ -335,14 +332,13 @@ end
@testset "parallel" begin
# 1500 rows should be enough to get each of the 3 task at least one consume!
throw_ctx = TestThrowingContext(1500)
lexer = Lexer(IOBuffer(("[1,2]\n[3,4]\n" ^ 800)), nothing, '\n') # 1600 rows total
nworkers = min(3, Threads.nthreads())
chunking_ctx = ChunkingContext(12, nworkers, 0, nothing)
ChunkedBase.read_and_lex!(lexer, chunking_ctx)
@test_throws TaskFailedException begin
parse_file_parallel(
Lexer(IOBuffer(("[1,2]\n[3,4]\n" ^ 800)), nothing, '\n'), # 1600 rows total
TestParsingContext(),
throw_ctx,
ChunkingContext(12, nworkers, 0, nothing),
[make_buf(1) for _ in 1:(2*nworkers)],
lexer, TestParsingContext(), throw_ctx, chunking_ctx, [make_buf(1) for _ in 1:(2*nworkers)]
)
end
sleep(0.2)
Expand All @@ -356,30 +352,28 @@ end
@testset "io" begin
@testset "serial" begin
throw_ctx = TestThrowingContext(typemax(Int)) # Only capture tasks, let IO do the throwing
lexer = Lexer(ThrowingIO(("[1,2]\n[3,4]\n" ^ 10)), nothing, '\n') # 20 rows total
chunking_ctx = ChunkingContext(6, 1, 0, nothing)

ChunkedBase.read_and_lex!(lexer, chunking_ctx)
@test_throws ErrorException("That should be enough data for everyone") begin
parse_file_serial(
Lexer(ThrowingIO(("[1,2]\n[3,4]\n" ^ 10)), nothing, '\n'), # 20 rows total
TestParsingContext(),
throw_ctx,
ChunkingContext(6, 1, 0, nothing),
make_buf(1),
)
parse_file_serial(lexer, TestParsingContext(), throw_ctx, chunking_ctx, make_buf(1))
end
@assert !isempty(throw_ctx.tasks)
@test throw_ctx.tasks[1] === current_task()
@test throw_ctx.conds[1].exception isa ErrorException
end

@testset "parallel" begin
throw_ctx = TestThrowingContext(typemax(Int)) # Only capture tasks, let IO do the throwing
nworkers = min(3, Threads.nthreads())
throw_ctx = TestThrowingContext(typemax(Int)) # Only capture tasks, let IO do the throwing
lexer = Lexer(ThrowingIO(("[1,2]\n[3,4]\n" ^ 800)), nothing, '\n') # 1600 rows total
chunking_ctx = ChunkingContext(12, nworkers, 0, nothing)

ChunkedBase.read_and_lex!(lexer, chunking_ctx)
@test_throws TaskFailedException begin
parse_file_parallel(
Lexer(ThrowingIO(("[1,2]\n[3,4]\n" ^ 800)), nothing, '\n'), # 1600 rows total
TestParsingContext(),
throw_ctx,
ChunkingContext(12, nworkers, 0, nothing),
[make_buf(1) for _ in 1:(2*nworkers)],
lexer, TestParsingContext(), throw_ctx, chunking_ctx, [make_buf(1) for _ in 1:(2*nworkers)]
)
end
sleep(0.2)
Expand Down

0 comments on commit 6668abc

Please sign in to comment.