Skip to content

Commit

Permalink
Tweak Tracing
Browse files Browse the repository at this point in the history
  • Loading branch information
Drvi committed Jul 18, 2023
1 parent 3efb32c commit 417de1c
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 2 deletions.
3 changes: 2 additions & 1 deletion src/ChunkedBase.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,13 +146,14 @@ export parse_file_serial, parse_file_parallel, populate_result_buffer!
# TRACING # return nothing
# TRACING # end
# TRACING # function load_traces!(path)
# TRACING # _resize!(vv, n) = length(vv) >= n ? resize!(vv, n) : append!(vv, [UInt[] for _ in 1:n-length(vv)])
# TRACING # open(path, "r") do io
# TRACING # read!(io, resize!(IO_TASK_TIMES, read(io, UInt32)))
# TRACING # read!(io, resize!(LEXER_TASK_TIMES, read(io, UInt32)))
# TRACING # read!(io, resize!(T1, read(io, UInt32)))
# TRACING # read!(io, resize!(T2, read(io, UInt32)))
# TRACING #
# TRACING # resize!(PARSER_TASKS_TIMES, read(io, UInt32))
# TRACING # _resize!(PARSER_TASKS_TIMES, read(io, UInt32))
# TRACING # for x in PARSER_TASKS_TIMES
# TRACING # read!(io, resize!(x, read(io, UInt32)))
# TRACING # end
Expand Down
1 change: 0 additions & 1 deletion src/ConsumeContexts.jl
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ there to be `ntasks * (1 + length(parsing_ctx.schema))` units of work per chunk
See also [`consume!`](@ref), [`setup_tasks!`](@ref), [`task_done!`](@ref), [`cleanup`](@ref)
"""
function setup_tasks!(::AbstractConsumeContext, chunking_ctx::ChunkingContext, ntasks::Int)
# TRACING # chunking_ctx.id == 1 ? push!(ChunkedBase.T1, time_ns()) : push!(ChunkedBase.T2, time_ns())
set!(chunking_ctx.counter, ntasks)
return nothing
end
Expand Down
1 change: 1 addition & 0 deletions src/parser_parallel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ function submit_lexed_rows!(parsing_queue, consume_ctx, chunking_ctx, row_num)
task_size = estimate_task_size(chunking_ctx)
ntasks = cld(length(chunking_ctx.newline_positions), task_size)
# Set the expected number of parsing tasks
# TRACING # chunking_ctx.id == 1 ? push!(ChunkedBase.T1, time_ns()) : push!(ChunkedBase.T2, time_ns())
setup_tasks!(consume_ctx, chunking_ctx, ntasks)
# Send task definitions (segment of `eols` to process) to the queue
task_start = Int32(1)
Expand Down
1 change: 1 addition & 0 deletions src/parser_serial.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ function parse_file_serial(
task_size = estimate_task_size(chunking_ctx)
task_start = Int32(1)
for task in Iterators.partition(eachindex(chunking_ctx.newline_positions), task_size)
# TRACING # chunking_ctx.id == 1 ? push!(ChunkedBase.T1, time_ns()) : push!(ChunkedBase.T2, time_ns())
setup_tasks!(consume_ctx, chunking_ctx, 1)
task_end = Int32(last(task))
newline_segment = @view(chunking_ctx.newline_positions.elements[task_start:task_end])
Expand Down

0 comments on commit 417de1c

Please sign in to comment.