Skip to content

Commit

Permalink
filestreams: fix event filter. cli: prettify, interface: reduce verbo…
Browse files Browse the repository at this point in the history
…sity.
  • Loading branch information
cubicibo committed Jun 28, 2024
1 parent f9dcad4 commit 798c3ad
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 24 deletions.
9 changes: 7 additions & 2 deletions SUPer/filestreams.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,16 +331,21 @@ def load(self) -> None:
def remove_dupes(events: list[BDNXMLEvent]) -> list[BDNXMLEvent]:
output_events = [events[0]]
for i in range(0, len(events)-1):
is_diff = events[i+1].img.size != events[i].img.size
is_diff = is_diff or np.any(np.asarray(events[i+1].img) - np.asarray(events[i].img))
is_diff = events[i+1].pos != events[i].pos
is_diff = is_diff or events[i+1].shape != events[i].shape
#only diff the images if they have the same size and position.
is_diff = is_diff or (not np.array_equal(np.asarray(events[i+1].img), np.asarray(events[i].img)))

events[i].unload()
if is_diff or output_events[-1].tc_out != events[i+1].tc_in:
output_events.append(events[i+1])
else:
output_events[-1].set_tc_out(events[i+1].tc_out)
events[-1].unload()
assert output_events[0].tc_in == events[0].tc_in and output_events[-1].tc_out == events[-1].tc_out
logger.debug(f"Removed {len(events) - len(output_events)} duplicate event(s).")
return output_events
####

class SeqIO(ABC):
"""
Expand Down
10 changes: 5 additions & 5 deletions SUPer/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def add_data(ep_timeline: list[bytes], final_ds_l: list[bytes], epoch_data: tupl
####while

# Orchestrator is done distributing epochs, wait for everyone to finish
logger.info("Done distributing events, waiting for jobs to finish.")
logger.info("Done distributing epochs, waiting for renderers to finish.")
time.sleep(0.2)

while any(busy_flags.values()):
Expand All @@ -367,7 +367,7 @@ def add_data(ep_timeline: list[bytes], final_ds_l: list[bytes], epoch_data: tupl
free_renderer.close()
time.sleep(0.2)

logger.info("All jobs finished, cleaning-up processes.")
logger.info("All jobs finished, cleaning-up.")
time.sleep(0.01)
for renderer in renderers:
try: renderer.terminate()
Expand Down Expand Up @@ -557,9 +557,9 @@ def setup_env(self) -> None:
####

def convert2(self, ectx: EpochContext, pcs_id: int = 0) -> tuple[Epoch, DisplaySet, int]:
subgroup = ectx.events
subgroup = remove_dupes(ectx.events)
prefix = f"W{self.iid}: " if __class__.__threaded else ""
logger.info(prefix + f"EPOCH {subgroup[0].tc_in}->{subgroup[-1].tc_out}, {len(subgroup)}->{len(subgroup := remove_dupes(subgroup))} event(s), {len(ectx.windows)} window(s).")
logger.info(prefix + f"Encoding epoch {subgroup[0].tc_in}->{subgroup[-1].tc_out} with {len(subgroup)} event(s), {len(ectx.windows)} window(s).")

if logger.level <= 10:
for w_id, wd in enumerate(ectx.windows):
Expand All @@ -568,7 +568,7 @@ def convert2(self, ectx: EpochContext, pcs_id: int = 0) -> tuple[Epoch, DisplayS
wds_analyzer = WindowsAnalyzer(ectx.windows, subgroup, ectx.box, self.bdn, pcs_id=pcs_id, **self.kwargs)
new_epoch, final_ds, pcs_id = wds_analyzer.analyze()

logger.info(prefix + f" => optimised as {len(new_epoch)} display sets.")
logger.debug(prefix + f" => optimised as {len(new_epoch)} display sets.")
return new_epoch, final_ds, pcs_id

def is_available(self) -> bool:
Expand Down
10 changes: 7 additions & 3 deletions SUPer/pgstream.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def test_rx_bitrate(epochs: list[Epoch], bitrate: int, fps: float) -> bool:
logger.iinfo(f"Bitrate: AVG={avg_bitrate/(128*1024):.04f} Mbps, PEAK_1s={stats[2]:.03f} Mbps @ {leaky.stats.tsavg}.")

f_log_fun = logger.iinfo if is_ok else logger.error
f_log_fun(f"Underflow margin (higher is better): AVG={stats[1]:.02f}%, MIN={stats[0]:.02f}% @ {leaky.stats.tsmin}")
f_log_fun(f"Target bitrate underflow margin (higher is better): AVG={stats[1]:.02f}%, MIN={stats[0]:.02f}% @ {leaky.stats.tsmin}")
return is_ok
####
#%%
Expand Down Expand Up @@ -276,8 +276,12 @@ def is_compliant(epochs: list[Epoch], fps: float) -> bool:

if seg.flags & ODS.ODSFlags.SEQUENCE_LAST:
data_hash = hash(bytes(ods_data))
if cumulated_ods_size >= PGDecoder.CODED_BUF_SIZE:
logger.warning(f"Object size >1 MiB at {to_tc(current_pts)} is unsupported by oldest decoders. UHD BD will be OK.")
# +6 (PES header) +13 (Optional PES header), +1 (type) +2 (length) +2 (object_id) +1 (object_vn) +1 (flags) = 26
# +13 (header(2) + PTS(4) + DTS(4) + type(1) + length(2)) +2 (object_id) +1 (object_vn) +1 (flags) = 17
# The Coded Object Buffer can hold up to 1 MiB of raw PES data
# This is roughly: "16 full PES packets" or "16 full b'PG' segments + 16*9 bytes"
if cumulated_ods_size >= PGDecoder.CODED_BUF_SIZE-(16*9):
logger.warning(f"Coded object size >1 MiB at {to_tc(current_pts)} is unsupported by old decoders. UHD BD will be OK.")
warnings += 1
cumulated_ods_size = 0

Expand Down
2 changes: 1 addition & 1 deletion SUPer/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ def _init_logger(cls, name: str) -> None:

if not logger.hasHandlers():
handler = logging.StreamHandler()
formatter = logging.Formatter(' %(name)s: %(levelname).4s : %(message)s'.format(name))
formatter = logging.Formatter(' %(name)s %(levelname).4s : %(message)s'.format(name))
handler.setFormatter(formatter)
logger.addHandler(handler)

Expand Down
15 changes: 2 additions & 13 deletions supercli.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

#%% Main code
if __name__ == '__main__':
print()
logger = LogFacility.get_logger('SUPer')

def exit_msg(msg: str, is_error: bool = True) -> NoReturn:
Expand Down Expand Up @@ -124,18 +123,8 @@ def check_ext(fp: Union[Path, str]) -> None:
logger.warning("PES output requested, adding --palette to command.")
args.palette = True

print("\n @@@@@@@ &@@@ @@@@ @@@@@@@\n"\
"@@@B &@@@ @@@@ @@@@ @@@@ @@@\n"\
"@@@@ @@@@ @@@@ @@@@ @@@ Special Thanks to:\n"\
"J&@@@@&G @@@@ @@@@ @@@@&@@@ Masstock\n"\
" &@@@@ @@@@ @@@@ @@@@ NLScavenger\n"\
"@@@P B@@@ @@@@ @@@& &@@@ Prince 7\n"\
"@@@&!&@@@ B@@@G#&YY5 YJ5# Emulgator\n"\
" G&&@&&B 5#&@B @@PB@& @@&@ Alllen\n"\
" @@@ ,@@@ @@@&G5\n"\
" @@@BP€ @@@\n"\
" (c) cubicibo @@@ @@@\n"\
" @@YY@@ @@@\n")
print(f"SUPer version {LIB_VERSION} - (c) 2024 cubicibo")
print("HDMV PGS encoder, with support from Masstock, Alllen and Emulgator.")
parameters = {}

if (config_file := Path('config.ini')).exists():
Expand Down

0 comments on commit 798c3ad

Please sign in to comment.