Skip to content

Commit

Permalink
minor performance improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
matt200-ok committed Jan 17, 2025
1 parent b6ea59b commit 25bb54b
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 8 deletions.
10 changes: 6 additions & 4 deletions recipes/llm-voice-assistant/python/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def flush(self):
def interrupt(self):
self.orca_connection.send({'command': Commands.INTERRUPT})
while self.orca_connection.poll() and self.orca_connection.recv()['command'] != Commands.INTERRUPT:
time.sleep(0.01)
time.sleep(0.1)
self.speaker.interrupt()

def tick(self):
Expand Down Expand Up @@ -248,6 +248,7 @@ def handler(_, __) -> None:
text_queue = Queue()
while not close:
while connection.poll():
time.sleep(0.1)
message = connection.recv()
if message['command'] == Commands.CLOSE:
close = True
Expand All @@ -269,7 +270,7 @@ def handler(_, __) -> None:
orca_profiler.reset()
utterance_end_sec = 0
delay_sec = -1
if not text_queue.empty():
while not text_queue.empty():
text = text_queue.get()
orca_profiler.tick()
pcm = orca_stream.synthesize(text)
Expand Down Expand Up @@ -321,7 +322,7 @@ def process(self, text: str, utterance_end_sec):
def interrupt(self):
self.pllm_connection.send({'command': Commands.INTERRUPT})
while self.pllm_connection.poll() and self.pllm_connection.recv()['command'] != Commands.INTERRUPT:
time.sleep(0.01)
time.sleep(0.1)
print('', flush=True)
self.synthesizer.interrupt()

Expand Down Expand Up @@ -406,6 +407,7 @@ def llm_task(text):
llm_future = None
interrupting = False
while not close:
time.sleep(0.1)
while connection.poll():
message = connection.recv()
if message['command'] == Commands.CLOSE:
Expand Down Expand Up @@ -434,7 +436,7 @@ def llm_task(text):
connection.send({'command': Commands.INTERRUPT})
finally:
while llm_future and llm_future.done():
time.sleep(0.01)
time.sleep(0.1)
del executor
pllm.release()

Expand Down
9 changes: 5 additions & 4 deletions recipes/llm-voice-assistant/python/windows_gui/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ def handler(_, __) -> None:
flushing = False
text_queue = Queue()
while not close:
time.sleep(0.1)
while connection.poll():
message = connection.recv()
if message['command'] == Commands.CLOSE:
Expand Down Expand Up @@ -350,6 +351,7 @@ def llm_task(text):
llm_future = None
interrupting = False
while not close:
time.sleep(0.1)
while connection.poll():
message = connection.recv()
if message['command'] == Commands.CLOSE:
Expand Down Expand Up @@ -745,6 +747,9 @@ def handler(_, __) -> None:
pass
signal.signal(signal.SIGINT, handler)

if not sys.platform.lower().startswith('win'):
return

try:
gpu_usage_counters_format = r'"\GPU Engine(pid_{}_*)\Utilization Percentage"'
gpu_usage_counters = ', '.join([gpu_usage_counters_format.format(pid) for pid in pids])
Expand Down Expand Up @@ -856,10 +861,6 @@ def handler(_, __) -> None:


if __name__ == '__main__':
if not sys.platform.lower().startswith('win'):
print('Error: Only runs on Windows platforms')
exit(1)

parser = ArgumentParser()
parser.add_argument(
'--config',
Expand Down

0 comments on commit 25bb54b

Please sign in to comment.