Changed instantiation logic for whisper handler:

- Separated GPU and CPU config keywords
 - GPUs are now chosen based on GPU index
 - Configurable number of threads per GPU
This commit is contained in:
Erik Thuning 2024-10-14 16:26:31 +02:00
parent f1de6a19c4
commit d6206f1e6b
2 changed files with 21 additions and 2 deletions

@ -85,6 +85,20 @@ modeldir = /some/path
# pool getting completely filled with jobs of this type.
jobsize = 5
# What device type to use. Accepts 'cpu' or 'gpu'.
device = gpu
# CPU only. Sets the number of workers to use.
count = 2
# GPU only. Specifies which GPUs to use, as a comma-separated list.
# GPUs are indexed from 0. nvidia-smi reports index and bus ID of
# each available GPU.
gpu_ids = 0, 1
# GPU only. Sets the number of workers to start on each used gpu.
threads_per_gpu = 3
[ThumbnailHandler]
# The base image to use when creating presentation thumbnails

@ -146,8 +146,13 @@ class SubtitlesWhisperHandler(Handler):
if device == 'cpu':
devices = [device for i in range(int(config.get('count', 1)))]
else:
count = int(config.get('count', 1))
devices = [f'{i}' for i in range(count)]
id_config_string = config.get('gpu_ids', '0')
ids = [s.strip() for s in id_config_string.split(',')]
per_gpu = int(config.get('threads_per_gpu', 1))
# 'for j in range(per_gpu)' repeats each gpu id
# the appropriate number of times
devices = [i for i in ids for j in range(per_gpu)]
return [cls(handlerqueue,
collector,