play-daemon-threaded/pipeline/handlers/__init__.py
Erik Thuning cbf29c4962 Implementing a unified job pool for transcodes and subtitles never panned out,
so the code was just adding unnecessary complexity. The pipeline now uses
mp.pool to manage ffmpeg jobs as before.

This reverts commit f91109fb3e and deletes the
WorkThread class and its associated tests.
2024-10-17 11:34:00 +02:00

43 lines
1.5 KiB
Python

import multiprocessing as mp
from .audio import AudioHandler
from .metadata import MetadataHandler
from .poster import PosterHandler
from .slides import SlidesHandler
from .subtitles_whisper import SubtitlesWhisperHandler
from .subtitles_import import SubtitlesImportHandler
from .thumbnail import ThumbnailHandler
from .transcode import TranscodeHandler
from .visibility import VisibilityHandler
from ..ldap import Ldap
from ..utils import get_section
allHandlers = [AudioHandler,
MetadataHandler,
PosterHandler,
SlidesHandler,
SubtitlesImportHandler,
SubtitlesWhisperHandler,
ThumbnailHandler,
TranscodeHandler,
VisibilityHandler,
]
def init_handlers(collector, pool, config):
ldap = Ldap(config['Ldap'])
handler_queue_map = {}
handler_list = []
for handlerclass in allHandlers:
handlerqueue = mp.Queue()
handler_queue_map[handlerclass] = handlerqueue
handlerconf = get_section(config, handlerclass.__name__)
handlers = handlerclass.instantiate(handlerqueue,
collector,
pool,
ldap,
config['Pipeline']['tempdir'],
handlerconf)
handler_list += handlers
return (handler_queue_map, handler_list)