Fixes Issue #2 #4
@ -3,9 +3,10 @@ import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from requests.sessions import Session
|
||||
|
||||
from daisy import DaisyHandler
|
||||
|
||||
@ -22,6 +23,7 @@ class Mediasite:
|
||||
config['mediasite']['password'])
|
||||
self.chunk_size = 10485760 # 10MiB; seems optimal for speed
|
||||
# Tested 8k, 10MiB and 20MiB
|
||||
self.logger = logging.getLogger('play-daemon')
|
||||
|
||||
def pack(self, pres_id, queue_item):
|
||||
data = queue_item['data']
|
||||
@ -46,23 +48,69 @@ class Mediasite:
|
||||
if 'id' in data:
|
||||
mypackage['notification_id'] = data['id']
|
||||
|
||||
mypackage['thumb'] = self._download(base, data['thumb'], self.auth)
|
||||
for source in data['sources']:
|
||||
mysource = {'video': self._download(base, source['video']),
|
||||
'poster': self._download(base, source['poster'],
|
||||
self.auth),
|
||||
'playAudio': source['playAudio']}
|
||||
mypackage['sources'].append(mysource)
|
||||
return mypackage
|
||||
# Create one session for all the downloads
|
||||
with Session() as session:
|
||||
session.auth = self.auth
|
||||
session.stream = True
|
||||
mypackage['thumb'] = self._download(base, data['thumb'], session)
|
||||
|
||||
# Download video sources, and store the information with a local file path in mypackage for the transcoder
|
||||
for source in data['sources']:
|
||||
mysource = {'video': self._download(base, source['video'], session),
|
||||
'poster': self._download(base, source['poster'], session),
|
||||
'playAudio': source['playAudio']}
|
||||
mypackage['sources'].append(mysource)
|
||||
|
||||
def _download(self, base, remotefile, auth=None):
|
||||
# Slides exist, create a package for creating a video from the slides
|
||||
if 'slides' in data:
|
||||
slides_path = os.path.join(base, 'slides')
|
||||
os.mkdir(slides_path)
|
||||
slides = []
|
||||
demux_file = os.path.join(slides_path, 'demux.txt')
|
||||
|
||||
with open(demux_file, 'w') as f:
|
||||
f.write('ffconcat version 1.0\n')
|
||||
num_slides = len(data['slides'])
|
||||
|
||||
# loop all slides and download, calculate the duration and create a text file holding all the info for he ffmpeg demuxer
|
||||
for i in range(num_slides):
|
||||
|
||||
# Download the source file and store the url as a local file path
|
||||
myslide = { 'url': os.path.join(slides_path, self._download(slides_path, data['slides'][i]['url'], session))}
|
||||
|
||||
# Handle the different edgecases for indiviual slide duration
|
||||
if i == num_slides - 1: # last slide
|
||||
myslide['duration'] = '{}ms'.format(data['duration'] - int(data['slides'][i]['duration']))
|
||||
elif i == 0: # first slide
|
||||
myslide['duration'] = '{}ms'.format(int(data['slides'][i+1]['duration']))
|
||||
else: # all other slides
|
||||
myslide['duration'] = '{}ms'.format(int(data['slides'][i+1]['duration']) - int(data['slides'][i]['duration']))
|
||||
|
||||
# Commit to the demuxfile. Duration is assumed to be seconds unless staded otherwise
|
||||
# https://ffmpeg.org/ffmpeg-utils.html#Time-duration
|
||||
# https://trac.ffmpeg.org/wiki/Slideshow
|
||||
f.write('file \'{}\'\n'.format(myslide['url']))
|
||||
f.write('duration {}\n'.format(myslide['duration']))
|
||||
slides.append(myslide)
|
||||
|
||||
# to accomodate for an ffmpeg quirk that needs the last slide twice
|
||||
f.write('file \'{}\'\n'.format(slides[-1]['url']))
|
||||
|
||||
# put all the slides info in mypackage for the transcoder to rework into a mp4 video
|
||||
mypackage['sources'].append({'demux_file': demux_file,
|
||||
'poster': slides[0]['url'],
|
||||
'playAudio': False })
|
||||
return mypackage
|
||||
|
||||
# function to download the material from mediasite
|
||||
def _download(self, base, remotefile, session):
|
||||
localname = remotefile.split('/')[-1]
|
||||
localpath = os.path.join(base, localname)
|
||||
with requests.get(remotefile, stream=True, auth=auth) as r:
|
||||
r.raise_for_status()
|
||||
with open(localpath, 'xb') as f:
|
||||
for chunk in r.iter_content(chunk_size=self.chunk_size):
|
||||
f.write(chunk)
|
||||
r = session.get(remotefile)
|
||||
r.raise_for_status()
|
||||
with open(localpath, 'xb') as f:
|
||||
for chunk in r.iter_content(chunk_size=self.chunk_size):
|
||||
f.write(chunk)
|
||||
return localname
|
||||
|
||||
|
||||
|
@ -126,6 +126,8 @@ class Pipeline:
|
||||
note = dict(package)
|
||||
if 'update_id' in note:
|
||||
note['id'] = note.pop('update_id')
|
||||
if 'slides' in note:
|
||||
note.pop('slides')
|
||||
del(note['base'])
|
||||
del(note['workbase'])
|
||||
result = requests.post(self.notify_url,
|
||||
|
@ -33,39 +33,68 @@ class Transcoder:
|
||||
self.logger.debug("%s - Pool created", package_id)
|
||||
for stream in package['sources']:
|
||||
transcodes = {}
|
||||
streampath_rel = stream['video']
|
||||
streampath_abs = os.path.join(base, streampath_rel)
|
||||
self.logger.debug("%s - Processing stream %s",
|
||||
package_id,
|
||||
streampath_rel)
|
||||
for maxheight in self.variants:
|
||||
crf, preset = self.variants[maxheight]
|
||||
videojob = pool.apply_async(_Worker.transcode,
|
||||
(self.worker,
|
||||
package_id,
|
||||
streampath_abs,
|
||||
workbase,
|
||||
maxheight, preset, crf))
|
||||
transcodes[maxheight] = videojob
|
||||
|
||||
# If the strean is a colection of slide images that has to be converted to a video
|
||||
if 'demux_file' in stream:
|
||||
self.logger.debug("%s - Processing stream %s", package_id, 'slides job in demux.txt')
|
||||
|
||||
thumbpath_rel = stream['poster']
|
||||
if thumbpath_rel:
|
||||
_, ext = os.path.splitext(thumbpath_rel)
|
||||
thumbbase, _ = os.path.splitext(
|
||||
os.path.basename(streampath_abs))
|
||||
thumbname = '{}{}'.format(thumbbase, ext)
|
||||
shutil.copy2(os.path.join(base, thumbpath_rel),
|
||||
os.path.join(workbase, thumbname))
|
||||
stream['poster'] = thumbname
|
||||
else:
|
||||
thumbjob = pool.apply_async(_Worker.make_thumb,
|
||||
(self.worker,
|
||||
streampath_abs,
|
||||
workbase))
|
||||
transcodes['poster'] = thumbjob
|
||||
# Create the different variants
|
||||
for maxheight in self.variants:
|
||||
crf, preset = self.variants[maxheight]
|
||||
|
||||
# Call the _Worker function asyncronously
|
||||
transcodes[maxheight] = pool.apply_async(_Worker.make_slides_video,
|
||||
(self.worker,
|
||||
package_id,
|
||||
stream['demux_file'],
|
||||
workbase,
|
||||
maxheight, preset, crf))
|
||||
_, ext = os.path.splitext(stream['poster'])
|
||||
slides_thumb = 'slides_thumb{}'.format(ext)
|
||||
shutil.copy2(stream['poster'],os.path.join(workbase, slides_thumb))
|
||||
stream['poster'] = slides_thumb
|
||||
|
||||
# Remove the reference to the demuxfile since it is no longer needed
|
||||
stream.pop('demux_file')
|
||||
|
||||
# If the strean is a regualr video it needs to be transcoded with new resolution
|
||||
elif 'video' in stream:
|
||||
streampath_rel = stream['video']
|
||||
streampath_abs = os.path.join(base, streampath_rel)
|
||||
self.logger.debug("%s - Processing stream %s", package_id, streampath_rel)
|
||||
|
||||
# Create the different variants
|
||||
for maxheight in self.variants:
|
||||
crf, preset = self.variants[maxheight]
|
||||
|
||||
# Call the _Worker function asyncronously
|
||||
transcodes[maxheight] = pool.apply_async(_Worker.transcode,
|
||||
(self.worker,
|
||||
package_id,
|
||||
streampath_abs,
|
||||
workbase,
|
||||
maxheight, preset, crf))
|
||||
|
||||
thumbpath_rel = stream['poster']
|
||||
if thumbpath_rel:
|
||||
_, ext = os.path.splitext(thumbpath_rel)
|
||||
thumbbase, _ = os.path.splitext(os.path.basename(streampath_abs))
|
||||
thumbname = '{}{}'.format(thumbbase, ext)
|
||||
shutil.copy2(os.path.join(base, thumbpath_rel),
|
||||
os.path.join(workbase, thumbname))
|
||||
stream['poster'] = thumbname
|
||||
else:
|
||||
thumbjob = pool.apply_async(_Worker.make_thumb,
|
||||
(self.worker,
|
||||
streampath_abs,
|
||||
workbase))
|
||||
transcodes['poster'] = thumbjob
|
||||
|
||||
# Store the jobs and streams in the pending array
|
||||
pending.append({'jobs': transcodes,
|
||||
'data': stream})
|
||||
|
||||
# Close the pool
|
||||
pool.close()
|
||||
pool.join()
|
||||
self.logger.info("%s - Finished transcoding", package_id)
|
||||
@ -76,6 +105,7 @@ class Transcoder:
|
||||
jobs = item['jobs']
|
||||
streams = {}
|
||||
for maxheight in self.variants:
|
||||
# Get the jobs. If not done wait until they are ready.
|
||||
streams[maxheight] = jobs[maxheight].get()
|
||||
stream['video'] = streams
|
||||
if 'poster' in jobs:
|
||||
@ -84,6 +114,7 @@ class Transcoder:
|
||||
package['thumb'] = stream['poster']
|
||||
package['sources'].append(stream)
|
||||
|
||||
# Return the finished streams
|
||||
return package
|
||||
|
||||
class _Worker:
|
||||
@ -148,3 +179,30 @@ class _Worker:
|
||||
self.logger.info("%s - Transcoded %s in %s seconds",
|
||||
package_id, outstream, runtime)
|
||||
return outstream
|
||||
|
||||
# Function to make a video from images used for the slideshow
|
||||
def make_slides_video(self, package_id, demux_file, outdir, maxheight, preset, crf):
|
||||
self.logger.debug("%s - Preparing slides for processing", package_id)
|
||||
outstream = '{}-{}-{}-{}.mp4'.format('slides', maxheight, preset, crf)
|
||||
outpath = os.path.join(outdir, outstream)
|
||||
self.logger.debug("%s - Building slide video from demux.txt", package_id)
|
||||
|
||||
quiet = True
|
||||
if self.logger.getEffectiveLevel() < logging.INFO:
|
||||
quiet = False
|
||||
|
||||
self.logger.debug("%s - Calling ffmpeg", package_id)
|
||||
start = time.time()
|
||||
# https://ffmpeg.org/ffmpeg-formats.html#toc-concat-1
|
||||
# https://github.com/kkroening/ffmpeg-python/tree/master/examples
|
||||
(ffmpeg
|
||||
.input(demux_file, safe=0)
|
||||
.filter('scale', height='min(in_h, {})'.format(maxheight), width=-2)
|
||||
.output(outpath, crf=crf, preset=preset, pix_fmt='yuv420p')
|
||||
.overwrite_output()
|
||||
.run(quiet=quiet))
|
||||
|
||||
runtime = time.time() - start
|
||||
self.logger.info("%s - Transcoded %s in %s seconds", package_id, outstream, runtime)
|
||||
|
||||
return outstream
|
Loading…
x
Reference in New Issue
Block a user