vid_convert/vid_convert.py

152 lines
5.9 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import subprocess
import logging
def get_infos(file):
'''
Cette fonction extrait les informations du film à l'aide de ffprobe et les stocke
dans un dictionnaire pour une utilisation ultérieure.
-> http://ffmpeg.org/ffprobe.html
'''
v_infos = {
'height': None,
'width': None,
'color_primaries': None,
'color_space': None,
'color_transfer': None,
2021-06-20 01:18:59 +02:00
'display_aspect_ratio': None,
'pix_fmt': None
}
a_infos = {}
v_infos_cmd = f"ffprobe -v quiet -print_format json -show_format -show_streams -select_streams v {file}"
v_infos_raw = subprocess.getoutput(v_infos_cmd)
a_infos_cmd = f"ffprobe -v quiet -print_format json -show_format -show_streams -select_streams a {file}"
a_infos_raw = subprocess.getoutput(a_infos_cmd)
full_v_infos = json.loads(v_infos_raw)
full_a_infos = json.loads(a_infos_raw)
v_stream = full_v_infos['streams'][0]
for prop in v_infos.keys():
try:
v_infos.update({prop: v_stream[prop]})
except KeyError:
pass
a_infos = []
for a_stream in full_a_infos['streams']:
a_stream_infos = {
'index': a_stream['index'],
2021-06-20 01:27:05 +02:00
'channels': a_stream['channels'],
2021-06-20 01:44:48 +02:00
'channel_layout': a_stream['channel_layout'],
2021-06-20 01:27:05 +02:00
'language': a_stream['tags']['language'],
'title': a_stream['tags']['title']}
a_infos.append(a_stream_infos)
duration = subprocess.getoutput(f"ffprobe -v quiet -print_format json -show_format {file}")
duration = json.loads(duration)
duration = float(duration['format']['duration'])
2021-06-20 01:18:59 +02:00
hdr10_v_cmd = f'ffmpeg -loglevel panic -i {file} -c:v copy -vbsf hevc_mp4toannexb -f hevc - | ./hdr10plus_parser -o metadata.json --verify -'
hdr10_v_raw = subprocess.getoutput(hdr10_v_cmd)
logging.debug(hdr10_v_raw)
if 'metadata detected' in hdr10_v_raw:
hdr10_cmd = f'ffmpeg -loglevel panic -i {file} -c:v copy -vbsf hevc_mp4toannexb -f hevc - | ./hdr10plus_parser -o /tmp/{file}_hdr10_metadata.json -'
v_infos.update({'hdr10': True, 'hdr10_metdata': f'/tmp/{file}_hdr10_metadata.json'})
infos = {'duration': duration, 'video': v_infos, 'audio': a_infos}
logging.debug("Informations du film : \n" + json.dumps(infos, indent=True))
return infos
2021-04-15 15:31:34 +02:00
def is_interlaced(file, infos):
'''
Cette fonction detecte si la vidéo est entrelacée.
-> https://fr.wikipedia.org/wiki/Entrelacement_(vid%C3%A9o)
'''
duration_tier = int(infos['duration'] / 3)
command = f"ffmpeg -loglevel info -ss {duration_tier} -t {duration_tier} -i {file} -an -filter:v idet -f null -y /dev/null"
result = subprocess.getoutput(command)
for line in result.splitlines():
if "Multi" in line:
TFF = int(line.split('TFF:')[1].split()[0])
BFF = int(line.split('BFF:')[1].split()[0])
Progressive = int(line.split('Progressive:')[1].split()[0])
try:
pct = ((TFF + BFF) / (TFF + BFF + Progressive)) * 100
pct = round(pct)
except ZeroDivisionError:
pct = 100
if pct > 10:
logging.debug("Vidéo entrelacée à {pct}%")
return True
else:
logging.debug("Vidéo non entrelacée")
return False
2021-04-15 15:31:34 +02:00
def cropping(file, infos):
'''
Cette fonction detecte les bandes inutiles de la vidéo
'''
duration_tier = int(infos['duration'] / 3)
command = f"ffmpeg -loglevel info -i {file} -ss {duration_tier} -t {duration_tier} -an -f null -vf cropdetect -y /dev/null"
cropsize = subprocess.getoutput(command).splitlines()[-3].split()[-1]
logging.debug(f"Paramètre de découpe : {cropsize}")
return cropsize
2021-04-15 15:52:51 +02:00
def volume_audio(file, infos):
'''
Cette fonction ajuste le volume vers 0dB
'''
volumes = {}
for piste_audio in infos['audio']:
piste = piste_audio['index']
command = f"ffmpeg -loglevel info -i {file} -map 0:{piste} -af volumedetect -f null -y /dev/null"
volumedetect = subprocess.getoutput(command)
for line in volumedetect.splitlines():
if "max_volume" in line:
volume = line.split()[-2]
volume = f"{str(-float(volume))}dB"
logging.debug(f"Ajustement du volume de la piste {piste} : {volume}")
volumes.update({piste: volume})
return volumes
2021-04-15 16:20:39 +02:00
def stabilization(file):
'''
Cette fonction permet de stabiliser l'image,
par exemple quand filmé au smartphone.
'''
cmd_stab = f'ffmpeg -i {file} -vf vidstabdetect=shakiness=10:accuracy=10:result="/tmp/vidstab.trf" -f null - '
subprocess.getoutput(cmd_stab)
2021-04-15 16:20:39 +02:00
2021-06-20 01:44:48 +02:00
def convert_audio(file, track, volume_adj, channels, channel_layout, language, title):
bitrate = 64*channels
codec = 'libopus'
metadatas = f'-metadata language="{language}" -metadata title="{title}"'
command = f'ffmpeg -i {file} -map 0:{track} -vn -sn -c:a {codec} -b:a {bitrate} -mapping_family 1 -filter:a volume={volume_adj}dB,aformat=channel_layouts={channel_layout} -y {file}_audio_{track}.mka'
logging.debug{command}
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("f_input")
2021-04-15 16:20:39 +02:00
parser.add_argument("-d", "--debug", dest="debug", action="store_true")
parser.add_argument("-s", "--stabilise", dest="stab", action="store_true")
args = parser.parse_args()
if args.debug:
logging.basicConfig(format='[%(asctime)s]\n%(message)s', level=logging.DEBUG, datefmt='%d/%m/%Y %H:%M:%S')
else:
logging.basicConfig(format='[%(asctime)s]\n%(message)s', level=logging.INFO, datefmt='%d/%m/%Y %H:%M:%S')
infos = get_infos(args.f_input)
2021-04-15 15:31:34 +02:00
interlaced = is_interlaced(args.f_input, infos)
2021-04-15 15:55:42 +02:00
cropsize = cropping(args.f_input, infos)
2021-04-15 15:52:51 +02:00
volumes = volume_audio(args.f_input, infos)
2021-04-15 16:20:39 +02:00
if args.stab:
stabilization(args.f_input)
2021-06-20 01:44:48 +02:00
for