Skip to content

Commit

Permalink
Option to resize during concat (#5)
Browse files Browse the repository at this point in the history
* Resize Videos (Issue #4)

* Resize Videos (Issue #4)

* Option to set resize bitrate

* Fix bitrate option (N.B. test before pushing)
  • Loading branch information
wlerin authored Jan 2, 2017
1 parent 52104f4 commit 9f233b4
Showing 1 changed file with 80 additions and 18 deletions.
98 changes: 80 additions & 18 deletions concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@
# 1280x720 (a single kimi dare episode)
GOOD_HEIGHTS = (198, 360, 396, 720)

DEF_BITRATE = '300k'

# old version
"""
def create_concat_files(target_dir, target_ext):
Expand Down Expand Up @@ -115,6 +117,65 @@ def create_concat_files(target_dir, target_ext):
"""

def probe_file(filename, streams):
try:
results = check_output([
"ffprobe",
'-loglevel', '16',
'-show_entries', 'stream={}'.format(','.join(streams)),
'-select_streams', 'v',
'-i', filename,
'-of', 'json'
],
universal_newlines=True
)
except CalledProcessError:
return None
else:
return results

def resize_videos(target_dir, target_ext, copytb=1, target_bitrate='300k'):

oldcwd = os.getcwd()
os.chdir(target_dir)
files = sorted(glob.glob('*.{}'.format(target_ext)))


to_resize=[]
for file in files:
results = probe_file(file, streams=('duration', 'height'))
if results:
try:
stream = json.loads(results)['streams'][0]
except IndexError:
continue
if float(stream['duration']) >= 0.001 and int(stream['height']) == 198:
to_resize.append(file)

if len(to_resize) > 0:
os.makedirs('resized', exist_ok=True)
else:
return
codecs = {'mp4': 'libx264', 'webm': 'libvpx'}

video_codec = codecs[target_ext]

for file in to_resize:
low_res_file = 'resized/' + file.replace('.' + target_ext, '_198p.' + target_ext)
os.replace(file, low_res_file)
run(['ffmpeg',
'-copytb', str(copytb),
'-i', low_res_file,
'-c:v', video_codec,
# '-maxrate', str(target_bitrate),
# '-bufsize', BUFSIZE,
# '-crf', '18',
'-b:v', target_bitrate,
'-vf', 'scale=-1:360',
'-c:a', 'copy', file])

os.chdir(oldcwd)


def generate_concat_files(target_dir, target_ext, max_gap):
oldcwd = os.getcwd()
Expand All @@ -137,20 +198,10 @@ def get_start_hhmm(seconds):

member_dict = {}
for file in files:
try:
results = check_output([
"ffprobe",
'-loglevel', '16',
'-show_entries', 'stream=duration,height',
'-select_streams', 'v',
'-i', file,
'-of', 'json'
],
universal_newlines=True
)
except CalledProcessError:
results = probe_file(file, streams=('duration', 'height'))
if not results:
continue

member_name = file.rsplit(' ', 1)[0]
if member_name not in member_dict:
member_dict[member_name] = []
Expand Down Expand Up @@ -277,25 +328,36 @@ def merge_videos(target_dir, output_dir, copytb=1):
incompatible resolutions, ignores very broken videos.",
epilog="When merging, watch the output for \"Non-monotonous DTS in output stream\" -- A few of these are \
harmless but a wall of them means that video is probably corrupted.")
parser.add_argument("--resize", action='store_true', help='resizes 198p videos in TARGET_DIR to 360p, '
'saves the old videos in a new "resized" subdirectory. Only supports h264 (MP4) and vpx (WEBM)')
parser.add_argument("--generate", action='store_true', help='generates concat files in TARGET_DIR, runs by default')
parser.add_argument("--merge", action='store_true',
help='merges videos in TARGET_DIR according to existing concat files')
parser.add_argument("--both", action='store_true', help='both generates concat files and merges videos')
parser.add_argument("--aggressive", action='store_true', help='resizes, generates, and merges')
parser.add_argument("target_dir", nargs='?', default='.',
help='Optional. defaults to the current working directory.', metavar='TARGET_DIR')
parser.add_argument("--max-gap", type=float, default=300.0,
help='maximum gap between merged videos, in seconds. anything larger is treated as a separate \
broadcast. default = 300.0')
parser.add_argument("-e", dest='ext', default='mp4', help='extension to merge, defaults to mp4')
parser.add_argument("--copytb", type=int, choices=[-1, 0, 1], default=1,
help='it may be useful to try setting this to 0 or -1 if a video has timing issues')
help='it may be useful to try setting this to 0 or -1 if a video has timing issues.'
'Defaults to %(default)s')
parser.add_argument("--output-dir", "-o", dest='output_dir', type=str, default='.',
help='Optional, defaults to target directory. Note that relative paths will be relative to \
the target directory, not the current working directory', metavar='OUTPUT_DIR')
parser.add_argument("--bitrate", "-b", type=str, default=DEF_BITRATE,
help='Bitrate for resizing. Defaults to %(default)s')
args = parser.parse_args()

if not args.merge:
generate_concat_files(args.target_dir, args.ext, args.max_gap)
if args.merge or args.both:
merge_videos(args.target_dir, args.output_dir, args.copytb)
if args.resize or args.aggressive:
resize_videos(target_dir=args.target_dir, target_ext=args.ext,
copytb=args.copytb, target_bitrate=args.bitrate)
if args.generate or args.both or args.aggressive:
generate_concat_files(target_dir=args.target_dir, target_ext=args.ext,
max_gap=args.max_gap)
if args.merge or args.both or args.aggressive:
merge_videos(target_dir=args.target_dir, output_dir=args.output_dir,
copytb=args.copytb)

0 comments on commit 9f233b4

Please sign in to comment.