diff --git a/examples/run-interpolate-rife.sh b/examples/run-interpolate-rife.sh new file mode 100755 index 0000000..e662a24 --- /dev/null +++ b/examples/run-interpolate-rife.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +set -euxo pipefail + +sudo podman run \ + -it --rm --gpus all -v /dev/dri:/dev/dri \ + -v $PWD/data:/host \ + ghcr.io/k4yt3x/video2x:5.0.0-beta2-cuda \ + -i input.mp4 -o output.mp4 \ + interpolate diff --git a/examples/run-upscale-waifu2x.sh b/examples/run-upscale-waifu2x.sh new file mode 100755 index 0000000..abf6ded --- /dev/null +++ b/examples/run-upscale-waifu2x.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +set -euxo pipefail + +sudo podman run \ + -it --rm --gpus all -v /dev/dri:/dev/dri \ + -v $PWD/data:/host \ + ghcr.io/k4yt3x/video2x:5.0.0-beta2-cuda \ + -i input.mp4 -o output.mp4 \ + -p5 upscale \ + -h 720 -d waifu2x -n3 diff --git a/examples/run_interpolate_rife.py b/examples/run_interpolate_rife.py new file mode 100755 index 0000000..a760f35 --- /dev/null +++ b/examples/run_interpolate_rife.py @@ -0,0 +1,21 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# built-in imports +import pathlib + +# import video2x +from video2x import Video2X + + +# create video2x object +video2x = Video2X() + +# run upscale +video2x.interpolate( + pathlib.Path("input.mp4"), # input video path + pathlib.Path("output.mp4"), # another + 3, # processes: number of parallel processors + 10, # threshold: adjacent frames with > n% diff won't be processed (100 == process all) + "rife", # algorithm: the algorithm to use to process the video +) diff --git a/examples/run_upscale_waifu2x.py b/examples/run_upscale_waifu2x.py new file mode 100755 index 0000000..07b9c98 --- /dev/null +++ b/examples/run_upscale_waifu2x.py @@ -0,0 +1,24 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# built-in imports +import pathlib + +# import video2x +from video2x import Video2X + + +# create video2x object +video2x = Video2X() + +# run upscale +video2x.upscale( + pathlib.Path("input.mp4"), # input video path + pathlib.Path("output.mp4"), # another + None, # width: width of output, None == auto + 720, # height: height of output, None == auto + 3, # noise: noise level, algorithm-dependent + 5, # processes: number of parallel processors + 0, # threshold: adjacent frames with < n% diff won't be processed (0 == process all) + "waifu2x", # algorithm: the algorithm to use to process the video +)