mirror of
https://github.com/k4yt3x/video2x.git
synced 2024-12-26 22:09:09 +00:00
feat(logger): add logger manager to provide unified logging (#1267)
Signed-off-by: k4yt3x <i@k4yt3x.com>
This commit is contained in:
parent
b8eb6de59b
commit
6676cd2439
@ -12,19 +12,9 @@ extern "C" {
|
||||
#include "avutils.h"
|
||||
#include "decoder.h"
|
||||
#include "encoder.h"
|
||||
#include "logutils.h"
|
||||
#include "libvideo2x_export.h"
|
||||
#include "processor.h"
|
||||
|
||||
#ifdef _WIN32
|
||||
#ifdef LIBVIDEO2X_EXPORTS
|
||||
#define LIBVIDEO2X_API __declspec(dllexport)
|
||||
#else
|
||||
#define LIBVIDEO2X_API __declspec(dllimport)
|
||||
#endif
|
||||
#else
|
||||
#define LIBVIDEO2X_API
|
||||
#endif
|
||||
|
||||
namespace video2x {
|
||||
|
||||
enum class VideoProcessorState {
|
||||
@ -43,7 +33,6 @@ class LIBVIDEO2X_API VideoProcessor {
|
||||
const encoder::EncoderConfig enc_cfg,
|
||||
const uint32_t vk_device_idx = 0,
|
||||
const AVHWDeviceType hw_device_type = AV_HWDEVICE_TYPE_NONE,
|
||||
const logutils::Video2xLogLevel = logutils::Video2xLogLevel::Info,
|
||||
const bool benchmark = false
|
||||
);
|
||||
|
||||
|
11
include/libvideo2x/libvideo2x_export.h
Normal file
11
include/libvideo2x/libvideo2x_export.h
Normal file
@ -0,0 +1,11 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef _WIN32
|
||||
#ifdef LIBVIDEO2X_EXPORTS
|
||||
#define LIBVIDEO2X_API __declspec(dllexport)
|
||||
#else
|
||||
#define LIBVIDEO2X_API __declspec(dllimport)
|
||||
#endif
|
||||
#else
|
||||
#define LIBVIDEO2X_API
|
||||
#endif
|
48
include/libvideo2x/logger_manager.h
Normal file
48
include/libvideo2x/logger_manager.h
Normal file
@ -0,0 +1,48 @@
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <spdlog/logger.h>
|
||||
#include <spdlog/sinks/sink.h>
|
||||
|
||||
#include "libvideo2x_export.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace logger_manager {
|
||||
|
||||
class LIBVIDEO2X_API LoggerManager {
|
||||
public:
|
||||
LoggerManager(const LoggerManager &) = delete;
|
||||
LoggerManager &operator=(const LoggerManager &) = delete;
|
||||
|
||||
static LoggerManager &instance();
|
||||
|
||||
std::shared_ptr<spdlog::logger> logger();
|
||||
|
||||
void reconfigure_logger(
|
||||
const std::string &logger_name,
|
||||
const std::vector<spdlog::sink_ptr> &sinks,
|
||||
const std::string &pattern = "%+"
|
||||
);
|
||||
|
||||
bool set_log_level(const std::string &level_str);
|
||||
|
||||
void hook_ffmpeg_logging();
|
||||
void unhook_ffmpeg_logging();
|
||||
|
||||
private:
|
||||
LoggerManager();
|
||||
|
||||
std::shared_ptr<spdlog::logger> logger_;
|
||||
};
|
||||
|
||||
} // namespace logger_manager
|
||||
|
||||
// Convenience function to get the logger instance
|
||||
inline std::shared_ptr<spdlog::logger> logger() {
|
||||
return logger_manager::LoggerManager::instance().logger();
|
||||
}
|
||||
|
||||
} // namespace video2x
|
@ -1,28 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <optional>
|
||||
|
||||
#include "fsutils.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace logutils {
|
||||
|
||||
enum class Video2xLogLevel {
|
||||
Unknown,
|
||||
Trace,
|
||||
Debug,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Critical,
|
||||
Off
|
||||
};
|
||||
|
||||
void set_log_level(Video2xLogLevel log_level);
|
||||
|
||||
std::optional<Video2xLogLevel> find_log_level_by_name(
|
||||
const fsutils::StringType &log_level_name
|
||||
);
|
||||
|
||||
} // namespace logutils
|
||||
} // namespace video2x
|
@ -10,6 +10,7 @@ extern "C" {
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "conversions.h"
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace avutils {
|
||||
@ -78,7 +79,7 @@ AVPixelFormat get_encoder_default_pix_fmt(const AVCodec *encoder, AVPixelFormat
|
||||
);
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::error("Failed to get supported pixel formats: {}", errbuf);
|
||||
logger()->error("Failed to get supported pixel formats: {}", errbuf);
|
||||
return AV_PIX_FMT_NONE;
|
||||
}
|
||||
|
||||
@ -119,7 +120,7 @@ AVPixelFormat get_encoder_default_pix_fmt(const AVCodec *encoder, AVPixelFormat
|
||||
}
|
||||
}
|
||||
if (best_pix_fmt == AV_PIX_FMT_NONE) {
|
||||
spdlog::error("No suitable pixel format found for encoder");
|
||||
logger()->error("No suitable pixel format found for encoder");
|
||||
}
|
||||
|
||||
if (target_pix_fmt != AV_PIX_FMT_NONE && best_pix_fmt != target_pix_fmt) {
|
||||
@ -136,12 +137,12 @@ AVPixelFormat get_encoder_default_pix_fmt(const AVCodec *encoder, AVPixelFormat
|
||||
|
||||
float get_frame_diff(AVFrame *frame1, AVFrame *frame2) {
|
||||
if (!frame1 || !frame2) {
|
||||
spdlog::error("Invalid frame(s) provided for comparison");
|
||||
logger()->error("Invalid frame(s) provided for comparison");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
if (frame1->width != frame2->width || frame1->height != frame2->height) {
|
||||
spdlog::error("Frame dimensions do not match");
|
||||
logger()->error("Frame dimensions do not match");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
@ -154,7 +155,7 @@ float get_frame_diff(AVFrame *frame1, AVFrame *frame2) {
|
||||
AVFrame *rgb_frame2 = conversions::convert_avframe_pix_fmt(frame2, target_pix_fmt);
|
||||
|
||||
if (!rgb_frame1 || !rgb_frame2) {
|
||||
spdlog::error("Failed to convert frames to target pixel format");
|
||||
logger()->error("Failed to convert frames to target pixel format");
|
||||
if (rgb_frame1) {
|
||||
av_frame_free(&rgb_frame1);
|
||||
}
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace conversions {
|
||||
|
||||
@ -12,7 +14,7 @@ namespace conversions {
|
||||
AVFrame *convert_avframe_pix_fmt(AVFrame *src_frame, AVPixelFormat pix_fmt) {
|
||||
AVFrame *dst_frame = av_frame_alloc();
|
||||
if (dst_frame == nullptr) {
|
||||
spdlog::error("Failed to allocate destination AVFrame.");
|
||||
logger()->error("Failed to allocate destination AVFrame.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
@ -22,7 +24,7 @@ AVFrame *convert_avframe_pix_fmt(AVFrame *src_frame, AVPixelFormat pix_fmt) {
|
||||
|
||||
// Allocate memory for the converted frame
|
||||
if (av_frame_get_buffer(dst_frame, 32) < 0) {
|
||||
spdlog::error("Failed to allocate memory for AVFrame.");
|
||||
logger()->error("Failed to allocate memory for AVFrame.");
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
}
|
||||
@ -42,7 +44,7 @@ AVFrame *convert_avframe_pix_fmt(AVFrame *src_frame, AVPixelFormat pix_fmt) {
|
||||
);
|
||||
|
||||
if (sws_ctx == nullptr) {
|
||||
spdlog::error("Failed to initialize swscale context.");
|
||||
logger()->error("Failed to initialize swscale context.");
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
}
|
||||
@ -72,7 +74,7 @@ ncnn::Mat avframe_to_ncnn_mat(AVFrame *frame) {
|
||||
if (frame->format != AV_PIX_FMT_BGR24) {
|
||||
converted_frame = convert_avframe_pix_fmt(frame, AV_PIX_FMT_BGR24);
|
||||
if (!converted_frame) {
|
||||
spdlog::error("Failed to convert AVFrame to BGR24.");
|
||||
logger()->error("Failed to convert AVFrame to BGR24.");
|
||||
return ncnn::Mat();
|
||||
}
|
||||
} else {
|
||||
@ -110,7 +112,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
// Step 1: Allocate a destination AVFrame for the specified pixel format
|
||||
AVFrame *dst_frame = av_frame_alloc();
|
||||
if (!dst_frame) {
|
||||
spdlog::error("Failed to allocate destination AVFrame.");
|
||||
logger()->error("Failed to allocate destination AVFrame.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
@ -120,7 +122,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
|
||||
// Allocate memory for the frame buffer
|
||||
if (av_frame_get_buffer(dst_frame, 32) < 0) {
|
||||
spdlog::error("Failed to allocate memory for destination AVFrame.");
|
||||
logger()->error("Failed to allocate memory for destination AVFrame.");
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
}
|
||||
@ -128,7 +130,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
// Step 2: Convert ncnn::Mat to BGR AVFrame
|
||||
AVFrame *bgr_frame = av_frame_alloc();
|
||||
if (!bgr_frame) {
|
||||
spdlog::error("Failed to allocate intermediate BGR AVFrame.");
|
||||
logger()->error("Failed to allocate intermediate BGR AVFrame.");
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
}
|
||||
@ -139,7 +141,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
|
||||
// Allocate memory for the intermediate BGR frame
|
||||
if (av_frame_get_buffer(bgr_frame, 32) < 0) {
|
||||
spdlog::error("Failed to allocate memory for BGR AVFrame.");
|
||||
logger()->error("Failed to allocate memory for BGR AVFrame.");
|
||||
av_frame_free(&dst_frame);
|
||||
av_frame_free(&bgr_frame);
|
||||
return nullptr;
|
||||
@ -169,7 +171,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
);
|
||||
|
||||
if (sws_ctx == nullptr) {
|
||||
spdlog::error("Failed to initialize swscale context.");
|
||||
logger()->error("Failed to initialize swscale context.");
|
||||
av_frame_free(&bgr_frame);
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
@ -191,7 +193,7 @@ AVFrame *ncnn_mat_to_avframe(const ncnn::Mat &mat, AVPixelFormat pix_fmt) {
|
||||
av_frame_free(&bgr_frame);
|
||||
|
||||
if (ret != dst_frame->height) {
|
||||
spdlog::error("Failed to convert BGR AVFrame to destination pixel format.");
|
||||
logger()->error("Failed to convert BGR AVFrame to destination pixel format.");
|
||||
av_frame_free(&dst_frame);
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace decoder {
|
||||
|
||||
@ -26,7 +28,7 @@ AVPixelFormat Decoder::get_hw_format(AVCodecContext *_, const AVPixelFormat *pix
|
||||
return *p;
|
||||
}
|
||||
}
|
||||
spdlog::error("Failed to get HW surface format.");
|
||||
logger()->error("Failed to get HW surface format.");
|
||||
return AV_PIX_FMT_NONE;
|
||||
}
|
||||
|
||||
@ -39,20 +41,20 @@ int Decoder::init(
|
||||
|
||||
// Open the input file
|
||||
if ((ret = avformat_open_input(&fmt_ctx_, in_fpath.u8string().c_str(), nullptr, nullptr)) < 0) {
|
||||
spdlog::error("Could not open input file '{}'", in_fpath.u8string());
|
||||
logger()->error("Could not open input file '{}'", in_fpath.u8string());
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Retrieve stream information
|
||||
if ((ret = avformat_find_stream_info(fmt_ctx_, nullptr)) < 0) {
|
||||
spdlog::error("Failed to retrieve input stream information");
|
||||
logger()->error("Failed to retrieve input stream information");
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Find the first video stream
|
||||
ret = av_find_best_stream(fmt_ctx_, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Could not find video stream in the input file");
|
||||
logger()->error("Could not find video stream in the input file");
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -62,7 +64,7 @@ int Decoder::init(
|
||||
// Find the decoder for the video stream
|
||||
const AVCodec *decoder = avcodec_find_decoder(video_stream->codecpar->codec_id);
|
||||
if (!decoder) {
|
||||
spdlog::error(
|
||||
logger()->error(
|
||||
"Failed to find decoder for codec ID {}",
|
||||
static_cast<int>(video_stream->codecpar->codec_id)
|
||||
);
|
||||
@ -72,13 +74,13 @@ int Decoder::init(
|
||||
// Allocate the decoder context
|
||||
dec_ctx_ = avcodec_alloc_context3(decoder);
|
||||
if (!dec_ctx_) {
|
||||
spdlog::error("Failed to allocate the decoder context");
|
||||
logger()->error("Failed to allocate the decoder context");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
// Copy codec parameters from input stream to decoder context
|
||||
if ((ret = avcodec_parameters_to_context(dec_ctx_, video_stream->codecpar)) < 0) {
|
||||
spdlog::error("Failed to copy decoder parameters to input decoder context");
|
||||
logger()->error("Failed to copy decoder parameters to input decoder context");
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -96,7 +98,7 @@ int Decoder::init(
|
||||
for (int i = 0;; i++) {
|
||||
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
|
||||
if (config == nullptr) {
|
||||
spdlog::error(
|
||||
logger()->error(
|
||||
"Decoder {} does not support device type {}.",
|
||||
decoder->name,
|
||||
av_hwdevice_get_type_name(hw_type)
|
||||
@ -113,7 +115,7 @@ int Decoder::init(
|
||||
|
||||
// Open the decoder
|
||||
if ((ret = avcodec_open2(dec_ctx_, decoder, nullptr)) < 0) {
|
||||
spdlog::error("Failed to open decoder for stream #{}", stream_index);
|
||||
logger()->error("Failed to open decoder for stream #{}", stream_index);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,8 @@ extern "C" {
|
||||
#include <libavutil/opt.h>
|
||||
}
|
||||
|
||||
#include "logger_manager.h"
|
||||
|
||||
#include "avutils.h"
|
||||
#include "conversions.h"
|
||||
|
||||
@ -46,14 +48,14 @@ int Encoder::init(
|
||||
// Allocate the output format context
|
||||
avformat_alloc_output_context2(&ofmt_ctx_, nullptr, nullptr, out_fpath.u8string().c_str());
|
||||
if (!ofmt_ctx_) {
|
||||
spdlog::error("Could not create output context");
|
||||
logger()->error("Could not create output context");
|
||||
return AVERROR_UNKNOWN;
|
||||
}
|
||||
|
||||
// Find the encoder
|
||||
const AVCodec *encoder = avcodec_find_encoder(enc_cfg.codec);
|
||||
if (!encoder) {
|
||||
spdlog::error(
|
||||
logger()->error(
|
||||
"Required video encoder not found for codec {}", avcodec_get_name(enc_cfg.codec)
|
||||
);
|
||||
return AVERROR_ENCODER_NOT_FOUND;
|
||||
@ -62,7 +64,7 @@ int Encoder::init(
|
||||
// Create a new video stream in the output file
|
||||
AVStream *out_vstream = avformat_new_stream(ofmt_ctx_, nullptr);
|
||||
if (!out_vstream) {
|
||||
spdlog::error("Failed to allocate the output video stream");
|
||||
logger()->error("Failed to allocate the output video stream");
|
||||
return AVERROR_UNKNOWN;
|
||||
}
|
||||
out_vstream_idx_ = out_vstream->index;
|
||||
@ -70,7 +72,7 @@ int Encoder::init(
|
||||
// Allocate the encoder context
|
||||
enc_ctx_ = avcodec_alloc_context3(encoder);
|
||||
if (!enc_ctx_) {
|
||||
spdlog::error("Failed to allocate the encoder context");
|
||||
logger()->error("Failed to allocate the encoder context");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
@ -121,7 +123,7 @@ int Encoder::init(
|
||||
// Automatically select the pixel format
|
||||
enc_ctx_->pix_fmt = avutils::get_encoder_default_pix_fmt(encoder, dec_ctx->pix_fmt);
|
||||
if (enc_ctx_->pix_fmt == AV_PIX_FMT_NONE) {
|
||||
spdlog::error("Could not get the default pixel format for the encoder");
|
||||
logger()->error("Could not get the default pixel format for the encoder");
|
||||
return AVERROR(EINVAL);
|
||||
}
|
||||
spdlog::debug("Auto-selected pixel format: {}", av_get_pix_fmt_name(enc_ctx_->pix_fmt));
|
||||
@ -165,14 +167,14 @@ int Encoder::init(
|
||||
|
||||
// Open the encoder
|
||||
if ((ret = avcodec_open2(enc_ctx_, encoder, nullptr)) < 0) {
|
||||
spdlog::error("Cannot open video encoder");
|
||||
logger()->error("Cannot open video encoder");
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Copy encoder parameters to output video stream
|
||||
ret = avcodec_parameters_from_context(out_vstream->codecpar, enc_ctx_);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Failed to copy encoder parameters to output video stream");
|
||||
logger()->error("Failed to copy encoder parameters to output video stream");
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -186,7 +188,7 @@ int Encoder::init(
|
||||
stream_map_ =
|
||||
reinterpret_cast<int *>(av_malloc_array(ifmt_ctx->nb_streams, sizeof(*stream_map_)));
|
||||
if (!stream_map_) {
|
||||
spdlog::error("Could not allocate stream mapping");
|
||||
logger()->error("Could not allocate stream mapping");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
@ -212,14 +214,14 @@ int Encoder::init(
|
||||
// Create corresponding output stream for audio and subtitle streams
|
||||
AVStream *out_stream = avformat_new_stream(ofmt_ctx_, nullptr);
|
||||
if (!out_stream) {
|
||||
spdlog::error("Failed allocating output stream");
|
||||
logger()->error("Failed allocating output stream");
|
||||
return AVERROR_UNKNOWN;
|
||||
}
|
||||
|
||||
// Copy codec parameters from input to output
|
||||
ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Failed to copy codec parameters");
|
||||
logger()->error("Failed to copy codec parameters");
|
||||
return ret;
|
||||
}
|
||||
out_stream->codecpar->codec_tag = 0;
|
||||
@ -237,7 +239,7 @@ int Encoder::init(
|
||||
if (!(ofmt_ctx_->oformat->flags & AVFMT_NOFILE)) {
|
||||
ret = avio_open(&ofmt_ctx_->pb, out_fpath.u8string().c_str(), AVIO_FLAG_WRITE);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Could not open output file '{}'", out_fpath.u8string());
|
||||
logger()->error("Could not open output file '{}'", out_fpath.u8string());
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@ -245,7 +247,7 @@ int Encoder::init(
|
||||
// Write the output file header
|
||||
ret = avformat_write_header(ofmt_ctx_, nullptr);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error writing output file header");
|
||||
logger()->error("Error writing output file header");
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -267,7 +269,7 @@ int Encoder::write_frame(AVFrame *frame, int64_t frame_idx) {
|
||||
if (frame->format != enc_ctx_->pix_fmt) {
|
||||
converted_frame = conversions::convert_avframe_pix_fmt(frame, enc_ctx_->pix_fmt);
|
||||
if (!converted_frame) {
|
||||
spdlog::error("Error converting frame to encoder's pixel format");
|
||||
logger()->error("Error converting frame to encoder's pixel format");
|
||||
return AVERROR_EXTERNAL;
|
||||
}
|
||||
converted_frame->pts = frame->pts;
|
||||
@ -275,7 +277,7 @@ int Encoder::write_frame(AVFrame *frame, int64_t frame_idx) {
|
||||
|
||||
AVPacket *enc_pkt = av_packet_alloc();
|
||||
if (!enc_pkt) {
|
||||
spdlog::error("Could not allocate AVPacket");
|
||||
logger()->error("Could not allocate AVPacket");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
@ -287,7 +289,7 @@ int Encoder::write_frame(AVFrame *frame, int64_t frame_idx) {
|
||||
ret = avcodec_send_frame(enc_ctx_, frame);
|
||||
}
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error sending frame to encoder");
|
||||
logger()->error("Error sending frame to encoder");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
@ -299,7 +301,7 @@ int Encoder::write_frame(AVFrame *frame, int64_t frame_idx) {
|
||||
av_packet_unref(enc_pkt);
|
||||
break;
|
||||
} else if (ret < 0) {
|
||||
spdlog::error("Error encoding frame");
|
||||
logger()->error("Error encoding frame");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
@ -314,7 +316,7 @@ int Encoder::write_frame(AVFrame *frame, int64_t frame_idx) {
|
||||
ret = av_interleaved_write_frame(ofmt_ctx_, enc_pkt);
|
||||
av_packet_unref(enc_pkt);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error muxing packet");
|
||||
logger()->error("Error muxing packet");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
@ -328,14 +330,14 @@ int Encoder::flush() {
|
||||
int ret;
|
||||
AVPacket *enc_pkt = av_packet_alloc();
|
||||
if (!enc_pkt) {
|
||||
spdlog::error("Could not allocate AVPacket");
|
||||
logger()->error("Could not allocate AVPacket");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
// Send a NULL frame to signal the encoder to flush
|
||||
ret = avcodec_send_frame(enc_ctx_, nullptr);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error sending NULL frame to encoder during flush");
|
||||
logger()->error("Error sending NULL frame to encoder during flush");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
@ -347,7 +349,7 @@ int Encoder::flush() {
|
||||
av_packet_unref(enc_pkt);
|
||||
break;
|
||||
} else if (ret < 0) {
|
||||
spdlog::error("Error encoding packet during flush");
|
||||
logger()->error("Error encoding packet during flush");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
@ -362,7 +364,7 @@ int Encoder::flush() {
|
||||
ret = av_interleaved_write_frame(ofmt_ctx_, enc_pkt);
|
||||
av_packet_unref(enc_pkt);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error muxing packet during flush");
|
||||
logger()->error("Error muxing packet during flush");
|
||||
av_packet_free(&enc_pkt);
|
||||
return ret;
|
||||
}
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
#include "fsutils.h"
|
||||
#include "libplacebo.h"
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace processors {
|
||||
@ -55,7 +56,7 @@ int FilterLibplacebo::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
|
||||
// Check if the shader file exists
|
||||
if (!std::filesystem::exists(shader_full_path)) {
|
||||
spdlog::error("libplacebo shader file not found: '{}'", shader_path_.u8string());
|
||||
logger()->error("libplacebo shader file not found: '{}'", shader_path_.u8string());
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -90,14 +91,14 @@ int FilterLibplacebo::filter(AVFrame *in_frame, AVFrame **out_frame) {
|
||||
// Get the filtered frame
|
||||
*out_frame = av_frame_alloc();
|
||||
if (*out_frame == nullptr) {
|
||||
spdlog::error("Failed to allocate output frame");
|
||||
logger()->error("Failed to allocate output frame");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Feed the frame to the filter graph
|
||||
ret = av_buffersrc_add_frame(buffersrc_ctx_, in_frame);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error while feeding the filter graph");
|
||||
logger()->error("Error while feeding the filter graph");
|
||||
av_frame_free(out_frame);
|
||||
return ret;
|
||||
}
|
||||
@ -118,7 +119,7 @@ int FilterLibplacebo::filter(AVFrame *in_frame, AVFrame **out_frame) {
|
||||
int FilterLibplacebo::flush(std::vector<AVFrame *> &flushed_frames) {
|
||||
int ret = av_buffersrc_add_frame(buffersrc_ctx_, nullptr);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error while flushing filter graph");
|
||||
logger()->error("Error while flushing filter graph");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "conversions.h"
|
||||
#include "fsutils.h"
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace processors {
|
||||
@ -46,18 +47,16 @@ int FilterRealesrgan::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
model_bin_path = std::filesystem::path(STR("models")) / STR("realesrgan") / bin_file_name;
|
||||
|
||||
// Get the full paths using a function that possibly modifies or validates the path
|
||||
std::filesystem::path model_param_full_path =
|
||||
fsutils::find_resource_file(model_param_path);
|
||||
std::filesystem::path model_bin_full_path =
|
||||
fsutils::find_resource_file(model_bin_path);
|
||||
std::filesystem::path model_param_full_path = fsutils::find_resource_file(model_param_path);
|
||||
std::filesystem::path model_bin_full_path = fsutils::find_resource_file(model_bin_path);
|
||||
|
||||
// Check if the model files exist
|
||||
if (!std::filesystem::exists(model_param_full_path)) {
|
||||
spdlog::error("RealESRGAN model param file not found: {}", model_param_path.u8string());
|
||||
logger()->error("RealESRGAN model param file not found: {}", model_param_path.u8string());
|
||||
return -1;
|
||||
}
|
||||
if (!std::filesystem::exists(model_bin_full_path)) {
|
||||
spdlog::error("RealESRGAN model bin file not found: {}", model_bin_path.u8string());
|
||||
logger()->error("RealESRGAN model bin file not found: {}", model_bin_path.u8string());
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -71,7 +70,7 @@ int FilterRealesrgan::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
|
||||
// Load the model
|
||||
if (realesrgan_->load(model_param_full_path, model_bin_full_path) != 0) {
|
||||
spdlog::error("Failed to load RealESRGAN model");
|
||||
logger()->error("Failed to load RealESRGAN model");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -100,7 +99,7 @@ int FilterRealesrgan::filter(AVFrame *in_frame, AVFrame **out_frame) {
|
||||
// Convert the input frame to RGB24
|
||||
ncnn::Mat in_mat = conversions::avframe_to_ncnn_mat(in_frame);
|
||||
if (in_mat.empty()) {
|
||||
spdlog::error("Failed to convert AVFrame to ncnn::Mat");
|
||||
logger()->error("Failed to convert AVFrame to ncnn::Mat");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -111,7 +110,7 @@ int FilterRealesrgan::filter(AVFrame *in_frame, AVFrame **out_frame) {
|
||||
|
||||
ret = realesrgan_->process(in_mat, out_mat);
|
||||
if (ret != 0) {
|
||||
spdlog::error("RealESRGAN processing failed");
|
||||
logger()->error("RealESRGAN processing failed");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -10,6 +10,8 @@
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace fsutils {
|
||||
|
||||
@ -20,7 +22,7 @@ static std::filesystem::path get_executable_directory() {
|
||||
// Get the executable path, expanding the buffer if necessary
|
||||
DWORD size = GetModuleFileNameW(NULL, filepath.data(), static_cast<DWORD>(filepath.size()));
|
||||
if (size == 0) {
|
||||
spdlog::error("Error getting executable path: {}", GetLastError());
|
||||
logger()->error("Error getting executable path: {}", GetLastError());
|
||||
return std::filesystem::path();
|
||||
}
|
||||
|
||||
@ -29,7 +31,7 @@ static std::filesystem::path get_executable_directory() {
|
||||
filepath.resize(filepath.size() * 2);
|
||||
size = GetModuleFileNameW(NULL, filepath.data(), static_cast<DWORD>(filepath.size()));
|
||||
if (size == 0) {
|
||||
spdlog::error("Error getting executable path: {}", GetLastError());
|
||||
logger()->error("Error getting executable path: {}", GetLastError());
|
||||
return std::filesystem::path();
|
||||
}
|
||||
}
|
||||
@ -44,7 +46,7 @@ static std::filesystem::path get_executable_directory() {
|
||||
std::filesystem::path filepath = std::filesystem::read_symlink("/proc/self/exe", ec);
|
||||
|
||||
if (ec) {
|
||||
spdlog::error("Error reading /proc/self/exe: {}", ec.message());
|
||||
logger()->error("Error reading /proc/self/exe: {}", ec.message());
|
||||
return std::filesystem::path();
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@
|
||||
|
||||
#include "conversions.h"
|
||||
#include "fsutils.h"
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace processors {
|
||||
@ -42,12 +43,11 @@ int InterpolatorRIFE::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
model_param_dir = std::filesystem::path(STR("models")) / STR("rife") / model_name_;
|
||||
|
||||
// Get the full paths using a function that possibly modifies or validates the path
|
||||
std::filesystem::path model_param_full_path =
|
||||
fsutils::find_resource_file(model_param_dir);
|
||||
std::filesystem::path model_param_full_path = fsutils::find_resource_file(model_param_dir);
|
||||
|
||||
// Check if the model files exist
|
||||
if (!std::filesystem::exists(model_param_full_path)) {
|
||||
spdlog::error("RIFE model param directory not found: {}", model_param_dir.u8string());
|
||||
logger()->error("RIFE model param directory not found: {}", model_param_dir.u8string());
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ int InterpolatorRIFE::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
} else if (model_name_.find(STR("rife-v4")) != fsutils::StringType::npos) {
|
||||
rife_v4 = true;
|
||||
} else if (model_name_.find(STR("rife")) == fsutils::StringType::npos) {
|
||||
spdlog::critical("Failed to infer RIFE model generation from model name");
|
||||
logger()->critical("Failed to infer RIFE model generation from model name");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -76,7 +76,7 @@ int InterpolatorRIFE::init(AVCodecContext *dec_ctx, AVCodecContext *enc_ctx, AVB
|
||||
|
||||
// Load the model
|
||||
if (rife_->load(model_param_full_path) != 0) {
|
||||
spdlog::error("Failed to load RIFE model");
|
||||
logger()->error("Failed to load RIFE model");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -93,13 +93,13 @@ int InterpolatorRIFE::interpolate(
|
||||
|
||||
ncnn::Mat in_mat1 = conversions::avframe_to_ncnn_mat(prev_frame);
|
||||
if (in_mat1.empty()) {
|
||||
spdlog::error("Failed to convert AVFrame to ncnn::Mat");
|
||||
logger()->error("Failed to convert AVFrame to ncnn::Mat");
|
||||
return -1;
|
||||
}
|
||||
|
||||
ncnn::Mat in_mat2 = conversions::avframe_to_ncnn_mat(in_frame);
|
||||
if (in_mat2.empty()) {
|
||||
spdlog::error("Failed to convert AVFrame to ncnn::Mat");
|
||||
logger()->error("Failed to convert AVFrame to ncnn::Mat");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -108,7 +108,7 @@ int InterpolatorRIFE::interpolate(
|
||||
|
||||
ret = rife_->process(in_mat1, in_mat2, time_step, out_mat);
|
||||
if (ret != 0) {
|
||||
spdlog::error("RIFE processing failed");
|
||||
logger()->error("RIFE processing failed");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -11,6 +11,8 @@ extern "C" {
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace processors {
|
||||
|
||||
@ -32,20 +34,20 @@ int init_libplacebo(
|
||||
&vk_hw_device_ctx, AV_HWDEVICE_TYPE_VULKAN, std::to_string(vk_device_index).c_str(), NULL, 0
|
||||
);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Failed to create Vulkan hardware device context for libplacebo.");
|
||||
logger()->error("Failed to create Vulkan hardware device context for libplacebo.");
|
||||
vk_hw_device_ctx = nullptr;
|
||||
}
|
||||
|
||||
AVFilterGraph *graph = avfilter_graph_alloc();
|
||||
if (!graph) {
|
||||
spdlog::error("Unable to create filter graph.");
|
||||
logger()->error("Unable to create filter graph.");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
// Create buffer source
|
||||
const AVFilter *buffersrc = avfilter_get_by_name("buffer");
|
||||
if (!buffersrc) {
|
||||
spdlog::error("Filter 'buffer' not found.");
|
||||
logger()->error("Filter 'buffer' not found.");
|
||||
avfilter_graph_free(&graph);
|
||||
return AVERROR_FILTER_NOT_FOUND;
|
||||
}
|
||||
@ -82,7 +84,7 @@ int init_libplacebo(
|
||||
spdlog::debug("Buffer source args: {}", args);
|
||||
ret = avfilter_graph_create_filter(buffersrc_ctx, buffersrc, "in", args.c_str(), NULL, graph);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Cannot create buffer source.");
|
||||
logger()->error("Cannot create buffer source.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
@ -92,7 +94,7 @@ int init_libplacebo(
|
||||
// Create the libplacebo filter
|
||||
const AVFilter *libplacebo_filter = avfilter_get_by_name("libplacebo");
|
||||
if (!libplacebo_filter) {
|
||||
spdlog::error("Filter 'libplacebo' not found.");
|
||||
logger()->error("Filter 'libplacebo' not found.");
|
||||
avfilter_graph_free(&graph);
|
||||
return AVERROR_FILTER_NOT_FOUND;
|
||||
}
|
||||
@ -115,7 +117,7 @@ int init_libplacebo(
|
||||
&libplacebo_ctx, libplacebo_filter, "libplacebo", filter_args.c_str(), NULL, graph
|
||||
);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Cannot create libplacebo filter.");
|
||||
logger()->error("Cannot create libplacebo filter.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
@ -129,7 +131,7 @@ int init_libplacebo(
|
||||
// Link buffersrc to libplacebo
|
||||
ret = avfilter_link(last_filter, 0, libplacebo_ctx, 0);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error connecting buffersrc to libplacebo filter.");
|
||||
logger()->error("Error connecting buffersrc to libplacebo filter.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
@ -140,7 +142,7 @@ int init_libplacebo(
|
||||
const AVFilter *buffersink = avfilter_get_by_name("buffersink");
|
||||
ret = avfilter_graph_create_filter(buffersink_ctx, buffersink, "out", NULL, NULL, graph);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Cannot create buffer sink.");
|
||||
logger()->error("Cannot create buffer sink.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
@ -148,7 +150,7 @@ int init_libplacebo(
|
||||
// Link libplacebo to buffersink
|
||||
ret = avfilter_link(last_filter, 0, *buffersink_ctx, 0);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error connecting libplacebo filter to buffersink.");
|
||||
logger()->error("Error connecting libplacebo filter to buffersink.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
@ -156,7 +158,7 @@ int init_libplacebo(
|
||||
// Configure the filter graph
|
||||
ret = avfilter_graph_config(graph, NULL);
|
||||
if (ret < 0) {
|
||||
spdlog::error("Error configuring the filter graph.");
|
||||
logger()->error("Error configuring the filter graph.");
|
||||
avfilter_graph_free(&graph);
|
||||
return ret;
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ extern "C" {
|
||||
#include "avutils.h"
|
||||
#include "decoder.h"
|
||||
#include "encoder.h"
|
||||
#include "logutils.h"
|
||||
#include "logger_manager.h"
|
||||
#include "processor.h"
|
||||
#include "processor_factory.h"
|
||||
|
||||
@ -20,16 +20,13 @@ VideoProcessor::VideoProcessor(
|
||||
const encoder::EncoderConfig enc_cfg,
|
||||
const uint32_t vk_device_idx,
|
||||
const AVHWDeviceType hw_device_type,
|
||||
const logutils::Video2xLogLevel log_level,
|
||||
const bool benchmark
|
||||
)
|
||||
: proc_cfg_(proc_cfg),
|
||||
enc_cfg_(enc_cfg),
|
||||
vk_device_idx_(vk_device_idx),
|
||||
hw_device_type_(hw_device_type),
|
||||
benchmark_(benchmark) {
|
||||
set_log_level(log_level);
|
||||
}
|
||||
benchmark_(benchmark) {}
|
||||
|
||||
int VideoProcessor::process(
|
||||
const std::filesystem::path in_fname,
|
||||
@ -42,7 +39,7 @@ int VideoProcessor::process(
|
||||
// Format and log the error message
|
||||
char errbuf[AV_ERROR_MAX_STRING_SIZE];
|
||||
av_strerror(error_code, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("{}: {}", msg, errbuf);
|
||||
logger()->critical("{}: {}", msg, errbuf);
|
||||
|
||||
// Set the video processor state to failed and return the error code
|
||||
state_.store(VideoProcessorState::Failed);
|
||||
@ -167,7 +164,7 @@ int VideoProcessor::process_frames(
|
||||
av_frame_alloc(), &avutils::av_frame_deleter
|
||||
);
|
||||
if (frame == nullptr) {
|
||||
spdlog::critical("Error allocating frame");
|
||||
logger()->critical("Error allocating frame");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
@ -176,7 +173,7 @@ int VideoProcessor::process_frames(
|
||||
av_packet_alloc(), &avutils::av_packet_deleter
|
||||
);
|
||||
if (packet == nullptr) {
|
||||
spdlog::critical("Error allocating packet");
|
||||
logger()->critical("Error allocating packet");
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
@ -205,7 +202,7 @@ int VideoProcessor::process_frames(
|
||||
break;
|
||||
}
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error reading packet: {}", errbuf);
|
||||
logger()->critical("Error reading packet: {}", errbuf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -214,7 +211,7 @@ int VideoProcessor::process_frames(
|
||||
ret = avcodec_send_packet(dec_ctx, packet.get());
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error sending packet to decoder: {}", errbuf);
|
||||
logger()->critical("Error sending packet to decoder: {}", errbuf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -233,7 +230,7 @@ int VideoProcessor::process_frames(
|
||||
break;
|
||||
} else if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error decoding video frame: {}", errbuf);
|
||||
logger()->critical("Error decoding video frame: {}", errbuf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -251,7 +248,7 @@ int VideoProcessor::process_frames(
|
||||
break;
|
||||
}
|
||||
default:
|
||||
spdlog::critical("Unknown processing mode");
|
||||
logger()->critical("Unknown processing mode");
|
||||
return -1;
|
||||
}
|
||||
if (ret < 0 && ret != AVERROR(EAGAIN)) {
|
||||
@ -275,7 +272,7 @@ int VideoProcessor::process_frames(
|
||||
ret = processor->flush(raw_flushed_frames);
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error flushing filter: {}", errbuf);
|
||||
logger()->critical("Error flushing filter: {}", errbuf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -298,7 +295,7 @@ int VideoProcessor::process_frames(
|
||||
ret = encoder.flush();
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error flushing encoder: {}", errbuf);
|
||||
logger()->critical("Error flushing encoder: {}", errbuf);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -313,7 +310,7 @@ int VideoProcessor::write_frame(AVFrame *frame, encoder::Encoder &encoder) {
|
||||
ret = encoder.write_frame(frame, frame_idx_);
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error encoding/writing frame: {}", errbuf);
|
||||
logger()->critical("Error encoding/writing frame: {}", errbuf);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
@ -338,7 +335,7 @@ int VideoProcessor::write_raw_packet(
|
||||
ret = av_interleaved_write_frame(ofmt_ctx, packet);
|
||||
if (ret < 0) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error muxing audio/subtitle packet: {}", errbuf);
|
||||
logger()->critical("Error muxing audio/subtitle packet: {}", errbuf);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@ -361,7 +358,7 @@ int VideoProcessor::process_filtering(
|
||||
// Write the processed frame
|
||||
if (ret < 0 && ret != AVERROR(EAGAIN)) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error filtering frame: {}", errbuf);
|
||||
logger()->critical("Error filtering frame: {}", errbuf);
|
||||
} else if (ret == 0 && proc_frame != nullptr) {
|
||||
auto processed_frame = std::unique_ptr<AVFrame, decltype(&avutils::av_frame_deleter)>(
|
||||
proc_frame, &avutils::av_frame_deleter
|
||||
@ -420,7 +417,7 @@ int VideoProcessor::process_interpolation(
|
||||
// Write the interpolated frame
|
||||
if (ret < 0 && ret != AVERROR(EAGAIN)) {
|
||||
av_strerror(ret, errbuf, sizeof(errbuf));
|
||||
spdlog::critical("Error interpolating frame: {}", errbuf);
|
||||
logger()->critical("Error interpolating frame: {}", errbuf);
|
||||
return ret;
|
||||
} else if (ret == 0 && proc_frame != nullptr) {
|
||||
auto processed_frame = std::unique_ptr<AVFrame, decltype(&avutils::av_frame_deleter)>(
|
||||
|
110
src/logger_manager.cpp
Normal file
110
src/logger_manager.cpp
Normal file
@ -0,0 +1,110 @@
|
||||
#include "logger_manager.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavutil/log.h>
|
||||
}
|
||||
|
||||
#include <spdlog/sinks/stdout_color_sinks.h>
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
static spdlog::level::level_enum ffmpeg_level_to_spdlog(int av_level) {
|
||||
if (av_level <= AV_LOG_PANIC) {
|
||||
return spdlog::level::critical;
|
||||
} else if (av_level <= AV_LOG_ERROR) {
|
||||
return spdlog::level::err;
|
||||
} else if (av_level <= AV_LOG_WARNING) {
|
||||
return spdlog::level::warn;
|
||||
} else if (av_level <= AV_LOG_INFO) {
|
||||
return spdlog::level::info;
|
||||
} else if (av_level <= AV_LOG_VERBOSE) {
|
||||
return spdlog::level::debug;
|
||||
} else if (av_level == AV_LOG_DEBUG) {
|
||||
return spdlog::level::debug;
|
||||
} else {
|
||||
// AV_LOG_TRACE or beyond (if supported by FFmpeg)
|
||||
return spdlog::level::trace;
|
||||
}
|
||||
}
|
||||
|
||||
static void ffmpeg_log_callback(void *, int av_level, const char *fmt, va_list vargs) {
|
||||
// Format the message into a buffer
|
||||
char buffer[1024];
|
||||
vsnprintf(buffer, sizeof(buffer), fmt, vargs);
|
||||
|
||||
// Trim trailing newlines
|
||||
std::string message = buffer;
|
||||
while (!message.empty() && (message.back() == '\n' || message.back() == '\r')) {
|
||||
message.pop_back();
|
||||
}
|
||||
|
||||
// Forward FFmpeg log message to the logger instance
|
||||
video2x::logger()->log(ffmpeg_level_to_spdlog(av_level), message);
|
||||
}
|
||||
|
||||
namespace video2x {
|
||||
namespace logger_manager {
|
||||
|
||||
LoggerManager::LoggerManager() {
|
||||
auto console_sink = std::make_shared<spdlog::sinks::stdout_color_sink_mt>();
|
||||
console_sink->set_pattern("%+");
|
||||
logger_ = std::make_shared<spdlog::logger>("video2x", console_sink);
|
||||
spdlog::register_logger(logger_);
|
||||
logger_->set_level(spdlog::level::info);
|
||||
}
|
||||
|
||||
LoggerManager &LoggerManager::instance() {
|
||||
static LoggerManager instance;
|
||||
return instance;
|
||||
}
|
||||
|
||||
std::shared_ptr<spdlog::logger> LoggerManager::logger() {
|
||||
return logger_;
|
||||
}
|
||||
|
||||
void LoggerManager::reconfigure_logger(
|
||||
const std::string &logger_name,
|
||||
const std::vector<spdlog::sink_ptr> &sinks,
|
||||
const std::string &pattern
|
||||
) {
|
||||
if (!sinks.empty()) {
|
||||
// If a logger with the same name exists, remove it first
|
||||
auto old_logger = spdlog::get(logger_name);
|
||||
if (old_logger) {
|
||||
spdlog::drop(logger_name);
|
||||
}
|
||||
|
||||
// Create a new logger with the given name, sinks, and pattern
|
||||
auto new_logger = std::make_shared<spdlog::logger>(logger_name, sinks.begin(), sinks.end());
|
||||
new_logger->set_pattern(pattern);
|
||||
|
||||
// Maintain the log level from the previous logger
|
||||
if (logger_) {
|
||||
new_logger->set_level(logger_->level());
|
||||
}
|
||||
|
||||
// Replace the internal logger_ member and register the new one
|
||||
logger_ = new_logger;
|
||||
spdlog::register_logger(logger_);
|
||||
}
|
||||
}
|
||||
|
||||
bool LoggerManager::set_log_level(const std::string &level_str) {
|
||||
spdlog::level::level_enum log_level = spdlog::level::from_str(level_str);
|
||||
if (log_level == spdlog::level::off && level_str != "off") {
|
||||
// Invalid level_str
|
||||
return false;
|
||||
}
|
||||
logger_->set_level(log_level);
|
||||
return true;
|
||||
}
|
||||
|
||||
void LoggerManager::hook_ffmpeg_logging() {
|
||||
av_log_set_callback(ffmpeg_log_callback);
|
||||
}
|
||||
|
||||
void LoggerManager::unhook_ffmpeg_logging() {
|
||||
av_log_set_callback(nullptr);
|
||||
}
|
||||
|
||||
} // namespace logger_manager
|
||||
} // namespace video2x
|
@ -1,50 +0,0 @@
|
||||
#include "logutils.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavutil/avutil.h>
|
||||
}
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
namespace video2x {
|
||||
namespace logutils {
|
||||
|
||||
void set_log_level(Video2xLogLevel log_level) {
|
||||
switch (log_level) {
|
||||
case Video2xLogLevel::Trace:
|
||||
av_log_set_level(AV_LOG_TRACE);
|
||||
spdlog::set_level(spdlog::level::trace);
|
||||
break;
|
||||
case Video2xLogLevel::Debug:
|
||||
av_log_set_level(AV_LOG_DEBUG);
|
||||
spdlog::set_level(spdlog::level::debug);
|
||||
break;
|
||||
case Video2xLogLevel::Info:
|
||||
av_log_set_level(AV_LOG_INFO);
|
||||
spdlog::set_level(spdlog::level::info);
|
||||
break;
|
||||
case Video2xLogLevel::Warning:
|
||||
av_log_set_level(AV_LOG_WARNING);
|
||||
spdlog::set_level(spdlog::level::warn);
|
||||
break;
|
||||
case Video2xLogLevel::Error:
|
||||
av_log_set_level(AV_LOG_ERROR);
|
||||
spdlog::set_level(spdlog::level::err);
|
||||
break;
|
||||
case Video2xLogLevel::Critical:
|
||||
av_log_set_level(AV_LOG_FATAL);
|
||||
spdlog::set_level(spdlog::level::critical);
|
||||
break;
|
||||
case Video2xLogLevel::Off:
|
||||
av_log_set_level(AV_LOG_QUIET);
|
||||
spdlog::set_level(spdlog::level::off);
|
||||
break;
|
||||
default:
|
||||
av_log_set_level(AV_LOG_INFO);
|
||||
spdlog::set_level(spdlog::level::info);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace logutils
|
||||
} // namespace video2x
|
@ -6,6 +6,7 @@
|
||||
#include "filter_libplacebo.h"
|
||||
#include "filter_realesrgan.h"
|
||||
#include "interpolator_rife.h"
|
||||
#include "logger_manager.h"
|
||||
|
||||
namespace video2x {
|
||||
namespace processors {
|
||||
@ -36,7 +37,7 @@ std::unique_ptr<Processor> ProcessorFactory::create_processor(
|
||||
) const {
|
||||
auto it = creators.find(proc_cfg.processor_type);
|
||||
if (it == creators.end()) {
|
||||
spdlog::critical(
|
||||
logger()->critical(
|
||||
"Processor type not registered: {}", static_cast<int>(proc_cfg.processor_type)
|
||||
);
|
||||
return nullptr;
|
||||
@ -54,11 +55,11 @@ void ProcessorFactory::init_default_processors(ProcessorFactory &factory) {
|
||||
uint32_t vk_device_index) -> std::unique_ptr<Processor> {
|
||||
const auto &config = std::get<LibplaceboConfig>(proc_cfg.config);
|
||||
if (config.shader_path.empty()) {
|
||||
spdlog::critical("Shader path must be provided for the libplacebo filter");
|
||||
logger()->critical("Shader path must be provided for the libplacebo filter");
|
||||
return nullptr;
|
||||
}
|
||||
if (proc_cfg.width <= 0 || proc_cfg.height <= 0) {
|
||||
spdlog::critical(
|
||||
logger()->critical(
|
||||
"Output width and height must be provided for the libplacebo filter"
|
||||
);
|
||||
return nullptr;
|
||||
@ -78,11 +79,11 @@ void ProcessorFactory::init_default_processors(ProcessorFactory &factory) {
|
||||
uint32_t vk_device_index) -> std::unique_ptr<Processor> {
|
||||
const auto &config = std::get<RealESRGANConfig>(proc_cfg.config);
|
||||
if (proc_cfg.scaling_factor <= 0) {
|
||||
spdlog::critical("Scaling factor must be provided for the RealESRGAN filter");
|
||||
logger()->critical("Scaling factor must be provided for the RealESRGAN filter");
|
||||
return nullptr;
|
||||
}
|
||||
if (config.model_name.empty()) {
|
||||
spdlog::critical("Model name must be provided for the RealESRGAN filter");
|
||||
logger()->critical("Model name must be provided for the RealESRGAN filter");
|
||||
return nullptr;
|
||||
}
|
||||
return std::make_unique<FilterRealesrgan>(
|
||||
@ -100,7 +101,7 @@ void ProcessorFactory::init_default_processors(ProcessorFactory &factory) {
|
||||
uint32_t vk_device_index) -> std::unique_ptr<Processor> {
|
||||
const auto &cfg = std::get<RIFEConfig>(proc_cfg.config);
|
||||
if (cfg.model_name.empty()) {
|
||||
spdlog::critical("Model name must be provided for the RIFE filter");
|
||||
logger()->critical("Model name must be provided for the RIFE filter");
|
||||
return nullptr;
|
||||
}
|
||||
return std::make_unique<InterpolatorRIFE>(
|
||||
|
@ -5,10 +5,7 @@
|
||||
|
||||
// Structure to hold parsed arguments
|
||||
struct Arguments {
|
||||
video2x::logutils::Video2xLogLevel log_level = video2x::logutils::Video2xLogLevel::Info;
|
||||
bool no_progress = false;
|
||||
|
||||
// General options
|
||||
std::filesystem::path in_fname;
|
||||
std::filesystem::path out_fname;
|
||||
uint32_t vk_device_index = 0;
|
||||
|
@ -1,17 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <optional>
|
||||
|
||||
#include <libvideo2x/libvideo2x.h>
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
extern std::atomic<bool> newline_required;
|
||||
|
||||
void set_spdlog_level(video2x::logutils::Video2xLogLevel log_level);
|
||||
|
||||
std::optional<video2x::logutils::Video2xLogLevel> find_log_level_by_name(
|
||||
const video2x::fsutils::StringType &log_level_name
|
||||
);
|
||||
|
||||
void newline_safe_ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
|
22
tools/video2x/include/newline_safe_sink.h
Normal file
22
tools/video2x/include/newline_safe_sink.h
Normal file
@ -0,0 +1,22 @@
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
|
||||
#include <spdlog/sinks/ansicolor_sink.h>
|
||||
|
||||
class newline_safe_sink : public spdlog::sinks::ansicolor_stdout_sink_mt {
|
||||
public:
|
||||
newline_safe_sink() = default;
|
||||
~newline_safe_sink() = default;
|
||||
|
||||
newline_safe_sink(const newline_safe_sink &) = delete;
|
||||
newline_safe_sink &operator=(const newline_safe_sink &) = delete;
|
||||
|
||||
void log(const spdlog::details::log_msg &msg);
|
||||
|
||||
void set_needs_newline(bool needs_newline) { needs_newline_.store(needs_newline); };
|
||||
bool get_needs_newline() { return needs_newline_.load(); };
|
||||
|
||||
private:
|
||||
std::atomic<bool> needs_newline_ = false;
|
||||
};
|
@ -7,13 +7,13 @@
|
||||
#include <cwchar>
|
||||
#endif
|
||||
|
||||
#include <libvideo2x/logger_manager.h>
|
||||
#include <libvideo2x/version.h>
|
||||
#include <spdlog/spdlog.h>
|
||||
#include <vulkan_utils.h>
|
||||
#include <boost/program_options.hpp>
|
||||
|
||||
#include "logging.h"
|
||||
#include "validators.h"
|
||||
#include "vulkan_utils.h"
|
||||
|
||||
#ifdef _WIN32
|
||||
#define BOOST_PROGRAM_OPTIONS_WCHAR_T
|
||||
@ -221,28 +221,27 @@ int parse_args(
|
||||
}
|
||||
|
||||
if (vm.count("log-level")) {
|
||||
std::optional<video2x::logutils::Video2xLogLevel> log_level =
|
||||
find_log_level_by_name(vm["log-level"].as<video2x::fsutils::StringType>());
|
||||
if (!log_level.has_value()) {
|
||||
spdlog::critical("Invalid log level specified.");
|
||||
if (!video2x::logger_manager::LoggerManager::instance().set_log_level(
|
||||
wstring_to_u8string(vm["log-level"].as<video2x::fsutils::StringType>())
|
||||
)) {
|
||||
video2x::logger()->critical("Invalid log level specified.");
|
||||
return -1;
|
||||
}
|
||||
arguments.log_level = log_level.value();
|
||||
}
|
||||
set_spdlog_level(arguments.log_level);
|
||||
video2x::logger_manager::LoggerManager::instance().hook_ffmpeg_logging();
|
||||
|
||||
// Print program banner
|
||||
spdlog::info("Video2X version {}", LIBVIDEO2X_VERSION_STRING);
|
||||
// spdlog::info("Copyright (C) 2018-2024 K4YT3X and contributors.");
|
||||
// spdlog::info("Licensed under GNU AGPL version 3.");
|
||||
video2x::logger()->info("Video2X version {}", LIBVIDEO2X_VERSION_STRING);
|
||||
// video2x::logger()->info("Copyright (C) 2018-2024 K4YT3X and contributors.");
|
||||
// video2x::logger()->info("Licensed under GNU AGPL version 3.");
|
||||
|
||||
// Assign positional arguments
|
||||
if (vm.count("input")) {
|
||||
arguments.in_fname =
|
||||
std::filesystem::path(vm["input"].as<video2x::fsutils::StringType>());
|
||||
spdlog::info("Processing file: {}", arguments.in_fname.u8string());
|
||||
video2x::logger()->info("Processing file: {}", arguments.in_fname.u8string());
|
||||
} else {
|
||||
spdlog::critical("Input file path is required.");
|
||||
video2x::logger()->critical("Input file path is required.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -250,7 +249,7 @@ int parse_args(
|
||||
arguments.out_fname =
|
||||
std::filesystem::path(vm["output"].as<video2x::fsutils::StringType>());
|
||||
} else if (!arguments.benchmark) {
|
||||
spdlog::critical("Output file path is required.");
|
||||
video2x::logger()->critical("Output file path is required.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -265,13 +264,13 @@ int parse_args(
|
||||
} else if (processor_type_str == STR("rife")) {
|
||||
proc_cfg.processor_type = video2x::processors::ProcessorType::RIFE;
|
||||
} else {
|
||||
spdlog::critical(
|
||||
video2x::logger()->critical(
|
||||
"Invalid processor specified. Must be 'libplacebo', 'realesrgan', or 'rife'."
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
spdlog::critical("Processor type is required.");
|
||||
video2x::logger()->critical("Processor type is required.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -284,7 +283,7 @@ int parse_args(
|
||||
arguments.hw_device_type =
|
||||
av_hwdevice_find_type_by_name(wstring_to_u8string(hwaccel_str).c_str());
|
||||
if (arguments.hw_device_type == AV_HWDEVICE_TYPE_NONE) {
|
||||
spdlog::critical(
|
||||
video2x::logger()->critical(
|
||||
"Invalid hardware device type '{}'.", wstring_to_u8string(hwaccel_str)
|
||||
);
|
||||
return -1;
|
||||
@ -299,7 +298,9 @@ int parse_args(
|
||||
const AVCodec *codec =
|
||||
avcodec_find_encoder_by_name(wstring_to_u8string(codec_str).c_str());
|
||||
if (codec == nullptr) {
|
||||
spdlog::critical("Codec '{}' not found.", wstring_to_u8string(codec_str));
|
||||
video2x::logger()->critical(
|
||||
"Codec '{}' not found.", wstring_to_u8string(codec_str)
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
enc_cfg.codec = codec->id;
|
||||
@ -316,7 +317,7 @@ int parse_args(
|
||||
if (!pix_fmt_str.empty()) {
|
||||
enc_cfg.pix_fmt = av_get_pix_fmt(wstring_to_u8string(pix_fmt_str).c_str());
|
||||
if (enc_cfg.pix_fmt == AV_PIX_FMT_NONE) {
|
||||
spdlog::critical(
|
||||
video2x::logger()->critical(
|
||||
"Invalid pixel format '{}'.", wstring_to_u8string(pix_fmt_str)
|
||||
);
|
||||
return -1;
|
||||
@ -334,7 +335,9 @@ int parse_args(
|
||||
video2x::fsutils::StringType value = opt.substr(eq_pos + 1);
|
||||
enc_cfg.extra_opts.push_back(std::make_pair(key, value));
|
||||
} else {
|
||||
spdlog::critical("Invalid extra AVOption format: {}", wstring_to_u8string(opt));
|
||||
video2x::logger()->critical(
|
||||
"Invalid extra AVOption format: {}", wstring_to_u8string(opt)
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
@ -344,11 +347,13 @@ int parse_args(
|
||||
switch (proc_cfg.processor_type) {
|
||||
case video2x::processors::ProcessorType::Libplacebo: {
|
||||
if (!vm.count("libplacebo-shader")) {
|
||||
spdlog::critical("Shader name/path must be set for libplacebo.");
|
||||
video2x::logger()->critical("Shader name/path must be set for libplacebo.");
|
||||
return -1;
|
||||
}
|
||||
if (proc_cfg.width <= 0 || proc_cfg.height <= 0) {
|
||||
spdlog::critical("Output width and height must be set for libplacebo.");
|
||||
video2x::logger()->critical(
|
||||
"Output width and height must be set for libplacebo."
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -361,12 +366,15 @@ int parse_args(
|
||||
}
|
||||
case video2x::processors::ProcessorType::RealESRGAN: {
|
||||
if (!vm.count("realesrgan-model")) {
|
||||
spdlog::critical("RealESRGAN model name must be set for RealESRGAN.");
|
||||
video2x::logger()->critical("RealESRGAN model name must be set for RealESRGAN."
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
if (proc_cfg.scaling_factor != 2 && proc_cfg.scaling_factor != 3 &&
|
||||
proc_cfg.scaling_factor != 4) {
|
||||
spdlog::critical("Scaling factor must be set to 2, 3, or 4 for RealESRGAN.");
|
||||
video2x::logger()->critical(
|
||||
"Scaling factor must be set to 2, 3, or 4 for RealESRGAN."
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -380,11 +388,13 @@ int parse_args(
|
||||
}
|
||||
case video2x::processors::ProcessorType::RIFE: {
|
||||
if (!vm.count("rife-model")) {
|
||||
spdlog::critical("RIFE model name must be set for RIFE.");
|
||||
video2x::logger()->critical("RIFE model name must be set for RIFE.");
|
||||
return -1;
|
||||
}
|
||||
if (proc_cfg.frm_rate_mul < 2) {
|
||||
spdlog::critical("Frame rate multiplier must be set to at least 2 for RIFE.");
|
||||
video2x::logger()->critical(
|
||||
"Frame rate multiplier must be set to at least 2 for RIFE."
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -399,14 +409,16 @@ int parse_args(
|
||||
break;
|
||||
}
|
||||
default:
|
||||
spdlog::critical("Invalid processor type.");
|
||||
video2x::logger()->critical("Invalid processor type.");
|
||||
return -1;
|
||||
}
|
||||
} catch (const po::error &e) {
|
||||
spdlog::critical("Error parsing arguments: {}", e.what());
|
||||
video2x::logger()->critical("Error parsing arguments: {}", e.what());
|
||||
return -1;
|
||||
} catch (const std::exception &e) {
|
||||
spdlog::critical("Unexpected exception caught while parsing options: {}", e.what());
|
||||
video2x::logger()->critical(
|
||||
"Unexpected exception caught while parsing options: {}", e.what()
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -415,17 +427,19 @@ int parse_args(
|
||||
int get_vulkan_dev_ret = get_vulkan_device_prop(arguments.vk_device_index, &dev_props);
|
||||
if (get_vulkan_dev_ret != 0) {
|
||||
if (get_vulkan_dev_ret == -2) {
|
||||
spdlog::critical("Invalid Vulkan device ID specified.");
|
||||
video2x::logger()->critical("Invalid Vulkan device ID specified.");
|
||||
return -1;
|
||||
} else {
|
||||
spdlog::warn("Unable to validate Vulkan device ID.");
|
||||
video2x::logger()->warn("Unable to validate Vulkan device ID.");
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
// Warn if the selected device is a CPU
|
||||
spdlog::info("Using Vulkan device: {} ({:#x})", dev_props.deviceName, dev_props.deviceID);
|
||||
video2x::logger()->info(
|
||||
"Using Vulkan device: {} ({:#x})", dev_props.deviceName, dev_props.deviceID
|
||||
);
|
||||
if (dev_props.deviceType == VK_PHYSICAL_DEVICE_TYPE_CPU) {
|
||||
spdlog::warn("The selected Vulkan device is a CPU device.");
|
||||
video2x::logger()->warn("The selected Vulkan device is a CPU device.");
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
|
@ -1,81 +0,0 @@
|
||||
#include "logging.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <unordered_map>
|
||||
|
||||
extern "C" {
|
||||
#include <libavutil/log.h>
|
||||
}
|
||||
|
||||
std::atomic<bool> newline_required = false;
|
||||
|
||||
void set_spdlog_level(video2x::logutils::Video2xLogLevel log_level) {
|
||||
switch (log_level) {
|
||||
case video2x::logutils::Video2xLogLevel::Trace:
|
||||
spdlog::set_level(spdlog::level::trace);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Debug:
|
||||
spdlog::set_level(spdlog::level::debug);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Info:
|
||||
spdlog::set_level(spdlog::level::info);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Warning:
|
||||
spdlog::set_level(spdlog::level::warn);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Error:
|
||||
spdlog::set_level(spdlog::level::err);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Critical:
|
||||
spdlog::set_level(spdlog::level::critical);
|
||||
break;
|
||||
case video2x::logutils::Video2xLogLevel::Off:
|
||||
spdlog::set_level(spdlog::level::off);
|
||||
break;
|
||||
default:
|
||||
spdlog::set_level(spdlog::level::info);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<video2x::logutils::Video2xLogLevel> find_log_level_by_name(
|
||||
const video2x::fsutils::StringType &log_level_name
|
||||
) {
|
||||
// Static map to store the mapping
|
||||
static const std::
|
||||
unordered_map<video2x::fsutils::StringType, video2x::logutils::Video2xLogLevel>
|
||||
log_level_map = {
|
||||
{STR("trace"), video2x::logutils::Video2xLogLevel::Trace},
|
||||
{STR("debug"), video2x::logutils::Video2xLogLevel::Debug},
|
||||
{STR("info"), video2x::logutils::Video2xLogLevel::Info},
|
||||
{STR("warning"), video2x::logutils::Video2xLogLevel::Warning},
|
||||
{STR("warn"), video2x::logutils::Video2xLogLevel::Warning},
|
||||
{STR("error"), video2x::logutils::Video2xLogLevel::Error},
|
||||
{STR("critical"), video2x::logutils::Video2xLogLevel::Critical},
|
||||
{STR("off"), video2x::logutils::Video2xLogLevel::Off},
|
||||
{STR("none"), video2x::logutils::Video2xLogLevel::Off}
|
||||
};
|
||||
|
||||
// Normalize the input to lowercase
|
||||
video2x::fsutils::StringType normalized_name = log_level_name;
|
||||
std::transform(
|
||||
normalized_name.begin(), normalized_name.end(), normalized_name.begin(), ::tolower
|
||||
);
|
||||
|
||||
// Lookup the log level in the map
|
||||
auto it = log_level_map.find(normalized_name);
|
||||
if (it != log_level_map.end()) {
|
||||
return it->second;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// Newline-safe log callback for FFmpeg
|
||||
void newline_safe_ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
|
||||
if (level <= av_log_get_level() && newline_required.load()) {
|
||||
putchar('\n');
|
||||
newline_required.store(false);
|
||||
}
|
||||
av_log_default_callback(ptr, level, fmt, vl);
|
||||
}
|
9
tools/video2x/src/newline_safe_sink.cpp
Normal file
9
tools/video2x/src/newline_safe_sink.cpp
Normal file
@ -0,0 +1,9 @@
|
||||
#include "newline_safe_sink.h"
|
||||
|
||||
void newline_safe_sink::log(const spdlog::details::log_msg &msg) {
|
||||
if (needs_newline_.exchange(false)) {
|
||||
std::fputs("\n", stdout);
|
||||
}
|
||||
|
||||
spdlog::sinks::ansicolor_stdout_sink_mt::log(msg);
|
||||
}
|
@ -9,10 +9,10 @@
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
#include <libvideo2x/logger_manager.h>
|
||||
|
||||
#include "argparse.h"
|
||||
#include "logging.h"
|
||||
#include "newline_safe_sink.h"
|
||||
#include "timer.h"
|
||||
|
||||
// Set UNIX terminal input to non-blocking mode
|
||||
@ -73,16 +73,13 @@ int main(int argc, char **argv) {
|
||||
|
||||
// Create video processor object
|
||||
video2x::VideoProcessor video_processor = video2x::VideoProcessor(
|
||||
proc_cfg,
|
||||
enc_cfg,
|
||||
arguments.vk_device_index,
|
||||
arguments.hw_device_type,
|
||||
arguments.log_level,
|
||||
arguments.benchmark
|
||||
proc_cfg, enc_cfg, arguments.vk_device_index, arguments.hw_device_type, arguments.benchmark
|
||||
);
|
||||
|
||||
// Register a newline-safe log callback for FFmpeg
|
||||
av_log_set_callback(newline_safe_ffmpeg_log_callback);
|
||||
// Register a newline-safe log sink
|
||||
std::shared_ptr<newline_safe_sink> logger_sink = std::make_shared<newline_safe_sink>();
|
||||
std::vector<spdlog::sink_ptr> sinks = {logger_sink};
|
||||
video2x::logger_manager::LoggerManager::instance().reconfigure_logger("video2x", sinks);
|
||||
|
||||
// Create a thread for video processing
|
||||
int proc_ret = 0;
|
||||
@ -91,7 +88,7 @@ int main(int argc, char **argv) {
|
||||
proc_ret = video_processor.process(arguments.in_fname, arguments.out_fname);
|
||||
completed.store(true, std::memory_order_relaxed);
|
||||
});
|
||||
spdlog::info("Press [space] to pause/resume, [q] to abort.");
|
||||
video2x::logger()->info("Press [space] to pause/resume, [q] to abort.");
|
||||
|
||||
// Setup timer
|
||||
Timer timer;
|
||||
@ -140,15 +137,15 @@ int main(int argc, char **argv) {
|
||||
std::cout.flush();
|
||||
timer.resume();
|
||||
}
|
||||
newline_required.store(true);
|
||||
logger_sink->set_needs_newline(true);
|
||||
}
|
||||
} else if (ch == 'q' || ch == 'Q') {
|
||||
// Abort processing
|
||||
if (newline_required.load()) {
|
||||
if (logger_sink->get_needs_newline()) {
|
||||
putchar('\n');
|
||||
}
|
||||
spdlog::warn("Aborting gracefully; press Ctrl+C to terminate forcefully.");
|
||||
newline_required.store(false);
|
||||
video2x::logger()->warn("Aborting gracefully; press Ctrl+C to terminate forcefully.");
|
||||
logger_sink->set_needs_newline(false);
|
||||
video_processor.abort();
|
||||
break;
|
||||
}
|
||||
@ -192,7 +189,7 @@ int main(int argc, char **argv) {
|
||||
<< ":" << std::setw(2) << std::setfill('0') << minutes_remaining << ":"
|
||||
<< std::setw(2) << std::setfill('0') << seconds_remaining;
|
||||
std::cout.flush();
|
||||
newline_required.store(true);
|
||||
logger_sink->set_needs_newline(true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -209,20 +206,20 @@ int main(int argc, char **argv) {
|
||||
processing_thread.join();
|
||||
|
||||
// Print a newline if progress bar was displayed
|
||||
if (newline_required.load()) {
|
||||
if (logger_sink->get_needs_newline()) {
|
||||
std::cout << '\n';
|
||||
}
|
||||
|
||||
// Print final message based on processing result
|
||||
if (video_processor.get_state() == video2x::VideoProcessorState::Aborted) {
|
||||
spdlog::warn("Video processing aborted");
|
||||
video2x::logger()->warn("Video processing aborted");
|
||||
return 2;
|
||||
} else if (proc_ret != 0 ||
|
||||
video_processor.get_state() == video2x::VideoProcessorState::Failed) {
|
||||
spdlog::critical("Video processing failed with error code {}", proc_ret);
|
||||
video2x::logger()->critical("Video processing failed with error code {}", proc_ret);
|
||||
return 1;
|
||||
} else {
|
||||
spdlog::info("Video processed successfully");
|
||||
video2x::logger()->info("Video processed successfully");
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
|
@ -3,7 +3,7 @@
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
#include <libvideo2x/logger_manager.h>
|
||||
|
||||
static int enumerate_vulkan_devices(VkInstance *instance, std::vector<VkPhysicalDevice> &devices) {
|
||||
// Create a Vulkan instance
|
||||
@ -12,7 +12,7 @@ static int enumerate_vulkan_devices(VkInstance *instance, std::vector<VkPhysical
|
||||
|
||||
VkResult result = vkCreateInstance(&create_info, nullptr, instance);
|
||||
if (result != VK_SUCCESS) {
|
||||
spdlog::error("Failed to create Vulkan instance.");
|
||||
video2x::logger()->error("Failed to create Vulkan instance.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -20,7 +20,9 @@ static int enumerate_vulkan_devices(VkInstance *instance, std::vector<VkPhysical
|
||||
uint32_t device_count = 0;
|
||||
result = vkEnumeratePhysicalDevices(*instance, &device_count, nullptr);
|
||||
if (result != VK_SUCCESS || device_count == 0) {
|
||||
spdlog::error("Failed to enumerate Vulkan physical devices or no devices available.");
|
||||
video2x::logger()->error(
|
||||
"Failed to enumerate Vulkan physical devices or no devices available."
|
||||
);
|
||||
vkDestroyInstance(*instance, nullptr);
|
||||
return -1;
|
||||
}
|
||||
@ -28,7 +30,7 @@ static int enumerate_vulkan_devices(VkInstance *instance, std::vector<VkPhysical
|
||||
devices.resize(device_count);
|
||||
result = vkEnumeratePhysicalDevices(*instance, &device_count, devices.data());
|
||||
if (result != VK_SUCCESS) {
|
||||
spdlog::error("Failed to retrieve Vulkan physical devices.");
|
||||
video2x::logger()->error("Failed to retrieve Vulkan physical devices.");
|
||||
vkDestroyInstance(*instance, nullptr);
|
||||
return -1;
|
||||
}
|
||||
@ -96,7 +98,7 @@ int list_vulkan_devices() {
|
||||
|
||||
int get_vulkan_device_prop(uint32_t vk_device_index, VkPhysicalDeviceProperties *dev_props) {
|
||||
if (dev_props == nullptr) {
|
||||
spdlog::error("Invalid device properties pointer.");
|
||||
video2x::logger()->error("Invalid device properties pointer.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user