diff --git a/README.md b/README.md
index 4751701..ad538c8 100644
--- a/README.md
+++ b/README.md
@@ -7,25 +7,49 @@
-### 需要一个强大的opengl和vulkan yuv渲染模块!
+### 需要一个强大的opengl和vulkan yuv渲染模块
1. Opengl中的shader有太多if else导致GPU空跑,影响GPU解码和av_hwframe_transfer_data速度,这个现象在4K视频图像上尤为明显;
2. 在WidgetRender中,尽可能使用QImage::Format_RGB32和QImage::Format_ARGB32_Premultiplied图像格式。如下原因:
1. Avoid most rendering directly to most of these formats using QPainter. Rendering is best optimized to the Format_RGB32 and Format_ARGB32_Premultiplied formats, and secondarily for rendering to the Format_RGB16, Format_RGBX8888, Format_RGBA8888_Premultiplied, Format_RGBX64 and Format_RGBA64_Premultiplied formats.
-### Ffmpeg(5.0)在解码字幕与4.4.3不太一样
+### 如何根据AVColorTransferCharacteristic调整图像?
+
+#### 1. opengl 渲染的情况下,该怎么样修改shader?
+
+#### 2. 非opengl渲染的情况下,又该怎么样添加filter实现图像补偿?
-### 解码字幕(ffmpeg-n5.0):
+1. 现在对AVCOL_TRC_SMPTE2084调整了对比度,饱和度,亮度;效果并不是很好。
+2. 如果不调整,就跟ffplay输出图像效果一致,整体颜色偏暗。Netflix的视频,视频开头的N,显示的颜色偏暗黄色。
+```bash
+contrast = 1.4;
+saturation = 0.9;
+brightness = 0;
```
+
+3. 参考[MPV video_shaders](https://github.com/mpv-player/mpv/blob/master/video/out/gpu/video_shaders.c#L341),效果也不是很好;应该是哪里有遗漏。
+
+```cpp
+void pass_linearize(struct gl_shader_cache *sc, enum mp_csp_trc trc);
+void pass_delinearize(struct gl_shader_cache *sc, enum mp_csp_trc trc);
+```
+
+### OpenGL 渲染图像,怎么实现画质增强的效果?
+
+### Ffmpeg(5.0)在解码字幕与4.4.3不太一样
+
+#### 解码字幕(ffmpeg-n5.0)
+
+```bash
0,,en,,0000,0000,0000,,Peek-a-boo!
```
你必须使用 ``ass_process_chunk`` 并设置 pts 和持续时间, 如在 libavfilter/vf_subtitles.c 中一样。
-ASS 标准格式应为(ffmpeg-n4.4.3) :
+#### ASS 标准格式应为(ffmpeg-n4.4.3)
-```
+```bash
Dialogue: 0,0:01:06.77,0:01:08.00,en,,0000,0000,0000,,Peek-a-boo!\r\n
```
@@ -33,7 +57,7 @@ Dialogue: 0,0:01:06.77,0:01:08.00,en,,0000,0000,0000,,Peek-a-boo!\r\n
### 使用字幕过滤器时,字幕显示时间有问题
-```
+```bash
subtitles=filename='%1':original_size=%2x%3
```
@@ -48,6 +72,7 @@ subtitles=filename='%1':original_size=%2x%3
d_ptr->codecCtx->flags |= AV_CODEC_FLAG_QSCALE;
d_ptr->codecCtx->global_quality = FF_QP2LAMBDA * quailty;
```
+
3. 设置 ``crf`` 无效。代码如下:
```C++
@@ -65,23 +90,13 @@ transcodeCtx->audioPts += frame->nb_samples;
### [New BING的视频转码建议](./doc/bing_transcode.md)
-## SwsContext很棒!与 QImage 转换为和缩放相比。
+## SwsContext很棒!与 QImage 转换为和缩放相比
## QT-BUG
-### Failed to set up resampler
-
-[it's a bug in Qt 6.4.1 on Windows](https://forum.qt.io/topic/140523/qt-6-x-error-message-qt-multimedia-audiooutput-failed-to-setup-resampler)
-https://bugreports.qt.io/browse/QTBUG-108383 (johnco3's bug report)
-https://bugreports.qt.io/browse/QTBUG-108669 (a duplicate bug report; I filed it before I found any of this)
-
-#### solution:
-
-https://stackoverflow.com/questions/74500509/failed-to-setup-resampler-when-starting-qaudiosink
-
#### 动态切换Video Render,从opengl切换到widget,还是有GPU 0-3D占用,而且使用量是opengl的2倍!!!QT-BUG?
-### QOpenGLWidget内存泄漏,移动放大和缩小窗口,代码如下:
+### QOpenGLWidget内存泄漏,移动放大和缩小窗口,代码如下
```C++
int main(int argc, char *argv[])
diff --git a/doc/bing_transcode.md b/doc/bing_transcode.md
index 439a2fc..c2e268f 100644
--- a/doc/bing_transcode.md
+++ b/doc/bing_transcode.md
@@ -139,4 +139,4 @@ ffmpeg -i input.mp4 -c:v libx264 -crf 22 -preset slow -tune zerolatency -profile
```bash
# Encode input.mp4 to output.mkv with resolution=1280x720, bitrate=2000000, framerate=30, codec=libx265
ffmpeg -i input.mp4 -c:v libx265 -s 1280x720 -b:v 2000000 -r 30 output.mkv
-```
\ No newline at end of file
+```
diff --git a/ffmpeg/colorspace.hpp b/ffmpeg/colorspace.hpp
index 96d7445..546bd3c 100644
--- a/ffmpeg/colorspace.hpp
+++ b/ffmpeg/colorspace.hpp
@@ -25,7 +25,7 @@ static constexpr std::array kBT709Matrix
static constexpr QVector3D kBT2020ffset = {-0.0627451017, -0.501960814, -0.501960814};
static constexpr std::array kBT2020Matrix
- = {1.1644, 1.1644, 1.1644, 0.000, -0.187326f, 2.141772f, 1.678674f, -0.650424f, 0.000};
+ = {1.1678, 1.1678, 1.1678, 0.0000, -0.1879, 2.1481, 1.6836, -0.6523, 0.0000};
} // namespace Ffmpeg::ColorSpace
diff --git a/ffmpeg/videoframeconverter.cc b/ffmpeg/videoframeconverter.cc
index f12d1d8..dcd62f2 100644
--- a/ffmpeg/videoframeconverter.cc
+++ b/ffmpeg/videoframeconverter.cc
@@ -67,9 +67,9 @@ VideoFrameConverter::VideoFrameConverter(CodecContext *codecCtx,
d_ptr->dst_pix_fmt,
d_ptr->dstSize.width() > ctx->width ? SWS_BICUBIC
: SWS_BILINEAR,
- NULL,
- NULL,
- NULL);
+ nullptr,
+ nullptr,
+ nullptr);
Q_ASSERT(d_ptr->swsContext != nullptr);
}
@@ -106,9 +106,9 @@ void VideoFrameConverter::flush(Frame *frame, const QSize &dstSize, AVPixelForma
d_ptr->dst_pix_fmt,
d_ptr->dstSize.width() > avFrame->width ? SWS_BICUBIC
: SWS_BILINEAR,
- NULL,
- NULL,
- NULL);
+ nullptr,
+ nullptr,
+ nullptr);
Q_ASSERT(d_ptr->swsContext != nullptr);
}
diff --git a/ffmpeg/videoframeconverter.hpp b/ffmpeg/videoframeconverter.hpp
index 4e743fd..1b76526 100644
--- a/ffmpeg/videoframeconverter.hpp
+++ b/ffmpeg/videoframeconverter.hpp
@@ -20,9 +20,9 @@ class VideoFrameConverter : public QObject
AVPixelFormat pix_fmt = AV_PIX_FMT_RGBA,
QObject *parent = nullptr);
explicit VideoFrameConverter(Frame *frame,
- const QSize &size = QSize(-1, -1),
- AVPixelFormat pix_fmt = AV_PIX_FMT_RGBA,
- QObject *parent = nullptr);
+ const QSize &size = QSize(-1, -1),
+ AVPixelFormat pix_fmt = AV_PIX_FMT_RGBA,
+ QObject *parent = nullptr);
~VideoFrameConverter() override;
void flush(Frame *frame,
diff --git a/ffmpeg/videorender/openglrender.cc b/ffmpeg/videorender/openglrender.cc
index 09c0ba2..b170e9f 100644
--- a/ffmpeg/videorender/openglrender.cc
+++ b/ffmpeg/videorender/openglrender.cc
@@ -196,6 +196,7 @@ void OpenglRender::setColorSpace()
d_ptr->programPtr->setUniformValue("offset", ColorSpace::kBT2020ffset);
d_ptr->programPtr->setUniformValue("colorConversion",
QMatrix3x3(ColorSpace::kBT2020Matrix.data()));
+
break;
//case AVCOL_SPC_BT709:
default:
@@ -206,6 +207,25 @@ void OpenglRender::setColorSpace()
}
}
+void OpenglRender::setColorTrc()
+{
+ GLfloat contrast = 1.0;
+ GLfloat saturation = 0;
+ GLfloat brightness = 0;
+ auto *avFrame = d_ptr->framePtr->avFrame();
+ switch (avFrame->color_trc) {
+ case AVCOL_TRC_SMPTE2084: // fake hdr
+ contrast = 1.4;
+ saturation = 0.9;
+ brightness = 0;
+ break;
+ default: break;
+ }
+ d_ptr->programPtr->setUniformValue("contrast", contrast);
+ d_ptr->programPtr->setUniformValue("saturation", saturation);
+ d_ptr->programPtr->setUniformValue("brightness", brightness);
+}
+
auto OpenglRender::fitToScreen(const QSize &size) -> QMatrix4x4
{
auto factor_w = static_cast(width()) / size.width();
@@ -357,6 +377,7 @@ void OpenglRender::paintVideoFrame()
d_ptr->programPtr->bind(); // 绑定着色器
d_ptr->programPtr->setUniformValue("transform", fitToScreen({avFrame->width, avFrame->height}));
setColorSpace();
+ setColorTrc();
draw();
d_ptr->programPtr->release();
d_ptr->frameChanged = false;
diff --git a/ffmpeg/videorender/openglrender.hpp b/ffmpeg/videorender/openglrender.hpp
index 59676cf..af25333 100644
--- a/ffmpeg/videorender/openglrender.hpp
+++ b/ffmpeg/videorender/openglrender.hpp
@@ -39,7 +39,6 @@ class FFMPEG_EXPORT OpenglRender : public VideoRender,
void draw();
void initTexture();
void initSubTexture();
- void setColorSpace();
auto fitToScreen(const QSize &size) -> QMatrix4x4;
void cleanup();
void resetShader(int format);
@@ -49,6 +48,8 @@ class FFMPEG_EXPORT OpenglRender : public VideoRender,
void paintVideoFrame();
void paintSubTitleFrame();
+ void setColorSpace();
+ void setColorTrc();
void updateYUV420P();
void updateYUYV422();
diff --git a/ffmpeg/videorender/shader/video_nv12.frag b/ffmpeg/videorender/shader/video_nv12.frag
index d60d3c8..def5a1b 100644
--- a/ffmpeg/videorender/shader/video_nv12.frag
+++ b/ffmpeg/videorender/shader/video_nv12.frag
@@ -1,7 +1,7 @@
#version 330 core
-in vec2 TexCord; // 纹理坐标
-out vec4 FragColor; // 输出颜色
+in vec2 TexCord; // 纹理坐标
+out vec4 FragColor; // 输出颜色
uniform sampler2D tex_y;
uniform sampler2D tex_u;
@@ -9,6 +9,25 @@ uniform sampler2D tex_u;
uniform vec3 offset;
uniform mat3 colorConversion;
+uniform float contrast;
+uniform float saturation;
+uniform float brightness;
+
+vec3 adjustContrast(vec3 rgb, float contrast) // 调整对比度
+{
+ return clamp((rgb - vec3(0.5)) * contrast + vec3(0.5), vec3(0.0), vec3(1.0));
+}
+
+vec3 adjustSaturation(vec3 rgb, float saturation) // 调整饱和度
+{
+ return mix(vec3(dot(rgb, vec3(0.2126, 0.7152, 0.0722))), rgb, 1.0 + saturation);
+}
+
+vec3 adjustBrightness(vec3 rgb, float brightness) // 调整亮度
+{
+ return clamp(rgb + vec3(brightness), vec3(0.0), vec3(1.0));
+}
+
void main()
{
vec3 yuv;
@@ -20,5 +39,9 @@ void main()
yuv += offset;
rgb = yuv * colorConversion;
+ rgb = adjustContrast(rgb, contrast);
+ rgb = adjustSaturation(rgb, saturation);
+ rgb = adjustBrightness(rgb, brightness);
+
FragColor = vec4(rgb, 1.0);
}
diff --git a/ffmpeg/videorender/widgetrender.cc b/ffmpeg/videorender/widgetrender.cc
index 615543f..2315c92 100644
--- a/ffmpeg/videorender/widgetrender.cc
+++ b/ffmpeg/videorender/widgetrender.cc
@@ -16,10 +16,10 @@ namespace Ffmpeg {
class WidgetRender::WidgetRenderPrivate
{
public:
- WidgetRenderPrivate(QWidget *parent)
+ explicit WidgetRenderPrivate(QWidget *parent)
: owner(parent)
{}
- ~WidgetRenderPrivate() {}
+ ~WidgetRenderPrivate() = default;
QWidget *owner;
@@ -42,7 +42,7 @@ WidgetRender::WidgetRender(QWidget *parent)
, d_ptr(new WidgetRenderPrivate(this))
{}
-WidgetRender::~WidgetRender() {}
+WidgetRender::~WidgetRender() = default;
bool WidgetRender::isSupportedOutput_pix_fmt(AVPixelFormat pix_fmt)
{