Pārlūkot izejas kodu

Merge pull request #6158 from jpark37/hdr-format-trc

Add I010/P010 support, video_trc enum
Jim 3 gadi atpakaļ
vecāks
revīzija
7f990677c0
35 mainītis faili ar 1091 papildinājumiem un 107 dzēšanām
  1. 5 3
      deps/media-playback/media-playback/closest-format.h
  2. 14 4
      deps/media-playback/media-playback/media.c
  3. 26 0
      docs/sphinx/reference-libobs-media-io.rst
  4. 6 2
      docs/sphinx/reference-outputs.rst
  5. 26 2
      docs/sphinx/reference-sources.rst
  6. 55 0
      libobs/data/color.effect
  7. 51 1
      libobs/data/deinterlace_base.effect
  8. 1 1
      libobs/data/deinterlace_blend.effect
  9. 1 1
      libobs/data/deinterlace_blend_2x.effect
  10. 1 1
      libobs/data/deinterlace_discard.effect
  11. 1 1
      libobs/data/deinterlace_discard_2x.effect
  12. 1 1
      libobs/data/deinterlace_linear.effect
  13. 1 1
      libobs/data/deinterlace_linear_2x.effect
  14. 1 1
      libobs/data/deinterlace_yadif.effect
  15. 1 1
      libobs/data/deinterlace_yadif_2x.effect
  16. 338 0
      libobs/data/format_conversion.effect
  17. 37 0
      libobs/media-io/video-frame.c
  18. 20 2
      libobs/media-io/video-io.h
  19. 21 2
      libobs/media-io/video-scaler-ffmpeg.c
  20. 2 1
      libobs/obs-encoder.c
  21. 30 0
      libobs/obs-internal.h
  22. 46 8
      libobs/obs-source-deinterlace.c
  23. 129 17
      libobs/obs-source.c
  24. 11 3
      libobs/obs-video-gpu-encode.c
  25. 62 0
      libobs/obs-video.c
  26. 109 0
      libobs/obs.c
  27. 3 0
      libobs/obs.h
  28. 21 4
      plugins/obs-ffmpeg/obs-ffmpeg-av1.c
  29. 4 36
      plugins/obs-ffmpeg/obs-ffmpeg-formats.h
  30. 9 0
      plugins/obs-ffmpeg/obs-ffmpeg-mux.c
  31. 12 3
      plugins/obs-ffmpeg/obs-ffmpeg-nvenc.c
  32. 15 8
      plugins/obs-ffmpeg/obs-ffmpeg-output.c
  33. 3 0
      plugins/win-dshow/data/locale/en-US.ini
  34. 7 0
      plugins/win-dshow/ffmpeg-decode.c
  35. 21 3
      plugins/win-dshow/win-dshow.cpp

+ 5 - 3
deps/media-playback/media-playback/closest-format.h

@@ -58,10 +58,12 @@ static enum AVPixelFormat closest_format(enum AVPixelFormat fmt)
 		return AV_PIX_FMT_NV12;
 
 	case AV_PIX_FMT_YUV420P:
-	case AV_PIX_FMT_YUVJ420P:
+	case AV_PIX_FMT_YUV410P:
 	case AV_PIX_FMT_YUV411P:
+	case AV_PIX_FMT_YUVJ420P:
 	case AV_PIX_FMT_UYYVYY411:
-	case AV_PIX_FMT_YUV410P:
+		return AV_PIX_FMT_YUV420P;
+
 	case AV_PIX_FMT_YUV420P16LE:
 	case AV_PIX_FMT_YUV420P16BE:
 	case AV_PIX_FMT_YUV420P9BE:
@@ -72,7 +74,7 @@ static enum AVPixelFormat closest_format(enum AVPixelFormat fmt)
 	case AV_PIX_FMT_YUV420P12LE:
 	case AV_PIX_FMT_YUV420P14BE:
 	case AV_PIX_FMT_YUV420P14LE:
-		return AV_PIX_FMT_YUV420P;
+		return AV_PIX_FMT_YUV420P10LE;
 
 	case AV_PIX_FMT_YUVA420P:
 		return AV_PIX_FMT_YUVA420P;

+ 14 - 4
deps/media-playback/media-playback/media.c

@@ -34,8 +34,6 @@ static inline enum video_format convert_pixel_format(int f)
 		return VIDEO_FORMAT_NONE;
 	case AV_PIX_FMT_YUV420P:
 		return VIDEO_FORMAT_I420;
-	case AV_PIX_FMT_NV12:
-		return VIDEO_FORMAT_NV12;
 	case AV_PIX_FMT_YUYV422:
 		return VIDEO_FORMAT_YUY2;
 	case AV_PIX_FMT_YUV422P:
@@ -44,18 +42,24 @@ static inline enum video_format convert_pixel_format(int f)
 		return VIDEO_FORMAT_I444;
 	case AV_PIX_FMT_UYVY422:
 		return VIDEO_FORMAT_UYVY;
+	case AV_PIX_FMT_NV12:
+		return VIDEO_FORMAT_NV12;
 	case AV_PIX_FMT_RGBA:
 		return VIDEO_FORMAT_RGBA;
 	case AV_PIX_FMT_BGRA:
 		return VIDEO_FORMAT_BGRA;
-	case AV_PIX_FMT_BGR0:
-		return VIDEO_FORMAT_BGRX;
 	case AV_PIX_FMT_YUVA420P:
 		return VIDEO_FORMAT_I40A;
+	case AV_PIX_FMT_YUV420P10LE:
+		return VIDEO_FORMAT_I010;
 	case AV_PIX_FMT_YUVA422P:
 		return VIDEO_FORMAT_I42A;
 	case AV_PIX_FMT_YUVA444P:
 		return VIDEO_FORMAT_YUVA;
+	case AV_PIX_FMT_BGR0:
+		return VIDEO_FORMAT_BGRX;
+	case AV_PIX_FMT_P010LE:
+		return VIDEO_FORMAT_P010;
 	default:;
 	}
 
@@ -123,6 +127,9 @@ convert_color_space(enum AVColorSpace s, enum AVColorTransferCharacteristic trc)
 	case AVCOL_SPC_SMPTE170M:
 	case AVCOL_SPC_SMPTE240M:
 		return VIDEO_CS_601;
+	case AVCOL_SPC_BT2020_NCL:
+		return (trc == AVCOL_TRC_ARIB_STD_B67) ? VIDEO_CS_2020_HLG
+						       : VIDEO_CS_2020_PQ;
 	default:
 		return VIDEO_CS_DEFAULT;
 	}
@@ -437,6 +444,9 @@ static void mp_media_next_video(mp_media_t *m, bool preload)
 	frame->height = f->height;
 	frame->flip = flip;
 	frame->flags |= m->is_linear_alpha ? OBS_SOURCE_FRAME_LINEAR_ALPHA : 0;
+	frame->trc = (f->color_trc == AVCOL_TRC_ARIB_STD_B67)
+			     ? VIDEO_TRC_HLG
+			     : VIDEO_TRC_DEFAULT;
 
 	if (!m->is_local_file && !d->got_first_keyframe) {
 		if (!f->key_frame)

+ 26 - 0
docs/sphinx/reference-libobs-media-io.rst

@@ -33,6 +33,32 @@ Video Handler
 
    - VIDEO_FORMAT_I444
 
+   - VIDEO_FORMAT_BGR3
+
+   - VIDEO_FORMAT_I422
+
+   - VIDEO_FORMAT_I40A
+
+   - VIDEO_FORMAT_I42A
+
+   - VIDEO_FORMAT_YUVA
+
+   - VIDEO_FORMAT_AYUV
+
+   - VIDEO_FORMAT_I010
+   - VIDEO_FORMAT_P010
+
+---------------------
+
+.. type:: enum video_trc
+
+   Transfer characteristics.  Can be one of the following values:
+
+   - VIDEO_TRC_DEFAULT - sRGB TRC for SDR, PQ TRC for HDR
+   - VIDEO_TRC_SRGB    - sRGB TRC
+   - VIDEO_TRC_PQ      - PQ
+   - VIDEO_TRC_HLG     - HLG
+
 ---------------------
 
 .. type:: enum video_colorspace

+ 6 - 2
docs/sphinx/reference-outputs.rst

@@ -678,11 +678,11 @@ Functions used by outputs
    enum video_format {
            VIDEO_FORMAT_NONE,
    
-           /* planar 420 format */
+           /* planar 4:2:0 formats */
            VIDEO_FORMAT_I420, /* three-plane */
            VIDEO_FORMAT_NV12, /* two-plane, luma and packed chroma */
    
-           /* packed 422 formats */
+           /* packed 4:2:2 formats */
            VIDEO_FORMAT_YVYU,
            VIDEO_FORMAT_YUY2, /* YUYV */
            VIDEO_FORMAT_UYVY,
@@ -713,6 +713,10 @@ Functions used by outputs
    
            /* packed 4:4:4 with alpha */
            VIDEO_FORMAT_AYUV,
+   
+           /* planar 4:2:0 format, 10 bpp */
+           VIDEO_FORMAT_I010, /* three-plane */
+           VIDEO_FORMAT_P010, /* two-plane, luma and packed chroma */
    };
    
    enum video_colorspace {

+ 26 - 2
docs/sphinx/reference-sources.rst

@@ -1266,11 +1266,11 @@ Functions used by sources
    enum video_format {
            VIDEO_FORMAT_NONE,
 
-           /* planar 420 format */
+           /* planar 4:2:0 formats */
            VIDEO_FORMAT_I420, /* three-plane */
            VIDEO_FORMAT_NV12, /* two-plane, luma and packed chroma */
 
-           /* packed 422 formats */
+           /* packed 4:2:2 formats */
            VIDEO_FORMAT_YVYU,
            VIDEO_FORMAT_YUY2, /* YUYV */
            VIDEO_FORMAT_UYVY,
@@ -1283,6 +1283,28 @@ Functions used by sources
 
            /* planar 4:4:4 */
            VIDEO_FORMAT_I444,
+
+           /* more packed uncompressed formats */
+           VIDEO_FORMAT_BGR3,
+
+           /* planar 4:2:2 */
+           VIDEO_FORMAT_I422,
+
+           /* planar 4:2:0 with alpha */
+           VIDEO_FORMAT_I40A,
+
+           /* planar 4:2:2 with alpha */
+           VIDEO_FORMAT_I42A,
+
+           /* planar 4:4:4 with alpha */
+           VIDEO_FORMAT_YUVA,
+
+           /* packed 4:4:4 with alpha */
+           VIDEO_FORMAT_AYUV,
+
+           /* planar 4:2:0 format, 10 bpp */
+           VIDEO_FORMAT_I010, /* three-plane */
+           VIDEO_FORMAT_P010, /* two-plane, luma and packed chroma */
    };
 
    struct obs_source_frame {
@@ -1298,6 +1320,8 @@ Functions used by sources
            float               color_range_min[3];
            float               color_range_max[3];
            bool                flip;
+           uint8_t             flags;
+           uint8_t             trc; /* enum video_trc */
    };
 
 ---------------------

+ 55 - 0
libobs/data/color.effect

@@ -43,3 +43,58 @@ float3 reinhard(float3 rgb)
 {
 	return float3(reinhard_channel(rgb.r), reinhard_channel(rgb.g), reinhard_channel(rgb.b));
 }
+
+float linear_to_st2084_channel(float x)
+{
+	return pow((0.8359375 + 18.8515625 * pow(abs(x), 0.1593017578)) / (1.0 + 18.6875 * pow(abs(x), 0.1593017578)), 78.84375);
+}
+
+float3 linear_to_st2084(float3 rgb)
+{
+	return float3(linear_to_st2084_channel(rgb.r), linear_to_st2084_channel(rgb.g), linear_to_st2084_channel(rgb.b));
+}
+
+float st2084_to_linear_channel(float u)
+{
+	return pow(abs(max(pow(abs(u), 1.0 / 78.84375) - 0.8359375, 0.0) / (18.8515625 - 18.6875 * pow(abs(u), 1.0 / 78.84375))), 1.0 / 0.1593017578);
+}
+
+float3 st2084_to_linear(float3 v)
+{
+	return float3(st2084_to_linear_channel(v.r), st2084_to_linear_channel(v.g), st2084_to_linear_channel(v.b));
+}
+
+float linear_to_hlg_channel(float u)
+{
+	float ln2_i = 1.0 / log(2.0);
+	float m = 0.17883277 / ln2_i;
+    return (u <= (1.0 /12.0)) ? sqrt(3.0 * u) : ((log2((12.0 * u) - 0.28466892) * m) + 0.55991073);
+}
+
+float3 linear_to_hlg(float3 rgb)
+{
+	rgb = saturate(rgb);
+	float yd = (0.2627 * rgb.r) + (0.678 * rgb.g) + (0.0593 * rgb.b);
+
+	// pow(0, exponent) can lead to NaN, use smallest positive normal number
+	yd = max(6.10352e-5, yd);
+
+	rgb *= pow(yd, -1.0 / 6.0);
+	return float3(linear_to_hlg_channel(rgb.r), linear_to_hlg_channel(rgb.g), linear_to_hlg_channel(rgb.b));
+}
+
+float hlg_to_linear_channel(float u)
+{
+	float ln2_i = 1.0 / log(2.0);
+	float m = ln2_i / 0.17883277;
+	float a = -ln2_i * 0.55991073 / 0.17883277;
+	return (u <= 0.5) ? ((u * u) / 3.0) : ((exp2(u * m + a) + 0.28466892) / 12.0);
+}
+
+float3 hlg_to_linear(float3 v)
+{
+	float3 rgb = float3(hlg_to_linear_channel(v.r), hlg_to_linear_channel(v.g), hlg_to_linear_channel(v.b));
+	float ys = dot(rgb, float3(0.2627, 0.678, 0.0593));
+	rgb *= pow(ys, 0.2);
+	return rgb;
+}

+ 51 - 1
libobs/data/deinterlace_base.effect

@@ -16,8 +16,11 @@
  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  */
 
+#include "color.effect"
+
 uniform float4x4 ViewProj;
 uniform texture2d image;
+uniform float multiplier;
 
 uniform texture2d previous_image;
 uniform float2 dimensions;
@@ -264,7 +267,30 @@ VertData VSDefault(VertData v_in)
 	return vert_out;
 }
 
-#define TECHNIQUE(rgba_ps) \
+#define TECHNIQUE(rgba_ps, rgba_ps_multiply, rgba_ps_tonemap, rgba_ps_multiply_tonemap) \
+float4 rgba_ps_multiply(VertData v_in) : TARGET \
+{ \
+	float4 rgba = rgba_ps(v_in); \
+	rgba.rgb *= multiplier; \
+	return rgba; \
+} \
+float4 rgba_ps_tonemap(VertData v_in) : TARGET \
+{ \
+	float4 rgba = rgba_ps(v_in); \
+	rgba.rgb = rec709_to_rec2020(rgba.rgb); \
+	rgba.rgb = reinhard(rgba.rgb); \
+	rgba.rgb = rec2020_to_rec709(rgba.rgb); \
+	return rgba; \
+} \
+float4 rgba_ps_multiply_tonemap(VertData v_in) : TARGET \
+{ \
+	float4 rgba = rgba_ps(v_in); \
+	rgba.rgb *= multiplier; \
+	rgba.rgb = rec709_to_rec2020(rgba.rgb); \
+	rgba.rgb = reinhard(rgba.rgb); \
+	rgba.rgb = rec2020_to_rec709(rgba.rgb); \
+	return rgba; \
+} \
 technique Draw \
 { \
 	pass \
@@ -272,4 +298,28 @@ technique Draw \
 		vertex_shader = VSDefault(v_in); \
 		pixel_shader  = rgba_ps(v_in); \
 	} \
+} \
+technique DrawMultiply \
+{ \
+	pass \
+	{ \
+		vertex_shader = VSDefault(v_in); \
+		pixel_shader  = rgba_ps_multiply(v_in); \
+	} \
+} \
+technique DrawTonemap \
+{ \
+	pass \
+	{ \
+		vertex_shader = VSDefault(v_in); \
+		pixel_shader  = rgba_ps_tonemap(v_in); \
+	} \
+} \
+technique DrawMultiplyTonemap \
+{ \
+	pass \
+	{ \
+		vertex_shader = VSDefault(v_in); \
+		pixel_shader  = rgba_ps_multiply_tonemap(v_in); \
+	} \
 }

+ 1 - 1
libobs/data/deinterlace_blend.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSBlendRGBA);
+TECHNIQUE(PSBlendRGBA, PSBlendRGBA_multiply, PSBlendRGBA_tonemap, PSBlendRGBA_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_blend_2x.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSBlendRGBA_2x);
+TECHNIQUE(PSBlendRGBA_2x, PSBlendRGBA_2x_multiply, PSBlendRGBA_2x_tonemap, PSBlendRGBA_2x_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_discard.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSDiscardRGBA);
+TECHNIQUE(PSDiscardRGBA, PSDiscardRGBA_multiply, PSDiscardRGBA_tonemap, PSDiscardRGBA_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_discard_2x.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSDiscardRGBA_2x);
+TECHNIQUE(PSDiscardRGBA_2x, PSDiscardRGBA_2x_multiply, PSDiscardRGBA_2x_tonemap, PSDiscardRGBA_2x_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_linear.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSLinearRGBA);
+TECHNIQUE(PSLinearRGBA, PSLinearRGBA_multiply, PSLinearRGBA_tonemap, PSLinearRGBA_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_linear_2x.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSLinearRGBA_2x);
+TECHNIQUE(PSLinearRGBA_2x, PSLinearRGBA_2x_multiply, PSLinearRGBA_2x_tonemap, PSLinearRGBA_2x_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_yadif.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSYadifMode0RGBA);
+TECHNIQUE(PSYadifMode0RGBA, PSYadifMode0RGBA_multiply, PSYadifMode0RGBA_tonemap, PSYadifMode0RGBA_multiply_tonemap);

+ 1 - 1
libobs/data/deinterlace_yadif_2x.effect

@@ -18,4 +18,4 @@
 
 #include "deinterlace_base.effect"
 
-TECHNIQUE(PSYadifMode0RGBA_2x);
+TECHNIQUE(PSYadifMode0RGBA_2x, PSYadifMode0RGBA_2x_multiply, PSYadifMode0RGBA_2x_tonemap, PSYadifMode0RGBA_2x_multiply_tonemap);

+ 338 - 0
libobs/data/format_conversion.effect

@@ -15,12 +15,17 @@
     along with this program.  If not, see <http://www.gnu.org/licenses/>.
 ******************************************************************************/
 
+#include "color.effect"
+
 uniform float     width;
 uniform float     height;
 uniform float     width_i;
+uniform float     height_i;
 uniform float     width_d2;
 uniform float     height_d2;
 uniform float     width_x2_i;
+uniform float     maximum_over_sdr_white_nits;
+uniform float     sdr_white_nits_over_maximum;
 
 uniform float4    color_vec0;
 uniform float4    color_vec1;
@@ -58,6 +63,11 @@ struct VertTexPosWide {
 	float4 pos : POSITION;
 };
 
+struct VertTexPosWideWide {
+	float4 uuvv : TEXCOORD0;
+	float4 pos : POSITION;
+};
+
 struct FragTex {
 	float2 uv : TEXCOORD0;
 };
@@ -70,6 +80,10 @@ struct FragTexWide {
 	float3 uuv : TEXCOORD0;
 };
 
+struct FragTexWideWide {
+	float4 uuvv : TEXCOORD0;
+};
+
 FragPos VSPos(uint id : VERTEXID)
 {
 	float idHigh = float(id >> 1);
@@ -101,6 +115,32 @@ VertTexPosWide VSTexPos_Left(uint id : VERTEXID)
 	return vert_out;
 }
 
+VertTexPosWideWide VSTexPos_TopLeft(uint id : VERTEXID)
+{
+	float idHigh = float(id >> 1);
+	float idLow = float(id & uint(1));
+
+	float x = idHigh * 4.0 - 1.0;
+	float y = idLow * 4.0 - 1.0;
+
+	float u_right = idHigh * 2.0;
+	float u_left = u_right - width_i;
+	float v_bottom;
+	float v_top;
+	if (obs_glsl_compile) {
+		v_bottom = idLow * 2.0;
+		v_top = v_bottom + height_i;
+	} else {
+		v_bottom = 1.0 - idLow * 2.0;
+		v_top = v_bottom - height_i;
+	}
+
+	VertTexPosWideWide vert_out;
+	vert_out.uuvv = float4(u_left, u_right, v_top, v_bottom);
+	vert_out.pos = float4(x, y, 0.0, 1.0);
+	return vert_out;
+}
+
 VertTexPos VSTexPosHalf_Reverse(uint id : VERTEXID)
 {
 	float idHigh = float(id >> 1);
@@ -159,6 +199,44 @@ float PS_Y(FragPos frag_in) : TARGET
 	return y;
 }
 
+float PS_PQ_Y_709_2020(FragPos frag_in) : TARGET
+{
+	float3 rgb = image.Load(int3(frag_in.pos.xy, 0)).rgb * sdr_white_nits_over_maximum;
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_st2084(rgb);
+	float y = dot(color_vec0.xyz, rgb) + color_vec0.w;
+	y = (65472. / 65535.) * y + 0.00048828125; // set up truncation to 10 bits
+	return y;
+}
+
+float PS_HLG_Y_709_2020(FragPos frag_in) : TARGET
+{
+	float3 rgb = image.Load(int3(frag_in.pos.xy, 0)).rgb * sdr_white_nits_over_maximum;
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_hlg(rgb);
+	float y = dot(color_vec0.xyz, rgb) + color_vec0.w;
+	y = (65472. / 65535.) * y + 0.00048828125; // set up truncation to 10 bits
+	return y;
+}
+
+float PS_I010_PQ_Y_709_2020(FragPos frag_in) : TARGET
+{
+	float3 rgb = image.Load(int3(frag_in.pos.xy, 0)).rgb * sdr_white_nits_over_maximum;
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_st2084(rgb);
+	float y = dot(color_vec0.xyz, rgb) + color_vec0.w;
+	return y * (1023. / 65535.);
+}
+
+float PS_I010_HLG_Y_709_2020(FragPos frag_in) : TARGET
+{
+	float3 rgb = image.Load(int3(frag_in.pos.xy, 0)).rgb * sdr_white_nits_over_maximum;
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_hlg(rgb);
+	float y = dot(color_vec0.xyz, rgb) + color_vec0.w;
+	return y * (1023. / 65535.);
+}
+
 float2 PS_UV_Wide(FragTexWide frag_in) : TARGET
 {
 	float3 rgb_left = image.Sample(def_sampler, frag_in.uuv.xz).rgb;
@@ -169,6 +247,38 @@ float2 PS_UV_Wide(FragTexWide frag_in) : TARGET
 	return float2(u, v);
 }
 
+float2 PS_PQ_UV_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_st2084(rgb);
+	float u = dot(color_vec1.xyz, rgb) + color_vec1.w;
+	float v = dot(color_vec2.xyz, rgb) + color_vec2.w;
+	float2 uv = float2(u, v);
+	uv = (65472. / 65535.) * uv + 0.00048828125; // set up truncation to 10 bits
+	return uv;
+}
+
+float2 PS_HLG_UV_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_hlg(rgb);
+	float u = dot(color_vec1.xyz, rgb) + color_vec1.w;
+	float v = dot(color_vec2.xyz, rgb) + color_vec2.w;
+	float2 uv = float2(u, v);
+	uv = (65472. / 65535.) * uv + 0.00048828125; // set up truncation to 10 bits
+	return uv;
+}
+
 float PS_U(FragPos frag_in) : TARGET
 {
 	float3 rgb = image.Load(int3(frag_in.pos.xy, 0)).rgb;
@@ -201,6 +311,58 @@ float PS_V_Wide(FragTexWide frag_in) : TARGET
 	return v;
 }
 
+float PS_I010_PQ_U_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_st2084(rgb);
+	float u = dot(color_vec1.xyz, rgb) + color_vec1.w;
+	return u * (1023. / 65535.);
+}
+
+float PS_I010_HLG_U_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_hlg(rgb);
+	float u = dot(color_vec1.xyz, rgb) + color_vec1.w;
+	return u * (1023. / 65535.);
+}
+
+float PS_I010_PQ_V_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_st2084(rgb);
+	float v = dot(color_vec2.xyz, rgb) + color_vec2.w;
+	return v * (1023. / 65535.);
+}
+
+float PS_I010_HLG_V_709_2020_WideWide(FragTexWideWide frag_in) : TARGET
+{
+	float3 rgb_topleft = image.Sample(def_sampler, frag_in.uuvv.xz).rgb;
+	float3 rgb_topright = image.Sample(def_sampler, frag_in.uuvv.yz).rgb;
+	float3 rgb_bottomleft = image.Sample(def_sampler, frag_in.uuvv.xw).rgb;
+	float3 rgb_bottomright = image.Sample(def_sampler, frag_in.uuvv.yw).rgb;
+	float3 rgb = (rgb_topleft + rgb_topright + rgb_bottomleft + rgb_bottomright) * (0.25 * sdr_white_nits_over_maximum);
+	rgb = rec709_to_rec2020(rgb);
+	rgb = linear_to_hlg(rgb);
+	float v = dot(color_vec2.xyz, rgb) + color_vec2.w;
+	return v * (1023. / 65535.);
+}
+
 float3 YUV_to_RGB(float3 yuv)
 {
 	yuv = clamp(yuv, color_range_min, color_range_max);
@@ -333,6 +495,56 @@ float3 PSNV12_Reverse(VertTexPos frag_in) : TARGET
 	return rgb;
 }
 
+float4 PSI010_PQ_2020_709_Reverse(VertTexPos frag_in) : TARGET
+{
+	float ratio = 65535. / 1023.;
+	float y = image.Load(int3(frag_in.pos.xy, 0)).x * ratio;
+	int3 xy0_chroma = int3(frag_in.uv, 0);
+	float cb = image1.Load(xy0_chroma).x * ratio;
+	float cr = image2.Load(xy0_chroma).x * ratio;
+	float3 yuv = float3(y, cb, cr);
+	float3 pq = YUV_to_RGB(yuv);
+	float3 hdr2020 = st2084_to_linear(pq) * maximum_over_sdr_white_nits;
+	float3 rgb = rec2020_to_rec709(hdr2020);
+	return float4(rgb, 1.0);
+}
+
+float4 PSI010_HLG_2020_709_Reverse(VertTexPos frag_in) : TARGET
+{
+	float ratio = 65535. / 1023.;
+	float y = image.Load(int3(frag_in.pos.xy, 0)).x * ratio;
+	int3 xy0_chroma = int3(frag_in.uv, 0);
+	float cb = image1.Load(xy0_chroma).x * ratio;
+	float cr = image2.Load(xy0_chroma).x * ratio;
+	float3 yuv = float3(y, cb, cr);
+	float3 hlg = YUV_to_RGB(yuv);
+	float3 hdr2020 = hlg_to_linear(hlg) * maximum_over_sdr_white_nits;
+	float3 rgb = rec2020_to_rec709(hdr2020);
+	return float4(rgb, 1.0);
+}
+
+float4 PSP010_PQ_2020_709_Reverse(VertTexPos frag_in) : TARGET
+{
+	float y = image.Load(int3(frag_in.pos.xy, 0)).x;
+	float2 cbcr = image1.Load(int3(frag_in.uv, 0)).xy;
+	float3 yuv = float3(y, cbcr);
+	float3 pq = YUV_to_RGB(yuv);
+	float3 hdr2020 = st2084_to_linear(pq) * maximum_over_sdr_white_nits;
+	float3 rgb = rec2020_to_rec709(hdr2020);
+	return float4(rgb, 1.0);
+}
+
+float4 PSP010_HLG_2020_709_Reverse(VertTexPos frag_in) : TARGET
+{
+	float y = image.Load(int3(frag_in.pos.xy, 0)).x;
+	float2 cbcr = image1.Load(int3(frag_in.uv, 0)).xy;
+	float3 yuv = float3(y, cbcr);
+	float3 hlg = YUV_to_RGB(yuv);
+	float3 hdr2020 = hlg_to_linear(hlg) * maximum_over_sdr_white_nits;
+	float3 rgb = rec2020_to_rec709(hdr2020);
+	return float4(rgb, 1.0);
+}
+
 float3 PSY800_Limited(FragPos frag_in) : TARGET
 {
 	float limited = image.Load(int3(frag_in.pos.xy, 0)).x;
@@ -439,6 +651,96 @@ technique NV12_UV
 	}
 }
 
+technique I010_PQ_Y
+{
+	pass
+	{
+		vertex_shader = VSPos(id);
+		pixel_shader  = PS_I010_PQ_Y_709_2020(frag_in);
+	}
+}
+
+technique I010_HLG_Y
+{
+	pass
+	{
+		vertex_shader = VSPos(id);
+		pixel_shader  = PS_I010_HLG_Y_709_2020(frag_in);
+	}
+}
+
+technique I010_PQ_U
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_I010_PQ_U_709_2020_WideWide(frag_in);
+	}
+}
+
+technique I010_HLG_U
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_I010_HLG_U_709_2020_WideWide(frag_in);
+	}
+}
+
+technique I010_PQ_V
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_I010_PQ_V_709_2020_WideWide(frag_in);
+	}
+}
+
+technique I010_HLG_V
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_I010_HLG_V_709_2020_WideWide(frag_in);
+	}
+}
+
+technique P010_PQ_Y
+{
+	pass
+	{
+		vertex_shader = VSPos(id);
+		pixel_shader  = PS_PQ_Y_709_2020(frag_in);
+	}
+}
+
+technique P010_HLG_Y
+{
+	pass
+	{
+		vertex_shader = VSPos(id);
+		pixel_shader  = PS_HLG_Y_709_2020(frag_in);
+	}
+}
+
+technique P010_PQ_UV
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_PQ_UV_709_2020_WideWide(frag_in);
+	}
+}
+
+technique P010_HLG_UV
+{
+	pass
+	{
+		vertex_shader = VSTexPos_TopLeft(id);
+		pixel_shader  = PS_HLG_UV_709_2020_WideWide(frag_in);
+	}
+}
+
 technique UYVY_Reverse
 {
 	pass
@@ -538,6 +840,42 @@ technique NV12_Reverse
 	}
 }
 
+technique I010_PQ_2020_709_Reverse
+{
+	pass
+	{
+		vertex_shader = VSTexPosHalfHalf_Reverse(id);
+		pixel_shader  = PSI010_PQ_2020_709_Reverse(frag_in);
+	}
+}
+
+technique I010_HLG_2020_709_Reverse
+{
+	pass
+	{
+		vertex_shader = VSTexPosHalfHalf_Reverse(id);
+		pixel_shader  = PSI010_HLG_2020_709_Reverse(frag_in);
+	}
+}
+
+technique P010_PQ_2020_709_Reverse
+{
+	pass
+	{
+		vertex_shader = VSTexPosHalfHalf_Reverse(id);
+		pixel_shader  = PSP010_PQ_2020_709_Reverse(frag_in);
+	}
+}
+
+technique P010_HLG_2020_709_Reverse
+{
+	pass
+	{
+		vertex_shader = VSTexPosHalfHalf_Reverse(id);
+		pixel_shader  = PSP010_HLG_2020_709_Reverse(frag_in);
+	}
+}
+
 technique Y800_Limited
 {
 	pass

+ 37 - 0
libobs/media-io/video-frame.c

@@ -211,6 +211,41 @@ void video_frame_init(struct video_frame *frame, enum video_format format,
 		frame->linesize[2] = width;
 		frame->linesize[3] = width;
 		break;
+
+	case VIDEO_FORMAT_I010: {
+		size = width * height * 2;
+		ALIGN_SIZE(size, alignment);
+		offsets[0] = size;
+		const uint32_t half_width = (width + 1) / 2;
+		const uint32_t half_height = (height + 1) / 2;
+		const uint32_t quarter_area = half_width * half_height;
+		size += quarter_area * 2;
+		ALIGN_SIZE(size, alignment);
+		offsets[1] = size;
+		size += quarter_area * 2;
+		ALIGN_SIZE(size, alignment);
+		frame->data[0] = bmalloc(size);
+		frame->data[1] = (uint8_t *)frame->data[0] + offsets[0];
+		frame->data[2] = (uint8_t *)frame->data[0] + offsets[1];
+		frame->linesize[0] = width * 2;
+		frame->linesize[1] = half_width * 2;
+		frame->linesize[2] = half_width * 2;
+		break;
+	}
+
+	case VIDEO_FORMAT_P010: {
+		size = width * height * 2;
+		ALIGN_SIZE(size, alignment);
+		offsets[0] = size;
+		const uint32_t cbcr_width = (width + 1) & (UINT32_MAX - 1);
+		size += cbcr_width * ((height + 1) / 2) * 2;
+		ALIGN_SIZE(size, alignment);
+		frame->data[0] = bmalloc(size);
+		frame->data[1] = (uint8_t *)frame->data[0] + offsets[0];
+		frame->linesize[0] = width * 2;
+		frame->linesize[1] = cbcr_width * 2;
+		break;
+	}
 	}
 }
 
@@ -222,12 +257,14 @@ void video_frame_copy(struct video_frame *dst, const struct video_frame *src,
 		return;
 
 	case VIDEO_FORMAT_I420:
+	case VIDEO_FORMAT_I010:
 		memcpy(dst->data[0], src->data[0], src->linesize[0] * cy);
 		memcpy(dst->data[1], src->data[1], src->linesize[1] * cy / 2);
 		memcpy(dst->data[2], src->data[2], src->linesize[2] * cy / 2);
 		break;
 
 	case VIDEO_FORMAT_NV12:
+	case VIDEO_FORMAT_P010:
 		memcpy(dst->data[0], src->data[0], src->linesize[0] * cy);
 		memcpy(dst->data[1], src->data[1], src->linesize[1] * cy / 2);
 		break;

+ 20 - 2
libobs/media-io/video-io.h

@@ -18,6 +18,7 @@
 #pragma once
 
 #include "media-io-defs.h"
+#include "../util/c99defs.h"
 
 #ifdef __cplusplus
 extern "C" {
@@ -33,11 +34,11 @@ typedef struct video_output video_t;
 enum video_format {
 	VIDEO_FORMAT_NONE,
 
-	/* planar 420 format */
+	/* planar 4:2:0 formats */
 	VIDEO_FORMAT_I420, /* three-plane */
 	VIDEO_FORMAT_NV12, /* two-plane, luma and packed chroma */
 
-	/* packed 422 formats */
+	/* packed 4:2:2 formats */
 	VIDEO_FORMAT_YVYU,
 	VIDEO_FORMAT_YUY2, /* YUYV */
 	VIDEO_FORMAT_UYVY,
@@ -68,6 +69,17 @@ enum video_format {
 
 	/* packed 4:4:4 with alpha */
 	VIDEO_FORMAT_AYUV,
+
+	/* planar 4:2:0 format, 10 bpp */
+	VIDEO_FORMAT_I010, /* three-plane */
+	VIDEO_FORMAT_P010, /* two-plane, luma and packed chroma */
+};
+
+enum video_trc {
+	VIDEO_TRC_DEFAULT,
+	VIDEO_TRC_SRGB,
+	VIDEO_TRC_PQ,
+	VIDEO_TRC_HLG,
 };
 
 enum video_colorspace {
@@ -119,6 +131,8 @@ static inline bool format_is_yuv(enum video_format format)
 	case VIDEO_FORMAT_I42A:
 	case VIDEO_FORMAT_YUVA:
 	case VIDEO_FORMAT_AYUV:
+	case VIDEO_FORMAT_I010:
+	case VIDEO_FORMAT_P010:
 		return true;
 	case VIDEO_FORMAT_NONE:
 	case VIDEO_FORMAT_RGBA:
@@ -167,6 +181,10 @@ static inline const char *get_video_format_name(enum video_format format)
 		return "YUVA";
 	case VIDEO_FORMAT_AYUV:
 		return "AYUV";
+	case VIDEO_FORMAT_I010:
+		return "I010";
+	case VIDEO_FORMAT_P010:
+		return "P010";
 	case VIDEO_FORMAT_NONE:;
 	}
 

+ 21 - 2
libobs/media-io/video-scaler-ffmpeg.c

@@ -61,6 +61,10 @@ get_ffmpeg_video_format(enum video_format format)
 		return AV_PIX_FMT_YUVA422P;
 	case VIDEO_FORMAT_YUVA:
 		return AV_PIX_FMT_YUVA444P;
+	case VIDEO_FORMAT_I010:
+		return AV_PIX_FMT_YUV420P10LE;
+	case VIDEO_FORMAT_P010:
+		return AV_PIX_FMT_P010LE;
 	case VIDEO_FORMAT_NONE:
 	case VIDEO_FORMAT_YVYU:
 	case VIDEO_FORMAT_AYUV:
@@ -91,8 +95,23 @@ static inline int get_ffmpeg_scale_type(enum video_scale_type type)
 
 static inline const int *get_ffmpeg_coeffs(enum video_colorspace cs)
 {
-	const int colorspace = (cs == VIDEO_CS_601) ? SWS_CS_ITU601
-						    : SWS_CS_ITU709;
+	int colorspace = SWS_CS_ITU709;
+
+	switch (cs) {
+	case VIDEO_CS_DEFAULT:
+	case VIDEO_CS_709:
+	case VIDEO_CS_SRGB:
+	default:
+		colorspace = SWS_CS_ITU709;
+		break;
+	case VIDEO_CS_601:
+		colorspace = SWS_CS_ITU601;
+		break;
+	case VIDEO_CS_2020_PQ:
+	case VIDEO_CS_2020_HLG:
+		colorspace = SWS_CS_BT2020;
+	}
+
 	return sws_getCoefficients(colorspace);
 }
 

+ 2 - 1
libobs/obs-encoder.c

@@ -187,8 +187,9 @@ static inline bool has_scaling(const struct obs_encoder *encoder)
 
 static inline bool gpu_encode_available(const struct obs_encoder *encoder)
 {
+	struct obs_core_video *const video = &obs->video;
 	return (encoder->info.caps & OBS_ENCODER_CAP_PASS_TEXTURE) != 0 &&
-	       obs->video.using_nv12_tex;
+	       (video->using_p010_tex || video->using_nv12_tex);
 }
 
 static void add_connection(struct obs_encoder *encoder)

+ 30 - 0
libobs/obs-internal.h

@@ -261,6 +261,7 @@ struct obs_core_video {
 	bool textures_copied[NUM_TEXTURES];
 	bool texture_converted;
 	bool using_nv12_tex;
+	bool using_p010_tex;
 	struct circlebuf vframe_info_buffer;
 	struct circlebuf vframe_info_buffer_gpu;
 	gs_effect_t *default_effect;
@@ -303,6 +304,8 @@ struct obs_core_video {
 	const char *conversion_techs[NUM_CHANNELS];
 	bool conversion_needed;
 	float conversion_width_i;
+	float conversion_height_i;
+	float maximum_nits;
 
 	uint32_t output_width;
 	uint32_t output_height;
@@ -717,8 +720,11 @@ struct obs_source {
 	bool async_gpu_conversion;
 	enum video_format async_format;
 	bool async_full_range;
+	uint8_t async_trc;
+	enum gs_color_format async_color_format;
 	enum video_format async_cache_format;
 	bool async_cache_full_range;
+	uint8_t async_cache_trc;
 	enum gs_color_format async_texture_formats[MAX_AV_PLANES];
 	int async_channel_count;
 	long async_rotation;
@@ -883,11 +889,35 @@ convert_video_format(enum video_format format)
 	case VIDEO_FORMAT_YUVA:
 	case VIDEO_FORMAT_AYUV:
 		return GS_BGRA;
+	case VIDEO_FORMAT_I010:
+	case VIDEO_FORMAT_P010:
+		return GS_RGBA16F;
 	default:
 		return GS_BGRX;
 	}
 }
 
+static inline enum gs_color_space
+convert_video_space(enum video_format format, size_t count,
+		    const enum gs_color_space *preferred_spaces)
+{
+	enum gs_color_space video_space = GS_CS_SRGB;
+	switch (format) {
+	case VIDEO_FORMAT_I010:
+	case VIDEO_FORMAT_P010:
+		video_space = GS_CS_709_EXTENDED;
+	}
+
+	enum gs_color_space space = video_space;
+	for (size_t i = 0; i < count; ++i) {
+		space = preferred_spaces[i];
+		if (space == video_space)
+			break;
+	}
+
+	return space;
+}
+
 extern void obs_source_set_texcoords_centered(obs_source_t *source,
 					      bool centered);
 extern void obs_source_activate(obs_source_t *source, enum view_type type);

+ 46 - 8
libobs/obs-source-deinterlace.c

@@ -234,8 +234,8 @@ void deinterlace_process_last_frame(obs_source_t *s, uint64_t sys_time)
 void set_deinterlace_texture_size(obs_source_t *source)
 {
 	if (source->async_gpu_conversion) {
-		source->async_prev_texrender =
-			gs_texrender_create(GS_BGRX, GS_ZS_NONE);
+		source->async_prev_texrender = gs_texrender_create(
+			source->async_color_format, GS_ZS_NONE);
 
 		for (int c = 0; c < source->async_channel_count; c++)
 			source->async_prev_textures[c] = gs_texture_create(
@@ -370,10 +370,11 @@ void deinterlace_render(obs_source_t *s)
 {
 	gs_effect_t *effect = s->deinterlace_effect;
 
-	uint64_t frame2_ts;
 	gs_eparam_t *image = gs_effect_get_param_by_name(effect, "image");
 	gs_eparam_t *prev =
 		gs_effect_get_param_by_name(effect, "previous_image");
+	gs_eparam_t *multiplier_param =
+		gs_effect_get_param_by_name(effect, "multiplier");
 	gs_eparam_t *field = gs_effect_get_param_by_name(effect, "field_order");
 	gs_eparam_t *frame2 = gs_effect_get_param_by_name(effect, "frame2");
 	gs_eparam_t *dimensions =
@@ -392,10 +393,46 @@ void deinterlace_render(obs_source_t *s)
 	if (!cur_tex || !prev_tex || !s->async_width || !s->async_height)
 		return;
 
+	const enum gs_color_space source_space =
+		(s->async_color_format == GS_RGBA16F) ? GS_CS_709_EXTENDED
+						      : GS_CS_SRGB;
 	const bool linear_srgb =
-		gs_get_linear_srgb() ||
+		(source_space != GS_CS_SRGB) || gs_get_linear_srgb() ||
 		deinterlace_linear_required(s->deinterlace_mode);
 
+	const enum gs_color_space current_space = gs_get_color_space();
+	const char *tech_name = "Draw";
+	float multiplier = 1.0;
+	switch (source_space) {
+	case GS_CS_SRGB:
+		switch (current_space) {
+		case GS_CS_709_SCRGB:
+			tech_name = "DrawMultiply";
+			multiplier = obs_get_video_sdr_white_level() / 80.0f;
+		}
+		break;
+	case GS_CS_709_EXTENDED:
+		switch (current_space) {
+		case GS_CS_SRGB:
+			tech_name = "DrawTonemap";
+			break;
+		case GS_CS_709_SCRGB:
+			tech_name = "DrawMultiply";
+			multiplier = obs_get_video_sdr_white_level() / 80.0f;
+		}
+		break;
+	case GS_CS_709_SCRGB:
+		switch (current_space) {
+		case GS_CS_SRGB:
+			tech_name = "DrawMultiplyTonemap";
+			multiplier = 80.0f / obs_get_video_sdr_white_level();
+			break;
+		case GS_CS_709_EXTENDED:
+			tech_name = "DrawMultiply";
+			multiplier = 80.0f / obs_get_video_sdr_white_level();
+		}
+	}
+
 	const bool previous = gs_framebuffer_srgb_enabled();
 	gs_enable_framebuffer_srgb(linear_srgb);
 
@@ -407,15 +444,16 @@ void deinterlace_render(obs_source_t *s)
 		gs_effect_set_texture(prev, prev_tex);
 	}
 
+	gs_effect_set_float(multiplier_param, multiplier);
 	gs_effect_set_int(field, s->deinterlace_top_first);
 	gs_effect_set_vec2(dimensions, &size);
 
-	frame2_ts = s->deinterlace_frame_ts + s->deinterlace_offset +
-		    s->deinterlace_half_duration - TWOX_TOLERANCE;
-
+	const uint64_t frame2_ts =
+		s->deinterlace_frame_ts + s->deinterlace_offset +
+		s->deinterlace_half_duration - TWOX_TOLERANCE;
 	gs_effect_set_bool(frame2, obs->video.video_time >= frame2_ts);
 
-	while (gs_effect_loop(effect, "Draw"))
+	while (gs_effect_loop(effect, tech_name))
 		gs_draw_sprite(NULL, s->async_flip ? GS_FLIP_V : 0,
 			       s->async_width, s->async_height);
 

+ 129 - 17
libobs/obs-source.c

@@ -1550,10 +1550,14 @@ enum convert_type {
 	CONVERT_800,
 	CONVERT_RGB_LIMITED,
 	CONVERT_BGR3,
+	CONVERT_I010_PQ_2020_709,
+	CONVERT_I010_HLG_2020_709,
+	CONVERT_P010_PQ_2020_709,
+	CONVERT_P010_HLG_2020_709,
 };
 
 static inline enum convert_type get_convert_type(enum video_format format,
-						 bool full_range)
+						 bool full_range, uint8_t trc)
 {
 	switch (format) {
 	case VIDEO_FORMAT_I420:
@@ -1593,6 +1597,18 @@ static inline enum convert_type get_convert_type(enum video_format format,
 
 	case VIDEO_FORMAT_AYUV:
 		return CONVERT_444_A_PACK;
+
+	case VIDEO_FORMAT_I010: {
+		const bool hlg = trc == VIDEO_TRC_HLG;
+		return hlg ? CONVERT_I010_HLG_2020_709
+			   : CONVERT_I010_PQ_2020_709;
+	}
+
+	case VIDEO_FORMAT_P010: {
+		const bool hlg = trc == VIDEO_TRC_HLG;
+		return hlg ? CONVERT_P010_HLG_2020_709
+			   : CONVERT_P010_PQ_2020_709;
+	}
 	}
 
 	return CONVERT_NONE;
@@ -1791,10 +1807,48 @@ static inline bool set_bgr3_sizes(struct obs_source *source,
 	return true;
 }
 
+static inline bool set_i010_sizes(struct obs_source *source,
+				  const struct obs_source_frame *frame)
+{
+	const uint32_t width = frame->width;
+	const uint32_t height = frame->height;
+	const uint32_t half_width = (width + 1) / 2;
+	const uint32_t half_height = (height + 1) / 2;
+	source->async_convert_width[0] = width;
+	source->async_convert_width[1] = half_width;
+	source->async_convert_width[2] = half_width;
+	source->async_convert_height[0] = height;
+	source->async_convert_height[1] = half_height;
+	source->async_convert_height[2] = half_height;
+	source->async_texture_formats[0] = GS_R16;
+	source->async_texture_formats[1] = GS_R16;
+	source->async_texture_formats[2] = GS_R16;
+	source->async_channel_count = 3;
+	return true;
+}
+
+static inline bool set_p010_sizes(struct obs_source *source,
+				  const struct obs_source_frame *frame)
+{
+	const uint32_t width = frame->width;
+	const uint32_t height = frame->height;
+	const uint32_t half_width = (width + 1) / 2;
+	const uint32_t half_height = (height + 1) / 2;
+	source->async_convert_width[0] = width;
+	source->async_convert_width[1] = half_width;
+	source->async_convert_height[0] = height;
+	source->async_convert_height[1] = half_height;
+	source->async_texture_formats[0] = GS_R16;
+	source->async_texture_formats[1] = GS_RG16;
+	source->async_channel_count = 2;
+	return true;
+}
+
 static inline bool init_gpu_conversion(struct obs_source *source,
 				       const struct obs_source_frame *frame)
 {
-	switch (get_convert_type(frame->format, frame->full_range)) {
+	switch (get_convert_type(frame->format, frame->full_range,
+				 frame->trc)) {
 	case CONVERT_422_PACK:
 		return set_packed422_sizes(source, frame);
 
@@ -1831,6 +1885,14 @@ static inline bool init_gpu_conversion(struct obs_source *source,
 	case CONVERT_444_A_PACK:
 		return set_packed444_alpha_sizes(source, frame);
 
+	case CONVERT_I010_PQ_2020_709:
+	case CONVERT_I010_HLG_2020_709:
+		return set_i010_sizes(source, frame);
+
+	case CONVERT_P010_PQ_2020_709:
+	case CONVERT_P010_HLG_2020_709:
+		return set_p010_sizes(source, frame);
+
 	case CONVERT_NONE:
 		assert(false && "No conversion requested");
 		break;
@@ -1842,18 +1904,22 @@ bool set_async_texture_size(struct obs_source *source,
 			    const struct obs_source_frame *frame)
 {
 	enum convert_type cur =
-		get_convert_type(frame->format, frame->full_range);
+		get_convert_type(frame->format, frame->full_range, frame->trc);
 
+	const enum gs_color_format format = convert_video_format(frame->format);
 	if (source->async_width == frame->width &&
 	    source->async_height == frame->height &&
 	    source->async_format == frame->format &&
-	    source->async_full_range == frame->full_range)
+	    source->async_full_range == frame->full_range &&
+	    source->async_trc == frame->trc &&
+	    source->async_color_format == format)
 		return true;
 
 	source->async_width = frame->width;
 	source->async_height = frame->height;
 	source->async_format = frame->format;
 	source->async_full_range = frame->full_range;
+	source->async_trc = frame->trc;
 
 	gs_enter_context(obs->video.graphics);
 
@@ -1869,7 +1935,7 @@ bool set_async_texture_size(struct obs_source *source,
 	source->async_texrender = NULL;
 	source->async_prev_texrender = NULL;
 
-	const enum gs_color_format format = convert_video_format(frame->format);
+	source->async_color_format = format;
 	const bool async_gpu_conversion = (cur != CONVERT_NONE) &&
 					  init_gpu_conversion(source, frame);
 	source->async_gpu_conversion = async_gpu_conversion;
@@ -1900,7 +1966,8 @@ bool set_async_texture_size(struct obs_source *source,
 static void upload_raw_frame(gs_texture_t *tex[MAX_AV_PLANES],
 			     const struct obs_source_frame *frame)
 {
-	switch (get_convert_type(frame->format, frame->full_range)) {
+	switch (get_convert_type(frame->format, frame->full_range,
+				 frame->trc)) {
 	case CONVERT_422_PACK:
 	case CONVERT_800:
 	case CONVERT_RGB_LIMITED:
@@ -1913,6 +1980,10 @@ static void upload_raw_frame(gs_texture_t *tex[MAX_AV_PLANES],
 	case CONVERT_422_A:
 	case CONVERT_444_A:
 	case CONVERT_444_A_PACK:
+	case CONVERT_I010_PQ_2020_709:
+	case CONVERT_I010_HLG_2020_709:
+	case CONVERT_P010_PQ_2020_709:
+	case CONVERT_P010_HLG_2020_709:
 		for (size_t c = 0; c < MAX_AV_PLANES; c++) {
 			if (tex[c])
 				gs_texture_set_image(tex[c], frame->data[c],
@@ -1927,7 +1998,7 @@ static void upload_raw_frame(gs_texture_t *tex[MAX_AV_PLANES],
 }
 
 static const char *select_conversion_technique(enum video_format format,
-					       bool full_range)
+					       bool full_range, uint8_t trc)
 {
 	switch (format) {
 	case VIDEO_FORMAT_UYVY:
@@ -1969,6 +2040,18 @@ static const char *select_conversion_technique(enum video_format format,
 	case VIDEO_FORMAT_AYUV:
 		return "AYUV_Reverse";
 
+	case VIDEO_FORMAT_I010: {
+		const bool hlg = trc == VIDEO_TRC_HLG;
+		return hlg ? "I010_HLG_2020_709_Reverse"
+			   : "I010_PQ_2020_709_Reverse";
+	}
+
+	case VIDEO_FORMAT_P010: {
+		const bool hlg = trc == VIDEO_TRC_HLG;
+		return hlg ? "P010_HLG_2020_709_Reverse"
+			   : "P010_PQ_2020_709_Reverse";
+	}
+
 	case VIDEO_FORMAT_BGRA:
 	case VIDEO_FORMAT_BGRX:
 	case VIDEO_FORMAT_RGBA:
@@ -1982,6 +2065,11 @@ static const char *select_conversion_technique(enum video_format format,
 	return NULL;
 }
 
+static bool need_linear_output(enum video_format format)
+{
+	return (format == VIDEO_FORMAT_I010) || (format == VIDEO_FORMAT_P010);
+}
+
 static inline void set_eparam(gs_effect_t *effect, const char *name, float val)
 {
 	gs_eparam_t *param = gs_effect_get_param_by_name(effect, name);
@@ -2008,14 +2096,18 @@ static bool update_async_texrender(struct obs_source *source,
 	uint32_t cx = source->async_width;
 	uint32_t cy = source->async_height;
 
+	const char *tech_name = select_conversion_technique(
+		frame->format, frame->full_range, frame->trc);
 	gs_effect_t *conv = obs->video.conversion_effect;
-	const char *tech_name =
-		select_conversion_technique(frame->format, frame->full_range);
 	gs_technique_t *tech = gs_effect_get_technique(conv, tech_name);
+	const bool linear = need_linear_output(frame->format);
 
 	const bool success = gs_texrender_begin(texrender, cx, cy);
 
 	if (success) {
+		const bool previous = gs_framebuffer_srgb_enabled();
+		gs_enable_framebuffer_srgb(linear);
+
 		gs_enable_blending(false);
 
 		gs_technique_begin(tech);
@@ -2043,6 +2135,11 @@ static bool update_async_texrender(struct obs_source *source,
 		set_eparam(conv, "height_d2", (float)cy * 0.5f);
 		set_eparam(conv, "width_x2_i", 0.5f / (float)cx);
 
+		const float maximum_nits =
+			(frame->trc == VIDEO_TRC_HLG) ? 1000.f : 10000.f;
+		set_eparam(conv, "maximum_over_sdr_white_nits",
+			   maximum_nits / obs_get_video_sdr_white_level());
+
 		struct vec4 vec0, vec1, vec2;
 		vec4_set(&vec0, frame->color_matrix[0], frame->color_matrix[1],
 			 frame->color_matrix[2], frame->color_matrix[3]);
@@ -2074,6 +2171,8 @@ static bool update_async_texrender(struct obs_source *source,
 
 		gs_enable_blending(true);
 
+		gs_enable_framebuffer_srgb(previous);
+
 		gs_texrender_end(texrender);
 	}
 
@@ -2104,7 +2203,7 @@ bool update_async_textures(struct obs_source *source,
 	if (source->async_gpu_conversion && texrender)
 		return update_async_texrender(source, frame, tex, texrender);
 
-	type = get_convert_type(frame->format, frame->full_range);
+	type = get_convert_type(frame->format, frame->full_range, frame->trc);
 	if (type == CONVERT_NONE) {
 		gs_texture_set_image(tex[0], frame->data[0], frame->linesize[0],
 				     false);
@@ -2259,7 +2358,10 @@ static inline void obs_source_render_async_video(obs_source_t *source)
 			obs_get_base_effect(OBS_EFFECT_DEFAULT);
 		const char *tech_name = "Draw";
 		float multiplier = 1.0;
-		const enum gs_color_space source_space = GS_CS_SRGB;
+		const enum gs_color_space source_space =
+			(source->async_color_format == GS_RGBA16F)
+				? GS_CS_709_EXTENDED
+				: GS_CS_SRGB;
 		const enum gs_color_space current_space = gs_get_color_space();
 		bool linear_srgb = gs_get_linear_srgb();
 		switch (source_space) {
@@ -2690,8 +2792,10 @@ obs_source_get_color_space(obs_source_t *source, size_t count,
 				source->filter_parent, count, preferred_spaces);
 	}
 
-	if (source->info.output_flags & OBS_SOURCE_ASYNC)
-		return GS_CS_SRGB;
+	if (source->info.output_flags & OBS_SOURCE_ASYNC) {
+		return convert_video_space(source->async_format, count,
+					   preferred_spaces);
+	}
 
 	assert(source->context.data);
 	return source->info.video_get_color_space
@@ -3011,6 +3115,7 @@ static void copy_frame_data(struct obs_source_frame *dst,
 {
 	dst->flip = src->flip;
 	dst->flags = src->flags;
+	dst->trc = src->trc;
 	dst->full_range = src->full_range;
 	dst->timestamp = src->timestamp;
 	memcpy(dst->color_matrix, src->color_matrix, sizeof(float) * 16);
@@ -3021,7 +3126,8 @@ static void copy_frame_data(struct obs_source_frame *dst,
 	}
 
 	switch (src->format) {
-	case VIDEO_FORMAT_I420: {
+	case VIDEO_FORMAT_I420:
+	case VIDEO_FORMAT_I010: {
 		const uint32_t height = dst->height;
 		const uint32_t half_height = (height + 1) / 2;
 		copy_frame_data_plane(dst, src, 0, height);
@@ -3030,7 +3136,8 @@ static void copy_frame_data(struct obs_source_frame *dst,
 		break;
 	}
 
-	case VIDEO_FORMAT_NV12: {
+	case VIDEO_FORMAT_NV12:
+	case VIDEO_FORMAT_P010: {
 		const uint32_t height = dst->height;
 		const uint32_t half_height = (height + 1) / 2;
 		copy_frame_data_plane(dst, src, 0, height);
@@ -3089,8 +3196,9 @@ static inline bool async_texture_changed(struct obs_source *source,
 {
 	enum convert_type prev, cur;
 	prev = get_convert_type(source->async_cache_format,
-				source->async_cache_full_range);
-	cur = get_convert_type(frame->format, frame->full_range);
+				source->async_cache_full_range,
+				source->async_cache_trc);
+	cur = get_convert_type(frame->format, frame->full_range, frame->trc);
 
 	return source->async_cache_width != frame->width ||
 	       source->async_cache_height != frame->height || prev != cur;
@@ -3149,6 +3257,7 @@ cache_video(struct obs_source *source, const struct obs_source_frame *frame)
 	const enum video_format format = frame->format;
 	source->async_cache_format = format;
 	source->async_cache_full_range = frame->full_range;
+	source->async_cache_trc = frame->trc;
 
 	for (size_t i = 0; i < source->async_cache.num; i++) {
 		struct async_frame *af = &source->async_cache.array[i];
@@ -3260,6 +3369,7 @@ void obs_source_output_video2(obs_source_t *source,
 	new_frame.full_range = range == VIDEO_RANGE_FULL;
 	new_frame.flip = frame->flip;
 	new_frame.flags = frame->flags;
+	new_frame.trc = frame->trc;
 
 	memcpy(&new_frame.color_matrix, &frame->color_matrix,
 	       sizeof(frame->color_matrix));
@@ -3400,6 +3510,7 @@ void obs_source_preload_video2(obs_source_t *source,
 	new_frame.full_range = range == VIDEO_RANGE_FULL;
 	new_frame.flip = frame->flip;
 	new_frame.flags = frame->flags;
+	new_frame.trc = frame->trc;
 
 	memcpy(&new_frame.color_matrix, &frame->color_matrix,
 	       sizeof(frame->color_matrix));
@@ -3510,6 +3621,7 @@ void obs_source_set_video_frame2(obs_source_t *source,
 	new_frame.full_range = range == VIDEO_RANGE_FULL;
 	new_frame.flip = frame->flip;
 	new_frame.flags = frame->flags;
+	new_frame.trc = frame->trc;
 
 	memcpy(&new_frame.color_matrix, &frame->color_matrix,
 	       sizeof(frame->color_matrix));

+ 11 - 3
libobs/obs-video-gpu-encode.c

@@ -161,9 +161,17 @@ bool init_gpu_encoding(struct obs_core_video *video)
 		gs_texture_t *tex;
 		gs_texture_t *tex_uv;
 
-		gs_texture_create_nv12(&tex, &tex_uv, ovi->output_width,
-				       ovi->output_height,
-				       GS_RENDER_TARGET | GS_SHARED_KM_TEX);
+		if (ovi->output_format == VIDEO_FORMAT_P010) {
+			gs_texture_create_p010(&tex, &tex_uv, ovi->output_width,
+					       ovi->output_height,
+					       GS_RENDER_TARGET |
+						       GS_SHARED_KM_TEX);
+		} else {
+			gs_texture_create_nv12(&tex, &tex_uv, ovi->output_width,
+					       ovi->output_height,
+					       GS_RENDER_TARGET |
+						       GS_SHARED_KM_TEX);
+		}
 		if (!tex) {
 			return false;
 		}

+ 62 - 0
libobs/obs-video.c

@@ -313,6 +313,9 @@ static void render_convert_texture(struct obs_core_video *video,
 		gs_effect_get_param_by_name(effect, "color_vec2");
 	gs_eparam_t *image = gs_effect_get_param_by_name(effect, "image");
 	gs_eparam_t *width_i = gs_effect_get_param_by_name(effect, "width_i");
+	gs_eparam_t *height_i = gs_effect_get_param_by_name(effect, "height_i");
+	gs_eparam_t *sdr_white_nits_over_maximum = gs_effect_get_param_by_name(
+		effect, "sdr_white_nits_over_maximum");
 
 	struct vec4 vec0, vec1, vec2;
 	vec4_set(&vec0, video->color_matrix[4], video->color_matrix[5],
@@ -325,8 +328,11 @@ static void render_convert_texture(struct obs_core_video *video,
 	gs_enable_blending(false);
 
 	if (convert_textures[0]) {
+		const float multiplier =
+			obs_get_video_sdr_white_level() / video->maximum_nits;
 		gs_effect_set_texture(image, texture);
 		gs_effect_set_vec4(color_vec0, &vec0);
+		gs_effect_set_float(sdr_white_nits_over_maximum, multiplier);
 		render_convert_plane(effect, convert_textures[0],
 				     video->conversion_techs[0]);
 
@@ -336,6 +342,10 @@ static void render_convert_texture(struct obs_core_video *video,
 			if (!convert_textures[2])
 				gs_effect_set_vec4(color_vec2, &vec2);
 			gs_effect_set_float(width_i, video->conversion_width_i);
+			gs_effect_set_float(height_i,
+					    video->conversion_height_i);
+			gs_effect_set_float(sdr_white_nits_over_maximum,
+					    multiplier);
 			render_convert_plane(effect, convert_textures[1],
 					     video->conversion_techs[1]);
 
@@ -344,6 +354,10 @@ static void render_convert_texture(struct obs_core_video *video,
 				gs_effect_set_vec4(color_vec2, &vec2);
 				gs_effect_set_float(width_i,
 						    video->conversion_width_i);
+				gs_effect_set_float(height_i,
+						    video->conversion_height_i);
+				gs_effect_set_float(sdr_white_nits_over_maximum,
+						    multiplier);
 				render_convert_plane(
 					effect, convert_textures[2],
 					video->conversion_techs[2]);
@@ -648,6 +662,54 @@ static void set_gpu_converted_data(struct obs_core_video *video,
 
 		break;
 	}
+	case VIDEO_FORMAT_I010: {
+		const uint32_t width = info->width;
+		const uint32_t height = info->height;
+
+		set_gpu_converted_plane(width * 2, height, input->linesize[0],
+					output->linesize[0], input->data[0],
+					output->data[0]);
+
+		const uint32_t height_d2 = height / 2;
+
+		set_gpu_converted_plane(width, height_d2, input->linesize[1],
+					output->linesize[1], input->data[1],
+					output->data[1]);
+
+		set_gpu_converted_plane(width, height_d2, input->linesize[2],
+					output->linesize[2], input->data[2],
+					output->data[2]);
+
+		break;
+	}
+	case VIDEO_FORMAT_P010: {
+		const uint32_t width_x2 = info->width * 2;
+		const uint32_t height = info->height;
+		const uint32_t height_d2 = height / 2;
+		if (input->linesize[1]) {
+			set_gpu_converted_plane(width_x2, height,
+						input->linesize[0],
+						output->linesize[0],
+						input->data[0],
+						output->data[0]);
+			set_gpu_converted_plane(width_x2, height_d2,
+						input->linesize[1],
+						output->linesize[1],
+						input->data[1],
+						output->data[1]);
+		} else {
+			const uint8_t *const in_uv = set_gpu_converted_plane(
+				width_x2, height, input->linesize[0],
+				output->linesize[0], input->data[0],
+				output->data[0]);
+			set_gpu_converted_plane(width_x2, height_d2,
+						input->linesize[0],
+						output->linesize[1], in_uv,
+						output->data[1]);
+		}
+
+		break;
+	}
 
 	case VIDEO_FORMAT_NONE:
 	case VIDEO_FORMAT_YVYU:

+ 109 - 0
libobs/obs.c

@@ -53,6 +53,8 @@ static inline void calc_gpu_conversion_sizes(const struct obs_video_info *ovi)
 	video->conversion_techs[1] = NULL;
 	video->conversion_techs[2] = NULL;
 	video->conversion_width_i = 0.f;
+	video->conversion_height_i = 0.f;
+	video->maximum_nits = 10000.f;
 
 	switch ((uint32_t)ovi->output_format) {
 	case VIDEO_FORMAT_I420:
@@ -74,6 +76,33 @@ static inline void calc_gpu_conversion_sizes(const struct obs_video_info *ovi)
 		video->conversion_techs[1] = "Planar_U";
 		video->conversion_techs[2] = "Planar_V";
 		break;
+	case VIDEO_FORMAT_I010:
+		video->conversion_needed = true;
+		video->conversion_width_i = 1.f / (float)ovi->output_width;
+		video->conversion_height_i = 1.f / (float)ovi->output_height;
+		if (ovi->colorspace == VIDEO_CS_2020_HLG) {
+			video->conversion_techs[0] = "I010_HLG_Y";
+			video->conversion_techs[1] = "I010_HLG_U";
+			video->conversion_techs[2] = "I010_HLG_V";
+			video->maximum_nits = 1000.f;
+		} else {
+			video->conversion_techs[0] = "I010_PQ_Y";
+			video->conversion_techs[1] = "I010_PQ_U";
+			video->conversion_techs[2] = "I010_PQ_V";
+		}
+		break;
+	case VIDEO_FORMAT_P010:
+		video->conversion_needed = true;
+		video->conversion_width_i = 1.f / (float)ovi->output_width;
+		video->conversion_height_i = 1.f / (float)ovi->output_height;
+		if (ovi->colorspace == VIDEO_CS_2020_HLG) {
+			video->conversion_techs[0] = "P010_HLG_Y";
+			video->conversion_techs[1] = "P010_HLG_UV";
+			video->maximum_nits = 1000.f;
+		} else {
+			video->conversion_techs[0] = "P010_PQ_Y";
+			video->conversion_techs[1] = "P010_PQ_UV";
+		}
 	}
 }
 
@@ -86,12 +115,16 @@ static bool obs_init_gpu_conversion(struct obs_video_info *ovi)
 	video->using_nv12_tex = ovi->output_format == VIDEO_FORMAT_NV12
 					? gs_nv12_available()
 					: false;
+	video->using_p010_tex = ovi->output_format == VIDEO_FORMAT_P010
+					? gs_p010_available()
+					: false;
 
 	if (!video->conversion_needed) {
 		blog(LOG_INFO, "GPU conversion not available for format: %u",
 		     (unsigned int)ovi->output_format);
 		video->gpu_conversion = false;
 		video->using_nv12_tex = false;
+		video->using_p010_tex = false;
 		blog(LOG_INFO, "NV12 texture support not available");
 		return true;
 	}
@@ -101,6 +134,11 @@ static bool obs_init_gpu_conversion(struct obs_video_info *ovi)
 	else
 		blog(LOG_INFO, "NV12 texture support not available");
 
+	if (video->using_p010_tex)
+		blog(LOG_INFO, "P010 texture support enabled");
+	else
+		blog(LOG_INFO, "P010 texture support not available");
+
 	video->convert_textures[0] = NULL;
 	video->convert_textures[1] = NULL;
 	video->convert_textures[2] = NULL;
@@ -116,6 +154,14 @@ static bool obs_init_gpu_conversion(struct obs_video_info *ovi)
 			    GS_RENDER_TARGET | GS_SHARED_KM_TEX)) {
 			return false;
 		}
+	} else if (video->using_p010_tex) {
+		if (!gs_texture_create_p010(
+			    &video->convert_textures_encode[0],
+			    &video->convert_textures_encode[1],
+			    ovi->output_width, ovi->output_height,
+			    GS_RENDER_TARGET | GS_SHARED_KM_TEX)) {
+			return false;
+		}
 	}
 #endif
 
@@ -161,6 +207,31 @@ static bool obs_init_gpu_conversion(struct obs_video_info *ovi)
 		if (!video->convert_textures[0] ||
 		    !video->convert_textures[1] || !video->convert_textures[2])
 			success = false;
+		break;
+	case VIDEO_FORMAT_I010:
+		video->convert_textures[0] =
+			gs_texture_create(ovi->output_width, ovi->output_height,
+					  GS_R16, 1, NULL, GS_RENDER_TARGET);
+		video->convert_textures[1] = gs_texture_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_R16,
+			1, NULL, GS_RENDER_TARGET);
+		video->convert_textures[2] = gs_texture_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_R16,
+			1, NULL, GS_RENDER_TARGET);
+		if (!video->convert_textures[0] ||
+		    !video->convert_textures[1] || !video->convert_textures[2])
+			success = false;
+		break;
+	case VIDEO_FORMAT_P010:
+		video->convert_textures[0] =
+			gs_texture_create(ovi->output_width, ovi->output_height,
+					  GS_R16, 1, NULL, GS_RENDER_TARGET);
+		video->convert_textures[1] = gs_texture_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_RG16,
+			1, NULL, GS_RENDER_TARGET);
+		if (!video->convert_textures[0] || !video->convert_textures[1])
+			success = false;
+		break;
 	}
 
 	if (!success) {
@@ -227,6 +298,30 @@ static bool obs_init_gpu_copy_surfaces(struct obs_video_info *ovi, size_t i)
 		if (!video->copy_surfaces[i][2])
 			return false;
 		break;
+	case VIDEO_FORMAT_I010:
+		video->copy_surfaces[i][0] = gs_stagesurface_create(
+			ovi->output_width, ovi->output_height, GS_R16);
+		if (!video->copy_surfaces[i][0])
+			return false;
+		video->copy_surfaces[i][1] = gs_stagesurface_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_R16);
+		if (!video->copy_surfaces[i][1])
+			return false;
+		video->copy_surfaces[i][2] = gs_stagesurface_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_R16);
+		if (!video->copy_surfaces[i][2])
+			return false;
+		break;
+	case VIDEO_FORMAT_P010:
+		video->copy_surfaces[i][0] = gs_stagesurface_create(
+			ovi->output_width, ovi->output_height, GS_R16);
+		if (!video->copy_surfaces[i][0])
+			return false;
+		video->copy_surfaces[i][1] = gs_stagesurface_create(
+			ovi->output_width / 2, ovi->output_height / 2, GS_RG16);
+		if (!video->copy_surfaces[i][1])
+			return false;
+		break;
 	default:
 		break;
 	}
@@ -250,6 +345,14 @@ static bool obs_init_textures(struct obs_video_info *ovi)
 				success = false;
 				break;
 			}
+		} else if (video->using_p010_tex) {
+			video->copy_surfaces_encode[i] =
+				gs_stagesurface_create_p010(ovi->output_width,
+							    ovi->output_height);
+			if (!video->copy_surfaces_encode[i]) {
+				success = false;
+				break;
+			}
 		}
 #endif
 
@@ -2657,6 +2760,12 @@ bool obs_nv12_tex_active(void)
 	return video->using_nv12_tex;
 }
 
+bool obs_p010_tex_active(void)
+{
+	struct obs_core_video *video = &obs->video;
+	return video->using_p010_tex;
+}
+
 /* ------------------------------------------------------------------------- */
 /* task stuff                                                                */
 

+ 3 - 0
libobs/obs.h

@@ -266,6 +266,7 @@ struct obs_source_frame {
 	float color_range_max[3];
 	bool flip;
 	uint8_t flags;
+	uint8_t trc; /* enum video_trc */
 
 	/* used internally by libobs */
 	volatile long refs;
@@ -286,6 +287,7 @@ struct obs_source_frame2 {
 	float color_range_max[3];
 	bool flip;
 	uint8_t flags;
+	uint8_t trc; /* enum video_trc */
 };
 
 /** Access to the argc/argv used to start OBS. What you see is what you get. */
@@ -813,6 +815,7 @@ EXPORT uint32_t obs_get_total_frames(void);
 EXPORT uint32_t obs_get_lagged_frames(void);
 
 EXPORT bool obs_nv12_tex_active(void);
+EXPORT bool obs_p010_tex_active(void);
 
 EXPORT void obs_apply_private_data(obs_data_t *settings);
 EXPORT void obs_set_private_data(obs_data_t *settings);

+ 21 - 4
plugins/obs-ffmpeg/obs-ffmpeg-av1.c

@@ -72,7 +72,15 @@ static const char *svt_av1_getname(void *unused)
 static void av1_video_info(void *data, struct video_scale_info *info)
 {
 	UNUSED_PARAMETER(data);
-	info->format = VIDEO_FORMAT_I420;
+
+	switch (info->format) {
+	case VIDEO_FORMAT_I010:
+	case VIDEO_FORMAT_P010:
+		info->format = VIDEO_FORMAT_I010;
+		break;
+	default:
+		info->format = VIDEO_FORMAT_I420;
+	}
 }
 
 static bool av1_init_codec(struct av1_encoder *enc)
@@ -190,21 +198,30 @@ static bool av1_update(struct av1_encoder *enc, obs_data_t *settings)
 
 	switch (info.colorspace) {
 	case VIDEO_CS_601:
-		enc->context->color_trc = AVCOL_TRC_SMPTE170M;
 		enc->context->color_primaries = AVCOL_PRI_SMPTE170M;
+		enc->context->color_trc = AVCOL_TRC_SMPTE170M;
 		enc->context->colorspace = AVCOL_SPC_SMPTE170M;
 		break;
 	case VIDEO_CS_DEFAULT:
 	case VIDEO_CS_709:
-		enc->context->color_trc = AVCOL_TRC_BT709;
 		enc->context->color_primaries = AVCOL_PRI_BT709;
+		enc->context->color_trc = AVCOL_TRC_BT709;
 		enc->context->colorspace = AVCOL_SPC_BT709;
 		break;
 	case VIDEO_CS_SRGB:
-		enc->context->color_trc = AVCOL_TRC_IEC61966_2_1;
 		enc->context->color_primaries = AVCOL_PRI_BT709;
+		enc->context->color_trc = AVCOL_TRC_IEC61966_2_1;
 		enc->context->colorspace = AVCOL_SPC_BT709;
 		break;
+	case VIDEO_CS_2020_PQ:
+		enc->context->color_primaries = AVCOL_PRI_BT2020;
+		enc->context->color_trc = AVCOL_TRC_SMPTE2084;
+		enc->context->colorspace = AVCOL_SPC_BT2020_NCL;
+		break;
+	case VIDEO_CS_2020_HLG:
+		enc->context->color_primaries = AVCOL_PRI_BT2020;
+		enc->context->color_trc = AVCOL_TRC_ARIB_STD_B67;
+		enc->context->colorspace = AVCOL_SPC_BT2020_NCL;
 	}
 
 	if (keyint_sec)

+ 4 - 36
plugins/obs-ffmpeg/obs-ffmpeg-formats.h

@@ -41,6 +41,10 @@ obs_to_ffmpeg_video_format(enum video_format format)
 		return AV_PIX_FMT_YUVA422P;
 	case VIDEO_FORMAT_YUVA:
 		return AV_PIX_FMT_YUVA444P;
+	case VIDEO_FORMAT_I010:
+		return AV_PIX_FMT_YUV420P10LE;
+	case VIDEO_FORMAT_P010:
+		return AV_PIX_FMT_P010LE;
 	case VIDEO_FORMAT_NONE:
 	case VIDEO_FORMAT_YVYU:
 	case VIDEO_FORMAT_AYUV:
@@ -51,42 +55,6 @@ obs_to_ffmpeg_video_format(enum video_format format)
 	return AV_PIX_FMT_NONE;
 }
 
-static inline enum video_format
-ffmpeg_to_obs_video_format(enum AVPixelFormat format)
-{
-	switch (format) {
-	case AV_PIX_FMT_YUV444P:
-		return VIDEO_FORMAT_I444;
-	case AV_PIX_FMT_YUV420P:
-		return VIDEO_FORMAT_I420;
-	case AV_PIX_FMT_NV12:
-		return VIDEO_FORMAT_NV12;
-	case AV_PIX_FMT_YUYV422:
-		return VIDEO_FORMAT_YUY2;
-	case AV_PIX_FMT_UYVY422:
-		return VIDEO_FORMAT_UYVY;
-	case AV_PIX_FMT_RGBA:
-		return VIDEO_FORMAT_RGBA;
-	case AV_PIX_FMT_BGRA:
-		return VIDEO_FORMAT_BGRA;
-	case AV_PIX_FMT_GRAY8:
-		return VIDEO_FORMAT_Y800;
-	case AV_PIX_FMT_BGR24:
-		return VIDEO_FORMAT_BGR3;
-	case AV_PIX_FMT_YUV422P:
-		return VIDEO_FORMAT_I422;
-	case AV_PIX_FMT_YUVA420P:
-		return VIDEO_FORMAT_I40A;
-	case AV_PIX_FMT_YUVA422P:
-		return VIDEO_FORMAT_I42A;
-	case AV_PIX_FMT_YUVA444P:
-		return VIDEO_FORMAT_YUVA;
-	case AV_PIX_FMT_NONE:
-	default:
-		return VIDEO_FORMAT_NONE;
-	}
-}
-
 static inline enum audio_format
 convert_ffmpeg_sample_format(enum AVSampleFormat format)
 {

+ 9 - 0
plugins/obs-ffmpeg/obs-ffmpeg-mux.c

@@ -144,6 +144,15 @@ static void add_video_encoder_params(struct ffmpeg_muxer *stream,
 		trc = AVCOL_TRC_IEC61966_2_1;
 		spc = AVCOL_SPC_BT709;
 		break;
+	case VIDEO_CS_2020_PQ:
+		pri = AVCOL_PRI_BT2020;
+		trc = AVCOL_TRC_SMPTE2084;
+		spc = AVCOL_SPC_BT2020_NCL;
+		break;
+	case VIDEO_CS_2020_HLG:
+		pri = AVCOL_PRI_BT2020;
+		trc = AVCOL_TRC_ARIB_STD_B67;
+		spc = AVCOL_SPC_BT2020_NCL;
 	}
 
 	const enum AVColorRange range = (info->range == VIDEO_RANGE_FULL)

+ 12 - 3
plugins/obs-ffmpeg/obs-ffmpeg-nvenc.c

@@ -258,21 +258,30 @@ static bool nvenc_update(struct nvenc_encoder *enc, obs_data_t *settings,
 
 	switch (info.colorspace) {
 	case VIDEO_CS_601:
-		enc->context->color_trc = AVCOL_TRC_SMPTE170M;
 		enc->context->color_primaries = AVCOL_PRI_SMPTE170M;
+		enc->context->color_trc = AVCOL_TRC_SMPTE170M;
 		enc->context->colorspace = AVCOL_SPC_SMPTE170M;
 		break;
 	case VIDEO_CS_DEFAULT:
 	case VIDEO_CS_709:
-		enc->context->color_trc = AVCOL_TRC_BT709;
 		enc->context->color_primaries = AVCOL_PRI_BT709;
+		enc->context->color_trc = AVCOL_TRC_BT709;
 		enc->context->colorspace = AVCOL_SPC_BT709;
 		break;
 	case VIDEO_CS_SRGB:
-		enc->context->color_trc = AVCOL_TRC_IEC61966_2_1;
 		enc->context->color_primaries = AVCOL_PRI_BT709;
+		enc->context->color_trc = AVCOL_TRC_IEC61966_2_1;
 		enc->context->colorspace = AVCOL_SPC_BT709;
 		break;
+	case VIDEO_CS_2020_PQ:
+		enc->context->color_primaries = AVCOL_PRI_BT2020;
+		enc->context->color_trc = AVCOL_TRC_SMPTE2084;
+		enc->context->colorspace = AVCOL_SPC_BT2020_NCL;
+		break;
+	case VIDEO_CS_2020_HLG:
+		enc->context->color_primaries = AVCOL_PRI_BT2020;
+		enc->context->color_trc = AVCOL_TRC_ARIB_STD_B67;
+		enc->context->colorspace = AVCOL_SPC_BT2020_NCL;
 	}
 
 	if (keyint_sec)

+ 15 - 8
plugins/obs-ffmpeg/obs-ffmpeg-output.c

@@ -1119,28 +1119,35 @@ static bool try_connect(struct ffmpeg_output *output)
 
 	config.color_range = voi->range == VIDEO_RANGE_FULL ? AVCOL_RANGE_JPEG
 							    : AVCOL_RANGE_MPEG;
+	config.colorspace = format_is_yuv(voi->format) ? AVCOL_SPC_BT709
+						       : AVCOL_SPC_RGB;
 	switch (voi->colorspace) {
 	case VIDEO_CS_601:
 		config.color_primaries = AVCOL_PRI_SMPTE170M;
 		config.color_trc = AVCOL_TRC_SMPTE170M;
+		config.colorspace = AVCOL_SPC_SMPTE170M;
 		break;
 	case VIDEO_CS_DEFAULT:
 	case VIDEO_CS_709:
 		config.color_primaries = AVCOL_PRI_BT709;
 		config.color_trc = AVCOL_TRC_BT709;
+		config.colorspace = AVCOL_SPC_BT709;
 		break;
 	case VIDEO_CS_SRGB:
 		config.color_primaries = AVCOL_PRI_BT709;
 		config.color_trc = AVCOL_TRC_IEC61966_2_1;
+		config.colorspace = AVCOL_SPC_BT709;
+		break;
+	case VIDEO_CS_2020_PQ:
+		config.color_primaries = AVCOL_PRI_BT2020;
+		config.color_trc = AVCOL_TRC_SMPTE2084;
+		config.colorspace = AVCOL_SPC_BT2020_NCL;
+		break;
+	case VIDEO_CS_2020_HLG:
+		config.color_primaries = AVCOL_PRI_BT2020;
+		config.color_trc = AVCOL_TRC_ARIB_STD_B67;
+		config.colorspace = AVCOL_SPC_BT2020_NCL;
 		break;
-	}
-
-	if (format_is_yuv(voi->format)) {
-		config.colorspace = (voi->colorspace == VIDEO_CS_601)
-					    ? AVCOL_SPC_SMPTE170M
-					    : AVCOL_SPC_BT709;
-	} else {
-		config.colorspace = AVCOL_SPC_RGB;
 	}
 
 	if (config.format == AV_PIX_FMT_NONE) {

+ 3 - 0
plugins/win-dshow/data/locale/en-US.ini

@@ -3,6 +3,9 @@ VideoCaptureDevice="Video Capture Device"
 Device="Device"
 ColorSpace="Color Space"
 ColorSpace.Default="Default"
+ColorSpace.709="Rec. 709"
+ColorSpace.601="Rec. 601"
+ColorSpace.2020="Rec. 2020"
 ColorRange="Color Range"
 ColorRange.Default="Default"
 ColorRange.Partial="Partial"

+ 7 - 0
plugins/win-dshow/ffmpeg-decode.c

@@ -149,8 +149,12 @@ static inline enum video_format convert_pixel_format(int f)
 		return VIDEO_FORMAT_RGBA;
 	case AV_PIX_FMT_BGRA:
 		return VIDEO_FORMAT_BGRA;
+	case AV_PIX_FMT_YUV420P10LE:
+		return VIDEO_FORMAT_I010;
 	case AV_PIX_FMT_BGR0:
 		return VIDEO_FORMAT_BGRX;
+	case AV_PIX_FMT_P010LE:
+		return VIDEO_FORMAT_P010;
 	default:;
 	}
 
@@ -289,6 +293,9 @@ convert_color_space(enum AVColorSpace s, enum AVColorTransferCharacteristic trc)
 	case AVCOL_SPC_SMPTE170M:
 	case AVCOL_SPC_SMPTE240M:
 		return VIDEO_CS_601;
+	case AVCOL_SPC_BT2020_NCL:
+		return (trc == AVCOL_TRC_ARIB_STD_B67) ? VIDEO_CS_2020_HLG
+						       : VIDEO_CS_2020_PQ;
 	default:
 		return VIDEO_CS_DEFAULT;
 	}

+ 21 - 3
plugins/win-dshow/win-dshow.cpp

@@ -78,6 +78,9 @@ using namespace DShow;
 #define TEXT_DEACTIVATE     obs_module_text("Deactivate")
 #define TEXT_COLOR_SPACE    obs_module_text("ColorSpace")
 #define TEXT_COLOR_DEFAULT  obs_module_text("ColorSpace.Default")
+#define TEXT_COLOR_709      obs_module_text("ColorSpace.709")
+#define TEXT_COLOR_601      obs_module_text("ColorSpace.601")
+#define TEXT_COLOR_2020     obs_module_text("ColorSpace.2020")
 #define TEXT_COLOR_RANGE    obs_module_text("ColorRange")
 #define TEXT_RANGE_DEFAULT  obs_module_text("ColorRange.Default")
 #define TEXT_RANGE_PARTIAL  obs_module_text("ColorRange.Partial")
@@ -437,6 +440,8 @@ static inline video_format ConvertVideoFormat(VideoFormat format)
 		return VIDEO_FORMAT_UYVY;
 	case VideoFormat::HDYC:
 		return VIDEO_FORMAT_UYVY;
+	case VideoFormat::P010:
+		return VIDEO_FORMAT_P010;
 	default:
 		return VIDEO_FORMAT_NONE;
 	}
@@ -545,6 +550,7 @@ void DShowInput::OnVideoData(const VideoConfig &config, unsigned char *data,
 	frame.format = ConvertVideoFormat(config.format);
 	frame.flip = flip;
 	frame.flags = OBS_SOURCE_FRAME_LINEAR_ALPHA;
+	frame.trc = VIDEO_TRC_DEFAULT;
 
 	/* YUV DIBS are always top-down */
 	if (config.format == VideoFormat::XRGB ||
@@ -592,6 +598,12 @@ void DShowInput::OnVideoData(const VideoConfig &config, unsigned char *data,
 		frame.data[0] = data;
 		frame.linesize[0] = cx;
 
+	} else if (videoConfig.format == VideoFormat::P010) {
+		frame.data[0] = data;
+		frame.data[1] = frame.data[0] + (cx * cy_abs) * 2;
+		frame.linesize[0] = cx * 2;
+		frame.linesize[1] = cx * 2;
+
 	} else {
 		/* TODO: other formats */
 		return;
@@ -1083,6 +1095,9 @@ DShowInput::GetColorSpace(obs_data_t *settings) const
 	if (astrcmpi(space, "601") == 0)
 		return VIDEO_CS_601;
 
+	if (astrcmpi(space, "2020") == 0)
+		return VIDEO_CS_2020_PQ;
+
 	return VIDEO_CS_DEFAULT;
 }
 
@@ -1348,6 +1363,7 @@ static const VideoFormatName videoFormatNames[] = {
 	{VideoFormat::NV12, "NV12"},
 	{VideoFormat::YV12, "YV12"},
 	{VideoFormat::Y800, "Y800"},
+	{VideoFormat::P010, "P010"},
 
 	/* packed YUV formats */
 	{VideoFormat::YVYU, "YVYU"},
@@ -1357,7 +1373,8 @@ static const VideoFormatName videoFormatNames[] = {
 
 	/* encoded formats */
 	{VideoFormat::MJPEG, "MJPEG"},
-	{VideoFormat::H264, "H264"}};
+	{VideoFormat::H264, "H264"},
+};
 
 static bool ResTypeChanged(obs_properties_t *props, obs_property_t *p,
 			   obs_data_t *settings);
@@ -1925,8 +1942,9 @@ static obs_properties_t *GetDShowProperties(void *obj)
 				    OBS_COMBO_TYPE_LIST,
 				    OBS_COMBO_FORMAT_STRING);
 	obs_property_list_add_string(p, TEXT_COLOR_DEFAULT, "default");
-	obs_property_list_add_string(p, "709", "709");
-	obs_property_list_add_string(p, "601", "601");
+	obs_property_list_add_string(p, TEXT_COLOR_709, "709");
+	obs_property_list_add_string(p, TEXT_COLOR_601, "601");
+	obs_property_list_add_string(p, TEXT_COLOR_2020, "2020");
 
 	p = obs_properties_add_list(ppts, COLOR_RANGE, TEXT_COLOR_RANGE,
 				    OBS_COMBO_TYPE_LIST,