|
|
@@ -479,7 +479,8 @@ void DShowInput::OnEncodedVideoData(enum AVCodecID id, unsigned char *data,
|
|
|
* than 1920x1080. The reason why is because we want to strike
|
|
|
* a reasonable balance between hardware and CPU usage. */
|
|
|
bool useHW = videoConfig.format != VideoFormat::MJPEG ||
|
|
|
- (videoConfig.cx * videoConfig.cy) > MAX_SW_RES_INT;
|
|
|
+ (videoConfig.cx * videoConfig.cy_abs) >
|
|
|
+ MAX_SW_RES_INT;
|
|
|
if (ffmpeg_decode_init(video_decoder, id, useHW) < 0) {
|
|
|
blog(LOG_WARNING, "Could not initialize video decoder");
|
|
|
return;
|
|
|
@@ -520,16 +521,15 @@ void DShowInput::OnVideoData(const VideoConfig &config, unsigned char *data,
|
|
|
}
|
|
|
|
|
|
const int cx = config.cx;
|
|
|
- const int cy = config.cy;
|
|
|
+ const int cy_abs = config.cy_abs;
|
|
|
|
|
|
frame.timestamp = (uint64_t)startTime * 100;
|
|
|
frame.width = config.cx;
|
|
|
- frame.height = config.cy;
|
|
|
+ frame.height = cy_abs;
|
|
|
frame.format = ConvertVideoFormat(config.format);
|
|
|
- frame.flip = (config.format == VideoFormat::XRGB ||
|
|
|
- config.format == VideoFormat::ARGB);
|
|
|
+ frame.flip = flip;
|
|
|
|
|
|
- if (flip)
|
|
|
+ if (config.cy_flip)
|
|
|
frame.flip = !frame.flip;
|
|
|
|
|
|
if (videoConfig.format == VideoFormat::XRGB ||
|
|
|
@@ -546,23 +546,23 @@ void DShowInput::OnVideoData(const VideoConfig &config, unsigned char *data,
|
|
|
|
|
|
} else if (videoConfig.format == VideoFormat::I420) {
|
|
|
frame.data[0] = data;
|
|
|
- frame.data[1] = frame.data[0] + (cx * cy);
|
|
|
- frame.data[2] = frame.data[1] + (cx * cy / 4);
|
|
|
+ frame.data[1] = frame.data[0] + (cx * cy_abs);
|
|
|
+ frame.data[2] = frame.data[1] + (cx * cy_abs / 4);
|
|
|
frame.linesize[0] = cx;
|
|
|
frame.linesize[1] = cx / 2;
|
|
|
frame.linesize[2] = cx / 2;
|
|
|
|
|
|
} else if (videoConfig.format == VideoFormat::YV12) {
|
|
|
frame.data[0] = data;
|
|
|
- frame.data[2] = frame.data[0] + (cx * cy);
|
|
|
- frame.data[1] = frame.data[2] + (cx * cy / 4);
|
|
|
+ frame.data[2] = frame.data[0] + (cx * cy_abs);
|
|
|
+ frame.data[1] = frame.data[2] + (cx * cy_abs / 4);
|
|
|
frame.linesize[0] = cx;
|
|
|
frame.linesize[1] = cx / 2;
|
|
|
frame.linesize[2] = cx / 2;
|
|
|
|
|
|
} else if (videoConfig.format == VideoFormat::NV12) {
|
|
|
frame.data[0] = data;
|
|
|
- frame.data[1] = frame.data[0] + (cx * cy);
|
|
|
+ frame.data[1] = frame.data[0] + (cx * cy_abs);
|
|
|
frame.linesize[0] = cx;
|
|
|
frame.linesize[1] = cx;
|
|
|
|
|
|
@@ -910,7 +910,8 @@ bool DShowInput::UpdateVideoConfig(obs_data_t *settings)
|
|
|
videoConfig.path = id.path.c_str();
|
|
|
videoConfig.useDefaultConfig = resType == ResType_Preferred;
|
|
|
videoConfig.cx = cx;
|
|
|
- videoConfig.cy = cy;
|
|
|
+ videoConfig.cy_abs = abs(cy);
|
|
|
+ videoConfig.cy_flip = cy < 0;
|
|
|
videoConfig.frameInterval = interval;
|
|
|
videoConfig.internalFormat = format;
|
|
|
|
|
|
@@ -950,11 +951,13 @@ bool DShowInput::UpdateVideoConfig(obs_data_t *settings)
|
|
|
"\tvideo device: %s\n"
|
|
|
"\tvideo path: %s\n"
|
|
|
"\tresolution: %dx%d\n"
|
|
|
+ "\tflip: %d\n"
|
|
|
"\tfps: %0.2f (interval: %lld)\n"
|
|
|
"\tformat: %s",
|
|
|
obs_source_get_name(source), (const char *)name_utf8,
|
|
|
- (const char *)path_utf8, videoConfig.cx, videoConfig.cy, fps,
|
|
|
- videoConfig.frameInterval, formatName->array);
|
|
|
+ (const char *)path_utf8, videoConfig.cx, videoConfig.cy_abs,
|
|
|
+ (int)videoConfig.cy_flip, fps, videoConfig.frameInterval,
|
|
|
+ formatName->array);
|
|
|
|
|
|
SetupBuffering(settings);
|
|
|
|