{ "FilterGraphs": { "Graphs": [{ "Description": "[0:0]scale_qsv@f1=w=1920:h=800:format=nv12[f1_out0]", "Filters": [{ "Filter": { "Description": "QuickSync video scaling and format conversion", "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "Inputs": [{ "LinkType": "Input", "ChannelLayout": 0, "Channels": 0, "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "p010le", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "graph 0 input from stream 0:0", "SourcePadName": "default", "Width": 1920 } ], "Name": "scale_qsv@f1", "Name2": "scale_qsv", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 0, "Channels": 0, "DestName": "format", "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "nv12", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "scale_qsv@f1", "Width": 1920 } ] } }, { "Filter": { "Description": "Buffer video frames, and make them accessible to the filterchain.", "Inputs": [ ], "Name": "graph 0 input from stream 0:0", "Name2": "buffer", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 0, "Channels": 0, "DestName": "scale_qsv@f1", "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "p010le", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "graph 0 input from stream 0:0", "Width": 1920 } ] } }, { "Filter": { "Description": "Buffer video frames, and make them available to the end of the filter graph.", "Inputs": [{ "LinkType": "Input", "ChannelLayout": 0, "Channels": 0, "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "nv12", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "format", "SourcePadName": "default", "Width": 1920 } ], "Name": "out_0_0", "Name2": "buffersink", "Outputs": [ ] } }, { "Filter": { "Description": "Convert the input video to one of the specified pixel formats.", "Inputs": [{ "LinkType": "Input", "ChannelLayout": 0, "Channels": 0, "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "nv12", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "scale_qsv@f1", "SourcePadName": "default", "Width": 1920 } ], "Name": "format", "Name2": "format", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 0, "Channels": 0, "DestName": "out_0_0", "DestPadName": "default", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "nv12", "Width": 1920 }, "MediaTypeId": 0, "SampleRate": 0, "SAR": "1:1", "SourceName": "format", "Width": 1920 } ] } } ], "GraphIndex": 0, "Inputs": [{ "LinkType": "Input", "ChannelLayout": 0, "Channels": 0, "Description": "Buffer video frames, and make them accessible to the filterchain.", "Format": "qsv", "Height": 800, "HwFramesContext": { "HasHwFramesContext": true, "Height": 800, "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "HwPixelFormat": "qsv", "SwPixelFormat": "p010le", "Width": 1920 }, "MediaType": "video", "MediaTypeId": 0, "Name1": "scale_qsv", "Name2": "graph 0 input from stream 0:0", "Name3": "buffer", "SampleRate": 0, "SAR": "1:1", "Width": 1920 } ], "Outputs": [{ "LinkType": "Output", "ChannelLayout": 0, "Channels": 0, "Description": "Buffer video frames, and make them available to the end of the filter graph.", "Format": "qsv", "Height": 800, "MediaType": "video", "MediaTypeId": 0, "Name1": "scale_qsv", "Name2": "out_0_0", "Name3": "buffersink", "SampleRate": 0, "Width": 1920 } ] }, { "Filters": [{ "Filter": { "Description": "Change input volume.", "HwDeviceContext": { "DeviceType": "qsv", "HasHwDeviceContext": true }, "Inputs": [{ "LinkType": "Input", "ChannelLayout": 1551, "Channels": 6, "ChannelString": "5.1(side)", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "graph_1_in_0_1", "SourcePadName": "default", "Width": 0 } ], "Name": "Parsed_volume_0", "Name2": "volume", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 1551, "Channels": 6, "ChannelString": "5.1(side)", "DestName": "auto_resampler_0", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "Parsed_volume_0", "Width": 0 } ] } }, { "Filter": { "Description": "Buffer audio frames, and make them accessible to the filterchain.", "Inputs": [ ], "Name": "graph_1_in_0_1", "Name2": "abuffer", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 1551, "Channels": 6, "ChannelString": "5.1(side)", "DestName": "Parsed_volume_0", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "graph_1_in_0_1", "Width": 0 } ] } }, { "Filter": { "Description": "Buffer audio frames, and make them available to the end of the filter graph.", "Inputs": [{ "LinkType": "Input", "ChannelLayout": 3, "Channels": 2, "ChannelString": "stereo", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "format_out_0_1", "SourcePadName": "default", "Width": 0 } ], "Name": "out_0_1", "Name2": "abuffersink", "Outputs": [ ] } }, { "Filter": { "Description": "Convert the input audio to one of the specified formats.", "Inputs": [{ "LinkType": "Input", "ChannelLayout": 3, "Channels": 2, "ChannelString": "stereo", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "auto_resampler_0", "SourcePadName": "default", "Width": 0 } ], "Name": "format_out_0_1", "Name2": "aformat", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 3, "Channels": 2, "ChannelString": "stereo", "DestName": "out_0_1", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "format_out_0_1", "Width": 0 } ] } }, { "Filter": { "Description": "Resample audio data.", "Inputs": [{ "LinkType": "Input", "ChannelLayout": 1551, "Channels": 6, "ChannelString": "5.1(side)", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "Parsed_volume_0", "SourcePadName": "default", "Width": 0 } ], "Name": "auto_resampler_0", "Name2": "aresample", "Outputs": [{ "LinkType": "Output", "ChannelLayout": 3, "Channels": 2, "ChannelString": "stereo", "DestName": "format_out_0_1", "DestPadName": "default", "Height": 0, "MediaTypeId": 0, "SampleRate": 48000, "SourceName": "auto_resampler_0", "Width": 0 } ] } } ], "GraphIndex": 1, "Inputs": [{ "LinkType": "Input", "ChannelLayout": 0, "Channels": 0, "Description": "Buffer audio frames, and make them accessible to the filterchain.", "Format": "gray", "Height": 0, "MediaType": "audio", "MediaTypeId": 1, "Name2": "graph_1_in_0_1", "Name3": "abuffer", "SampleRate": 0, "SAR": "0:1", "Width": 0 } ], "Outputs": [{ "LinkType": "Output", "ChannelLayout": 0, "Channels": 0, "Description": "Buffer audio frames, and make them available to the end of the filter graph.", "Format": "gray", "Height": 0, "MediaType": "audio", "MediaTypeId": 1, "Name2": "out_0_1", "Name3": "abuffersink", "SampleRate": 0, "Width": 0 } ] } ] } }