Commit de9f659f authored by Alexander Smorkalov's avatar Alexander Smorkalov

Several transforms added to sample IMFTransform.

parent 9e062871
...@@ -40,7 +40,8 @@ ...@@ -40,7 +40,8 @@
<Button x:Name="btnTakePhoto2" Click="btnTakePhoto_Click" IsEnabled="false" Margin="0,0,10,0">TakePhoto</Button> <Button x:Name="btnTakePhoto2" Click="btnTakePhoto_Click" IsEnabled="false" Margin="0,0,10,0">TakePhoto</Button>
</StackPanel> </StackPanel>
<StackPanel Orientation="Horizontal" Grid.Row="2" Margin="0,10,0,0"> <StackPanel Orientation="Horizontal" Grid.Row="2" Margin="0,10,0,0">
<CheckBox x:Name="chkAddRemoveEffect" Margin="0,0,10,0" Content="Grayscale Effect" IsEnabled="False" Checked="chkAddRemoveEffect_Checked" Unchecked="chkAddRemoveEffect_Unchecked"/> <CheckBox x:Name="chkAddRemoveEffect" Margin="0,0,10,0" Content="Add Effect" IsEnabled="False" Checked="chkAddRemoveEffect_Checked" Unchecked="chkAddRemoveEffect_Unchecked"/>
<ComboBox Width="120"/>
</StackPanel> </StackPanel>
</Grid> </Grid>
......
...@@ -122,7 +122,7 @@ void AdvancedCapture::ScenarioReset() ...@@ -122,7 +122,7 @@ void AdvancedCapture::ScenarioReset()
void AdvancedCapture::SoundLevelChanged(Object^ sender, Object^ e) void AdvancedCapture::SoundLevelChanged(Object^ sender, Object^ e)
{ {
create_task(Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::High, ref new Windows::UI::Core::DispatchedHandler([this]() create_task(Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::High, ref new Windows::UI::Core::DispatchedHandler([this]()
{ {
if(Windows::Media::MediaControl::SoundLevel != Windows::Media::SoundLevel::Muted) if(Windows::Media::MediaControl::SoundLevel != Windows::Media::SoundLevel::Muted)
{ {
ScenarioReset(); ScenarioReset();
...@@ -220,7 +220,7 @@ void AdvancedCapture::RecordLimitationExceeded(Windows::Media::Capture::MediaCap ...@@ -220,7 +220,7 @@ void AdvancedCapture::RecordLimitationExceeded(Windows::Media::Capture::MediaCap
void AdvancedCapture::Failed(Windows::Media::Capture::MediaCapture ^currentCaptureObject, Windows::Media::Capture::MediaCaptureFailedEventArgs^ currentFailure) void AdvancedCapture::Failed(Windows::Media::Capture::MediaCapture ^currentCaptureObject, Windows::Media::Capture::MediaCaptureFailedEventArgs^ currentFailure)
{ {
String ^message = "Fatal error" + currentFailure->Message; String ^message = "Fatal error" + currentFailure->Message;
create_task(Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::High, create_task(Dispatcher->RunAsync(Windows::UI::Core::CoreDispatcherPriority::High,
ref new Windows::UI::Core::DispatchedHandler([this, message]() ref new Windows::UI::Core::DispatchedHandler([this, message]()
{ {
ShowStatusMessage(message); ShowStatusMessage(message);
...@@ -325,7 +325,7 @@ void AdvancedCapture::btnTakePhoto_Click(Platform::Object^ sender, Windows::UI:: ...@@ -325,7 +325,7 @@ void AdvancedCapture::btnTakePhoto_Click(Platform::Object^ sender, Windows::UI::
EnableButton(false, "TakePhoto"); EnableButton(false, "TakePhoto");
auto currentRotation = GetCurrentPhotoRotation(); auto currentRotation = GetCurrentPhotoRotation();
task<StorageFile^>(KnownFolders::PicturesLibrary->CreateFileAsync(TEMP_PHOTO_FILE_NAME, Windows::Storage::CreationCollisionOption::GenerateUniqueName)).then([this, currentRotation](task<StorageFile^> getFileTask) task<StorageFile^>(KnownFolders::PicturesLibrary->CreateFileAsync(TEMP_PHOTO_FILE_NAME, Windows::Storage::CreationCollisionOption::GenerateUniqueName)).then([this, currentRotation](task<StorageFile^> getFileTask)
{ {
try try
{ {
...@@ -520,7 +520,7 @@ void AdvancedCapture::lstEnumedDevices_SelectionChanged(Platform::Object^ sender ...@@ -520,7 +520,7 @@ void AdvancedCapture::lstEnumedDevices_SelectionChanged(Platform::Object^ sender
} }
}); });
} }
btnStartDevice2->IsEnabled = true; btnStartDevice2->IsEnabled = true;
btnStartPreview2->IsEnabled = false; btnStartPreview2->IsEnabled = false;
btnStartStopRecord2->IsEnabled = false; btnStartStopRecord2->IsEnabled = false;
...@@ -581,12 +581,12 @@ void AdvancedCapture::EnumerateWebcamsAsync() ...@@ -581,12 +581,12 @@ void AdvancedCapture::EnumerateWebcamsAsync()
} }
void AdvancedCapture::AddEffectToImageStream() void AdvancedCapture::AddEffectToImageStream()
{ {
auto mediaCapture = m_mediaCaptureMgr.Get(); auto mediaCapture = m_mediaCaptureMgr.Get();
Windows::Media::Capture::VideoDeviceCharacteristic charecteristic = mediaCapture->MediaCaptureSettings->VideoDeviceCharacteristic; Windows::Media::Capture::VideoDeviceCharacteristic charecteristic = mediaCapture->MediaCaptureSettings->VideoDeviceCharacteristic;
if((charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::AllStreamsIdentical) && if((charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::AllStreamsIdentical) &&
(charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::PreviewPhotoStreamsIdentical) && (charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::PreviewPhotoStreamsIdentical) &&
(charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::RecordPhotoStreamsIdentical)) (charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::RecordPhotoStreamsIdentical))
{ {
Windows::Media::MediaProperties::IMediaEncodingProperties ^props = mediaCapture->VideoDeviceController->GetMediaStreamProperties(Windows::Media::Capture::MediaStreamType::Photo); Windows::Media::MediaProperties::IMediaEncodingProperties ^props = mediaCapture->VideoDeviceController->GetMediaStreamProperties(Windows::Media::Capture::MediaStreamType::Photo);
...@@ -596,13 +596,13 @@ void AdvancedCapture::AddEffectToImageStream() ...@@ -596,13 +596,13 @@ void AdvancedCapture::AddEffectToImageStream()
Windows::Foundation::Collections::IVectorView<Windows::Media::MediaProperties::IMediaEncodingProperties^>^ supportedPropsList = mediaCapture->VideoDeviceController->GetAvailableMediaStreamProperties(Windows::Media::Capture::MediaStreamType::Photo); Windows::Foundation::Collections::IVectorView<Windows::Media::MediaProperties::IMediaEncodingProperties^>^ supportedPropsList = mediaCapture->VideoDeviceController->GetAvailableMediaStreamProperties(Windows::Media::Capture::MediaStreamType::Photo);
{ {
unsigned int i = 0; unsigned int i = 0;
while (i< supportedPropsList->Size) while (i < supportedPropsList->Size)
{ {
Windows::Media::MediaProperties::IMediaEncodingProperties^ props = supportedPropsList->GetAt(i); Windows::Media::MediaProperties::IMediaEncodingProperties^ props = supportedPropsList->GetAt(i);
String^ s = props->Type; String^ s = props->Type;
if(props->Type->Equals("Video")) if(props->Type->Equals("Video"))
{ {
task<void>(mediaCapture->VideoDeviceController->SetMediaStreamPropertiesAsync(Windows::Media::Capture::MediaStreamType::Photo,props)).then([this](task<void> changeTypeTask) task<void>(mediaCapture->VideoDeviceController->SetMediaStreamPropertiesAsync(Windows::Media::Capture::MediaStreamType::Photo,props)).then([this](task<void> changeTypeTask)
{ {
try try
...@@ -616,7 +616,7 @@ void AdvancedCapture::AddEffectToImageStream() ...@@ -616,7 +616,7 @@ void AdvancedCapture::AddEffectToImageStream()
{ {
effectTask3.get(); effectTask3.get();
m_bEffectAddedToPhoto = true; m_bEffectAddedToPhoto = true;
ShowStatusMessage("Adding effect to photo stream successful"); ShowStatusMessage("Adding effect to photo stream successful");
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
} }
...@@ -633,8 +633,7 @@ void AdvancedCapture::AddEffectToImageStream() ...@@ -633,8 +633,7 @@ void AdvancedCapture::AddEffectToImageStream()
{ {
ShowExceptionMessage(e); ShowExceptionMessage(e);
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = false; chkAddRemoveEffect->IsChecked = false;
} }
}); });
...@@ -686,8 +685,8 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo ...@@ -686,8 +685,8 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo
auto mediaCapture = m_mediaCaptureMgr.Get(); auto mediaCapture = m_mediaCaptureMgr.Get();
Windows::Media::Capture::VideoDeviceCharacteristic charecteristic = mediaCapture->MediaCaptureSettings->VideoDeviceCharacteristic; Windows::Media::Capture::VideoDeviceCharacteristic charecteristic = mediaCapture->MediaCaptureSettings->VideoDeviceCharacteristic;
ShowStatusMessage("Add effect successful to preview stream successful"); ShowStatusMessage("Add effect successful to preview stream successful");
if((charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::AllStreamsIdentical) && if((charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::AllStreamsIdentical) &&
(charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::PreviewRecordStreamsIdentical)) (charecteristic != Windows::Media::Capture::VideoDeviceCharacteristic::PreviewRecordStreamsIdentical))
{ {
Windows::Media::MediaProperties::IMediaEncodingProperties ^props = mediaCapture->VideoDeviceController->GetMediaStreamProperties(Windows::Media::Capture::MediaStreamType::VideoRecord); Windows::Media::MediaProperties::IMediaEncodingProperties ^props = mediaCapture->VideoDeviceController->GetMediaStreamProperties(Windows::Media::Capture::MediaStreamType::VideoRecord);
...@@ -703,14 +702,14 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo ...@@ -703,14 +702,14 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo
m_bEffectAddedToRecord = true; m_bEffectAddedToRecord = true;
AddEffectToImageStream(); AddEffectToImageStream();
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
} }
catch(Exception ^e) catch(Exception ^e)
{ {
ShowExceptionMessage(e); ShowExceptionMessage(e);
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = false; chkAddRemoveEffect->IsChecked = false;
} }
}); });
} }
else else
{ {
...@@ -718,7 +717,7 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo ...@@ -718,7 +717,7 @@ void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windo
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
} }
} }
else else
{ {
AddEffectToImageStream(); AddEffectToImageStream();
...@@ -777,7 +776,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win ...@@ -777,7 +776,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win
{ {
ShowExceptionMessage(e); ShowExceptionMessage(e);
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = true; chkAddRemoveEffect->IsChecked = true;
} }
}); });
...@@ -791,7 +790,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win ...@@ -791,7 +790,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win
{ {
ShowExceptionMessage(e); ShowExceptionMessage(e);
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = true; chkAddRemoveEffect->IsChecked = true;
} }
...@@ -813,7 +812,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win ...@@ -813,7 +812,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win
{ {
ShowExceptionMessage(e); ShowExceptionMessage(e);
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = true; chkAddRemoveEffect->IsChecked = true;
} }
}); });
...@@ -821,7 +820,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win ...@@ -821,7 +820,7 @@ void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Win
else else
{ {
chkAddRemoveEffect->IsEnabled = true; chkAddRemoveEffect->IsEnabled = true;
chkAddRemoveEffect->IsChecked = true; chkAddRemoveEffect->IsChecked = true;
} }
} }
catch (Exception ^e) catch (Exception ^e)
...@@ -1032,3 +1031,9 @@ Windows::Media::Capture::VideoRotation AdvancedCapture::VideoRotationLookup( ...@@ -1032,3 +1031,9 @@ Windows::Media::Capture::VideoRotation AdvancedCapture::VideoRotationLookup(
} }
} }
void SDKSample::MediaCapture::AdvancedCapture::EffectType_SelectionChanged(Platform::Object^ sender, Windows::UI::Xaml::Controls::SelectionChangedEventArgs^ e)
{
}
...@@ -98,6 +98,7 @@ namespace SDKSample ...@@ -98,6 +98,7 @@ namespace SDKSample
bool m_bRotateVideoOnOrientationChange; bool m_bRotateVideoOnOrientationChange;
bool m_bReversePreviewRotation; bool m_bReversePreviewRotation;
Windows::Foundation::EventRegistrationToken m_orientationChangedEventToken; Windows::Foundation::EventRegistrationToken m_orientationChangedEventToken;
void EffectType_SelectionChanged(Platform::Object^ sender, Windows::UI::Xaml::Controls::SelectionChangedEventArgs^ e);
}; };
} }
} }
...@@ -116,17 +116,7 @@ ...@@ -116,17 +116,7 @@
<ColumnDefinition Width="Auto"/> <ColumnDefinition Width="Auto"/>
<ColumnDefinition Width="*"/> <ColumnDefinition Width="*"/>
</Grid.ColumnDefinitions> </Grid.ColumnDefinitions>
<Image Grid.Row="0" Source="Assets/microsoft-sdk.png" AutomationProperties.Name="Microsoft Logo" Stretch="None" HorizontalAlignment="Left"/> <StackPanel x:Name="FooterPanel" Orientation="Horizontal" Grid.Row="1" Grid.Column="1" HorizontalAlignment="Right"/>
<TextBlock Style="{StaticResource FooterStyle}" Text="© Microsoft Corporation. All rights reserved." TextWrapping="Wrap" Grid.Row="1" HorizontalAlignment="Left"/>
<StackPanel x:Name="FooterPanel" Orientation="Horizontal" Grid.Row="1" Grid.Column="1" HorizontalAlignment="Right">
<HyperlinkButton Content="Terms of use" Tag="http://www.microsoft.com/About/Legal/EN/US/IntellectualProperty/Copyright/default.aspx"
Click="Footer_Click" FontSize="12" Style="{StaticResource HyperlinkStyle}"/>
<TextBlock Text="|" Style="{StaticResource SeparatorStyle}" VerticalAlignment="Center"/>
<HyperlinkButton Content="Trademarks" Tag="http://www.microsoft.com/About/Legal/EN/US/IntellectualProperty/Trademarks/EN-US.aspx"
Click="Footer_Click" FontSize="12" Style="{StaticResource HyperlinkStyle}"/>
<TextBlock Text="|" Style="{StaticResource SeparatorStyle}" VerticalAlignment="Center"/>
<HyperlinkButton Content="Privacy Statement" Tag="http://privacy.microsoft.com" Click="Footer_Click" FontSize="12" Style="{StaticResource HyperlinkStyle}"/>
</StackPanel>
</Grid> </Grid>
......
...@@ -30,9 +30,9 @@ MFT_GRAYSCALE_DESTINATION_RECT (type = blob, UINT32[4] array) ...@@ -30,9 +30,9 @@ MFT_GRAYSCALE_DESTINATION_RECT (type = blob, UINT32[4] array)
MFT_GRAYSCALE_SATURATION (type = double) MFT_GRAYSCALE_SATURATION (type = double)
Sets the saturation level. The nominal range is [0...1]. Values beyond 1.0f Sets the saturation level. The nominal range is [0...1]. Values beyond 1.0f
result in supersaturated colors. Values below 0.0f create inverted colors. result in supersaturated colors. Values below 0.0f create inverted colors.
MFT_GRAYSCALE_CHROMA_ROTATION (type = double) MFT_GRAYSCALE_CHROMA_ROTATION (type = double)
Rotates the chroma values of each pixel. The attribue value is the angle of Rotates the chroma values of each pixel. The attribue value is the angle of
...@@ -45,7 +45,7 @@ as a scaling transform. ...@@ -45,7 +45,7 @@ as a scaling transform.
NOTES ON THE MFT IMPLEMENTATION NOTES ON THE MFT IMPLEMENTATION
1. The MFT has fixed streams: One input stream and one output stream. 1. The MFT has fixed streams: One input stream and one output stream.
2. The MFT supports the following formats: UYVY, YUY2, NV12. 2. The MFT supports the following formats: UYVY, YUY2, NV12.
...@@ -56,34 +56,34 @@ NOTES ON THE MFT IMPLEMENTATION ...@@ -56,34 +56,34 @@ NOTES ON THE MFT IMPLEMENTATION
5. If both types are set, no type can be set until the current type is cleared. 5. If both types are set, no type can be set until the current type is cleared.
6. Preferred input types: 6. Preferred input types:
(a) If the output type is set, that's the preferred type. (a) If the output type is set, that's the preferred type.
(b) Otherwise, the preferred types are partial types, constructed from the (b) Otherwise, the preferred types are partial types, constructed from the
list of supported subtypes. list of supported subtypes.
7. Preferred output types: As above. 7. Preferred output types: As above.
8. Streaming: 8. Streaming:
The private BeingStreaming() method is called in response to the The private BeingStreaming() method is called in response to the
MFT_MESSAGE_NOTIFY_BEGIN_STREAMING message. MFT_MESSAGE_NOTIFY_BEGIN_STREAMING message.
If the client does not send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, the MFT calls If the client does not send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, the MFT calls
BeginStreaming inside the first call to ProcessInput or ProcessOutput. BeginStreaming inside the first call to ProcessInput or ProcessOutput.
This is a good approach for allocating resources that your MFT requires for This is a good approach for allocating resources that your MFT requires for
streaming. streaming.
9. The configuration attributes are applied in the BeginStreaming method. If the 9. The configuration attributes are applied in the BeginStreaming method. If the
client changes the attributes during streaming, the change is ignored until client changes the attributes during streaming, the change is ignored until
streaming is stopped (either by changing the media types or by sending the streaming is stopped (either by changing the media types or by sending the
MFT_MESSAGE_NOTIFY_END_STREAMING message) and then restarted. MFT_MESSAGE_NOTIFY_END_STREAMING message) and then restarted.
*/ */
// Video FOURCC codes. // Video FOURCC codes.
const DWORD FOURCC_NV12 = '21VN'; const DWORD FOURCC_NV12 = '21VN';
// Static array of media types (preferred and accepted). // Static array of media types (preferred and accepted).
const GUID g_MediaSubtypes[] = const GUID g_MediaSubtypes[] =
...@@ -124,11 +124,11 @@ inline T clamp(const T& val, const T& minVal, const T& maxVal) ...@@ -124,11 +124,11 @@ inline T clamp(const T& val, const T& minVal, const T& maxVal)
void TransformImage_NV12( void TransformImage_NV12(
const D2D1::Matrix3x2F& mat, const D2D1::Matrix3x2F& mat,
const D2D_RECT_U& rcDest, const D2D_RECT_U& rcDest,
_Inout_updates_(_Inexpressible_(2 * lDestStride * dwHeightInPixels)) BYTE *pDest, _Inout_updates_(_Inexpressible_(2 * lDestStride * dwHeightInPixels)) BYTE *pDest,
_In_ LONG lDestStride, _In_ LONG lDestStride,
_In_reads_(_Inexpressible_(2 * lSrcStride * dwHeightInPixels)) const BYTE* pSrc, _In_reads_(_Inexpressible_(2 * lSrcStride * dwHeightInPixels)) const BYTE* pSrc,
_In_ LONG lSrcStride, _In_ LONG lSrcStride,
_In_ DWORD dwWidthInPixels, _In_ DWORD dwWidthInPixels,
_In_ DWORD dwHeightInPixels) _In_ DWORD dwHeightInPixels)
{ {
// NV12 is planar: Y plane, followed by packed U-V plane. // NV12 is planar: Y plane, followed by packed U-V plane.
...@@ -189,7 +189,7 @@ void TransformImage_NV12( ...@@ -189,7 +189,7 @@ void TransformImage_NV12(
CGrayscale::CGrayscale() : CGrayscale::CGrayscale() :
m_pSample(NULL), m_pInputType(NULL), m_pOutputType(NULL), m_pSample(NULL), m_pInputType(NULL), m_pOutputType(NULL),
m_imageWidthInPixels(0), m_imageHeightInPixels(0), m_cbImageSize(0), m_imageWidthInPixels(0), m_imageHeightInPixels(0), m_cbImageSize(0),
m_TransformType(Preview), m_rcDest(D2D1::RectU()), m_bStreamingInitialized(false), m_TransformType(Preview), m_rcDest(D2D1::RectU()), m_bStreamingInitialized(false),
m_pAttributes(NULL) m_pAttributes(NULL)
{ {
InitializeCriticalSectionEx(&m_critSec, 3000, 0); InitializeCriticalSectionEx(&m_critSec, 3000, 0);
...@@ -786,12 +786,12 @@ HRESULT CGrayscale::GetInputStatus( ...@@ -786,12 +786,12 @@ HRESULT CGrayscale::GetInputStatus(
return MF_E_INVALIDSTREAMNUMBER; return MF_E_INVALIDSTREAMNUMBER;
} }
// If an input sample is already queued, do not accept another sample until the // If an input sample is already queued, do not accept another sample until the
// client calls ProcessOutput or Flush. // client calls ProcessOutput or Flush.
// NOTE: It is possible for an MFT to accept more than one input sample. For // NOTE: It is possible for an MFT to accept more than one input sample. For
// example, this might be required in a video decoder if the frames do not // example, this might be required in a video decoder if the frames do not
// arrive in temporal order. In the case, the decoder must hold a queue of // arrive in temporal order. In the case, the decoder must hold a queue of
// samples. For the video effect, each sample is transformed independently, so // samples. For the video effect, each sample is transformed independently, so
// there is no reason to queue multiple input samples. // there is no reason to queue multiple input samples.
...@@ -902,12 +902,12 @@ HRESULT CGrayscale::ProcessMessage( ...@@ -902,12 +902,12 @@ HRESULT CGrayscale::ProcessMessage(
case MFT_MESSAGE_SET_D3D_MANAGER: case MFT_MESSAGE_SET_D3D_MANAGER:
// Sets a pointer to the IDirect3DDeviceManager9 interface. // Sets a pointer to the IDirect3DDeviceManager9 interface.
// The pipeline should never send this message unless the MFT sets the MF_SA_D3D_AWARE // The pipeline should never send this message unless the MFT sets the MF_SA_D3D_AWARE
// attribute set to TRUE. Because this MFT does not set MF_SA_D3D_AWARE, it is an error // attribute set to TRUE. Because this MFT does not set MF_SA_D3D_AWARE, it is an error
// to send the MFT_MESSAGE_SET_D3D_MANAGER message to the MFT. Return an error code in // to send the MFT_MESSAGE_SET_D3D_MANAGER message to the MFT. Return an error code in
// this case. // this case.
// NOTE: If this MFT were D3D-enabled, it would cache the IDirect3DDeviceManager9 // NOTE: If this MFT were D3D-enabled, it would cache the IDirect3DDeviceManager9
// pointer for use during streaming. // pointer for use during streaming.
hr = E_NOTIMPL; hr = E_NOTIMPL;
...@@ -972,7 +972,7 @@ HRESULT CGrayscale::ProcessInput( ...@@ -972,7 +972,7 @@ HRESULT CGrayscale::ProcessInput(
// The client must set input and output types before calling ProcessInput. // The client must set input and output types before calling ProcessInput.
if (!m_pInputType || !m_pOutputType) if (!m_pInputType || !m_pOutputType)
{ {
hr = MF_E_NOTACCEPTING; hr = MF_E_NOTACCEPTING;
goto done; goto done;
} }
...@@ -1016,7 +1016,7 @@ HRESULT CGrayscale::ProcessOutput( ...@@ -1016,7 +1016,7 @@ HRESULT CGrayscale::ProcessOutput(
// This MFT does not accept any flags for the dwFlags parameter. // This MFT does not accept any flags for the dwFlags parameter.
// The only defined flag is MFT_PROCESS_OUTPUT_DISCARD_WHEN_NO_BUFFER. This flag // The only defined flag is MFT_PROCESS_OUTPUT_DISCARD_WHEN_NO_BUFFER. This flag
// applies only when the MFT marks an output stream as lazy or optional. But this // applies only when the MFT marks an output stream as lazy or optional. But this
// MFT has no lazy or optional streams, so the flag is not valid. // MFT has no lazy or optional streams, so the flag is not valid.
...@@ -1266,7 +1266,7 @@ HRESULT CGrayscale::OnCheckMediaType(IMFMediaType *pmt) ...@@ -1266,7 +1266,7 @@ HRESULT CGrayscale::OnCheckMediaType(IMFMediaType *pmt)
goto done; goto done;
} }
// Reject single-field media types. // Reject single-field media types.
UINT32 interlace = MFGetAttributeUINT32(pmt, MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); UINT32 interlace = MFGetAttributeUINT32(pmt, MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (interlace == MFVideoInterlace_FieldSingleUpper || interlace == MFVideoInterlace_FieldSingleLower) if (interlace == MFVideoInterlace_FieldSingleUpper || interlace == MFVideoInterlace_FieldSingleLower)
{ {
...@@ -1350,10 +1350,13 @@ HRESULT CGrayscale::BeginStreaming() ...@@ -1350,10 +1350,13 @@ HRESULT CGrayscale::BeginStreaming()
goto done; goto done;
} }
// Get the chroma transformations. // Get the effect type
UINT32 effect = MFGetAttributeUINT32(m_pAttributes, MFT_IMAGE_EFFECT, 1);
// float scale = (float)MFGetAttributeDouble(m_pAttributes, MFT_GRAYSCALE_SATURATION, 0.0f); if ((effect >= 0) && (effect < InvalidEffect))
// float angle = (float)MFGetAttributeDouble(m_pAttributes, MFT_GRAYSCALE_CHROMA_ROTATION, 0.0f); {
m_TransformType = (ProcessingType)effect;
}
m_bStreamingInitialized = true; m_bStreamingInitialized = true;
} }
...@@ -1363,7 +1366,7 @@ done: ...@@ -1363,7 +1366,7 @@ done:
} }
// End streaming. // End streaming.
// This method is called if the client sends an MFT_MESSAGE_NOTIFY_END_STREAMING // This method is called if the client sends an MFT_MESSAGE_NOTIFY_END_STREAMING
// message, or when the media type changes. In general, it should be called whenever // message, or when the media type changes. In general, it should be called whenever
...@@ -1414,16 +1417,72 @@ HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut) ...@@ -1414,16 +1417,72 @@ HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
return hr; return hr;
} }
//(*m_pTransformFn)(m_transform, m_rcDest, pDest, lDestStride, pSrc, lSrcStride, cv::Mat InputFrame(m_imageHeightInPixels + m_imageHeightInPixels/2, m_imageWidthInPixels, CV_8UC1, pSrc, lSrcStride);
// m_imageWidthInPixels, m_imageHeightInPixels); cv::Mat InputGreyScale(InputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
cv::Mat OutputFrame(m_imageHeightInPixels + m_imageHeightInPixels/2, m_imageWidthInPixels, CV_8UC1, pDest, lDestStride);
switch (m_TransformType)
{
case Preview:
{
InputFrame.copyTo(OutputFrame);
} break;
case GrayScale:
{
OutputFrame.setTo(cv::Scalar(128));
cv::Mat OutputGreyScale(OutputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
InputGreyScale.copyTo(OutputGreyScale);
} break;
case Canny:
{
OutputFrame.setTo(cv::Scalar(128));
cv::Mat OutputGreyScale(OutputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
cv::Canny(InputGreyScale, OutputGreyScale, 80, 90);
} break;
case Sobel:
{
OutputFrame.setTo(cv::Scalar(128));
cv::Mat OutputGreyScale(OutputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
cv::Sobel(InputGreyScale, OutputGreyScale, CV_8U, 1, 1);
} break;
case Histogram:
{
const int mHistSizeNum = 25;
const int channels[3][1] = {{0}, {1}, {2}};
const int mHistSize[] = {25};
const float baseRabge[] = {0.f,256.f};
const float* ranges[] = {baseRabge};
const cv::Scalar mColorsRGB[] = { cv::Scalar(200, 0, 0, 255), cv::Scalar(0, 200, 0, 255),
cv::Scalar(0, 0, 200, 255) };
cv::Mat BgrFrame;
cv::cvtColor(InputFrame, BgrFrame, cv::COLOR_YUV420sp2BGR);
int thikness = (int) (BgrFrame.cols / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
int offset = (int) ((BgrFrame.cols - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB
for (int c=0; c<3; c++)
{
std::vector<int> hist;
cv::calcHist(&BgrFrame, 1, channels[c], cv::Mat(), hist, 1, mHistSize, ranges);
cv::normalize(hist, hist, BgrFrame.rows/2, 0, cv::NORM_INF);
for(int h=0; h<mHistSizeNum; h++) {
cv::Point mP1, mP2;
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = BgrFrame.rows-1;
mP2.y = mP1.y - 2 - (int)hist[h];
cv::line(BgrFrame, mP1, mP2, mColorsRGB[c], thikness);
}
}
cv::cvtColor(BgrFrame, OutputFrame, cv::COLOR_BGR2YUV_I420);
} break;
default:
break;
}
cv::Mat InputFrame(m_imageHeightInPixels + m_imageHeightInPixels/2, m_imageWidthInPixels, CV_8UC1, pSrc, lSrcStride);
cv::Mat InputGreyScale(InputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
cv::Mat OutputFrame(m_imageHeightInPixels + m_imageHeightInPixels/2, m_imageWidthInPixels, CV_8UC1, pDest, lDestStride);
OutputFrame.setTo(cv::Scalar(128));
cv::Mat OutputGreyScale(OutputFrame, cv::Range(0, m_imageHeightInPixels), cv::Range(0, m_imageWidthInPixels));
cv::Canny(InputGreyScale, OutputGreyScale, 80, 90);
// Set the data size on the output buffer. // Set the data size on the output buffer.
hr = pOut->SetCurrentLength(m_cbImageSize); hr = pOut->SetCurrentLength(m_cbImageSize);
...@@ -1461,7 +1520,7 @@ HRESULT CGrayscale::UpdateFormatInfo() ...@@ -1461,7 +1520,7 @@ HRESULT CGrayscale::UpdateFormatInfo()
{ {
goto done; goto done;
} }
if (subtype != MFVideoFormat_NV12) if (subtype != MFVideoFormat_NV12)
{ {
hr = E_UNEXPECTED; hr = E_UNEXPECTED;
goto done; goto done;
...@@ -1511,7 +1570,7 @@ HRESULT GetImageSize(DWORD fcc, UINT32 width, UINT32 height, DWORD* pcbImage) ...@@ -1511,7 +1570,7 @@ HRESULT GetImageSize(DWORD fcc, UINT32 width, UINT32 height, DWORD* pcbImage)
return hr; return hr;
} }
// Get the default stride for a video format. // Get the default stride for a video format.
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride) HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
{ {
LONG lStride = 0; LONG lStride = 0;
......
...@@ -37,18 +37,18 @@ DEFINE_GUID(CLSID_GrayscaleMFT, ...@@ -37,18 +37,18 @@ DEFINE_GUID(CLSID_GrayscaleMFT,
// Configuration attributes // Configuration attributes
// {698649BE-8EAE-4551-A4CB-3EC98FBD3D86}
DEFINE_GUID(MFT_IMAGE_EFFECT,
0x698649be, 0x8eae, 0x4551, 0xa4, 0xcb, 0x3e, 0xc9, 0x8f, 0xbd, 0x3d, 0x86);
// {7BBBB051-133B-41F5-B6AA-5AFF9B33A2CB}
DEFINE_GUID(MFT_GRAYSCALE_DESTINATION_RECT,
0x7bbbb051, 0x133b, 0x41f5, 0xb6, 0xaa, 0x5a, 0xff, 0x9b, 0x33, 0xa2, 0xcb);
enum ProcessingType enum ProcessingType
{ {
Preview, GrayScale,
GrayScale, Canny,
Canny, Sobel,
Zoom, Histogram,
Sepia InvalidEffect
}; };
template <class T> void SafeRelease(T **ppT) template <class T> void SafeRelease(T **ppT)
...@@ -63,9 +63,9 @@ template <class T> void SafeRelease(T **ppT) ...@@ -63,9 +63,9 @@ template <class T> void SafeRelease(T **ppT)
// CGrayscale class: // CGrayscale class:
// Implements a grayscale video effect. // Implements a grayscale video effect.
class CGrayscale class CGrayscale
: public Microsoft::WRL::RuntimeClass< : public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::WinRtClassicComMix >, Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::WinRtClassicComMix >,
ABI::Windows::Media::IMediaExtension, ABI::Windows::Media::IMediaExtension,
IMFTransform > IMFTransform >
{ {
...@@ -231,7 +231,7 @@ private: ...@@ -231,7 +231,7 @@ private:
CRITICAL_SECTION m_critSec; CRITICAL_SECTION m_critSec;
// Transformation parameters // Transformation parameters
ProcessingType m_TransformType; ProcessingType m_TransformType;
D2D_RECT_U m_rcDest; // Destination rectangle for the effect. D2D_RECT_U m_rcDest; // Destination rectangle for the effect.
// Streaming // Streaming
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment