diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e2669d8 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +*.psht filter=lfs diff=lfs merge=lfs -text +*.mov filter=lfs diff=lfs merge=lfs -text +*.mp4 filter=lfs diff=lfs merge=lfs -text diff --git a/Config/DefaultEditor.ini b/Config/DefaultEditor.ini new file mode 100644 index 0000000..f341f2f --- /dev/null +++ b/Config/DefaultEditor.ini @@ -0,0 +1,9 @@ +[/Script/UnrealEd.LevelEditorViewportSettings] +bInvertMiddleMousePan=True +bTransparentBoxSelection=True +bUseDistanceScaledCameraSpeed=True + +[ContentBrowser] +ContentBrowserTab1.SourcesExpanded=True + + diff --git a/Config/DefaultEditorSettings.ini b/Config/DefaultEditorSettings.ini new file mode 100644 index 0000000..fe03c95 --- /dev/null +++ b/Config/DefaultEditorSettings.ini @@ -0,0 +1,17 @@ + +[/Script/VREditor.VRModeSettings] +bEnableAutoVREditMode=False +bAutokeySequences=True +InteractorHand=Right +bShowWorldMovementGrid=False +bShowWorldMovementPostProcess=False +bShowWorldScaleProgressBar=True +UIBrightness=1.500000 +GizmoScale=0.800000 +DoubleClickTime=0.250000 +TriggerPressedThreshold_Vive=0.330000 +TriggerPressedThreshold_Rift=0.500000 +bScaleWorldWithDynamicPivot=True +bAllowSimultaneousWorldScalingAndRotation=True + + diff --git a/Config/DefaultEngine.ini b/Config/DefaultEngine.ini new file mode 100644 index 0000000..521e625 --- /dev/null +++ b/Config/DefaultEngine.ini @@ -0,0 +1,156 @@ +[/Script/HardwareTargeting.HardwareTargetingSettings] +; quality settings +TargetedHardwareClass=Desktop +AppliedTargetedHardwareClass=Desktop +DefaultGraphicsPerformance=Maximum +AppliedDefaultGraphicsPerformance=Maximum + + +[/Script/Engine.RendererSettings] +; exposure control and tone mapping settings +r.Tonemapper.Sharpen=2 +r.DefaultFeature.AutoExposure.ExtendDefaultLuminanceRange=True +r.DefaultFeature.AutoExposure.ExtendDefaultLuminanceRange=True +r.DefaultFeature.Bloom=False +r.DefaultFeature.AutoExposure=False +r.DefaultFeature.LocalExposure.HighlightContrastScale=0.8 +r.DefaultFeature.LocalExposure.HighlightContrastScale=1.0 +r.DefaultFeature.LocalExposure.ShadowContrastScale=0.8 +r.DefaultFeature.LocalExposure.ShadowContrastScale=1.0 +r.DefaultFeature.MotionBlur=False +r.SceneRenderTargetResizeMethod=2 + +; dynamic gi settings +r.LightPropagationVolume=0 + +; quality settings +r.ReflectionCaptureResolution=2048 +r.AllowStaticLighting=True +r.HighResScreenshotDelay=8 +r.DefaultBackBufferPixelFormat=4 +r.AllowGlobalClipPlane=False +r.GBufferFormat=3 + +; shader settings +r.PostProcessing.PropagateAlpha=1 +r.SupportSkyAtmosphereAffectsHeightFog=True +r.DefaultFeature.LensFlare=True +r.ClearCoatNormal=False +r.NormalMapsForStaticLighting=False + + + +r.GenerateMeshDistanceFields=True + +r.DynamicGlobalIlluminationMethod=1 + +r.ReflectionMethod=1 + +r.SkinCache.CompileShaders=True + +r.RayTracing=True + +r.Shadow.Virtual.Enable=1 +RedChromaticityCoordinate=(X=0.708000,Y=0.292000) +GreenChromaticityCoordinate=(X=0.170000,Y=0.797000) +BlueChromaticityCoordinate=(X=0.131000,Y=0.046000) +WhiteChromaticityCoordinate=(X=0.312700,Y=0.329000) +WorkingColorSpaceChoice=Rec2020 + +[/Script/EngineSettings.GameMapsSettings] +;defines what level to be loaded by default in the editor and at playback time +EditorStartupMap=/Game/Main.Main +EditorStartupMap=/Game/Main.Main + +[/Script/UnrealEd.UnrealEdEngine] +; Remove Engine Template maps +-TemplateMapInfos=(ThumbnailTexture=Texture2D'/Engine/Maps/Templates/Thumbnails/VR-Basic.VR-Basic',Map="/Engine/Maps/Templates/VR-Basic") + + +; Allows for Hardware Accelerated Video Decoding + +[/Script/WmfMediaFactory.WmfMediaSettings] +AllowNonStandardCodecs=True +HardwareAcceleratedVideoDecoding=True +LowLatency=False +NativeAudioOut=False + +; Adds Virtual Scouting Widget for VR Scouting + +[/Script/VPUtilitiesEditor.VPUtilitiesEditorSettings] +VirtualScoutingUI=/VirtualProductionUtilities/Editor/VirtualScoutingWidget.VirtualScoutingWidget_C +FlightSpeed=0.500000 +GripNavSpeed=0.250000 +bUseMetric=False +bUseTransformGizmo=False +bUseGripInertiaDamping=True +InertiaDamping=0.950000 +bIsHelperSystemEnabled=True +ScoutingSubsystemEdititorUtilityActorClassPath=/VirtualProductionUtilities/VirtualProductionHelpers.VirtualProductionHelpers_C + + +; Enables WebControl API +WebControl.EnableServerOnStartup=1 + +;Adds Remote Session for Vcam + +[RemoteSession] ++Channels=(Name=FRemoteSessionFrameBufferChannel,Mode=Write) ++Channels=(Name=FRemoteSessionInputChannel,Mode=Read) ++Channels=(Name=FRemoteSessionXRTrackingChannel,Mode=Read) + +; Setup for Multiuser + +[/Script/Concert.ConcertClientConfig] +bIsHeadless=False +bInstallEditorToolbarButton=True +bAutoConnect=False +DefaultServerURL= +DefaultSessionName= +DefaultSessionToRestore= +DefaultSaveSessionAs= +ClientSettings=(DisplayName="",AvatarColor=(R=1.000000,G=1.000000,B=1.000000,A=1.000000),DesktopAvatarActorClass=/ConcertSyncClient/DesktopPresence.DesktopPresence_C,VRAvatarActorClass=/ConcertSyncClient/VRPresence.VRPresence_C,DiscoveryTimeoutSeconds=5,SessionTickFrequencySeconds=1,LatencyCompensationMs=0.000000,Tags=) +EndpointSettings=(bEnableLogging=False,PurgeProcessedMessageDelaySeconds=30,RemoteEndpointTimeoutSeconds=60) + +; Setup for Ndisplay + +[/Script/DisplayClusterEditor.DisplayClusterEditorSettings] +bEnabled=True + +[/Script/WindowsTargetPlatform.WindowsTargetSettings] +DefaultGraphicsRHI=DefaultGraphicsRHI_DX12 +DefaultGraphicsRHI=DefaultGraphicsRHI_DX12 +-D3D12TargetedShaderFormats=PCD3D_SM5 ++D3D12TargetedShaderFormats=PCD3D_SM6 +-D3D11TargetedShaderFormats=PCD3D_SM5 ++D3D11TargetedShaderFormats=PCD3D_SM5 + +[/Script/LinuxTargetPlatform.LinuxTargetSettings] +-TargetedRHIs=SF_VULKAN_SM5 ++TargetedRHIs=SF_VULKAN_SM6 + +[/Script/Engine.Engine] ++ActiveGameNameRedirects=(OldGameName="TP_ME_BlankBP",NewGameName="/Script/VPTemplate") ++ActiveGameNameRedirects=(OldGameName="/Script/TP_ME_BlankBP",NewGameName="/Script/VPTemplate") +TimecodeProviderClassName=/Game/TheStudio/Media/Sync/DanteAudioTimecodeProvider.DanteAudioTimecodeProvider_C +GenerateDefaultTimecodeFrameRate=(Numerator=25,Denominator=1) +CustomTimeStepClassName=/Game/TheStudio/Media/Sync/TestBlackmagic.TestBlackmagic_C +bGenerateDefaultTimecode=True +bSmoothFrameRate=True +bUseFixedFrameRate=True +FixedFrameRate=25.000000 + +[/Script/AndroidFileServerEditor.AndroidFileServerRuntimeSettings] +bEnablePlugin=True +bAllowNetworkConnection=True +SecurityToken=535ACFBC4221366775230DB093F16EEB +bIncludeInShipping=False +bAllowExternalStartInShipping=False +bCompileAFSProject=False +bUseCompression=False +bLogFiles=False +bReportStats=False +ConnectionType=USBOnly +bUseManualIPAddress=False +ManualIPAddress= + diff --git a/Config/DefaultGame.ini b/Config/DefaultGame.ini new file mode 100644 index 0000000..9b5401e --- /dev/null +++ b/Config/DefaultGame.ini @@ -0,0 +1,24 @@ +[/Script/EngineSettings.GeneralProjectSettings] +bUseBorderlessWindow=True + + +ProjectID=59FCA5EC424A54D5421213B1B85602B4 + +[StartupActions] +bAddPacks=True +InsertPack=(PackSource="StarterContent.upack",PackName="StarterContent") + +[SectionsToSave] ++Section=StartupActions + +[/Script/MediaFrameworkUtilities.MediaProfileSettings] +bApplyInCommandlet=False ++MediaSourceProxy=/Game/TheStudio/Media/Proxies/ProxyMediaSource_CineCamera.ProxyMediaSource_CineCamera ++MediaSourceProxy=/Game/TheStudio/Media/Proxies/ProxyMediaSource_CameraSDI3.ProxyMediaSource_CameraSDI3 ++MediaOutputProxy=/Game/TheStudio/Media/Proxies/ProxyMediaOutput_Monitor.ProxyMediaOutput_Monitor ++MediaOutputProxy=/Game/TheStudio/Media/Proxies/ProxyMediaOutput_Multiview.ProxyMediaOutput_Multiview +StartupMediaProfile=/Game/TheStudio/Media/Profiles/MediaProfile_Production.MediaProfile_Production + +[/Script/LiveLink.LiveLinkSettings] +DefaultLiveLinkPreset=/Game/TheStudio/Presets/LiveLinkhubPreset.LiveLinkhubPreset + diff --git a/Config/DefaultInput.ini b/Config/DefaultInput.ini new file mode 100644 index 0000000..46e3d5f --- /dev/null +++ b/Config/DefaultInput.ini @@ -0,0 +1,88 @@ + + +[/Script/Engine.InputSettings] +-AxisConfig=(AxisKeyName="Gamepad_LeftX",AxisProperties=(DeadZone=0.25,Exponent=1.f,Sensitivity=1.f)) +-AxisConfig=(AxisKeyName="Gamepad_LeftY",AxisProperties=(DeadZone=0.25,Exponent=1.f,Sensitivity=1.f)) +-AxisConfig=(AxisKeyName="Gamepad_RightX",AxisProperties=(DeadZone=0.25,Exponent=1.f,Sensitivity=1.f)) +-AxisConfig=(AxisKeyName="Gamepad_RightY",AxisProperties=(DeadZone=0.25,Exponent=1.f,Sensitivity=1.f)) +-AxisConfig=(AxisKeyName="MouseX",AxisProperties=(DeadZone=0.f,Exponent=1.f,Sensitivity=0.07f)) +-AxisConfig=(AxisKeyName="MouseY",AxisProperties=(DeadZone=0.f,Exponent=1.f,Sensitivity=0.07f)) +-AxisConfig=(AxisKeyName="Mouse2D",AxisProperties=(DeadZone=0.f,Exponent=1.f,Sensitivity=0.07f)) ++AxisConfig=(AxisKeyName="Gamepad_LeftX",AxisProperties=(DeadZone=0.250000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_LeftY",AxisProperties=(DeadZone=0.250000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_RightX",AxisProperties=(DeadZone=0.250000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_RightY",AxisProperties=(DeadZone=0.250000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MouseX",AxisProperties=(DeadZone=0.000000,Sensitivity=0.070000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MouseY",AxisProperties=(DeadZone=0.000000,Sensitivity=0.070000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Mouse2D",AxisProperties=(DeadZone=0.000000,Sensitivity=0.070000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MouseWheelAxis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_LeftTriggerAxis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_RightTriggerAxis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_Special_Left_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Gamepad_Special_Left_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Left_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Left_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Left_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Right_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Right_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="Vive_Right_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Left_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Left_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Left_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Left_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Left_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Right_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Right_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Right_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Right_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="MixedReality_Right_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Left_Grip_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Left_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Left_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Left_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Right_Grip_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Right_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Right_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="OculusTouch_Right_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Grip_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Grip_Force",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Left_Trackpad_Force",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Grip_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Grip_Force",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Trigger_Axis",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Thumbstick_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Thumbstick_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Trackpad_X",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Trackpad_Y",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) ++AxisConfig=(AxisKeyName="ValveIndex_Right_Trackpad_Force",AxisProperties=(DeadZone=0.000000,Sensitivity=1.000000,Exponent=1.000000,bInvert=False)) +bAltEnterTogglesFullscreen=True +bF11TogglesFullscreen=True +bUseMouseForTouch=False +bEnableMouseSmoothing=True +bEnableFOVScaling=True +bCaptureMouseOnLaunch=True +bEnableLegacyInputScales=True +bEnableMotionControls=True +bFilterInputByPlatformUser=False +bEnableInputDeviceSubsystem=True +bShouldFlushPressedKeysOnViewportFocusLost=True +bEnableDynamicComponentInputBinding=True +bAlwaysShowTouchInterface=False +bShowConsoleOnFourFingerTap=True +bEnableGestureRecognizer=False +bUseAutocorrect=False +DefaultViewportMouseCaptureMode=CapturePermanently_IncludingInitialMouseDown +DefaultViewportMouseLockMode=LockOnCapture +FOVScale=0.011110 +DoubleClickTime=0.200000 +DefaultPlayerInputClass=/Script/EnhancedInput.EnhancedPlayerInput +DefaultInputComponentClass=/Script/EnhancedInput.EnhancedInputComponent +DefaultTouchInterface=/Engine/MobileResources/HUD/DefaultVirtualJoysticks.DefaultVirtualJoysticks +-ConsoleKeys=Tilde ++ConsoleKeys=Tilde + diff --git a/Config/DefaultLightmass.ini b/Config/DefaultLightmass.ini new file mode 100644 index 0000000..4ce1ed6 --- /dev/null +++ b/Config/DefaultLightmass.ini @@ -0,0 +1,22 @@ +; These are tweaked defaults for various lightmass solver and export settings +; Artist oriented lightmass settings are in the editor UI +; Documentation for all of these is in UnrealLightmass / Public / SceneExport.h +; This ini is reloaded every time a lighting build begins, no need to restart + +; Warning: overwriting this file with an old version will cause the editor to crash. This file must be in sync with the editor executable. +; Instead, create a DefaultLightmass.ini in your project and override just the values you need, then the overrides will continue to work on version upgrades. +; https://docs.unrealengine.com/latest/INT/Programming/Basics/ConfigurationFiles/ +; +; For example, in your project's Config/DefaultLightmass.ini: +; [DevOptions.PrecomputedDynamicObjectLighting] +; SurfaceLightSampleSpacing=400 + +[DevOptions.StaticLightingSettings] +bCompressLightmaps=False +NumIndirectLightingBounces=5 +NumSkyLightingBounces=5 +IndirectLightingSmoothness=0.9 +IndirectLightingQuality=2.0 + +[DevOptions.StaticLightingSceneConstants] +StaticLightingLevelScale=0.5 \ No newline at end of file diff --git a/Config/DefaultVirtualProductionUtilities.ini b/Config/DefaultVirtualProductionUtilities.ini new file mode 100644 index 0000000..0242350 --- /dev/null +++ b/Config/DefaultVirtualProductionUtilities.ini @@ -0,0 +1,13 @@ + +[/Script/VPUtilitiesEditor.VPUtilitiesEditorSettings] +VirtualScoutingUI=/VirtualProductionUtilities/Editor/VirtualScoutingWidget.VirtualScoutingWidget_C +FlightSpeed=0.500000 +GripNavSpeed=0.250000 +bUseMetric=False +bUseTransformGizmo=False +bUseGripInertiaDamping=True +InertiaDamping=0.950000 +bIsHelperSystemEnabled=True +ScoutingSubsystemEdititorUtilityActorClassPath=/VirtualProductionUtilities/VirtualProductionHelpers.VirtualProductionHelpers_C + + diff --git a/Config/Tags/VPRoles.ini b/Config/Tags/VPRoles.ini new file mode 100644 index 0000000..b5e4491 --- /dev/null +++ b/Config/Tags/VPRoles.ini @@ -0,0 +1,5 @@ +[/Script/GameplayTags.GameplayTagsList] +GameplayTagList=(Tag="3D Artist",DevComment="") +GameplayTagList=(Tag="nDisplay",DevComment="") +GameplayTagList=(Tag="Primary",DevComment="") + diff --git a/Config/Windows/WindowsEngine.ini b/Config/Windows/WindowsEngine.ini new file mode 100644 index 0000000..c3bc904 --- /dev/null +++ b/Config/Windows/WindowsEngine.ini @@ -0,0 +1,6 @@ +[/Script/Engine.Engine] +GameEngine=/Script/DisplayCluster.DisplayClusterGameEngine +UnrealEdEngine=/Script/DisplayClusterEditor.DisplayClusterEditorEngine +GameViewportClientClassName=/Script/DisplayCluster.DisplayClusterViewportClient + + diff --git a/Content/Levels/360VideoSphere.umap b/Content/Levels/360VideoSphere.umap new file mode 100644 index 0000000..11bc3df Binary files /dev/null and b/Content/Levels/360VideoSphere.umap differ diff --git a/Content/Levels/Lidinis.umap b/Content/Levels/Lidinis.umap new file mode 100644 index 0000000..340554d Binary files /dev/null and b/Content/Levels/Lidinis.umap differ diff --git a/Content/LidinisStreetMove.uasset b/Content/LidinisStreetMove.uasset new file mode 100644 index 0000000..e84f7ad Binary files /dev/null and b/Content/LidinisStreetMove.uasset differ diff --git a/Content/Main.umap b/Content/Main.umap new file mode 100644 index 0000000..d3148d1 Binary files /dev/null and b/Content/Main.umap differ diff --git a/Content/Media/Bundles/MediaBundle-01.uasset b/Content/Media/Bundles/MediaBundle-01.uasset new file mode 100644 index 0000000..e73c793 Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-01.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-01_InnerAssets/MI_MediaBundle-01.uasset b/Content/Media/Bundles/MediaBundle-01_InnerAssets/MI_MediaBundle-01.uasset new file mode 100644 index 0000000..7cdb2bc Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-01_InnerAssets/MI_MediaBundle-01.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-01_InnerAssets/MediaP_MediaBundle-01.uasset b/Content/Media/Bundles/MediaBundle-01_InnerAssets/MediaP_MediaBundle-01.uasset new file mode 100644 index 0000000..b984bec Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-01_InnerAssets/MediaP_MediaBundle-01.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-01_InnerAssets/RT_MediaBundle-01_LensDisplacement.uasset b/Content/Media/Bundles/MediaBundle-01_InnerAssets/RT_MediaBundle-01_LensDisplacement.uasset new file mode 100644 index 0000000..a9244bc Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-01_InnerAssets/RT_MediaBundle-01_LensDisplacement.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-01_InnerAssets/T_MediaBundle-01_BC.uasset b/Content/Media/Bundles/MediaBundle-01_InnerAssets/T_MediaBundle-01_BC.uasset new file mode 100644 index 0000000..55d3888 Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-01_InnerAssets/T_MediaBundle-01_BC.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-02.uasset b/Content/Media/Bundles/MediaBundle-02.uasset new file mode 100644 index 0000000..649dfc4 Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-02.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-02_InnerAssets/MI_MediaBundle-02.uasset b/Content/Media/Bundles/MediaBundle-02_InnerAssets/MI_MediaBundle-02.uasset new file mode 100644 index 0000000..d04e1a1 Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-02_InnerAssets/MI_MediaBundle-02.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-02_InnerAssets/MediaP_MediaBundle-02.uasset b/Content/Media/Bundles/MediaBundle-02_InnerAssets/MediaP_MediaBundle-02.uasset new file mode 100644 index 0000000..545b30a Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-02_InnerAssets/MediaP_MediaBundle-02.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-02_InnerAssets/RT_MediaBundle-02_LensDisplacement.uasset b/Content/Media/Bundles/MediaBundle-02_InnerAssets/RT_MediaBundle-02_LensDisplacement.uasset new file mode 100644 index 0000000..f50824a Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-02_InnerAssets/RT_MediaBundle-02_LensDisplacement.uasset differ diff --git a/Content/Media/Bundles/MediaBundle-02_InnerAssets/T_MediaBundle-02_BC.uasset b/Content/Media/Bundles/MediaBundle-02_InnerAssets/T_MediaBundle-02_BC.uasset new file mode 100644 index 0000000..79b2fef Binary files /dev/null and b/Content/Media/Bundles/MediaBundle-02_InnerAssets/T_MediaBundle-02_BC.uasset differ diff --git a/Content/Media/Proxies/MediaOutput-01.uasset b/Content/Media/Proxies/MediaOutput-01.uasset new file mode 100644 index 0000000..36171cb Binary files /dev/null and b/Content/Media/Proxies/MediaOutput-01.uasset differ diff --git a/Content/Media/Proxies/MediaSource-01.uasset b/Content/Media/Proxies/MediaSource-01.uasset new file mode 100644 index 0000000..a85b12e Binary files /dev/null and b/Content/Media/Proxies/MediaSource-01.uasset differ diff --git a/Content/Media/Proxies/MediaSource-02.uasset b/Content/Media/Proxies/MediaSource-02.uasset new file mode 100644 index 0000000..d1e542e Binary files /dev/null and b/Content/Media/Proxies/MediaSource-02.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures.uasset new file mode 100644 index 0000000..780ac18 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1.uasset new file mode 100644 index 0000000..21c5569 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1_diffuse.uasset new file mode 100644 index 0000000..ef8820f Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2.uasset new file mode 100644 index 0000000..c19f307 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2_diffuse.uasset new file mode 100644 index 0000000..bd8c550 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3.uasset new file mode 100644 index 0000000..7798c7c Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3_diffuse.uasset new file mode 100644 index 0000000..1c277ee Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4.uasset new file mode 100644 index 0000000..dd1313e Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4_diffuse.uasset new file mode 100644 index 0000000..3c9c6c0 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5.uasset new file mode 100644 index 0000000..983c2a4 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5_diffuse.uasset new file mode 100644 index 0000000..4a99eb0 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u1_v5_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1.uasset new file mode 100644 index 0000000..36c03b4 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1_diffuse.uasset new file mode 100644 index 0000000..6a45bbf Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2.uasset new file mode 100644 index 0000000..69ac85f Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2_diffuse.uasset new file mode 100644 index 0000000..fb46c5e Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3.uasset new file mode 100644 index 0000000..ab4eacf Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3_diffuse.uasset new file mode 100644 index 0000000..8f37296 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4.uasset new file mode 100644 index 0000000..4ed270c Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4_diffuse.uasset new file mode 100644 index 0000000..5c6367e Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5.uasset new file mode 100644 index 0000000..d023d03 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5_diffuse.uasset new file mode 100644 index 0000000..345aa85 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u2_v5_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1.uasset new file mode 100644 index 0000000..39d9daa Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1_diffuse.uasset new file mode 100644 index 0000000..26c4c49 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2.uasset new file mode 100644 index 0000000..f224215 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2_diffuse.uasset new file mode 100644 index 0000000..5760bb2 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3.uasset new file mode 100644 index 0000000..f0066f5 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3_diffuse.uasset new file mode 100644 index 0000000..30ec7e0 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4.uasset new file mode 100644 index 0000000..1697462 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4_diffuse.uasset new file mode 100644 index 0000000..863f817 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5.uasset new file mode 100644 index 0000000..89a8ed0 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5_diffuse.uasset new file mode 100644 index 0000000..5a23c18 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u3_v5_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1.uasset new file mode 100644 index 0000000..fbbb465 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1_diffuse.uasset new file mode 100644 index 0000000..6d6bd9c Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2.uasset new file mode 100644 index 0000000..533b30b Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2_diffuse.uasset new file mode 100644 index 0000000..95d1af9 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3.uasset new file mode 100644 index 0000000..6fbdb59 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3_diffuse.uasset new file mode 100644 index 0000000..af8334b Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4.uasset new file mode 100644 index 0000000..6a913fd Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4_diffuse.uasset new file mode 100644 index 0000000..e8cc98b Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u4_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1.uasset new file mode 100644 index 0000000..7ba5ae9 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1_diffuse.uasset new file mode 100644 index 0000000..8937508 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2.uasset new file mode 100644 index 0000000..b52b117 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2_diffuse.uasset new file mode 100644 index 0000000..c63b4d5 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3.uasset new file mode 100644 index 0000000..2916dcf Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3_diffuse.uasset new file mode 100644 index 0000000..077cf2b Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4.uasset new file mode 100644 index 0000000..9ade22e Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4_diffuse.uasset new file mode 100644 index 0000000..54a04b3 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u5_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1.uasset new file mode 100644 index 0000000..5f964e1 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1_diffuse.uasset new file mode 100644 index 0000000..24d509d Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v1_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2.uasset new file mode 100644 index 0000000..e90dc2f Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2_diffuse.uasset new file mode 100644 index 0000000..d5814b4 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v2_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3.uasset new file mode 100644 index 0000000..b6d6a4a Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3_diffuse.uasset new file mode 100644 index 0000000..1ec56b4 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v3_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4.uasset new file mode 100644 index 0000000..5e7c1ec Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4.uasset differ diff --git a/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4_diffuse.uasset b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4_diffuse.uasset new file mode 100644 index 0000000..bbb7b19 Binary files /dev/null and b/Content/MovieProject/Assets/Environment/2_-_Laser_Textures/Lidinis_LaserTextures_u6_v4_diffuse.uasset differ diff --git a/Content/MovieProject/Assets/Environment/LidinisRepeat.uasset b/Content/MovieProject/Assets/Environment/LidinisRepeat.uasset new file mode 100644 index 0000000..6eef01e Binary files /dev/null and b/Content/MovieProject/Assets/Environment/LidinisRepeat.uasset differ diff --git a/Content/MovieProject/Levels/TestLevel1.umap b/Content/MovieProject/Levels/TestLevel1.umap new file mode 100644 index 0000000..0a51e40 Binary files /dev/null and b/Content/MovieProject/Levels/TestLevel1.umap differ diff --git a/Content/StarterContent/Architecture/Floor_400x400.uasset b/Content/StarterContent/Architecture/Floor_400x400.uasset new file mode 100644 index 0000000..08c4838 Binary files /dev/null and b/Content/StarterContent/Architecture/Floor_400x400.uasset differ diff --git a/Content/StarterContent/Architecture/Pillar_50x500.uasset b/Content/StarterContent/Architecture/Pillar_50x500.uasset new file mode 100644 index 0000000..46aed7d Binary files /dev/null and b/Content/StarterContent/Architecture/Pillar_50x500.uasset differ diff --git a/Content/StarterContent/Architecture/SM_AssetPlatform.uasset b/Content/StarterContent/Architecture/SM_AssetPlatform.uasset new file mode 100644 index 0000000..1f0f0be Binary files /dev/null and b/Content/StarterContent/Architecture/SM_AssetPlatform.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_400x200.uasset b/Content/StarterContent/Architecture/Wall_400x200.uasset new file mode 100644 index 0000000..a7aeff7 Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_400x200.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_400x300.uasset b/Content/StarterContent/Architecture/Wall_400x300.uasset new file mode 100644 index 0000000..959fd41 Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_400x300.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_400x400.uasset b/Content/StarterContent/Architecture/Wall_400x400.uasset new file mode 100644 index 0000000..9ad5e36 Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_400x400.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_500x500.uasset b/Content/StarterContent/Architecture/Wall_500x500.uasset new file mode 100644 index 0000000..96e5e9b Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_500x500.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_Door_400x300.uasset b/Content/StarterContent/Architecture/Wall_Door_400x300.uasset new file mode 100644 index 0000000..8b60b28 Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_Door_400x300.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_Door_400x400.uasset b/Content/StarterContent/Architecture/Wall_Door_400x400.uasset new file mode 100644 index 0000000..55b60cb Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_Door_400x400.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_Window_400x300.uasset b/Content/StarterContent/Architecture/Wall_Window_400x300.uasset new file mode 100644 index 0000000..cb91768 Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_Window_400x300.uasset differ diff --git a/Content/StarterContent/Architecture/Wall_Window_400x400.uasset b/Content/StarterContent/Architecture/Wall_Window_400x400.uasset new file mode 100644 index 0000000..1d2259d Binary files /dev/null and b/Content/StarterContent/Architecture/Wall_Window_400x400.uasset differ diff --git a/Content/StarterContent/Audio/Collapse01.uasset b/Content/StarterContent/Audio/Collapse01.uasset new file mode 100644 index 0000000..9c68be4 Binary files /dev/null and b/Content/StarterContent/Audio/Collapse01.uasset differ diff --git a/Content/StarterContent/Audio/Collapse02.uasset b/Content/StarterContent/Audio/Collapse02.uasset new file mode 100644 index 0000000..5bd3fec Binary files /dev/null and b/Content/StarterContent/Audio/Collapse02.uasset differ diff --git a/Content/StarterContent/Audio/Collapse_Cue.uasset b/Content/StarterContent/Audio/Collapse_Cue.uasset new file mode 100644 index 0000000..4548e1e Binary files /dev/null and b/Content/StarterContent/Audio/Collapse_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Explosion01.uasset b/Content/StarterContent/Audio/Explosion01.uasset new file mode 100644 index 0000000..2ec32d8 Binary files /dev/null and b/Content/StarterContent/Audio/Explosion01.uasset differ diff --git a/Content/StarterContent/Audio/Explosion02.uasset b/Content/StarterContent/Audio/Explosion02.uasset new file mode 100644 index 0000000..98174ab Binary files /dev/null and b/Content/StarterContent/Audio/Explosion02.uasset differ diff --git a/Content/StarterContent/Audio/Explosion_Cue.uasset b/Content/StarterContent/Audio/Explosion_Cue.uasset new file mode 100644 index 0000000..7984faf Binary files /dev/null and b/Content/StarterContent/Audio/Explosion_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Fire01.uasset b/Content/StarterContent/Audio/Fire01.uasset new file mode 100644 index 0000000..0c495f4 Binary files /dev/null and b/Content/StarterContent/Audio/Fire01.uasset differ diff --git a/Content/StarterContent/Audio/Fire01_Cue.uasset b/Content/StarterContent/Audio/Fire01_Cue.uasset new file mode 100644 index 0000000..86861b6 Binary files /dev/null and b/Content/StarterContent/Audio/Fire01_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Fire_Sparks01.uasset b/Content/StarterContent/Audio/Fire_Sparks01.uasset new file mode 100644 index 0000000..1479dad Binary files /dev/null and b/Content/StarterContent/Audio/Fire_Sparks01.uasset differ diff --git a/Content/StarterContent/Audio/Fire_Sparks01_Cue.uasset b/Content/StarterContent/Audio/Fire_Sparks01_Cue.uasset new file mode 100644 index 0000000..f93b729 Binary files /dev/null and b/Content/StarterContent/Audio/Fire_Sparks01_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Light01.uasset b/Content/StarterContent/Audio/Light01.uasset new file mode 100644 index 0000000..8e02bab Binary files /dev/null and b/Content/StarterContent/Audio/Light01.uasset differ diff --git a/Content/StarterContent/Audio/Light01_Cue.uasset b/Content/StarterContent/Audio/Light01_Cue.uasset new file mode 100644 index 0000000..a165870 Binary files /dev/null and b/Content/StarterContent/Audio/Light01_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Light02.uasset b/Content/StarterContent/Audio/Light02.uasset new file mode 100644 index 0000000..907cd0e Binary files /dev/null and b/Content/StarterContent/Audio/Light02.uasset differ diff --git a/Content/StarterContent/Audio/Light02_Cue.uasset b/Content/StarterContent/Audio/Light02_Cue.uasset new file mode 100644 index 0000000..1a63569 Binary files /dev/null and b/Content/StarterContent/Audio/Light02_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Smoke01.uasset b/Content/StarterContent/Audio/Smoke01.uasset new file mode 100644 index 0000000..023a555 Binary files /dev/null and b/Content/StarterContent/Audio/Smoke01.uasset differ diff --git a/Content/StarterContent/Audio/Smoke01_Cue.uasset b/Content/StarterContent/Audio/Smoke01_Cue.uasset new file mode 100644 index 0000000..b7d7bad Binary files /dev/null and b/Content/StarterContent/Audio/Smoke01_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Background_Cue.uasset b/Content/StarterContent/Audio/Starter_Background_Cue.uasset new file mode 100644 index 0000000..fc9649f Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Background_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Birds01.uasset b/Content/StarterContent/Audio/Starter_Birds01.uasset new file mode 100644 index 0000000..900fc5e Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Birds01.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Music01.uasset b/Content/StarterContent/Audio/Starter_Music01.uasset new file mode 100644 index 0000000..d404003 Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Music01.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Music_Cue.uasset b/Content/StarterContent/Audio/Starter_Music_Cue.uasset new file mode 100644 index 0000000..1bba60d Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Music_Cue.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Wind05.uasset b/Content/StarterContent/Audio/Starter_Wind05.uasset new file mode 100644 index 0000000..36bf8d9 Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Wind05.uasset differ diff --git a/Content/StarterContent/Audio/Starter_Wind06.uasset b/Content/StarterContent/Audio/Starter_Wind06.uasset new file mode 100644 index 0000000..0867c74 Binary files /dev/null and b/Content/StarterContent/Audio/Starter_Wind06.uasset differ diff --git a/Content/StarterContent/Audio/Steam01.uasset b/Content/StarterContent/Audio/Steam01.uasset new file mode 100644 index 0000000..a1ee8a7 Binary files /dev/null and b/Content/StarterContent/Audio/Steam01.uasset differ diff --git a/Content/StarterContent/Audio/Steam01_Cue.uasset b/Content/StarterContent/Audio/Steam01_Cue.uasset new file mode 100644 index 0000000..2ef161f Binary files /dev/null and b/Content/StarterContent/Audio/Steam01_Cue.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/FogBrightnessLUT.uasset b/Content/StarterContent/Blueprints/Assets/FogBrightnessLUT.uasset new file mode 100644 index 0000000..6babc5b Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/FogBrightnessLUT.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/M_LightStage_Arrows.uasset b/Content/StarterContent/Blueprints/Assets/M_LightStage_Arrows.uasset new file mode 100644 index 0000000..55151f5 Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/M_LightStage_Arrows.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Black.uasset b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Black.uasset new file mode 100644 index 0000000..1e8f009 Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Black.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_HDRI.uasset b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_HDRI.uasset new file mode 100644 index 0000000..c81172b Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_HDRI.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Master.uasset b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Master.uasset new file mode 100644 index 0000000..e47d5e0 Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/M_LightStage_Skybox_Master.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/SM_Arrows.uasset b/Content/StarterContent/Blueprints/Assets/SM_Arrows.uasset new file mode 100644 index 0000000..94e6203 Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/SM_Arrows.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/Skybox.uasset b/Content/StarterContent/Blueprints/Assets/Skybox.uasset new file mode 100644 index 0000000..54dc67a Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/Skybox.uasset differ diff --git a/Content/StarterContent/Blueprints/Assets/SunlightColorLUT.uasset b/Content/StarterContent/Blueprints/Assets/SunlightColorLUT.uasset new file mode 100644 index 0000000..e2c3765 Binary files /dev/null and b/Content/StarterContent/Blueprints/Assets/SunlightColorLUT.uasset differ diff --git a/Content/StarterContent/Blueprints/BP_LightStudio.uasset b/Content/StarterContent/Blueprints/BP_LightStudio.uasset new file mode 100644 index 0000000..b85ba86 Binary files /dev/null and b/Content/StarterContent/Blueprints/BP_LightStudio.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_CeilingLight.uasset b/Content/StarterContent/Blueprints/Blueprint_CeilingLight.uasset new file mode 100644 index 0000000..b5b40a2 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_CeilingLight.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_Effect_Explosion.uasset b/Content/StarterContent/Blueprints/Blueprint_Effect_Explosion.uasset new file mode 100644 index 0000000..3e92029 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_Effect_Explosion.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_Effect_Fire.uasset b/Content/StarterContent/Blueprints/Blueprint_Effect_Fire.uasset new file mode 100644 index 0000000..a0c45b8 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_Effect_Fire.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_Effect_Smoke.uasset b/Content/StarterContent/Blueprints/Blueprint_Effect_Smoke.uasset new file mode 100644 index 0000000..bb648c9 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_Effect_Smoke.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_Effect_Sparks.uasset b/Content/StarterContent/Blueprints/Blueprint_Effect_Sparks.uasset new file mode 100644 index 0000000..1c27dc4 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_Effect_Sparks.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_Effect_Steam.uasset b/Content/StarterContent/Blueprints/Blueprint_Effect_Steam.uasset new file mode 100644 index 0000000..45417c8 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_Effect_Steam.uasset differ diff --git a/Content/StarterContent/Blueprints/Blueprint_WallSconce.uasset b/Content/StarterContent/Blueprints/Blueprint_WallSconce.uasset new file mode 100644 index 0000000..3f29873 Binary files /dev/null and b/Content/StarterContent/Blueprints/Blueprint_WallSconce.uasset differ diff --git a/Content/StarterContent/HDRI/HDRI_Epic_Courtyard_Daylight.uasset b/Content/StarterContent/HDRI/HDRI_Epic_Courtyard_Daylight.uasset new file mode 100644 index 0000000..9b1f1be Binary files /dev/null and b/Content/StarterContent/HDRI/HDRI_Epic_Courtyard_Daylight.uasset differ diff --git a/Content/StarterContent/Maps/Advanced_Lighting.umap b/Content/StarterContent/Maps/Advanced_Lighting.umap new file mode 100644 index 0000000..ebf6966 Binary files /dev/null and b/Content/StarterContent/Maps/Advanced_Lighting.umap differ diff --git a/Content/StarterContent/Maps/Minimal_Default.umap b/Content/StarterContent/Maps/Minimal_Default.umap new file mode 100644 index 0000000..378270c Binary files /dev/null and b/Content/StarterContent/Maps/Minimal_Default.umap differ diff --git a/Content/StarterContent/Maps/StarterMap.umap b/Content/StarterContent/Maps/StarterMap.umap new file mode 100644 index 0000000..3d55147 Binary files /dev/null and b/Content/StarterContent/Maps/StarterMap.umap differ diff --git a/Content/StarterContent/Materials/M_AssetPlatform.uasset b/Content/StarterContent/Materials/M_AssetPlatform.uasset new file mode 100644 index 0000000..4962777 Binary files /dev/null and b/Content/StarterContent/Materials/M_AssetPlatform.uasset differ diff --git a/Content/StarterContent/Materials/M_Basic_Floor.uasset b/Content/StarterContent/Materials/M_Basic_Floor.uasset new file mode 100644 index 0000000..6b05cb4 Binary files /dev/null and b/Content/StarterContent/Materials/M_Basic_Floor.uasset differ diff --git a/Content/StarterContent/Materials/M_Basic_Wall.uasset b/Content/StarterContent/Materials/M_Basic_Wall.uasset new file mode 100644 index 0000000..dd5ae24 Binary files /dev/null and b/Content/StarterContent/Materials/M_Basic_Wall.uasset differ diff --git a/Content/StarterContent/Materials/M_Brick_Clay_Beveled.uasset b/Content/StarterContent/Materials/M_Brick_Clay_Beveled.uasset new file mode 100644 index 0000000..2b8a349 Binary files /dev/null and b/Content/StarterContent/Materials/M_Brick_Clay_Beveled.uasset differ diff --git a/Content/StarterContent/Materials/M_Brick_Clay_New.uasset b/Content/StarterContent/Materials/M_Brick_Clay_New.uasset new file mode 100644 index 0000000..e519245 Binary files /dev/null and b/Content/StarterContent/Materials/M_Brick_Clay_New.uasset differ diff --git a/Content/StarterContent/Materials/M_Brick_Clay_Old.uasset b/Content/StarterContent/Materials/M_Brick_Clay_Old.uasset new file mode 100644 index 0000000..bd49323 Binary files /dev/null and b/Content/StarterContent/Materials/M_Brick_Clay_Old.uasset differ diff --git a/Content/StarterContent/Materials/M_Brick_Cut_Stone.uasset b/Content/StarterContent/Materials/M_Brick_Cut_Stone.uasset new file mode 100644 index 0000000..2ff69bc Binary files /dev/null and b/Content/StarterContent/Materials/M_Brick_Cut_Stone.uasset differ diff --git a/Content/StarterContent/Materials/M_Brick_Hewn_Stone.uasset b/Content/StarterContent/Materials/M_Brick_Hewn_Stone.uasset new file mode 100644 index 0000000..d1d7e67 Binary files /dev/null and b/Content/StarterContent/Materials/M_Brick_Hewn_Stone.uasset differ diff --git a/Content/StarterContent/Materials/M_Ceramic_Tile_Checker.uasset b/Content/StarterContent/Materials/M_Ceramic_Tile_Checker.uasset new file mode 100644 index 0000000..e5f0590 Binary files /dev/null and b/Content/StarterContent/Materials/M_Ceramic_Tile_Checker.uasset differ diff --git a/Content/StarterContent/Materials/M_CobbleStone_Pebble.uasset b/Content/StarterContent/Materials/M_CobbleStone_Pebble.uasset new file mode 100644 index 0000000..958dd1b Binary files /dev/null and b/Content/StarterContent/Materials/M_CobbleStone_Pebble.uasset differ diff --git a/Content/StarterContent/Materials/M_CobbleStone_Rough.uasset b/Content/StarterContent/Materials/M_CobbleStone_Rough.uasset new file mode 100644 index 0000000..fb1c6d3 Binary files /dev/null and b/Content/StarterContent/Materials/M_CobbleStone_Rough.uasset differ diff --git a/Content/StarterContent/Materials/M_CobbleStone_Smooth.uasset b/Content/StarterContent/Materials/M_CobbleStone_Smooth.uasset new file mode 100644 index 0000000..642aaaa Binary files /dev/null and b/Content/StarterContent/Materials/M_CobbleStone_Smooth.uasset differ diff --git a/Content/StarterContent/Materials/M_ColorGrid_LowSpec.uasset b/Content/StarterContent/Materials/M_ColorGrid_LowSpec.uasset new file mode 100644 index 0000000..4b25f74 Binary files /dev/null and b/Content/StarterContent/Materials/M_ColorGrid_LowSpec.uasset differ diff --git a/Content/StarterContent/Materials/M_Concrete_Grime.uasset b/Content/StarterContent/Materials/M_Concrete_Grime.uasset new file mode 100644 index 0000000..3be6070 Binary files /dev/null and b/Content/StarterContent/Materials/M_Concrete_Grime.uasset differ diff --git a/Content/StarterContent/Materials/M_Concrete_Panels.uasset b/Content/StarterContent/Materials/M_Concrete_Panels.uasset new file mode 100644 index 0000000..d843626 Binary files /dev/null and b/Content/StarterContent/Materials/M_Concrete_Panels.uasset differ diff --git a/Content/StarterContent/Materials/M_Concrete_Poured.uasset b/Content/StarterContent/Materials/M_Concrete_Poured.uasset new file mode 100644 index 0000000..34661da Binary files /dev/null and b/Content/StarterContent/Materials/M_Concrete_Poured.uasset differ diff --git a/Content/StarterContent/Materials/M_Concrete_Tiles.uasset b/Content/StarterContent/Materials/M_Concrete_Tiles.uasset new file mode 100644 index 0000000..26e7163 Binary files /dev/null and b/Content/StarterContent/Materials/M_Concrete_Tiles.uasset differ diff --git a/Content/StarterContent/Materials/M_Glass.uasset b/Content/StarterContent/Materials/M_Glass.uasset new file mode 100644 index 0000000..7168e4f Binary files /dev/null and b/Content/StarterContent/Materials/M_Glass.uasset differ diff --git a/Content/StarterContent/Materials/M_Ground_Grass.uasset b/Content/StarterContent/Materials/M_Ground_Grass.uasset new file mode 100644 index 0000000..1cbe2a5 Binary files /dev/null and b/Content/StarterContent/Materials/M_Ground_Grass.uasset differ diff --git a/Content/StarterContent/Materials/M_Ground_Gravel.uasset b/Content/StarterContent/Materials/M_Ground_Gravel.uasset new file mode 100644 index 0000000..7d3d68f Binary files /dev/null and b/Content/StarterContent/Materials/M_Ground_Gravel.uasset differ diff --git a/Content/StarterContent/Materials/M_Ground_Moss.uasset b/Content/StarterContent/Materials/M_Ground_Moss.uasset new file mode 100644 index 0000000..ff1f454 Binary files /dev/null and b/Content/StarterContent/Materials/M_Ground_Moss.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Brushed_Nickel.uasset b/Content/StarterContent/Materials/M_Metal_Brushed_Nickel.uasset new file mode 100644 index 0000000..e34b606 Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Brushed_Nickel.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Burnished_Steel.uasset b/Content/StarterContent/Materials/M_Metal_Burnished_Steel.uasset new file mode 100644 index 0000000..5091518 Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Burnished_Steel.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Chrome.uasset b/Content/StarterContent/Materials/M_Metal_Chrome.uasset new file mode 100644 index 0000000..2997cde Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Chrome.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Copper.uasset b/Content/StarterContent/Materials/M_Metal_Copper.uasset new file mode 100644 index 0000000..2f3841c Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Copper.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Gold.uasset b/Content/StarterContent/Materials/M_Metal_Gold.uasset new file mode 100644 index 0000000..0fc3df3 Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Gold.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Rust.uasset b/Content/StarterContent/Materials/M_Metal_Rust.uasset new file mode 100644 index 0000000..8402144 Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Rust.uasset differ diff --git a/Content/StarterContent/Materials/M_Metal_Steel.uasset b/Content/StarterContent/Materials/M_Metal_Steel.uasset new file mode 100644 index 0000000..e303340 Binary files /dev/null and b/Content/StarterContent/Materials/M_Metal_Steel.uasset differ diff --git a/Content/StarterContent/Materials/M_Rock_Basalt.uasset b/Content/StarterContent/Materials/M_Rock_Basalt.uasset new file mode 100644 index 0000000..d2cf1ba Binary files /dev/null and b/Content/StarterContent/Materials/M_Rock_Basalt.uasset differ diff --git a/Content/StarterContent/Materials/M_Rock_Marble_Polished.uasset b/Content/StarterContent/Materials/M_Rock_Marble_Polished.uasset new file mode 100644 index 0000000..d177e32 Binary files /dev/null and b/Content/StarterContent/Materials/M_Rock_Marble_Polished.uasset differ diff --git a/Content/StarterContent/Materials/M_Rock_Sandstone.uasset b/Content/StarterContent/Materials/M_Rock_Sandstone.uasset new file mode 100644 index 0000000..4ca7e31 Binary files /dev/null and b/Content/StarterContent/Materials/M_Rock_Sandstone.uasset differ diff --git a/Content/StarterContent/Materials/M_Rock_Slate.uasset b/Content/StarterContent/Materials/M_Rock_Slate.uasset new file mode 100644 index 0000000..53f6bb8 Binary files /dev/null and b/Content/StarterContent/Materials/M_Rock_Slate.uasset differ diff --git a/Content/StarterContent/Materials/M_Tech_Checker_Dot.uasset b/Content/StarterContent/Materials/M_Tech_Checker_Dot.uasset new file mode 100644 index 0000000..041b0f8 Binary files /dev/null and b/Content/StarterContent/Materials/M_Tech_Checker_Dot.uasset differ diff --git a/Content/StarterContent/Materials/M_Tech_Hex_Tile.uasset b/Content/StarterContent/Materials/M_Tech_Hex_Tile.uasset new file mode 100644 index 0000000..f102368 Binary files /dev/null and b/Content/StarterContent/Materials/M_Tech_Hex_Tile.uasset differ diff --git a/Content/StarterContent/Materials/M_Tech_Hex_Tile_Pulse.uasset b/Content/StarterContent/Materials/M_Tech_Hex_Tile_Pulse.uasset new file mode 100644 index 0000000..570262a Binary files /dev/null and b/Content/StarterContent/Materials/M_Tech_Hex_Tile_Pulse.uasset differ diff --git a/Content/StarterContent/Materials/M_Tech_Panel.uasset b/Content/StarterContent/Materials/M_Tech_Panel.uasset new file mode 100644 index 0000000..1afccae Binary files /dev/null and b/Content/StarterContent/Materials/M_Tech_Panel.uasset differ diff --git a/Content/StarterContent/Materials/M_Water_Lake.uasset b/Content/StarterContent/Materials/M_Water_Lake.uasset new file mode 100644 index 0000000..88a6333 Binary files /dev/null and b/Content/StarterContent/Materials/M_Water_Lake.uasset differ diff --git a/Content/StarterContent/Materials/M_Water_Ocean.uasset b/Content/StarterContent/Materials/M_Water_Ocean.uasset new file mode 100644 index 0000000..1ff0460 Binary files /dev/null and b/Content/StarterContent/Materials/M_Water_Ocean.uasset differ diff --git a/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Polished.uasset b/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Polished.uasset new file mode 100644 index 0000000..8bbba08 Binary files /dev/null and b/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Polished.uasset differ diff --git a/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Worn.uasset b/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Worn.uasset new file mode 100644 index 0000000..229a294 Binary files /dev/null and b/Content/StarterContent/Materials/M_Wood_Floor_Walnut_Worn.uasset differ diff --git a/Content/StarterContent/Materials/M_Wood_Oak.uasset b/Content/StarterContent/Materials/M_Wood_Oak.uasset new file mode 100644 index 0000000..d722444 Binary files /dev/null and b/Content/StarterContent/Materials/M_Wood_Oak.uasset differ diff --git a/Content/StarterContent/Materials/M_Wood_Pine.uasset b/Content/StarterContent/Materials/M_Wood_Pine.uasset new file mode 100644 index 0000000..b02101c Binary files /dev/null and b/Content/StarterContent/Materials/M_Wood_Pine.uasset differ diff --git a/Content/StarterContent/Materials/M_Wood_Walnut.uasset b/Content/StarterContent/Materials/M_Wood_Walnut.uasset new file mode 100644 index 0000000..6e76341 Binary files /dev/null and b/Content/StarterContent/Materials/M_Wood_Walnut.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Burst.uasset b/Content/StarterContent/Particles/Materials/M_Burst.uasset new file mode 100644 index 0000000..12caa0a Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Burst.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Dust_Particle.uasset b/Content/StarterContent/Particles/Materials/M_Dust_Particle.uasset new file mode 100644 index 0000000..4f665d0 Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Dust_Particle.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Fire_SubUV.uasset b/Content/StarterContent/Particles/Materials/M_Fire_SubUV.uasset new file mode 100644 index 0000000..73f3eaa Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Fire_SubUV.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Heat_Distortion.uasset b/Content/StarterContent/Particles/Materials/M_Heat_Distortion.uasset new file mode 100644 index 0000000..d34ee58 Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Heat_Distortion.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Radial_Gradient.uasset b/Content/StarterContent/Particles/Materials/M_Radial_Gradient.uasset new file mode 100644 index 0000000..9829b9a Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Radial_Gradient.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_Spark.uasset b/Content/StarterContent/Particles/Materials/M_Spark.uasset new file mode 100644 index 0000000..6f6f699 Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_Spark.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_explosion_subUV.uasset b/Content/StarterContent/Particles/Materials/M_explosion_subUV.uasset new file mode 100644 index 0000000..1af76e6 Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_explosion_subUV.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_radial_ramp.uasset b/Content/StarterContent/Particles/Materials/M_radial_ramp.uasset new file mode 100644 index 0000000..b022826 Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_radial_ramp.uasset differ diff --git a/Content/StarterContent/Particles/Materials/M_smoke_subUV.uasset b/Content/StarterContent/Particles/Materials/M_smoke_subUV.uasset new file mode 100644 index 0000000..2086e5f Binary files /dev/null and b/Content/StarterContent/Particles/Materials/M_smoke_subUV.uasset differ diff --git a/Content/StarterContent/Particles/Materials/m_flare_01.uasset b/Content/StarterContent/Particles/Materials/m_flare_01.uasset new file mode 100644 index 0000000..824b44b Binary files /dev/null and b/Content/StarterContent/Particles/Materials/m_flare_01.uasset differ diff --git a/Content/StarterContent/Particles/P_Ambient_Dust.uasset b/Content/StarterContent/Particles/P_Ambient_Dust.uasset new file mode 100644 index 0000000..38b6cab Binary files /dev/null and b/Content/StarterContent/Particles/P_Ambient_Dust.uasset differ diff --git a/Content/StarterContent/Particles/P_Explosion.uasset b/Content/StarterContent/Particles/P_Explosion.uasset new file mode 100644 index 0000000..8f6e03a Binary files /dev/null and b/Content/StarterContent/Particles/P_Explosion.uasset differ diff --git a/Content/StarterContent/Particles/P_Fire.uasset b/Content/StarterContent/Particles/P_Fire.uasset new file mode 100644 index 0000000..3fb74a8 Binary files /dev/null and b/Content/StarterContent/Particles/P_Fire.uasset differ diff --git a/Content/StarterContent/Particles/P_Smoke.uasset b/Content/StarterContent/Particles/P_Smoke.uasset new file mode 100644 index 0000000..a6014df Binary files /dev/null and b/Content/StarterContent/Particles/P_Smoke.uasset differ diff --git a/Content/StarterContent/Particles/P_Sparks.uasset b/Content/StarterContent/Particles/P_Sparks.uasset new file mode 100644 index 0000000..8a6dc37 Binary files /dev/null and b/Content/StarterContent/Particles/P_Sparks.uasset differ diff --git a/Content/StarterContent/Particles/P_Steam_Lit.uasset b/Content/StarterContent/Particles/P_Steam_Lit.uasset new file mode 100644 index 0000000..3e96da7 Binary files /dev/null and b/Content/StarterContent/Particles/P_Steam_Lit.uasset differ diff --git a/Content/StarterContent/Props/MaterialSphere.uasset b/Content/StarterContent/Props/MaterialSphere.uasset new file mode 100644 index 0000000..6a2538f Binary files /dev/null and b/Content/StarterContent/Props/MaterialSphere.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Bush.uasset b/Content/StarterContent/Props/Materials/M_Bush.uasset new file mode 100644 index 0000000..eaa4a2a Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Bush.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Chair.uasset b/Content/StarterContent/Props/Materials/M_Chair.uasset new file mode 100644 index 0000000..e7446be Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Chair.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Door.uasset b/Content/StarterContent/Props/Materials/M_Door.uasset new file mode 100644 index 0000000..714d0f6 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Door.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Frame.uasset b/Content/StarterContent/Props/Materials/M_Frame.uasset new file mode 100644 index 0000000..2a2dff8 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Frame.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Lamp.uasset b/Content/StarterContent/Props/Materials/M_Lamp.uasset new file mode 100644 index 0000000..3eb6663 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Lamp.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Rock.uasset b/Content/StarterContent/Props/Materials/M_Rock.uasset new file mode 100644 index 0000000..2d59537 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Rock.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Shelf.uasset b/Content/StarterContent/Props/Materials/M_Shelf.uasset new file mode 100644 index 0000000..2444258 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Shelf.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_Statue.uasset b/Content/StarterContent/Props/Materials/M_Statue.uasset new file mode 100644 index 0000000..775313d Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_Statue.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_StatueGlass.uasset b/Content/StarterContent/Props/Materials/M_StatueGlass.uasset new file mode 100644 index 0000000..d1592b3 Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_StatueGlass.uasset differ diff --git a/Content/StarterContent/Props/Materials/M_TableRound.uasset b/Content/StarterContent/Props/Materials/M_TableRound.uasset new file mode 100644 index 0000000..66b487c Binary files /dev/null and b/Content/StarterContent/Props/Materials/M_TableRound.uasset differ diff --git a/Content/StarterContent/Props/SM_Bush.uasset b/Content/StarterContent/Props/SM_Bush.uasset new file mode 100644 index 0000000..060a376 Binary files /dev/null and b/Content/StarterContent/Props/SM_Bush.uasset differ diff --git a/Content/StarterContent/Props/SM_Chair.uasset b/Content/StarterContent/Props/SM_Chair.uasset new file mode 100644 index 0000000..e062426 Binary files /dev/null and b/Content/StarterContent/Props/SM_Chair.uasset differ diff --git a/Content/StarterContent/Props/SM_CornerFrame.uasset b/Content/StarterContent/Props/SM_CornerFrame.uasset new file mode 100644 index 0000000..5710151 Binary files /dev/null and b/Content/StarterContent/Props/SM_CornerFrame.uasset differ diff --git a/Content/StarterContent/Props/SM_Couch.uasset b/Content/StarterContent/Props/SM_Couch.uasset new file mode 100644 index 0000000..cc7d408 Binary files /dev/null and b/Content/StarterContent/Props/SM_Couch.uasset differ diff --git a/Content/StarterContent/Props/SM_Door.uasset b/Content/StarterContent/Props/SM_Door.uasset new file mode 100644 index 0000000..22c9bfc Binary files /dev/null and b/Content/StarterContent/Props/SM_Door.uasset differ diff --git a/Content/StarterContent/Props/SM_DoorFrame.uasset b/Content/StarterContent/Props/SM_DoorFrame.uasset new file mode 100644 index 0000000..ab5e96f Binary files /dev/null and b/Content/StarterContent/Props/SM_DoorFrame.uasset differ diff --git a/Content/StarterContent/Props/SM_GlassWindow.uasset b/Content/StarterContent/Props/SM_GlassWindow.uasset new file mode 100644 index 0000000..93863f4 Binary files /dev/null and b/Content/StarterContent/Props/SM_GlassWindow.uasset differ diff --git a/Content/StarterContent/Props/SM_Lamp_Ceiling.uasset b/Content/StarterContent/Props/SM_Lamp_Ceiling.uasset new file mode 100644 index 0000000..4e55f04 Binary files /dev/null and b/Content/StarterContent/Props/SM_Lamp_Ceiling.uasset differ diff --git a/Content/StarterContent/Props/SM_Lamp_Wall.uasset b/Content/StarterContent/Props/SM_Lamp_Wall.uasset new file mode 100644 index 0000000..a1b112f Binary files /dev/null and b/Content/StarterContent/Props/SM_Lamp_Wall.uasset differ diff --git a/Content/StarterContent/Props/SM_PillarFrame.uasset b/Content/StarterContent/Props/SM_PillarFrame.uasset new file mode 100644 index 0000000..7cafefe Binary files /dev/null and b/Content/StarterContent/Props/SM_PillarFrame.uasset differ diff --git a/Content/StarterContent/Props/SM_PillarFrame300.uasset b/Content/StarterContent/Props/SM_PillarFrame300.uasset new file mode 100644 index 0000000..c681a0a Binary files /dev/null and b/Content/StarterContent/Props/SM_PillarFrame300.uasset differ diff --git a/Content/StarterContent/Props/SM_Rock.uasset b/Content/StarterContent/Props/SM_Rock.uasset new file mode 100644 index 0000000..419f4d5 Binary files /dev/null and b/Content/StarterContent/Props/SM_Rock.uasset differ diff --git a/Content/StarterContent/Props/SM_Shelf.uasset b/Content/StarterContent/Props/SM_Shelf.uasset new file mode 100644 index 0000000..7ebb820 Binary files /dev/null and b/Content/StarterContent/Props/SM_Shelf.uasset differ diff --git a/Content/StarterContent/Props/SM_Stairs.uasset b/Content/StarterContent/Props/SM_Stairs.uasset new file mode 100644 index 0000000..d0e84cc Binary files /dev/null and b/Content/StarterContent/Props/SM_Stairs.uasset differ diff --git a/Content/StarterContent/Props/SM_Statue.uasset b/Content/StarterContent/Props/SM_Statue.uasset new file mode 100644 index 0000000..d0e5b8d Binary files /dev/null and b/Content/StarterContent/Props/SM_Statue.uasset differ diff --git a/Content/StarterContent/Props/SM_TableRound.uasset b/Content/StarterContent/Props/SM_TableRound.uasset new file mode 100644 index 0000000..317b8ff Binary files /dev/null and b/Content/StarterContent/Props/SM_TableRound.uasset differ diff --git a/Content/StarterContent/Props/SM_WindowFrame.uasset b/Content/StarterContent/Props/SM_WindowFrame.uasset new file mode 100644 index 0000000..bfc9f90 Binary files /dev/null and b/Content/StarterContent/Props/SM_WindowFrame.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Cone.uasset b/Content/StarterContent/Shapes/Shape_Cone.uasset new file mode 100644 index 0000000..200f0a2 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Cone.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Cube.uasset b/Content/StarterContent/Shapes/Shape_Cube.uasset new file mode 100644 index 0000000..4d169fe Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Cube.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Cylinder.uasset b/Content/StarterContent/Shapes/Shape_Cylinder.uasset new file mode 100644 index 0000000..06541d9 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Cylinder.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_NarrowCapsule.uasset b/Content/StarterContent/Shapes/Shape_NarrowCapsule.uasset new file mode 100644 index 0000000..0322b31 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_NarrowCapsule.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Pipe.uasset b/Content/StarterContent/Shapes/Shape_Pipe.uasset new file mode 100644 index 0000000..f2549a3 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Pipe.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Pipe_180.uasset b/Content/StarterContent/Shapes/Shape_Pipe_180.uasset new file mode 100644 index 0000000..b152330 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Pipe_180.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Pipe_90.uasset b/Content/StarterContent/Shapes/Shape_Pipe_90.uasset new file mode 100644 index 0000000..db79ad4 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Pipe_90.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Plane.uasset b/Content/StarterContent/Shapes/Shape_Plane.uasset new file mode 100644 index 0000000..f338efa Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Plane.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_QuadPyramid.uasset b/Content/StarterContent/Shapes/Shape_QuadPyramid.uasset new file mode 100644 index 0000000..e228f15 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_QuadPyramid.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Sphere.uasset b/Content/StarterContent/Shapes/Shape_Sphere.uasset new file mode 100644 index 0000000..0b7fc69 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Sphere.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Torus.uasset b/Content/StarterContent/Shapes/Shape_Torus.uasset new file mode 100644 index 0000000..cfbaac4 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Torus.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_TriPyramid.uasset b/Content/StarterContent/Shapes/Shape_TriPyramid.uasset new file mode 100644 index 0000000..f28d857 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_TriPyramid.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Trim.uasset b/Content/StarterContent/Shapes/Shape_Trim.uasset new file mode 100644 index 0000000..0909161 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Trim.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Trim_90_In.uasset b/Content/StarterContent/Shapes/Shape_Trim_90_In.uasset new file mode 100644 index 0000000..6e33a90 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Trim_90_In.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Trim_90_Out.uasset b/Content/StarterContent/Shapes/Shape_Trim_90_Out.uasset new file mode 100644 index 0000000..8125f77 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Trim_90_Out.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Tube.uasset b/Content/StarterContent/Shapes/Shape_Tube.uasset new file mode 100644 index 0000000..b2e9a43 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Tube.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Wedge_A.uasset b/Content/StarterContent/Shapes/Shape_Wedge_A.uasset new file mode 100644 index 0000000..b99fde7 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Wedge_A.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_Wedge_B.uasset b/Content/StarterContent/Shapes/Shape_Wedge_B.uasset new file mode 100644 index 0000000..2e8f430 Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_Wedge_B.uasset differ diff --git a/Content/StarterContent/Shapes/Shape_WideCapsule.uasset b/Content/StarterContent/Shapes/Shape_WideCapsule.uasset new file mode 100644 index 0000000..830b90d Binary files /dev/null and b/Content/StarterContent/Shapes/Shape_WideCapsule.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_Beveled_D.uasset b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_D.uasset new file mode 100644 index 0000000..8ea9227 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_Beveled_M.uasset b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_M.uasset new file mode 100644 index 0000000..995cc82 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_Beveled_N.uasset b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_N.uasset new file mode 100644 index 0000000..bf6633d Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_Beveled_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_New_D.uasset b/Content/StarterContent/Textures/T_Brick_Clay_New_D.uasset new file mode 100644 index 0000000..5d32850 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_New_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_New_M.uasset b/Content/StarterContent/Textures/T_Brick_Clay_New_M.uasset new file mode 100644 index 0000000..e386e9d Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_New_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_New_N.uasset b/Content/StarterContent/Textures/T_Brick_Clay_New_N.uasset new file mode 100644 index 0000000..f4dc14a Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_New_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_Old_D.uasset b/Content/StarterContent/Textures/T_Brick_Clay_Old_D.uasset new file mode 100644 index 0000000..acec520 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_Old_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Clay_Old_N.uasset b/Content/StarterContent/Textures/T_Brick_Clay_Old_N.uasset new file mode 100644 index 0000000..49fac3f Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Clay_Old_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Cut_Stone_D.uasset b/Content/StarterContent/Textures/T_Brick_Cut_Stone_D.uasset new file mode 100644 index 0000000..1f76e63 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Cut_Stone_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Cut_Stone_N.uasset b/Content/StarterContent/Textures/T_Brick_Cut_Stone_N.uasset new file mode 100644 index 0000000..9cf63ce Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Cut_Stone_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Hewn_Stone_D.uasset b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_D.uasset new file mode 100644 index 0000000..cf67614 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Hewn_Stone_M.uasset b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_M.uasset new file mode 100644 index 0000000..81c5fcf Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Brick_Hewn_Stone_N.uasset b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_N.uasset new file mode 100644 index 0000000..27c49b5 Binary files /dev/null and b/Content/StarterContent/Textures/T_Brick_Hewn_Stone_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Burst_M.uasset b/Content/StarterContent/Textures/T_Burst_M.uasset new file mode 100644 index 0000000..35b26fc Binary files /dev/null and b/Content/StarterContent/Textures/T_Burst_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Bush_D.uasset b/Content/StarterContent/Textures/T_Bush_D.uasset new file mode 100644 index 0000000..d539936 Binary files /dev/null and b/Content/StarterContent/Textures/T_Bush_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Bush_N.uasset b/Content/StarterContent/Textures/T_Bush_N.uasset new file mode 100644 index 0000000..a9f1c7e Binary files /dev/null and b/Content/StarterContent/Textures/T_Bush_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Ceramic_Tile_M.uasset b/Content/StarterContent/Textures/T_Ceramic_Tile_M.uasset new file mode 100644 index 0000000..8214218 Binary files /dev/null and b/Content/StarterContent/Textures/T_Ceramic_Tile_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Ceramic_Tile_N.uasset b/Content/StarterContent/Textures/T_Ceramic_Tile_N.uasset new file mode 100644 index 0000000..343aeb8 Binary files /dev/null and b/Content/StarterContent/Textures/T_Ceramic_Tile_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Chair_M.uasset b/Content/StarterContent/Textures/T_Chair_M.uasset new file mode 100644 index 0000000..40e1e6f Binary files /dev/null and b/Content/StarterContent/Textures/T_Chair_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Chair_N.uasset b/Content/StarterContent/Textures/T_Chair_N.uasset new file mode 100644 index 0000000..af1c8a4 Binary files /dev/null and b/Content/StarterContent/Textures/T_Chair_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Checker_Noise_M.uasset b/Content/StarterContent/Textures/T_Checker_Noise_M.uasset new file mode 100644 index 0000000..58b5668 Binary files /dev/null and b/Content/StarterContent/Textures/T_Checker_Noise_M.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Pebble_D.uasset b/Content/StarterContent/Textures/T_CobbleStone_Pebble_D.uasset new file mode 100644 index 0000000..bc3db66 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Pebble_D.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Pebble_M.uasset b/Content/StarterContent/Textures/T_CobbleStone_Pebble_M.uasset new file mode 100644 index 0000000..7a953e9 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Pebble_M.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Pebble_N.uasset b/Content/StarterContent/Textures/T_CobbleStone_Pebble_N.uasset new file mode 100644 index 0000000..46da547 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Pebble_N.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Rough_D.uasset b/Content/StarterContent/Textures/T_CobbleStone_Rough_D.uasset new file mode 100644 index 0000000..1bc4666 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Rough_D.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Rough_N.uasset b/Content/StarterContent/Textures/T_CobbleStone_Rough_N.uasset new file mode 100644 index 0000000..935d1af Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Rough_N.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Smooth_D.uasset b/Content/StarterContent/Textures/T_CobbleStone_Smooth_D.uasset new file mode 100644 index 0000000..44e5597 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Smooth_D.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Smooth_M.uasset b/Content/StarterContent/Textures/T_CobbleStone_Smooth_M.uasset new file mode 100644 index 0000000..165106f Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Smooth_M.uasset differ diff --git a/Content/StarterContent/Textures/T_CobbleStone_Smooth_N.uasset b/Content/StarterContent/Textures/T_CobbleStone_Smooth_N.uasset new file mode 100644 index 0000000..4207398 Binary files /dev/null and b/Content/StarterContent/Textures/T_CobbleStone_Smooth_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Grime_D.uasset b/Content/StarterContent/Textures/T_Concrete_Grime_D.uasset new file mode 100644 index 0000000..d6667d2 Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Grime_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Panels_D.uasset b/Content/StarterContent/Textures/T_Concrete_Panels_D.uasset new file mode 100644 index 0000000..75134e3 Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Panels_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Panels_N.uasset b/Content/StarterContent/Textures/T_Concrete_Panels_N.uasset new file mode 100644 index 0000000..6db0432 Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Panels_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Poured_D.uasset b/Content/StarterContent/Textures/T_Concrete_Poured_D.uasset new file mode 100644 index 0000000..f2d651c Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Poured_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Poured_N.uasset b/Content/StarterContent/Textures/T_Concrete_Poured_N.uasset new file mode 100644 index 0000000..edc2e1b Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Poured_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Tiles_D.uasset b/Content/StarterContent/Textures/T_Concrete_Tiles_D.uasset new file mode 100644 index 0000000..b98f3a7 Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Tiles_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Tiles_M.uasset b/Content/StarterContent/Textures/T_Concrete_Tiles_M.uasset new file mode 100644 index 0000000..cc1fe76 Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Tiles_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Tiles_N.uasset b/Content/StarterContent/Textures/T_Concrete_Tiles_N.uasset new file mode 100644 index 0000000..d5d6b3a Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Tiles_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Concrete_Tiles_Variation_M.uasset b/Content/StarterContent/Textures/T_Concrete_Tiles_Variation_M.uasset new file mode 100644 index 0000000..5edef8f Binary files /dev/null and b/Content/StarterContent/Textures/T_Concrete_Tiles_Variation_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Detail_Rocky_N.uasset b/Content/StarterContent/Textures/T_Detail_Rocky_N.uasset new file mode 100644 index 0000000..a80dc77 Binary files /dev/null and b/Content/StarterContent/Textures/T_Detail_Rocky_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Door_M.uasset b/Content/StarterContent/Textures/T_Door_M.uasset new file mode 100644 index 0000000..b5eec21 Binary files /dev/null and b/Content/StarterContent/Textures/T_Door_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Door_N.uasset b/Content/StarterContent/Textures/T_Door_N.uasset new file mode 100644 index 0000000..93e2c30 Binary files /dev/null and b/Content/StarterContent/Textures/T_Door_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Dust_Particle_D.uasset b/Content/StarterContent/Textures/T_Dust_Particle_D.uasset new file mode 100644 index 0000000..d80b3a0 Binary files /dev/null and b/Content/StarterContent/Textures/T_Dust_Particle_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Explosion_SubUV.uasset b/Content/StarterContent/Textures/T_Explosion_SubUV.uasset new file mode 100644 index 0000000..d30f842 Binary files /dev/null and b/Content/StarterContent/Textures/T_Explosion_SubUV.uasset differ diff --git a/Content/StarterContent/Textures/T_Fire_SubUV.uasset b/Content/StarterContent/Textures/T_Fire_SubUV.uasset new file mode 100644 index 0000000..6e87ac7 Binary files /dev/null and b/Content/StarterContent/Textures/T_Fire_SubUV.uasset differ diff --git a/Content/StarterContent/Textures/T_Fire_Tiled_D.uasset b/Content/StarterContent/Textures/T_Fire_Tiled_D.uasset new file mode 100644 index 0000000..825765f Binary files /dev/null and b/Content/StarterContent/Textures/T_Fire_Tiled_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Frame_M.uasset b/Content/StarterContent/Textures/T_Frame_M.uasset new file mode 100644 index 0000000..4aeac1c Binary files /dev/null and b/Content/StarterContent/Textures/T_Frame_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Frame_N.uasset b/Content/StarterContent/Textures/T_Frame_N.uasset new file mode 100644 index 0000000..b1a335d Binary files /dev/null and b/Content/StarterContent/Textures/T_Frame_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Gradinet_01.uasset b/Content/StarterContent/Textures/T_Gradinet_01.uasset new file mode 100644 index 0000000..a71d3dd Binary files /dev/null and b/Content/StarterContent/Textures/T_Gradinet_01.uasset differ diff --git a/Content/StarterContent/Textures/T_Ground_Grass_D.uasset b/Content/StarterContent/Textures/T_Ground_Grass_D.uasset new file mode 100644 index 0000000..0ce49d3 Binary files /dev/null and b/Content/StarterContent/Textures/T_Ground_Grass_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Ground_Grass_N.uasset b/Content/StarterContent/Textures/T_Ground_Grass_N.uasset new file mode 100644 index 0000000..c69da0d Binary files /dev/null and b/Content/StarterContent/Textures/T_Ground_Grass_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Ground_Gravel_D.uasset b/Content/StarterContent/Textures/T_Ground_Gravel_D.uasset new file mode 100644 index 0000000..3064541 Binary files /dev/null and b/Content/StarterContent/Textures/T_Ground_Gravel_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Ground_Gravel_N.uasset b/Content/StarterContent/Textures/T_Ground_Gravel_N.uasset new file mode 100644 index 0000000..e9f55a7 Binary files /dev/null and b/Content/StarterContent/Textures/T_Ground_Gravel_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Ground_Moss_N.uasset b/Content/StarterContent/Textures/T_Ground_Moss_N.uasset new file mode 100644 index 0000000..898e6ee Binary files /dev/null and b/Content/StarterContent/Textures/T_Ground_Moss_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Lamp_M.uasset b/Content/StarterContent/Textures/T_Lamp_M.uasset new file mode 100644 index 0000000..e1958b0 Binary files /dev/null and b/Content/StarterContent/Textures/T_Lamp_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Lamp_N.uasset b/Content/StarterContent/Textures/T_Lamp_N.uasset new file mode 100644 index 0000000..77e47bd Binary files /dev/null and b/Content/StarterContent/Textures/T_Lamp_N.uasset differ diff --git a/Content/StarterContent/Textures/T_MacroVariation.uasset b/Content/StarterContent/Textures/T_MacroVariation.uasset new file mode 100644 index 0000000..dca6463 Binary files /dev/null and b/Content/StarterContent/Textures/T_MacroVariation.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Aluminum_D.uasset b/Content/StarterContent/Textures/T_Metal_Aluminum_D.uasset new file mode 100644 index 0000000..225628a Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Aluminum_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Copper_D.uasset b/Content/StarterContent/Textures/T_Metal_Copper_D.uasset new file mode 100644 index 0000000..df0cacb Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Copper_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Gold_D.uasset b/Content/StarterContent/Textures/T_Metal_Gold_D.uasset new file mode 100644 index 0000000..0762c01 Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Gold_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Gold_N.uasset b/Content/StarterContent/Textures/T_Metal_Gold_N.uasset new file mode 100644 index 0000000..938e7c3 Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Gold_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Rust_D.uasset b/Content/StarterContent/Textures/T_Metal_Rust_D.uasset new file mode 100644 index 0000000..2a47b53 Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Rust_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Rust_N.uasset b/Content/StarterContent/Textures/T_Metal_Rust_N.uasset new file mode 100644 index 0000000..c94e24d Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Rust_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Steel_D.uasset b/Content/StarterContent/Textures/T_Metal_Steel_D.uasset new file mode 100644 index 0000000..bc8ee97 Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Steel_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Metal_Steel_N.uasset b/Content/StarterContent/Textures/T_Metal_Steel_N.uasset new file mode 100644 index 0000000..bd90253 Binary files /dev/null and b/Content/StarterContent/Textures/T_Metal_Steel_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Perlin_Noise_M.uasset b/Content/StarterContent/Textures/T_Perlin_Noise_M.uasset new file mode 100644 index 0000000..a71f39d Binary files /dev/null and b/Content/StarterContent/Textures/T_Perlin_Noise_M.uasset differ diff --git a/Content/StarterContent/Textures/T_RockMesh_M.uasset b/Content/StarterContent/Textures/T_RockMesh_M.uasset new file mode 100644 index 0000000..91a8dbe Binary files /dev/null and b/Content/StarterContent/Textures/T_RockMesh_M.uasset differ diff --git a/Content/StarterContent/Textures/T_RockMesh_N.uasset b/Content/StarterContent/Textures/T_RockMesh_N.uasset new file mode 100644 index 0000000..cf18346 Binary files /dev/null and b/Content/StarterContent/Textures/T_RockMesh_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Basalt_D.uasset b/Content/StarterContent/Textures/T_Rock_Basalt_D.uasset new file mode 100644 index 0000000..15197b2 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Basalt_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Basalt_N.uasset b/Content/StarterContent/Textures/T_Rock_Basalt_N.uasset new file mode 100644 index 0000000..17ac16e Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Basalt_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Marble_Polished_D.uasset b/Content/StarterContent/Textures/T_Rock_Marble_Polished_D.uasset new file mode 100644 index 0000000..cc564e5 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Marble_Polished_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Sandstone_D.uasset b/Content/StarterContent/Textures/T_Rock_Sandstone_D.uasset new file mode 100644 index 0000000..efc7c83 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Sandstone_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Sandstone_N.uasset b/Content/StarterContent/Textures/T_Rock_Sandstone_N.uasset new file mode 100644 index 0000000..942331b Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Sandstone_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Slate_D.uasset b/Content/StarterContent/Textures/T_Rock_Slate_D.uasset new file mode 100644 index 0000000..b460e81 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Slate_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Slate_N.uasset b/Content/StarterContent/Textures/T_Rock_Slate_N.uasset new file mode 100644 index 0000000..3eb6b14 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Slate_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Rock_Smooth_Granite_D.uasset b/Content/StarterContent/Textures/T_Rock_Smooth_Granite_D.uasset new file mode 100644 index 0000000..9c11db2 Binary files /dev/null and b/Content/StarterContent/Textures/T_Rock_Smooth_Granite_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Shelf_M.uasset b/Content/StarterContent/Textures/T_Shelf_M.uasset new file mode 100644 index 0000000..40bea03 Binary files /dev/null and b/Content/StarterContent/Textures/T_Shelf_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Shelf_N.uasset b/Content/StarterContent/Textures/T_Shelf_N.uasset new file mode 100644 index 0000000..5cb6c71 Binary files /dev/null and b/Content/StarterContent/Textures/T_Shelf_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Single_Tile_N.uasset b/Content/StarterContent/Textures/T_Single_Tile_N.uasset new file mode 100644 index 0000000..ddc0828 Binary files /dev/null and b/Content/StarterContent/Textures/T_Single_Tile_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Smoke_SubUV.uasset b/Content/StarterContent/Textures/T_Smoke_SubUV.uasset new file mode 100644 index 0000000..5c8a2d9 Binary files /dev/null and b/Content/StarterContent/Textures/T_Smoke_SubUV.uasset differ diff --git a/Content/StarterContent/Textures/T_Smoke_Tiled_D.uasset b/Content/StarterContent/Textures/T_Smoke_Tiled_D.uasset new file mode 100644 index 0000000..aad7a5e Binary files /dev/null and b/Content/StarterContent/Textures/T_Smoke_Tiled_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Spark_Core.uasset b/Content/StarterContent/Textures/T_Spark_Core.uasset new file mode 100644 index 0000000..3881b88 Binary files /dev/null and b/Content/StarterContent/Textures/T_Spark_Core.uasset differ diff --git a/Content/StarterContent/Textures/T_Statue_M.uasset b/Content/StarterContent/Textures/T_Statue_M.uasset new file mode 100644 index 0000000..d43158a Binary files /dev/null and b/Content/StarterContent/Textures/T_Statue_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Statue_N.uasset b/Content/StarterContent/Textures/T_Statue_N.uasset new file mode 100644 index 0000000..787d2d5 Binary files /dev/null and b/Content/StarterContent/Textures/T_Statue_N.uasset differ diff --git a/Content/StarterContent/Textures/T_TableRound_M.uasset b/Content/StarterContent/Textures/T_TableRound_M.uasset new file mode 100644 index 0000000..a76573e Binary files /dev/null and b/Content/StarterContent/Textures/T_TableRound_M.uasset differ diff --git a/Content/StarterContent/Textures/T_TableRound_N.uasset b/Content/StarterContent/Textures/T_TableRound_N.uasset new file mode 100644 index 0000000..619c420 Binary files /dev/null and b/Content/StarterContent/Textures/T_TableRound_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Dot_M.uasset b/Content/StarterContent/Textures/T_Tech_Dot_M.uasset new file mode 100644 index 0000000..fb692b3 Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Dot_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Dot_N.uasset b/Content/StarterContent/Textures/T_Tech_Dot_N.uasset new file mode 100644 index 0000000..50d81c2 Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Dot_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Hex_Tile_M.uasset b/Content/StarterContent/Textures/T_Tech_Hex_Tile_M.uasset new file mode 100644 index 0000000..502170b Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Hex_Tile_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Hex_Tile_N.uasset b/Content/StarterContent/Textures/T_Tech_Hex_Tile_N.uasset new file mode 100644 index 0000000..549633c Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Hex_Tile_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Panel_M.uasset b/Content/StarterContent/Textures/T_Tech_Panel_M.uasset new file mode 100644 index 0000000..192a8e4 Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Panel_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Tech_Panel_N.uasset b/Content/StarterContent/Textures/T_Tech_Panel_N.uasset new file mode 100644 index 0000000..b12087e Binary files /dev/null and b/Content/StarterContent/Textures/T_Tech_Panel_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Water_M.uasset b/Content/StarterContent/Textures/T_Water_M.uasset new file mode 100644 index 0000000..63e9b4e Binary files /dev/null and b/Content/StarterContent/Textures/T_Water_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Water_N.uasset b/Content/StarterContent/Textures/T_Water_N.uasset new file mode 100644 index 0000000..dc83c1b Binary files /dev/null and b/Content/StarterContent/Textures/T_Water_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Floor_Walnut_D.uasset b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_D.uasset new file mode 100644 index 0000000..0598d9e Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Floor_Walnut_M.uasset b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_M.uasset new file mode 100644 index 0000000..1af473f Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_M.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Floor_Walnut_N.uasset b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_N.uasset new file mode 100644 index 0000000..dae4bb4 Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Floor_Walnut_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Oak_D.uasset b/Content/StarterContent/Textures/T_Wood_Oak_D.uasset new file mode 100644 index 0000000..c5494a9 Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Oak_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Oak_N.uasset b/Content/StarterContent/Textures/T_Wood_Oak_N.uasset new file mode 100644 index 0000000..71543aa Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Oak_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Pine_D.uasset b/Content/StarterContent/Textures/T_Wood_Pine_D.uasset new file mode 100644 index 0000000..eac50af Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Pine_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Pine_N.uasset b/Content/StarterContent/Textures/T_Wood_Pine_N.uasset new file mode 100644 index 0000000..051a794 Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Pine_N.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Walnut_D.uasset b/Content/StarterContent/Textures/T_Wood_Walnut_D.uasset new file mode 100644 index 0000000..bc3c3df Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Walnut_D.uasset differ diff --git a/Content/StarterContent/Textures/T_Wood_Walnut_N.uasset b/Content/StarterContent/Textures/T_Wood_Walnut_N.uasset new file mode 100644 index 0000000..856a1f8 Binary files /dev/null and b/Content/StarterContent/Textures/T_Wood_Walnut_N.uasset differ diff --git a/Content/StarterContent/Textures/T_ground_Moss_D.uasset b/Content/StarterContent/Textures/T_ground_Moss_D.uasset new file mode 100644 index 0000000..2b9ca5b Binary files /dev/null and b/Content/StarterContent/Textures/T_ground_Moss_D.uasset differ diff --git a/Content/TheStudio/LiveLinkHub/TheStudioLiveLinkHubConfig.json b/Content/TheStudio/LiveLinkHub/TheStudioLiveLinkHubConfig.json new file mode 100644 index 0000000..acecb7b Binary files /dev/null and b/Content/TheStudio/LiveLinkHub/TheStudioLiveLinkHubConfig.json differ diff --git a/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input.uasset b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input.uasset new file mode 100644 index 0000000..a4ec0c9 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MI_MB_CameraSDI2Input.uasset b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MI_MB_CameraSDI2Input.uasset new file mode 100644 index 0000000..f27fef4 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MI_MB_CameraSDI2Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MediaP_MB_CameraSDI2Input.uasset b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MediaP_MB_CameraSDI2Input.uasset new file mode 100644 index 0000000..f89c5ed Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/MediaP_MB_CameraSDI2Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/RT_MB_CameraSDI2Input_LensDisplacement.uasset b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/RT_MB_CameraSDI2Input_LensDisplacement.uasset new file mode 100644 index 0000000..5c3160f Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/RT_MB_CameraSDI2Input_LensDisplacement.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/T_MB_CameraSDI2Input_BC.uasset b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/T_MB_CameraSDI2Input_BC.uasset new file mode 100644 index 0000000..96ef054 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CameraSDI2Input_InnerAssets/T_MB_CameraSDI2Input_BC.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CineCamera_Input.uasset b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input.uasset new file mode 100644 index 0000000..e949166 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MI_MB_CineCamera_Input.uasset b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MI_MB_CineCamera_Input.uasset new file mode 100644 index 0000000..022a2af Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MI_MB_CineCamera_Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MediaP_MB_CineCamera_Input.uasset b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MediaP_MB_CineCamera_Input.uasset new file mode 100644 index 0000000..4ea3b32 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/MediaP_MB_CineCamera_Input.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/RT_MB_CineCamera_Input_LensDisplacement.uasset b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/RT_MB_CineCamera_Input_LensDisplacement.uasset new file mode 100644 index 0000000..c5f86c9 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/RT_MB_CineCamera_Input_LensDisplacement.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/T_MB_CineCamera_Input_BC.uasset b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/T_MB_CineCamera_Input_BC.uasset new file mode 100644 index 0000000..26d5122 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_CineCamera_Input_InnerAssets/T_MB_CineCamera_Input_BC.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Monitor_Preview.uasset b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview.uasset new file mode 100644 index 0000000..8ef952e Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MI_MB_Monitor_Preview.uasset b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MI_MB_Monitor_Preview.uasset new file mode 100644 index 0000000..6599f66 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MI_MB_Monitor_Preview.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MediaP_MB_Monitor_Preview.uasset b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MediaP_MB_Monitor_Preview.uasset new file mode 100644 index 0000000..05e1288 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/MediaP_MB_Monitor_Preview.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/RT_MB_Monitor_Preview_LensDisplacement.uasset b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/RT_MB_Monitor_Preview_LensDisplacement.uasset new file mode 100644 index 0000000..605443c Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/RT_MB_Monitor_Preview_LensDisplacement.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/T_MB_Monitor_Preview_BC.uasset b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/T_MB_Monitor_Preview_BC.uasset new file mode 100644 index 0000000..39f459f Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Monitor_Preview_InnerAssets/T_MB_Monitor_Preview_BC.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Multiview_Display.uasset b/Content/TheStudio/Media/Bundles/MB_Multiview_Display.uasset new file mode 100644 index 0000000..6fbed16 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Multiview_Display.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MI_MB_Multiview_Display.uasset b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MI_MB_Multiview_Display.uasset new file mode 100644 index 0000000..37754b9 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MI_MB_Multiview_Display.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MediaP_MB_Multiview_Display.uasset b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MediaP_MB_Multiview_Display.uasset new file mode 100644 index 0000000..5459d30 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/MediaP_MB_Multiview_Display.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/RT_MB_Multiview_Display_LensDisplacement.uasset b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/RT_MB_Multiview_Display_LensDisplacement.uasset new file mode 100644 index 0000000..0a55e90 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/RT_MB_Multiview_Display_LensDisplacement.uasset differ diff --git a/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/T_MB_Multiview_Display_BC.uasset b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/T_MB_Multiview_Display_BC.uasset new file mode 100644 index 0000000..f6a7999 Binary files /dev/null and b/Content/TheStudio/Media/Bundles/MB_Multiview_Display_InnerAssets/T_MB_Multiview_Display_BC.uasset differ diff --git a/Content/TheStudio/Media/M_Multiview.uasset b/Content/TheStudio/Media/M_Multiview.uasset new file mode 100644 index 0000000..79e53c1 Binary files /dev/null and b/Content/TheStudio/Media/M_Multiview.uasset differ diff --git a/Content/TheStudio/Media/NDI/BP_NDIBroadcaster.uasset b/Content/TheStudio/Media/NDI/BP_NDIBroadcaster.uasset new file mode 100644 index 0000000..f584c07 Binary files /dev/null and b/Content/TheStudio/Media/NDI/BP_NDIBroadcaster.uasset differ diff --git a/Content/TheStudio/Media/NDI/BP_NDIReceiver.uasset b/Content/TheStudio/Media/NDI/BP_NDIReceiver.uasset new file mode 100644 index 0000000..9818c8a Binary files /dev/null and b/Content/TheStudio/Media/NDI/BP_NDIReceiver.uasset differ diff --git a/Content/TheStudio/Media/NDI/NDIBroadcastRenderTarget.uasset b/Content/TheStudio/Media/NDI/NDIBroadcastRenderTarget.uasset new file mode 100644 index 0000000..de9a9bf Binary files /dev/null and b/Content/TheStudio/Media/NDI/NDIBroadcastRenderTarget.uasset differ diff --git a/Content/TheStudio/Media/NDI/NDIMediaSender.uasset b/Content/TheStudio/Media/NDI/NDIMediaSender.uasset new file mode 100644 index 0000000..42c9ca2 Binary files /dev/null and b/Content/TheStudio/Media/NDI/NDIMediaSender.uasset differ diff --git a/Content/TheStudio/Media/NDI/NewEditorUtilityWidgetBlueprint.uasset b/Content/TheStudio/Media/NDI/NewEditorUtilityWidgetBlueprint.uasset new file mode 100644 index 0000000..4170186 Binary files /dev/null and b/Content/TheStudio/Media/NDI/NewEditorUtilityWidgetBlueprint.uasset differ diff --git a/Content/TheStudio/Media/NDI/NewNDIMediaTexture2D_Mat.uasset b/Content/TheStudio/Media/NDI/NewNDIMediaTexture2D_Mat.uasset new file mode 100644 index 0000000..c10f37b Binary files /dev/null and b/Content/TheStudio/Media/NDI/NewNDIMediaTexture2D_Mat.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/BP_NDIReceiver.uasset b/Content/TheStudio/Media/NDI/Receiver/BP_NDIReceiver.uasset new file mode 100644 index 0000000..13a0dfb Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/BP_NDIReceiver.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/M_NDIReceiverMaterial.uasset b/Content/TheStudio/Media/NDI/Receiver/M_NDIReceiverMaterial.uasset new file mode 100644 index 0000000..9970373 Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/M_NDIReceiverMaterial.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/NDIMediaReceiver.uasset b/Content/TheStudio/Media/NDI/Receiver/NDIMediaReceiver.uasset new file mode 100644 index 0000000..90f22d0 Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/NDIMediaReceiver.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/NewEditorUtilityWidgetBlueprint.uasset b/Content/TheStudio/Media/NDI/Receiver/NewEditorUtilityWidgetBlueprint.uasset new file mode 100644 index 0000000..7816e92 Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/NewEditorUtilityWidgetBlueprint.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/NewNDIMediaTexture2D.uasset b/Content/TheStudio/Media/NDI/Receiver/NewNDIMediaTexture2D.uasset new file mode 100644 index 0000000..5c6db62 Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/NewNDIMediaTexture2D.uasset differ diff --git a/Content/TheStudio/Media/NDI/Receiver/WBP_NDIReceiver.uasset b/Content/TheStudio/Media/NDI/Receiver/WBP_NDIReceiver.uasset new file mode 100644 index 0000000..fb2cb5c Binary files /dev/null and b/Content/TheStudio/Media/NDI/Receiver/WBP_NDIReceiver.uasset differ diff --git a/Content/TheStudio/Media/Profiles/MediaProfile_Maintenance.uasset b/Content/TheStudio/Media/Profiles/MediaProfile_Maintenance.uasset new file mode 100644 index 0000000..2a92d4c Binary files /dev/null and b/Content/TheStudio/Media/Profiles/MediaProfile_Maintenance.uasset differ diff --git a/Content/TheStudio/Media/Profiles/MediaProfile_Production.uasset b/Content/TheStudio/Media/Profiles/MediaProfile_Production.uasset new file mode 100644 index 0000000..3378fb8 Binary files /dev/null and b/Content/TheStudio/Media/Profiles/MediaProfile_Production.uasset differ diff --git a/Content/TheStudio/Media/Profiles/MediaProfile_Rehearsal.uasset b/Content/TheStudio/Media/Profiles/MediaProfile_Rehearsal.uasset new file mode 100644 index 0000000..5d76a48 Binary files /dev/null and b/Content/TheStudio/Media/Profiles/MediaProfile_Rehearsal.uasset differ diff --git a/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Monitor.uasset b/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Monitor.uasset new file mode 100644 index 0000000..4eb4ad1 Binary files /dev/null and b/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Monitor.uasset differ diff --git a/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Multiview.uasset b/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Multiview.uasset new file mode 100644 index 0000000..f12a867 Binary files /dev/null and b/Content/TheStudio/Media/Proxies/ProxyMediaOutput_Multiview.uasset differ diff --git a/Content/TheStudio/Media/Proxies/ProxyMediaSource_CameraSDI3.uasset b/Content/TheStudio/Media/Proxies/ProxyMediaSource_CameraSDI3.uasset new file mode 100644 index 0000000..0ba4424 Binary files /dev/null and b/Content/TheStudio/Media/Proxies/ProxyMediaSource_CameraSDI3.uasset differ diff --git a/Content/TheStudio/Media/Proxies/ProxyMediaSource_CineCamera.uasset b/Content/TheStudio/Media/Proxies/ProxyMediaSource_CineCamera.uasset new file mode 100644 index 0000000..7f91537 Binary files /dev/null and b/Content/TheStudio/Media/Proxies/ProxyMediaSource_CineCamera.uasset differ diff --git a/Content/TheStudio/Media/Sync/BlackMagicGencode.uasset b/Content/TheStudio/Media/Sync/BlackMagicGencode.uasset new file mode 100644 index 0000000..bb5a347 Binary files /dev/null and b/Content/TheStudio/Media/Sync/BlackMagicGencode.uasset differ diff --git a/Content/TheStudio/Media/Sync/DanteAudioTimecodeProvider.uasset b/Content/TheStudio/Media/Sync/DanteAudioTimecodeProvider.uasset new file mode 100644 index 0000000..a7096f4 Binary files /dev/null and b/Content/TheStudio/Media/Sync/DanteAudioTimecodeProvider.uasset differ diff --git a/Content/TheStudio/Media/Sync/TestBlackmagic.uasset b/Content/TheStudio/Media/Sync/TestBlackmagic.uasset new file mode 100644 index 0000000..71337bf Binary files /dev/null and b/Content/TheStudio/Media/Sync/TestBlackmagic.uasset differ diff --git a/Content/TheStudio/Media/VirtualCamera/M_VirtualCamera.uasset b/Content/TheStudio/Media/VirtualCamera/M_VirtualCamera.uasset new file mode 100644 index 0000000..a177677 Binary files /dev/null and b/Content/TheStudio/Media/VirtualCamera/M_VirtualCamera.uasset differ diff --git a/Content/TheStudio/Media/VirtualCamera/TextureTargetVirtualCamera.uasset b/Content/TheStudio/Media/VirtualCamera/TextureTargetVirtualCamera.uasset new file mode 100644 index 0000000..a68860f Binary files /dev/null and b/Content/TheStudio/Media/VirtualCamera/TextureTargetVirtualCamera.uasset differ diff --git a/Content/TheStudio/Presets/LiveLinkhubPreset.uasset b/Content/TheStudio/Presets/LiveLinkhubPreset.uasset new file mode 100644 index 0000000..5d7f7e9 Binary files /dev/null and b/Content/TheStudio/Presets/LiveLinkhubPreset.uasset differ diff --git a/Content/TheStudio/Switchboard/Switchboard.config.json b/Content/TheStudio/Switchboard/Switchboard.config.json new file mode 100644 index 0000000..23314f7 --- /dev/null +++ b/Content/TheStudio/Switchboard/Switchboard.config.json @@ -0,0 +1,237 @@ +{ + "project_name": "TheStudio", + "uproject": "D:\\Projects\\EasternMediterranean\\VPTemplate.uproject", + "engine_dir": "C:/Program Files/Epic Games/UE_5.5/Engine/", + "engine_sync_method": "Use Existing (do not sync/build)", + "maps_path": "", + "maps_filter": "*.umap", + "content_plugin_filters": [], + "listener_exe": "SwitchboardListener", + "sblhelper_exe": "SwitchboardListenerHelper", + "tracing_enabled": false, + "tracing_args": "default,concert,messaging,tasks", + "tracing_stat_events": true, + "osc_server_port": 6000, + "p4_enabled": false, + "p4_sync_path": "", + "p4_engine_path": "", + "source_control_workspace": null, + "muserver_command_line_arguments": "", + "muserver_server_name": "TheStudio_MU_Server", + "muserver_endpoint": "192.168.4.35:9030", + "multiuser_exe": "UnrealMultiUserServer", + "multiuserslate_exe": "UnrealMultiUserSlateServer", + "muserver_auto_launch": true, + "muserver_slate_mode": true, + "muserver_clean_history": false, + "muserver_auto_build": true, + "muserver_auto_endpoint": true, + "muserver_multicast_endpoint": "224.0.0.1:6666", + "muserver_auto_join": true, + "muserver_archive_dir": "", + "muserver_working_dir": "", + "current_level": "/Game/Levels/360VideoSphere", + "devices": { + "KiPro": { + "settings": { + "is_recording_device": true + } + }, + "LiveLinkFace": { + "settings": { + "osc_port": 8000 + } + }, + "Motive": { + "settings": { + "is_recording_device": true, + "motive_port": 1510 + } + }, + "MultiplayerServer": { + "settings": { + "executable_path": "", + "level_path": "", + "multiplayer_port": 7777, + "port": 2980 + } + }, + "Unreal": { + "settings": { + "is_recording_device": true, + "buffer_size": 1024, + "command_line_arguments": "", + "exec_cmds": [], + "dp_cvars": [], + "port": 2980, + "osc_port": 5500, + "roles_filename": "VPRoles.ini", + "stage_session_id": 0, + "editor_exe": "UnrealEditor.exe", + "max_gpu_count": 1, + "priority_modifier": "Normal", + "auto_decline_package_recovery": false, + "udpmessaging_unicast_endpoint": ":0", + "udpmessaging_extra_static_endpoints": "", + "udpmessaging_multicast_endpoint": "224.0.0.1:6666", + "log_download_dir": "", + "reflect_visibility_to_game": true, + "rsync_port": 8730, + "listener_inactive_timeout": 5, + "slate_allow_throttling": false, + "retrieve_logs": true, + "livelink_preset": "/Game/TheStudio/Presets/LiveLinkhubPreset.uasset", + "mediaprofile": "", + "lock_gpu_clock": false, + "use_sync_filters": false, + "included_sync_categories": [ + [ + "6703e989-d912-451d-93ad-b48de748d282", + "Content" + ], + [ + "f44b2d25-cbc0-4a8f-b6b3-e4a8125533dd", + "Platform Support: Linux" + ], + [ + "5206ccee-9024-4e36-8b89-f5f5a7d288d2", + "Platform Support: Win64" + ], + [ + "cfec942a-bb90-4f0c-accf-238ecaad9430", + "Source Code" + ] + ], + "custom_sync_view": "" + }, + "Editor_1": { + "address": "192.168.4.35", + "roles": [], + "ddc_build_platforms": [ + "Windows", + "WindowsEditor", + "Linux", + "LinuxEditor" + ], + "autojoin_mu_server": true, + "last_launch_command": "C:\\Program Files\\Epic Games\\UE_5.5\\Engine\\Binaries\\Win64\\UnrealEditor.exe \"D:\\Projects\\EasternMediterranean\\VPTemplate.uproject\" /Game/Levels/360VideoSphere Log=Editor_1.log -CONCERTRETRYAUTOCONNECTONERROR -CONCERTAUTOCONNECT -CONCERTSERVER=\"TheStudio_MU_Server_D+Ton9OT3hkk\" -CONCERTSESSION=\"MU_TheStudio_1\" -CONCERTDISPLAYNAME=\"Editor_1\" -ExecCmds=\"LiveLink.Preset.Apply Preset=/Game/TheStudio/Presets/LiveLinkhubPreset.LiveLinkhubPreset\" -StageFriendlyName=\"Editor_1\" -DPCVars=\"Slate.bAllowThrottling=0\" -ConcertReflectVisibility=1 -UDPMESSAGING_TRANSPORT_MULTICAST=\"224.0.0.1:6666\" -UDPMESSAGING_TRANSPORT_UNICAST=\"192.168.4.35:0\" -UDPMESSAGING_TRANSPORT_STATIC=\"192.168.4.35:9030\" -ini:Engine:[/Script/ConcertTakeRecorder.ConcertSessionRecordSettings]:LocalSettings=(bRecordOnClient=True)", + "last_log_path": "D:\\Projects\\EasternMediterranean\\Saved\\Logs\\Switchboard\\Editor_1.log", + "last_trace_path": "", + "exclude_from_build": false, + "exclude_from_insights": false, + "last_sync_filter_hash": "" + }, + "Node_1_Editor": { + "address": "192.168.4.15", + "roles": [], + "ddc_build_platforms": [ + "Windows", + "WindowsEditor", + "Linux", + "LinuxEditor" + ], + "autojoin_mu_server": true, + "last_launch_command": "C:\\Program Files\\Epic Games\\UE_5.5\\Engine\\Binaries\\Win64\\UnrealEditor.exe \"D:\\Projects\\EasternMediterranean\\VPTemplate.uproject\" /Game/Levels/Lidinis Log=Node_1_Editor.log -CONCERTRETRYAUTOCONNECTONERROR -CONCERTAUTOCONNECT -CONCERTSERVER=\"TheStudio_MU_Server_D+Ton9OT3hkk\" -CONCERTSESSION=\"MU_TheStudio_1\" -CONCERTDISPLAYNAME=\"Node_1_Editor\" -ExecCmds=\"LiveLink.Preset.Apply Preset=/Game/TheStudio/Presets/LiveLinkhubPreset.LiveLinkhubPreset\" -StageFriendlyName=\"Node_1_Editor\" -DPCVars=\"Slate.bAllowThrottling=0\" -ConcertReflectVisibility=1 -UDPMESSAGING_TRANSPORT_MULTICAST=\"224.0.0.1:6666\" -UDPMESSAGING_TRANSPORT_UNICAST=\"192.168.4.15:0\" -UDPMESSAGING_TRANSPORT_STATIC=\"192.168.4.35:9030\" -ini:Engine:[/Script/ConcertTakeRecorder.ConcertSessionRecordSettings]:LocalSettings=(bRecordOnClient=True)", + "last_log_path": "D:\\Projects\\EasternMediterranean\\Saved\\Logs\\Switchboard\\Node_1_Editor.log", + "last_trace_path": "", + "exclude_from_build": false, + "exclude_from_insights": false, + "last_sync_filter_hash": "" + } + }, + "nDisplay": { + "settings": { + "ndisplay_cfg_file": "D:/Projects/EasternMediterranean/Content/TheStudio/nDisplay/TheStudioNDisplayConfig.uasset", + "launch_mode": "Standalone (ICVFX)", + "packaged_game_path": "", + "use_all_available_cores": false, + "texture_streaming": true, + "sound": false, + "loading_screen": false, + "allow_python": false, + "incremental_gc_reachability": false, + "render_api": "dx12", + "multiplayer_mode": "None", + "dedicated_server_address": "127.0.0.1", + "dedicated_server_port": "7777", + "render_mode": "Mono", + "render_sync_policy": "Config", + "executable_filename": "UnrealEditor.exe", + "ndisplay_cmd_args": "", + "ndisplay_exec_cmds": [], + "ndisplay_dp_cvars": [], + "ndisplay_unattended": true, + "max_gpu_count": 1, + "priority_modifier": "Normal", + "populated_config_itemDatas": [], + "minimize_before_launch": true, + "primary_device_name": "Node_2", + "logging": { + "LogConcert": null, + "LogDisplayClusterCluster": null, + "LogDisplayClusterConfig": null, + "LogDisplayClusterEngine": null, + "LogDisplayClusterGame": null, + "LogDisplayClusterNetwork": null, + "LogDisplayClusterRender": null, + "LogDisplayClusterRenderSync": null, + "LogDisplayClusterViewport": null, + "LogDisplayClusterMedia": null, + "LogDisplayClusterStageMonitoringDWM": null, + "LogLiveLink": null, + "LogRemoteControl": null + }, + "udpmessaging_unicast_endpoint": ":0", + "udpmessaging_extra_static_endpoints": "", + "disable_ensures": true, + "disable_all_screen_messages": true, + "livelink_preset": "/Game/TheStudio/Presets/LiveLinkhubPreset.uasset", + "graphics_adapter": "Config", + "mediaprofile": "/Game/TheStudio/Media/Profiles/MediaProfile_Production.uasset", + "lock_gpu_clock": true, + "port": 2980, + "roles_filename": "VPRoles.ini", + "stage_session_id": 0 + }, + "Node_2": { + "address": "192.168.4.25", + "roles": [], + "ddc_build_platforms": [ + "Windows", + "WindowsEditor", + "Linux", + "LinuxEditor" + ], + "autojoin_mu_server": true, + "last_launch_command": "C:\\Program Files\\Epic Games\\UE_5.5\\Engine\\Binaries\\Win64\\UnrealEditor.exe \"D:\\Projects\\EasternMediterranean\\VPTemplate.uproject\" -game /Game/Levels/360VideoSphere -messaging -dc_cluster -nosplash -fixedseed -NoVerifyGC -noxrstereo -xrtrackingonly -RemoteControlIsHeadless -StageFriendlyName=\"Node_2\" -dc_cfg=\"C:\\Users\\1\\AppData\\Local\\Temp\\ndisplay\\8BACC80A43DD2A4CFBB6198B6FEB61DA.ndisplay\" -dx12 -dc_dev_mono -nosound -NoLoadingScreen -DisablePython -dc_node=Node_2 Log=Node_2.log -ini:Engine:[/Script/Engine.Engine]:GameEngine=/Script/DisplayCluster.DisplayClusterGameEngine,[/Script/Engine.Engine]:GameViewportClientClassName=/Script/DisplayCluster.DisplayClusterViewportClient,[/Script/Engine.UserInterfaceSettings]:bAllowHighDPIInGameMode=True -ini:Game:[/Script/EngineSettings.GeneralProjectSettings]:bUseBorderlessWindow=True -ini:Input:[/Script/Engine.InputSettings]:DefaultPlayerInputClass=/Script/DisplayCluster.DisplayClusterPlayerInput -unattended -NoScreenMessages -handleensurepercent=0 -UDPMESSAGING_TRANSPORT_MULTICAST=\"224.0.0.1:6666\" -UDPMESSAGING_TRANSPORT_UNICAST=\"192.168.4.25:0\" -UDPMESSAGING_TRANSPORT_STATIC=\"192.168.4.35:9030\" -ExecCmds=\"DisableAllScreenMessages,LiveLink.Preset.Apply Preset=/Game/TheStudio/Presets/LiveLinkhubPreset.LiveLinkhubPreset\" -windowed -forceres WinX=0 WinY=0 ResX=4608 ResY=1536 -CONCERTRETRYAUTOCONNECTONERROR -CONCERTAUTOCONNECT -CONCERTSERVER=\"TheStudio_MU_Server_D+Ton9OT3hkk\" -CONCERTSESSION=\"MU_TheStudio_1\" -CONCERTDISPLAYNAME=\"Node_2\" -CONCERTISHEADLESS -DPCVars=\"Slate.bAllowNotifications=0,p.Chaos.Solver.Deterministic=1,LevelInstance.ForceEditorWorldMode=1,MediaUtils.StartupProfile=/Game/TheStudio/Media/Profiles/MediaProfile_Production.MediaProfile_Production\" ", + "last_log_path": "D:\\Projects\\EasternMediterranean\\Saved\\Logs\\Switchboard\\Node_2.log", + "last_trace_path": "", + "exclude_from_build": false, + "exclude_from_insights": false, + "last_sync_filter_hash": "", + "ue_command_line": "C:\\Program Files\\Epic Games\\UE_5.5\\Engine\\Binaries\\Win64\\UnrealEditor.exe \"D:\\Projects\\EasternMediterranean\\VPTemplate.uproject\" -game /Game/Levels/360VideoSphere -messaging -dc_cluster -nosplash -fixedseed -NoVerifyGC -noxrstereo -xrtrackingonly -RemoteControlIsHeadless -StageFriendlyName=\"Node_2\" -dc_cfg=\"C:\\Users\\1\\AppData\\Local\\Temp\\ndisplay\\8BACC80A43DD2A4CFBB6198B6FEB61DA.ndisplay\" -dx12 -dc_dev_mono -nosound -NoLoadingScreen -DisablePython -dc_node=Node_2 Log=Node_2.log -ini:Engine:[/Script/Engine.Engine]:GameEngine=/Script/DisplayCluster.DisplayClusterGameEngine,[/Script/Engine.Engine]:GameViewportClientClassName=/Script/DisplayCluster.DisplayClusterViewportClient,[/Script/Engine.UserInterfaceSettings]:bAllowHighDPIInGameMode=True -ini:Game:[/Script/EngineSettings.GeneralProjectSettings]:bUseBorderlessWindow=True -ini:Input:[/Script/Engine.InputSettings]:DefaultPlayerInputClass=/Script/DisplayCluster.DisplayClusterPlayerInput -unattended -NoScreenMessages -handleensurepercent=0 -UDPMESSAGING_TRANSPORT_MULTICAST=\"224.0.0.1:6666\" -UDPMESSAGING_TRANSPORT_UNICAST=\"192.168.4.25:0\" -UDPMESSAGING_TRANSPORT_STATIC=\"192.168.4.35:9030\" -ExecCmds=\"DisableAllScreenMessages,LiveLink.Preset.Apply Preset=/Game/TheStudio/Presets/LiveLinkhubPreset.LiveLinkhubPreset\" -windowed -forceres WinX=0 WinY=0 ResX=4608 ResY=1536 -CONCERTRETRYAUTOCONNECTONERROR -CONCERTAUTOCONNECT -CONCERTSERVER=\"TheStudio_MU_Server_D+Ton9OT3hkk\" -CONCERTSESSION=\"MU_TheStudio_1\" -CONCERTDISPLAYNAME=\"Node_2\" -CONCERTISHEADLESS -DPCVars=\"Slate.bAllowNotifications=0,p.Chaos.Solver.Deterministic=1,LevelInstance.ForceEditorWorldMode=1,MediaUtils.StartupProfile=/Game/TheStudio/Media/Profiles/MediaProfile_Production.MediaProfile_Production\" ", + "window_position": [ + 0, + 0 + ], + "window_resolution": [ + 4608, + 1536 + ], + "fullscreen": false, + "headless": false, + "config_graphics_adapter": -1 + } + }, + "Shogun": { + "settings": { + "is_recording_device": true + } + }, + "SoundDevices": { + "settings": { + "is_recording_device": true + } + } + } +} \ No newline at end of file diff --git a/Content/TheStudio/nDisplay/TheStudioNDisplayConfig.uasset b/Content/TheStudio/nDisplay/TheStudioNDisplayConfig.uasset new file mode 100644 index 0000000..ea52194 Binary files /dev/null and b/Content/TheStudio/nDisplay/TheStudioNDisplayConfig.uasset differ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..cde4ac6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,10 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. + +In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x20.png new file mode 100644 index 0000000..37326b5 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x64.png new file mode 100644 index 0000000..49b96c7 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIBroadcastActorIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x20.png new file mode 100644 index 0000000..2120da7 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x64.png new file mode 100644 index 0000000..67485b4 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiveActorIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x20.png new file mode 100644 index 0000000..a31be27 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x64.png new file mode 100644 index 0000000..68505b3 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIReceiverIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x20.png new file mode 100644 index 0000000..06aec97 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x64.png new file mode 100644 index 0000000..4c6c5e5 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDISenderIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x20.png new file mode 100644 index 0000000..af81c39 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x64.png new file mode 100644 index 0000000..40864ec Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDISoundWaveIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x20.png b/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x20.png new file mode 100644 index 0000000..85d7571 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x20.png differ diff --git a/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x64.png b/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x64.png new file mode 100644 index 0000000..d061665 Binary files /dev/null and b/Plugins/NDIIO/Content/Editor/Icons/NDIVideoTextureIcon_x64.png differ diff --git a/Plugins/NDIIO/Content/Materials/NDI_UI_SourceMaterial.uasset b/Plugins/NDIIO/Content/Materials/NDI_UI_SourceMaterial.uasset new file mode 100644 index 0000000..0eccc25 Binary files /dev/null and b/Plugins/NDIIO/Content/Materials/NDI_UI_SourceMaterial.uasset differ diff --git a/Plugins/NDIIO/Content/Materials/NDI_Unlit_SourceMaterial.uasset b/Plugins/NDIIO/Content/Materials/NDI_Unlit_SourceMaterial.uasset new file mode 100644 index 0000000..13d6513 Binary files /dev/null and b/Plugins/NDIIO/Content/Materials/NDI_Unlit_SourceMaterial.uasset differ diff --git a/Plugins/NDIIO/Documentation/NDI IO Plugin for Unreal Engine Documentation.pdf b/Plugins/NDIIO/Documentation/NDI IO Plugin for Unreal Engine Documentation.pdf new file mode 100644 index 0000000..e6d8792 Binary files /dev/null and b/Plugins/NDIIO/Documentation/NDI IO Plugin for Unreal Engine Documentation.pdf differ diff --git a/Plugins/NDIIO/Documentation/Quick Start.pdf b/Plugins/NDIIO/Documentation/Quick Start.pdf new file mode 100644 index 0000000..ec687b3 Binary files /dev/null and b/Plugins/NDIIO/Documentation/Quick Start.pdf differ diff --git a/Plugins/NDIIO/Documentation/Release Notes.pdf b/Plugins/NDIIO/Documentation/Release Notes.pdf new file mode 100644 index 0000000..da59f3f Binary files /dev/null and b/Plugins/NDIIO/Documentation/Release Notes.pdf differ diff --git a/Plugins/NDIIO/NDIIOPlugin.uplugin b/Plugins/NDIIO/NDIIOPlugin.uplugin new file mode 100644 index 0000000..bc7fe56 --- /dev/null +++ b/Plugins/NDIIO/NDIIOPlugin.uplugin @@ -0,0 +1,58 @@ +{ + "FileVersion": 3, + "Version": 11, + "VersionName": "3.8", + "FriendlyName": "NDI IO Plugin", + "Description": "Exposes Core NDI Support for integration into Unreal Engine Applications", + "Category": "Virtual Production", + "CreatedBy": "Vizrt NDI AB", + "CreatedByURL": "https://www.ndi.video", + "DocsURL": "", + "MarketplaceURL": "", + "SupportURL": "", + "EngineVersion": "5.5.0", + "CanContainContent": true, + "Installed": true, + "Modules": [ + { + "Name": "NDIIO", + "Type": "Runtime", + "LoadingPhase": "Default", + "PlatformAllowList": [ + "Win64", + "Linux", + "LinuxArm64" + ] + }, + { + "Name": "NDIIOEditor", + "Type": "Editor", + "LoadingPhase": "PostEngineInit", + "PlatformAllowList": [ + "Win64", + "Linux", + "LinuxArm64" + ] + }, + { + "Name": "NDIIOShaders", + "Type": "Runtime", + "LoadingPhase": "PostConfigInit", + "PlatformAllowList": [ + "Win64", + "Linux", + "LinuxArm64" + ] + } + ], + "Plugins": [ + { + "Name": "MediaIOFramework", + "Enabled": true + }, + { + "Name": "MediaFrameworkUtilities", + "Enabled": true + } + ] +} \ No newline at end of file diff --git a/Plugins/NDIIO/Resources/Icon128.png b/Plugins/NDIIO/Resources/Icon128.png new file mode 100644 index 0000000..44f0d3e Binary files /dev/null and b/Plugins/NDIIO/Resources/Icon128.png differ diff --git a/Plugins/NDIIO/Shaders/Private/NDIIOShaders.usf b/Plugins/NDIIO/Shaders/Private/NDIIOShaders.usf new file mode 100644 index 0000000..2454378 --- /dev/null +++ b/Plugins/NDIIO/Shaders/Private/NDIIOShaders.usf @@ -0,0 +1,237 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include "/Engine/Public/Platform.ush" +#include "/Engine/Generated/GeneratedUniformBuffers.ush" +#include "/Engine/Private/GammaCorrectionCommon.ush" + + +// Matches FNDIIOShaderPS::EColorCorrection enum +#define COLOR_CORRECTION_None 0 +#define COLOR_CORRECTION_sRGBToLinear 1 +#define COLOR_CORRECTION_LinearTosRGB 2 + + +void NDIIOMainVS( + in float4 InPosition : ATTRIBUTE0, + in float2 InUV : ATTRIBUTE1, + out float4 OutPosition : SV_POSITION, + out float2 OutUV : TEXCOORD0) +{ + OutPosition = InPosition; + OutUV = InUV; +} + + + +// Shader from 8 bits RGBA to 8 bits UYVY +void NDIIOBGRAtoUYVYPS( + float4 InPosition : SV_POSITION, + float2 InUV : TEXCOORD0, + out float4 OutColor : SV_Target0) +{ + float3x3 RGBToYCbCrMat = + { + 0.18300, 0.61398, 0.06201, + -0.10101, -0.33899, 0.43900, + 0.43902, -0.39900, -0.04001 + }; + float3 RGBToYCbCrVec = { 0.06302, 0.50198, 0.50203 }; + + float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale; + float2 UVdelta = NDIIOShaderUB.UVScale * float2(2.0f/NDIIOShaderUB.OutputWidth, 1.0f/NDIIOShaderUB.OutputHeight); + float2 UV0 = UV + float2(-1.0f/4.0f, 0.0f) * UVdelta; + float2 UV1 = UV + float2( 1.0f/4.0f, 0.0f) * UVdelta; + + float3 YUV0 = RGBToYCbCrVec; + float3 YUV1 = RGBToYCbCrVec; + + if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1))) + { + float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0); + float3 RGB0 = (NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_LinearTosRGB) ? LinearToSrgb(RGBA0.xyz) : RGBA0.xyz; + YUV0 = mul(RGBToYCbCrMat, RGB0) + RGBToYCbCrVec; + } + if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1))) + { + float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1); + float3 RGB1 = (NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_LinearTosRGB) ? LinearToSrgb(RGBA1.xyz) : RGBA1.xyz; + YUV1 = mul(RGBToYCbCrMat, RGB1) + RGBToYCbCrVec; + } + + OutColor.xz = (YUV0.zy + YUV1.zy) / 2.f; + OutColor.y = YUV0.x; + OutColor.w = YUV1.x; +} + + +// Shader from 8 bits RGBA to 8 bits Alpha suitable for UYVA; even-numbered lines +void NDIIOBGRAtoAlphaEvenPS( + float4 InPosition : SV_POSITION, + float2 InUV : TEXCOORD0, + out float4 OutColor : SV_Target0) +{ + float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale; + float2 UVdelta = NDIIOShaderUB.UVScale * float2(4.0f/NDIIOShaderUB.OutputWidth, 2.0f/NDIIOShaderUB.OutputHeight); + float2 UV0 = UV + float2(-3.0f/8.0f, -1.0f/4.0f) * UVdelta; + float2 UV1 = UV + float2(-1.0f/8.0f, -1.0f/4.0f) * UVdelta; + float2 UV2 = UV + float2( 1.0f/8.0f, -1.0f/4.0f) * UVdelta; + float2 UV3 = UV + float2( 3.0f/8.0f, -1.0f/4.0f) * UVdelta; + + float A0 = 0.0f; + float A1 = 0.0f; + float A2 = 0.0f; + float A3 = 0.0f; + + if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1))) + { + float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0); + A0 = RGBA0.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1))) + { + float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1); + A1 = RGBA1.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV2 >= float2(0,0)) && all(UV2 < float2(1,1))) + { + float4 RGBA2 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV2); + A2 = RGBA2.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV3 >= float2(0,0)) && all(UV3 < float2(1,1))) + { + float4 RGBA3 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV3); + A3 = RGBA3.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + + OutColor.xyzw = float4(A2, A1, A0, A3); +} + + +// Shader from 8 bits RGBA to 8 bits Alpha suitable for UYVA; odd-numbered lines +void NDIIOBGRAtoAlphaOddPS( + float4 InPosition : SV_POSITION, + float2 InUV : TEXCOORD0, + out float4 OutColor : SV_Target0) +{ + float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale; + float2 UVdelta = NDIIOShaderUB.UVScale * float2(4.0f/NDIIOShaderUB.OutputWidth, 2.0f/NDIIOShaderUB.OutputHeight); + float2 UV0 = UV + float2(-3.0f/8.0f, 1.0f/4.0f) * UVdelta; + float2 UV1 = UV + float2(-1.0f/8.0f, 1.0f/4.0f) * UVdelta; + float2 UV2 = UV + float2( 1.0f/8.0f, 1.0f/4.0f) * UVdelta; + float2 UV3 = UV + float2( 3.0f/8.0f, 1.0f/4.0f) * UVdelta; + + float A0 = 0.0f; + float A1 = 0.0f; + float A2 = 0.0f; + float A3 = 0.0f; + + if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1))) + { + float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0); + A0 = RGBA0.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1))) + { + float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1); + A1 = RGBA1.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV2 >= float2(0,0)) && all(UV2 < float2(1,1))) + { + float4 RGBA2 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV2); + A2 = RGBA2.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + if(all(UV3 >= float2(0,0)) && all(UV3 < float2(1,1))) + { + float4 RGBA3 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV3); + A3 = RGBA3.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset; + } + + OutColor.xyzw = float4(A2, A1, A0, A3); +} + + +// Shader from 8 bits UYVY to 8 bits RGBA (alpha set to 1) +void NDIIOUYVYtoBGRAPS( + float4 InPosition : SV_POSITION, + float2 InUV : TEXCOORD0, + out float4 OutColor : SV_Target0) +{ + float3x3 YCbCrToRGBMat = + { + 1.16414, -0.0011, 1.7923, + 1.16390, -0.2131, -0.5342, + 1.16660, 2.1131, -0.0001 + }; + float3 YCbCrToRGBVec = { -0.9726, 0.3018, -1.1342 }; + + if(all(InUV >= float2(0,0)) && all(InUV < float2(1,1)) && all(InUV >= float2(0,0)) && all(InUV < float2(1,1))) + { + float4 UYVY = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerP, InUV); + + float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth; + float4 YUVA; + + float FracX = floor(PosX) % 2.0f; + YUVA.x = (1 - FracX) * UYVY.y + FracX * UYVY.w; + YUVA.yz = UYVY.zx; + YUVA.w = 1; + + OutColor.xyz = mul(YCbCrToRGBMat, YUVA.xyz) + YCbCrToRGBVec; + if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear) + OutColor.xyz = sRGBToLinear(OutColor.xyz); + OutColor.w = YUVA.w; + } + else + { + OutColor.xyz = YCbCrToRGBVec; + if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear) + OutColor.xyz = sRGBToLinear(OutColor.xyz); + OutColor.w = 1; + } +} + +// Shader from 8 bits UYVA to 8 bits RGBA +void NDIIOUYVAtoBGRAPS( + float4 InPosition : SV_POSITION, + float2 InUV : TEXCOORD0, + out float4 OutColor : SV_Target0) +{ + float3x3 YCbCrToRGBMat = + { + 1.16414, -0.0011, 1.7923, + 1.16390, -0.2131, -0.5342, + 1.16660, 2.1131, -0.0001 + }; + float3 YCbCrToRGBVec = { -0.9726, 0.3018, -1.1342 }; + + if(all(InUV >= float2(0,0)) && all(InUV < float2(1,1)) && all(InUV >= float2(0,0)) && all(InUV < float2(1,1))) + { + float4 UYVY = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerP, InUV); + float Alpha = NDIIOShaderUB.InputAlphaTarget.Sample(NDIIOShaderUB.SamplerP, InUV).w; + + float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth; + float4 YUVA; + + float FracX = floor(PosX) % 2.0f; + YUVA.x = (1 - FracX) * UYVY.y + FracX * UYVY.w; + YUVA.yz = UYVY.zx; + YUVA.w = Alpha; + + OutColor.xyz = mul(YCbCrToRGBMat, YUVA.xyz) + YCbCrToRGBVec; + if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear) + OutColor.xyz = sRGBToLinear(OutColor.xyz); + OutColor.w = YUVA.w; + } + else + { + OutColor.xyz = YCbCrToRGBVec; + if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear) + OutColor.xyz = sRGBToLinear(OutColor.xyz); + OutColor.w = 1; + } +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Actors/NDIBroadcastActor.cpp b/Plugins/NDIIO/Source/Core/Classes/Actors/NDIBroadcastActor.cpp new file mode 100644 index 0000000..7dab868 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Actors/NDIBroadcastActor.cpp @@ -0,0 +1,81 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + + + +ANDIBroadcastActor::ANDIBroadcastActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) +{ + + this->ViewportCaptureComponent = ObjectInitializer.CreateDefaultSubobject(this, TEXT("ViewportCaptureComponent")); + this->ViewportCaptureComponent->AttachToComponent(this->RootComponent, FAttachmentTransformRules::KeepRelativeTransform); + + this->PTZController = ObjectInitializer.CreateDefaultSubobject(this, TEXT("PTZController")); +} + +void ANDIBroadcastActor::BeginPlay() +{ + Super::BeginPlay(); + + // validate the viewport capture component + if (IsValid(this->ViewportCaptureComponent)) + { + // Initialize the Capture Component with the media source + ViewportCaptureComponent->Initialize(this->NDIMediaSource); + } + + if (IsValid(this->PTZController)) + { + // Initialize the PTZ Controller with the media source + PTZController->Initialize(this->NDIMediaSource); + } + + if (IsValid(this->NDIMediaSource)) + { + this->NDIMediaSource->Initialize(this->SubmixCapture); + } +} + +FPTZState ANDIBroadcastActor::GetPTZStateFromUE() const +{ + FPTZState PTZState; + + PTZState.CameraTransform = GetActorTransform(); + FTransform Transform = FTransform::Identity; + if (IsValid(this->ViewportCaptureComponent)) + Transform = this->ViewportCaptureComponent->GetRelativeTransform(); + FQuat Rotation = Transform.GetRotation(); + FVector Euler = Rotation.Euler(); + PTZState.Pan = FMath::DegreesToRadians(Euler[2]); + PTZState.Tilt = FMath::DegreesToRadians(Euler[1]); + + if (IsValid(this->ViewportCaptureComponent)) + { + PTZState.FieldOfView = this->ViewportCaptureComponent->FOVAngle; + PTZState.FocusDistance = 1.f - 1.f / (this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance / 100.f + 1.f); + PTZState.bAutoFocus = (this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance == true) ? false : true; + } + + return PTZState; +} + +void ANDIBroadcastActor::SetPTZStateToUE(const FPTZState& PTZState) +{ + SetActorTransform(PTZState.CameraTransform); + FVector Euler(0, FMath::RadiansToDegrees(PTZState.Tilt), FMath::RadiansToDegrees(PTZState.Pan)); + FQuat NewRotation = FQuat::MakeFromEuler(Euler); + + if (IsValid(this->ViewportCaptureComponent)) + { + this->ViewportCaptureComponent->SetRelativeLocationAndRotation(this->ViewportCaptureComponent->GetRelativeLocation(), NewRotation); + this->ViewportCaptureComponent->FOVAngle = PTZState.FieldOfView; + this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = (1.f / FMath::Max(1 - PTZState.FocusDistance, 0.01f) - 1.f) * 100.f; + this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = FMath::Max(this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance, SMALL_NUMBER); + this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = (PTZState.bAutoFocus == true) ? false : true; + } +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Actors/NDIReceiveActor.cpp b/Plugins/NDIIO/Source/Core/Classes/Actors/NDIReceiveActor.cpp new file mode 100644 index 0000000..387468a --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Actors/NDIReceiveActor.cpp @@ -0,0 +1,369 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +ANDIReceiveActor::ANDIReceiveActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) +{ + // Get the Engine's 'Plane' static mesh + static ConstructorHelpers::FObjectFinder MeshObject( + TEXT("StaticMesh'/Engine/BasicShapes/Plane.Plane'")); + static ConstructorHelpers::FObjectFinder MaterialObject( + TEXT("Material'/NDIIOPlugin/Materials/NDI_Unlit_SourceMaterial.NDI_Unlit_SourceMaterial'")); + + // Ensure that the object is valid + if (MeshObject.Object) + { + // Create the static mesh component visual + this->VideoMeshComponent = + ObjectInitializer.CreateDefaultSubobject(this, TEXT("VideoMeshComponent"), true); + + // setup the attachment and modify the position, rotation, and mesh properties + this->VideoMeshComponent->SetupAttachment(RootComponent); + this->VideoMeshComponent->SetStaticMesh(MeshObject.Object); + this->VideoMeshComponent->SetRelativeRotation(FQuat::MakeFromEuler(FVector(90.0f, 0.0f, 90.0f))); + this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f)); + + this->VideoMeshComponent->SetCollisionResponseToAllChannels(ECR_Ignore); + this->VideoMeshComponent->SetCollisionEnabled(ECollisionEnabled::NoCollision); + this->VideoMeshComponent->SetCollisionObjectType(ECC_WorldDynamic); + + // This is object is mainly used for simple tests and things that don't require + // additional material shading support, store the an unlit source material to display + this->VideoMaterial = MaterialObject.Object; + + // If the material is valid + if (this->VideoMaterial) + { + // Set the Mesh Material to the Video Material + this->VideoMeshComponent->SetMaterial(0, this->VideoMaterial); + } + } + + this->AudioComponent = ObjectInitializer.CreateDefaultSubobject(this, TEXT("AudioComponent"), true); + this->AudioComponent->SetupAttachment(RootComponent); + this->AudioComponent->SetRelativeLocationAndRotation(FVector::ZeroVector, FRotator::ZeroRotator); + this->AudioComponent->SetRelativeScale3D(FVector::OneVector); + + this->bAllowTickBeforeBeginPlay = false; + this->PrimaryActorTick.bCanEverTick = true; +} + +void ANDIReceiveActor::BeginPlay() +{ + // call the base implementation for 'BeginPlay' + Super::BeginPlay(); + + // We need to validate that we have media source, so we can set the texture in the material instance + if (IsValid(this->NDIMediaSource)) + { + this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone); + + // Validate the Video Material Instance so we can set the texture used in the NDI Media source + if (IsValid(this->VideoMaterial)) + { + // create and set the instance material from the MaterialObject + VideoMaterialInstance = + this->VideoMeshComponent->CreateAndSetMaterialInstanceDynamicFromMaterial(0, this->VideoMaterial); + + // Ensure we have a valid material instance + if (IsValid(VideoMaterialInstance)) + { + // Enable/disable the color and/or alpha channels + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f); + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f); + + this->NDIMediaSource->UpdateMaterialTexture(VideoMaterialInstance, "Video Texture"); + } + } + + // Define the basic parameters for constructing temporary audio wave object + FString AudioSource = FString::Printf(TEXT("AudioSource_%s"), *GetFName().ToString().Right(1)); + FName AudioWaveName = FName(*AudioSource); + EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative; + + // Construct a temporary audio sound wave to be played by this component + this->AudioSoundWave = NewObject(GetTransientPackage(), UNDIMediaSoundWave::StaticClass(), + AudioWaveName, Flags); + + // Ensure the validity of the temporary sound wave object + if (IsValid(this->AudioSoundWave)) + { + // Set the number of channels + bStoppedForChannelsMode = false; + ApplyChannelsMode(); + + // Set the sound of the Audio Component and Ensure playback + this->AudioComponent->SetSound(this->AudioSoundWave); + + // Ensure we register the audio wave object with the media. + this->NDIMediaSource->RegisterAudioWave(AudioSoundWave); + } + + if (this->NDIMediaSource->GetCurrentConnectionInformation().IsValid()) + { + if (IsValid(AudioComponent)) + { + // we should play the audio, if we want audio playback + if (bEnableAudioPlayback) + { + this->AudioComponent->Play(0.0f); + } + + // otherwise just stop + else + { + this->AudioComponent->Stop(); + this->bStoppedForChannelsMode = false; + } + } + } + + // Add a lambda to the OnReceiverConnected Event + else + this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddWeakLambda(this, [&](UNDIMediaReceiver*) { + // Ensure that the audio component is valid + if (IsValid(AudioComponent)) + { + // we should play the audio, if we want audio playback + if (bEnableAudioPlayback) + { + this->AudioComponent->Play(0.0f); + } + + // otherwise just stop + else + { + this->AudioComponent->Stop(); + this->bStoppedForChannelsMode = false; + } + } + }); + } +} + +void ANDIReceiveActor::EndPlay(const EEndPlayReason::Type EndPlayReason) +{ + Super::EndPlay(EndPlayReason); + + this->bStoppedForChannelsMode = false; + + // Ensure we have a valid material instance + if (EndPlayReason == EEndPlayReason::EndPlayInEditor && IsValid(VideoMaterialInstance)) + { + // Ensure that the video texture is disabled + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", 0.0f); + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", 0.0f); + } +} + +void ANDIReceiveActor::Tick(float DeltaTime) +{ + Super::Tick(DeltaTime); + + ApplyChannelsMode(); +} + +void ANDIReceiveActor::ApplyChannelsMode() +{ + if (IsValid(this->AudioComponent) && IsValid(this->NDIMediaSource) && IsValid(this->AudioSoundWave)) + { + int32 NewNumChannels = 0; + switch(this->AudioPlaybackChannels) + { + case ENDIAudioChannels::Mono: + NewNumChannels = 1; + break; + case ENDIAudioChannels::Stereo: + NewNumChannels = 2; + break; + case ENDIAudioChannels::Source: + NewNumChannels = this->NDIMediaSource->GetAudioChannels(); + break; + } + + if ((NewNumChannels != 0) && (NewNumChannels != this->AudioSoundWave->NumChannels)) + { + bool isPlaying = this->AudioComponent->IsPlaying(); + if (isPlaying) + { + this->AudioComponent->Stop(); + bStoppedForChannelsMode = true; + } + else + { + this->AudioSoundWave->NumChannels = NewNumChannels; + + if (bEnableAudioPlayback && bStoppedForChannelsMode) + { + this->AudioComponent->Play(0.0f); + } + bStoppedForChannelsMode = false; + } + } + } +} + +/** + Attempts to set the desired frame size in cm, represented in the virtual scene +*/ +void ANDIReceiveActor::SetFrameSize(FVector2D InFrameSize) +{ + // clamp the values to the lowest we'll allow + const float frame_height = FMath::Max(InFrameSize.Y, 0.00001f); + const float frame_width = FMath::Max(InFrameSize.X, 0.00001f); + + FrameWidth = frame_width; + FrameHeight = frame_height; + + // validate the static mesh component + if (IsValid(this->VideoMeshComponent)) + { + // change the scale of the video + this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f)); + } +} + +void ANDIReceiveActor::SetFrameHeight(const float& InFrameHeight) +{ + // Clamp the Frame Height to a minimal value + FrameHeight = FMath::Max(InFrameHeight, 0.00001f); + + // Call the function to set the frame size with the newly clamped value + SetFrameSize(FVector2D(FrameWidth, FrameHeight)); +} + +void ANDIReceiveActor::SetFrameWidth(const float& InFrameWidth) +{ + // Clamp the Frame Width to a minimal value + FrameWidth = FMath::Max(InFrameWidth, 0.00001f); + + // Call the function to set the frame size with the newly clamped value + SetFrameSize(FVector2D(FrameWidth, FrameHeight)); +} + +void ANDIReceiveActor::UpdateAudioPlayback(const bool& Enabled) +{ + // Ensure validity and we are currently playing + if (IsValid(this->AudioComponent)) + { + // Stop playback when possible + if (Enabled) + { + // Start the playback + this->AudioComponent->Play(0.0f); + } + + // otherwise just stop playback (even if it's not playing) + else + this->AudioComponent->Stop(); + } +} + +void ANDIReceiveActor::UpdateAudioPlaybackChannels(const ENDIAudioChannels& Channels) +{} + + +void ANDIReceiveActor::EnableColor(const bool& Enabled) +{ + bEnableColor = Enabled; + if (IsValid(VideoMaterialInstance)) + { + // Enable/disable the color channels + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f); + } +} + +void ANDIReceiveActor::EnableAlpha(const bool& Enabled) +{ + bEnableAlpha = Enabled; + if (IsValid(VideoMaterialInstance)) + { + // Enable/disable the alpha channel + this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f); + } +} + +/** + Returns the current frame size of the 'VideoMeshComponent' for this object +*/ +const FVector2D ANDIReceiveActor::GetFrameSize() const +{ + return FVector2D(FrameWidth, FrameHeight); +} + +#if WITH_EDITORONLY_DATA + +void ANDIReceiveActor::PreEditChange(FProperty* InProperty) +{ + // call the base class 'PreEditChange' + Super::PreEditChange(InProperty); +} + +void ANDIReceiveActor::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + // get the name of the property which changed + FName PropertyName = + (PropertyChangedEvent.Property != nullptr) ? PropertyChangedEvent.Property->GetFName() : NAME_None; + + // compare against the 'FrameHeight' property + if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameHeight)) + { + // resize the frame + SetFrameSize(FVector2D(FrameWidth, FrameHeight)); + } + + // compare against the 'FrameWidth' property + else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameWidth)) + { + // resize the frame + SetFrameSize(FVector2D(FrameWidth, FrameHeight)); + } + + // compare against the 'bEnableAudioPlayback' property + else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAudioPlayback)) + { + // start or stop the audio playback depending on state + UpdateAudioPlayback(bEnableAudioPlayback); + } + + // compare against the 'AudioPlaybackChannels' property + else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, AudioPlaybackChannels)) + { + // start or stop the audio playback depending on state + UpdateAudioPlaybackChannels(AudioPlaybackChannels); + } + + // compare against the 'bEnableColor' property + else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableColor)) + { + // enable or disable color channels (if it exists) + EnableColor(bEnableColor); + } + + // compare against the 'bEnableAlpha' property + else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAlpha)) + { + // enable or disable alpha channel (if it exists) + EnableAlpha(bEnableAlpha); + } + + // call the base class 'PostEditChangeProperty' + Super::PostEditChangeProperty(PropertyChangedEvent); +} + +#endif diff --git a/Plugins/NDIIO/Source/Core/Classes/Assets/NDITimecodeProvider.cpp b/Plugins/NDIIO/Source/Core/Classes/Assets/NDITimecodeProvider.cpp new file mode 100644 index 0000000..1a59fc9 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Assets/NDITimecodeProvider.cpp @@ -0,0 +1,101 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + + +UNDITimecodeProvider::UNDITimecodeProvider(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{} + +bool UNDITimecodeProvider::FetchTimecode(FQualifiedFrameTime& OutFrameTime) +{ + FScopeLock Lock(&this->StateSyncContext); + + if (!IsValid(this->NDIMediaSource) || + (GetSynchronizationState() != ETimecodeProviderSynchronizationState::Synchronized)) + { + return false; + } + + OutFrameTime = this->MostRecentFrameTime; + + return true; +} + +ETimecodeProviderSynchronizationState UNDITimecodeProvider::GetSynchronizationState() const +{ + FScopeLock Lock(&this->StateSyncContext); + + if (!IsValid(this->NDIMediaSource)) + return ETimecodeProviderSynchronizationState::Closed; + + return this->State; +} + +bool UNDITimecodeProvider::Initialize(UEngine* InEngine) +{ + this->State = ETimecodeProviderSynchronizationState::Closed; + + if (!IsValid(this->NDIMediaSource)) + { + this->State = ETimecodeProviderSynchronizationState::Error; + return false; + } + + this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone); + + this->VideoCaptureEventHandle = this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* Receiver, const NDIlib_video_frame_v2_t& VideoFrame) + { + const FFrameRate Rate = Receiver->GetCurrentFrameRate(); + const FTimecode Timecode = Receiver->GetCurrentTimecode(); + + FScopeLock Lock(&this->StateSyncContext); + this->State = ETimecodeProviderSynchronizationState::Synchronized; + this->MostRecentFrameTime = FQualifiedFrameTime(Timecode, Rate); + }); + this->ConnectedEventHandle = this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver) + { + FScopeLock Lock(&this->StateSyncContext); + this->State = ETimecodeProviderSynchronizationState::Synchronizing; + }); + this->DisconnectedEventHandle = this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver) + { + FScopeLock Lock(&this->StateSyncContext); + this->State = ETimecodeProviderSynchronizationState::Closed; + }); + + return true; +} + +void UNDITimecodeProvider::Shutdown(UEngine* InEngine) +{ + ReleaseResources(); +} + + +void UNDITimecodeProvider::BeginDestroy() +{ + ReleaseResources(); + + Super::BeginDestroy(); +} + +void UNDITimecodeProvider::ReleaseResources() +{ + if(IsValid(this->NDIMediaSource)) + { + this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.Remove(this->VideoCaptureEventHandle); + this->NDIMediaSource->OnNDIReceiverConnectedEvent.Remove(this->ConnectedEventHandle); + this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.Remove(this->DisconnectedEventHandle); + } + this->VideoCaptureEventHandle.Reset(); + this->ConnectedEventHandle.Reset(); + this->DisconnectedEventHandle.Reset(); + + this->State = ETimecodeProviderSynchronizationState::Closed; +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDIBroadcastComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDIBroadcastComponent.cpp new file mode 100644 index 0000000..f10b4fc --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDIBroadcastComponent.cpp @@ -0,0 +1,153 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +UNDIBroadcastComponent::UNDIBroadcastComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) +{} + +/** + Initialize this component with the media source required for sending NDI audio, video, and metadata. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. +*/ +bool UNDIBroadcastComponent::Initialize(UNDIMediaSender* InMediaSource) +{ + // is the media source already set? + if (this->NDIMediaSource == nullptr && InMediaSource != nullptr) + { + // we passed validation, so set the media source + this->NDIMediaSource = InMediaSource; + } + + // did we pass validation + return InMediaSource != nullptr && InMediaSource == NDIMediaSource; +} + +/** + Attempts to start broadcasting audio, video, and metadata via the 'NDIMediaSource' associated with this object + + @param ErrorMessage The error message received when the media source is unable to start broadcasting + @result Indicates whether this object successfully started broadcasting +*/ +bool UNDIBroadcastComponent::StartBroadcasting(FString& ErrorMessage) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->Initialize(nullptr); + + // the underlying functionality is always return 'true' + return true; + } + + // We have no media source to broadcast + ErrorMessage = TEXT("No Media Source present to broadcast"); + + // looks like we don't have a media source to broadcast + return false; +} + +/** + Changes the name of the sender object as seen on the network for remote connections + + @param InSourceName The new name of the source to be identified as on the network +*/ +void UNDIBroadcastComponent::ChangeSourceName(const FString& InSourceName) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->ChangeSourceName(InSourceName); + } +} + +/** + Attempts to change the Broadcast information associated with this media object + + @param InConfiguration The new configuration to broadcast +*/ +void UNDIBroadcastComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration); + } +} + +/** + Attempts to change the RenderTarget used in sending video frames over NDI + + @param BroadcastTexture The texture to use as video, while broadcasting over NDI +*/ +void UNDIBroadcastComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->ChangeVideoTexture(BroadcastTexture); + } +} + +/** + Determines the current tally information. If you specify a timeout then it will wait until it has + changed, otherwise it will simply poll it and return the current tally immediately + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver +*/ +void UNDIBroadcastComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram) +{ + // Initialize the properties + IsOnPreview = false; + IsOnProgram = false; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0); + } +} + +/** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network + + @param Result The total number of connected receivers attached to the broadcast of this object +*/ +void UNDIBroadcastComponent::GetNumberOfConnections(int32& Result) +{ + // Initialize the property + Result = 0; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->GetNumberOfConnections(Result); + } +} + +/** + Attempts to immediately stop sending frames over NDI to any connected receivers +*/ +void UNDIBroadcastComponent::StopBroadcasting() +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->Shutdown(); + } +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDIFinderComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDIFinderComponent.cpp new file mode 100644 index 0000000..9a55aa7 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDIFinderComponent.cpp @@ -0,0 +1,104 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include + +UNDIFinderComponent::UNDIFinderComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {} + +void UNDIFinderComponent::BeginPlay() +{ + Super::BeginPlay(); + + // Provide some sense of thread-safety + FScopeLock Lock(&CollectionSyncContext); + + // Update the NetworkSourceCollection with some sources which that the service has already found + FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection); + + // Ensure that we are subscribed to the collection changed notification so we can handle it locally + FNDIFinderService::EventOnNDISourceCollectionChanged.AddUObject( + this, &UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent); +} + +void UNDIFinderComponent::EndPlay(const EEndPlayReason::Type EndPlayReason) +{ + Super::EndPlay(EndPlayReason); + + // Provide some sense of thread-safety + FScopeLock Lock(&CollectionSyncContext); + + // Empty the source collection + this->NetworkSourceCollection.Empty(0); + + // Ensure that we are no longer subscribed to collection change notifications + FNDIFinderService::EventOnNDISourceCollectionChanged.RemoveAll(this); +} + +/** + An Event handler for when the NDI Finder Service notifies listeners that changes have been + detected in the network source collection +*/ +void UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent() +{ + // Since we don't poll the NDIFinderService for network sources, we subscribe to the change notification. + // Now we need to update the Network Source Collection, but we need to do it in a thread-safe way. + + FScopeLock Lock(&CollectionSyncContext); + + // Check to determine if something actually changed within the collection. We don't want to trigger + // notifications unnecessarily. + if (FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection)) + { + // Trigger the blueprint handling of the situation. + this->OnNetworkSourcesChangedEvent(); + + // If any listeners have subscribed broadcast any collection changes + if (this->OnNetworkSourcesChanged.IsBound()) + this->OnNetworkSourcesChanged.Broadcast(this); + } +} + +/** + Attempts to find a network source by the supplied name. + + @param ConnectionInformation An existing source information structure which contains the source name + @param InSourceName A string value representing the name of the source to find + @result A value indicating whether a source with the supplied name was found +*/ +const bool UNDIFinderComponent::FindNetworkSourceByName(FNDIConnectionInformation& ConnectionInformation, + FString InSourceName) +{ + // Lock the Collection so that we are working with a solid collection of items + FScopeLock Lock(&CollectionSyncContext); + + // Ensure we Reset the SourceInformation + ConnectionInformation.Reset(); + + for (const auto& connectionInfo : NetworkSourceCollection) + { + if (InSourceName.Equals(connectionInfo.SourceName, ESearchCase::IgnoreCase)) + { + ConnectionInformation = connectionInfo; + return true; + } + } + + return false; +} + +/** + Returns the current collection of sources found on the network +*/ +const TArray UNDIFinderComponent::GetNetworkSources() +{ + // Lock the current source collection + FScopeLock Lock(&CollectionSyncContext); + + // return the source collection + return this->NetworkSourceCollection; +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDIPTZControllerComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDIPTZControllerComponent.cpp new file mode 100644 index 0000000..b98273a --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDIPTZControllerComponent.cpp @@ -0,0 +1,471 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include + +#include + + +/** + Parsers for PTZ metadata +*/ + +class NDIXmlElementParser_ntk_ptz_pan_tilt_speed : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_ntk_ptz_pan_tilt_speed(UPTZController* PTZControllerIn) + : PTZController(PTZControllerIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + PanSpeed = 0.0; + TiltSpeed = 0.0; + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("pan_speed"), AttributeName) == 0) + { + PanSpeed = FCString::Atod(AttributeValue); + } + else if(FCString::Strcmp(TEXT("tilt_speed"), AttributeName) == 0) + { + TiltSpeed = FCString::Atod(AttributeValue); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + PTZController->SetPTZPanTiltSpeed(PanSpeed, TiltSpeed); + + return true; + } + +protected: + UPTZController* PTZController; + + double PanSpeed { 0.0 }; + double TiltSpeed { 0.0 }; +}; + +class NDIXmlElementParser_ntk_ptz_zoom_speed : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_ntk_ptz_zoom_speed(UPTZController* PTZControllerIn) + : PTZController(PTZControllerIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + ZoomSpeed = 0.0; + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("zoom_speed"), AttributeName) == 0) + { + ZoomSpeed = FCString::Atod(AttributeValue); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + PTZController->SetPTZZoomSpeed(ZoomSpeed); + + return true; + } + +protected: + UPTZController* PTZController; + + double ZoomSpeed { 0.0 }; +}; + +class NDIXmlElementParser_ntk_ptz_focus : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_ntk_ptz_focus(UPTZController* PTZControllerIn) + : PTZController(PTZControllerIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + AutoMode = true; + Distance = 0.5; + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("mode"), AttributeName) == 0) + { + if(FCString::Strcmp(TEXT("manual"), AttributeValue) == 0) + AutoMode = false; + } + else if(FCString::Strcmp(TEXT("distance"), AttributeName) == 0) + { + Distance = FCString::Atod(AttributeValue); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + PTZController->SetPTZFocus(AutoMode, Distance); + + return true; + } + +protected: + UPTZController* PTZController; + + bool AutoMode { true }; + double Distance { 0.5 }; +}; + +class NDIXmlElementParser_ntk_ptz_store_preset : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_ntk_ptz_store_preset(UPTZController* PTZControllerIn) + : PTZController(PTZControllerIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + StoreIndex = -1; + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("index"), AttributeName) == 0) + { + StoreIndex = FCString::Atoi(AttributeValue); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + if(StoreIndex >= 0) + { + PTZController->StorePTZState(StoreIndex); + } + + return true; + } + +protected: + UPTZController* PTZController; + + int StoreIndex { -1 }; +}; + +class NDIXmlElementParser_ntk_ptz_recall_preset : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_ntk_ptz_recall_preset(UPTZController* PTZControllerIn) + : PTZController(PTZControllerIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + RecallIndex = -1; + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("index"), AttributeName) == 0) + { + RecallIndex = FCString::Atoi(AttributeValue); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + if(RecallIndex >= 0) + { + PTZController->RecallPTZState(RecallIndex); + } + + return true; + } + +protected: + UPTZController* PTZController; + + int RecallIndex { -1 }; +}; + + +/** + PTZ controller component +*/ +UPTZController::UPTZController() +{ + this->bWantsInitializeComponent = true; + + this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false; + this->PrimaryComponentTick.bCanEverTick = true; + this->PrimaryComponentTick.bHighPriority = true; + this->PrimaryComponentTick.bRunOnAnyThread = false; + this->PrimaryComponentTick.bStartWithTickEnabled = true; + this->PrimaryComponentTick.bTickEvenWhenPaused = true; + + this->NDIMetadataParser = MakeShareable(new NDIXmlParser()); + this->NDIMetadataParser->AddElementParser("ntk_ptz_pan_tilt_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_pan_tilt_speed(this))); + this->NDIMetadataParser->AddElementParser("ntk_ptz_zoom_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_zoom_speed(this))); + this->NDIMetadataParser->AddElementParser("ntk_ptz_focus", MakeShareable(new NDIXmlElementParser_ntk_ptz_focus(this))); + this->NDIMetadataParser->AddElementParser("ntk_ptz_store_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_store_preset(this))); + this->NDIMetadataParser->AddElementParser("ntk_ptz_recall_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_recall_preset(this))); +} + +UPTZController::~UPTZController() +{} + +void UPTZController::InitializeComponent() +{ + Super::InitializeComponent(); + + if (IsValid(NDIMediaSource)) + { + // Ensure the PTZ controller is subscribed to the sender receiving metadata + this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this); + this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender); + } +} + +bool UPTZController::Initialize(UNDIMediaSender* InMediaSource) +{ + // is the media source already set? + if (this->NDIMediaSource == nullptr && InMediaSource != nullptr) + { + // we passed validation, so set the media source + this->NDIMediaSource = InMediaSource; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // Ensure the PTZ controller is subscribed to the sender receiving metadata + this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this); + this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender); + } + } + + // did we pass validation + return InMediaSource != nullptr && InMediaSource == NDIMediaSource; +} + +void UPTZController::SetPTZPanTiltSpeed(float PanSpeed, float TiltSpeed) +{ + PTZPanSpeed = PanSpeed; + PTZTiltSpeed = TiltSpeed; + + OnPTZPanTiltSpeed.Broadcast(PanSpeed, TiltSpeed); +} + +void UPTZController::SetPTZZoomSpeed(float ZoomSpeed) +{ + PTZZoomSpeed = ZoomSpeed; + + OnPTZZoomSpeed.Broadcast(ZoomSpeed); +} + +void UPTZController::SetPTZFocus(bool AutoMode, float Distance) +{ + FPTZState PTZState = GetPTZStateFromUE(); + PTZState.FocusDistance = Distance; + PTZState.bAutoFocus = AutoMode; + SetPTZStateToUE(PTZState); + + OnPTZFocus.Broadcast(AutoMode, Distance); +} + +void UPTZController::StorePTZState(int Index) +{ + if((Index >= 0) && (Index < 256)) + { + FPTZState PTZState = GetPTZStateFromUE(); + + if(Index >= PTZStoredStates.Num()) + PTZStoredStates.SetNum(Index+1); + PTZStoredStates[Index] = PTZState; + + OnPTZStore.Broadcast(Index); + } +} + +void UPTZController::RecallPTZState(int Index) +{ + if((Index >= 0) && (Index < PTZStoredStates.Num())) + { + if(PTZRecallEasing > 0) + { + PTZStateInterp.PTZTargetState = PTZStoredStates[Index]; + PTZStateInterp.EasingDuration = PTZRecallEasing; + PTZStateInterp.EasingRemaining = PTZStateInterp.EasingDuration; + } + else + { + SetPTZStateToUE(PTZStoredStates[Index]); + } + } + + OnPTZRecall.Broadcast(Index); +} + +FPTZState UPTZController::GetPTZStateFromUE() const +{ + AActor* OwnerActor = GetOwner(); + + IPTZControllableInterface* ControllableObject = Cast(OwnerActor); + if (ControllableObject != nullptr) + { + return ControllableObject->GetPTZStateFromUE(); + } + else + { + FPTZState PTZState; + + FTransform Transform = OwnerActor->GetActorTransform(); + FVector Euler = Transform.GetRotation().Euler(); + PTZState.Pan = FMath::DegreesToRadians(Euler[2]); + PTZState.Tilt = FMath::DegreesToRadians(Euler[1]); + Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], 0.f, 0.f))); + PTZState.CameraTransform = Transform; + + return PTZState; + } +} + +void UPTZController::SetPTZStateToUE(const FPTZState& PTZState) +{ + if (EnablePTZ == true) + { + AActor* OwnerActor = GetOwner(); + + IPTZControllableInterface* ControllableObject = Cast(OwnerActor); + if (ControllableObject != nullptr) + { + ControllableObject->SetPTZStateToUE(PTZState); + } + else + { + FTransform Transform = PTZState.CameraTransform; + FVector Euler = Transform.GetRotation().Euler(); + float Pitch = FMath::RadiansToDegrees(PTZState.Tilt); + float Yaw = FMath::RadiansToDegrees(PTZState.Pan); + Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], Pitch, Yaw))); + OwnerActor->SetActorTransform(Transform); + } + } +} + + +void UPTZController::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) +{ + Super::TickComponent(DeltaTime, TickType, ThisTickFunction); + + bool bUpdatePTZ = false; + + if(PTZStateInterp.EasingRemaining > 0) + bUpdatePTZ = true; + + if((PTZPanSpeed != 0) || (PTZTiltSpeed != 0) || (PTZZoomSpeed != 0)) + bUpdatePTZ = true; + + if(bUpdatePTZ) + { + FPTZState PTZState = GetPTZStateFromUE(); + + if(PTZStateInterp.EasingRemaining > 0) + { + float EasingDelta = FMath::Min(PTZStateInterp.EasingRemaining, DeltaTime); + + /** Interpolate from 0 to 1 using polynomial: + I(F) = a*F^3 + b*F^2 + c*F + d + with constraints: + Start and end points: I(0) = 0, I(1) = 1 + Smooth stop at end: I'(1) = 0 (velocity) + I''(1) = 0 (acceleration) + Solve to get: + a = 1, b = -3, c = 3, d = 0 + I(F) = F^3 - 3*F^2 + 3*F + */ + float EasingFrac = (PTZStateInterp.EasingRemaining > 0) ? (EasingDelta / PTZStateInterp.EasingRemaining) : 1; + float EasingInterp = EasingFrac*EasingFrac*EasingFrac - 3*EasingFrac*EasingFrac + 3*EasingFrac; + + PTZState.Pan = PTZState.Pan * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Pan * EasingInterp; + PTZState.Tilt = PTZState.Tilt * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Tilt * EasingInterp; + PTZState.FieldOfView = PTZState.FieldOfView * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FieldOfView * EasingInterp; + PTZState.FocusDistance = PTZState.FocusDistance * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FocusDistance * EasingInterp; + PTZState.CameraTransform.BlendWith(PTZStateInterp.PTZTargetState.CameraTransform, EasingInterp); + + PTZStateInterp.EasingRemaining -= EasingDelta; + } + + PTZState.FieldOfView -= FMath::RadiansToDegrees(PTZZoomSpeed) * DeltaTime; + if(PTZWithFoVLimit) + { + PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, PTZFoVMinLimit, PTZFoVMaxLimit); + } + PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, 5.f, 170.f); + + float MovementScale = PTZState.FieldOfView / 90.f; + + PTZState.Pan += PTZPanSpeed * DeltaTime * MovementScale * (bPTZPanInvert ? -1 : 1); + PTZState.Pan = FMath::Fmod(PTZState.Pan, 2*PI); + if(PTZWithPanLimit) + { + PTZState.Pan = FMath::Clamp(PTZState.Pan, FMath::DegreesToRadians(PTZPanMinLimit), FMath::DegreesToRadians(PTZPanMaxLimit)); + } + + PTZState.Tilt += PTZTiltSpeed * DeltaTime * MovementScale * (bPTZTiltInvert ? -1 : 1); + PTZState.Tilt = FMath::Fmod(PTZState.Tilt, 2*PI); + if(PTZWithTiltLimit) + { + PTZState.Tilt = FMath::Clamp(PTZState.Tilt, FMath::DegreesToRadians(PTZTiltMinLimit), FMath::DegreesToRadians(PTZTiltMaxLimit)); + } + + SetPTZStateToUE(PTZState); + } +} + + +void UPTZController::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data) +{ + FText OutErrorMessage; + int32 OutErrorLineNumber; + + FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(), + nullptr, // XmlFilePath + Data.GetCharArray().GetData(), // XmlFileContents + nullptr, // FeedbackContext + false, // bShowSlowTaskDialog + false, // bShowCancelButton + OutErrorMessage, // OutErrorMessage + OutErrorLineNumber // OutErrorLineNumber + ); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDIReceiverComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDIReceiverComponent.cpp new file mode 100644 index 0000000..07c7c80 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDIReceiverComponent.cpp @@ -0,0 +1,126 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +UNDIReceiverComponent::UNDIReceiverComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {} + +/** + Initialize this component with the media source required for receiving NDI audio, video, and metadata. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. +*/ +bool UNDIReceiverComponent::Initialize(UNDIMediaReceiver* InMediaSource) +{ + if (this->NDIMediaSource == nullptr && InMediaSource != nullptr) + { + this->NDIMediaSource = InMediaSource; + } + + return InMediaSource != nullptr && InMediaSource == NDIMediaSource; +} + +/** + Begin receiving NDI audio, video, and metadata frames +*/ +bool UNDIReceiverComponent::StartReceiver(const FNDIConnectionInformation& InConnectionInformation) +{ + if (IsValid(this->NDIMediaSource)) + { + // Call to the Media Source's function to initialize (hopefully with valid connection information) + if (NDIMediaSource->Initialize(InConnectionInformation, UNDIMediaReceiver::EUsage::Standalone)) + { + // FNDIConnectionService::RegisterReceiver(this->NDIMediaSource); + return true; + } + } + + return false; +} + +/** + Attempt to change the connection for which to get audio, video, and metadata frame from +*/ +void UNDIReceiverComponent::ChangeConnection(const FNDIConnectionInformation& InConnectionInformation) +{ + // Ensure a valid source to change the connection on + if (IsValid(this->NDIMediaSource)) + { + // Call the underlying function + NDIMediaSource->ChangeConnection(InConnectionInformation); + } +} + +/** + This will add a metadata frame and return immediately, having scheduled the frame asynchronously +*/ +void UNDIReceiverComponent::SendMetadataFrame(const FString& metadata) +{ + // Ensure a valid source to send metadata from + if (IsValid(this->NDIMediaSource)) + { + // Call the underlying function + NDIMediaSource->SendMetadataFrame(metadata); + } +} + +/** + This will setup the up-stream tally notifications. If no streams are connected, it will automatically send + the tally state upon connection +*/ +void UNDIReceiverComponent::SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram) +{ + if (IsValid(this->NDIMediaSource)) + { + NDIMediaSource->SendTallyInformation(IsOnPreview, IsOnProgram); + } +} + +/** + Attempts to stop receiving audio, video, and metadata frame from the connected source +*/ +void UNDIReceiverComponent::ShutdownReceiver() +{ + if (IsValid(this->NDIMediaSource)) + { + NDIMediaSource->Shutdown(); + } +} + +/** + Returns the current framerate of the connected source +*/ +FFrameRate UNDIReceiverComponent::GetCurrentFrameRate() const +{ + return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentFrameRate() : FFrameRate(60, 1); +} + +/** + Returns the current timecode of the connected source +*/ +FTimecode UNDIReceiverComponent::GetCurrentTimecode() const +{ + return IsValid(NDIMediaSource) + ? NDIMediaSource->GetCurrentTimecode() + : FTimecode::FromTimespan(FTimespan::FromMilliseconds(0.0), FFrameRate(60, 1), false, true); +} + +/** + Returns the current connection information of the connected source +*/ +FNDIConnectionInformation UNDIReceiverComponent::GetCurrentConnectionInformation() const +{ + return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentConnectionInformation() : FNDIConnectionInformation(); +} + +/** + Returns the current performance data of the receiver while connected to the source +*/ +FNDIReceiverPerformanceData UNDIReceiverComponent::GetPerformanceData() const +{ + return IsValid(NDIMediaSource) ? NDIMediaSource->GetPerformanceData() : FNDIReceiverPerformanceData(); +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDITriCasterExtComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDITriCasterExtComponent.cpp new file mode 100644 index 0000000..58f63bc --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDITriCasterExtComponent.cpp @@ -0,0 +1,340 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include + +#include + +#include + + +/** + Parsers for TriCasterExt metadata +*/ + +class NDIXmlElementParser_tricaster_ext : public NDIXmlElementParser +{ +public: + NDIXmlElementParser_tricaster_ext(UTriCasterExtComponent* TriCasterExtComponentIn) + : TriCasterExtComponent(TriCasterExtComponentIn) + {} + + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + TCData.Value = FString(); + TCData.KeyValues.Empty(); + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(FCString::Strcmp(TEXT("name"), AttributeName) == 0) + {} + else if(FCString::Strcmp(TEXT("value"), AttributeName) == 0) + { + TCData.Value = FString(AttributeValue); + } + else + { + TCData.KeyValues.Add(FName(AttributeName), FString(AttributeValue)); + } + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + if(TCData.Value == "ndiio") + { + FString* ActorNamePtr = TCData.KeyValues.Find("actor"); + FString* PropertyNamePtr = TCData.KeyValues.Find("property"); + FString* PropertyValueStrPtr = TCData.KeyValues.Find("propertyvalue"); + FString* ComponentNamePtr = TCData.KeyValues.Find("component"); + FString* EasingDurationPtr = TCData.KeyValues.Find("easing"); + + if((ActorNamePtr != nullptr) && (PropertyNamePtr != nullptr) && (PropertyValueStrPtr != nullptr)) + { + FString PropertyBaseName, PropertyElementName; + if(!PropertyNamePtr->Split(TEXT(":"), &PropertyBaseName, &PropertyElementName)) + PropertyBaseName = *PropertyNamePtr; + + FTimespan EasingDuration = 0; + if(EasingDurationPtr != nullptr) + { + double Seconds = FCString::Atod(**EasingDurationPtr); + EasingDuration = FTimespan::FromSeconds(Seconds); + } + + for(TActorIterator ActorItr(TriCasterExtComponent->GetWorld()); ActorItr; ++ActorItr) + { + AActor* Actor = *ActorItr; + if(Actor->GetName() == *ActorNamePtr) + { + UObject* FoundObject = nullptr; + FProperty* FoundProperty = nullptr; + + if(ComponentNamePtr != nullptr) + { + TInlineComponentArray PrimComponents; + Actor->GetComponents(PrimComponents, true); + for(auto& CompIt : PrimComponents) + { + if(CompIt->GetName() == *ComponentNamePtr) + { + FProperty* Property = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName); + if(Property) + { + FoundObject = CompIt; + FoundProperty = Property; + break; + } + } + } + } + else + { + FProperty* ActorProperty = Actor->GetClass()->FindPropertyByName(*PropertyBaseName); + if(ActorProperty) + { + FoundObject = Actor; + FoundProperty = ActorProperty; + } + else + { + TInlineComponentArray PrimComponents; + Actor->GetComponents(PrimComponents, true); + + for(auto& CompIt : PrimComponents) + { + FProperty* CompProperty = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName); + if(CompProperty) + { + FoundObject = CompIt; + FoundProperty = CompProperty; + break; + } + } + } + } + + if(FoundObject && FoundProperty) + { + TriCasterExtComponent->TriCasterExt(Actor, FoundObject, FoundProperty, PropertyElementName, *PropertyValueStrPtr, EasingDuration); + break; + } + } + } + } + } + + TriCasterExtComponent->TriCasterExtCustom(TCData); + + return true; + } + +protected: + UTriCasterExtComponent* TriCasterExtComponent; + + FTriCasterExt TCData; +}; +// +// +// +// +// + + + +UTriCasterExtComponent::UTriCasterExtComponent() +{ + this->bWantsInitializeComponent = true; + + this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false; + this->PrimaryComponentTick.bCanEverTick = true; + this->PrimaryComponentTick.bHighPriority = true; + this->PrimaryComponentTick.bRunOnAnyThread = false; + this->PrimaryComponentTick.bStartWithTickEnabled = true; + this->PrimaryComponentTick.bTickEvenWhenPaused = true; + + this->NDIMetadataParser = MakeShareable(new NDIXmlParser()); + NDIMetadataParser->AddElementParser("tricaster_ext", MakeShareable(new NDIXmlElementParser_tricaster_ext(this))); +} + +UTriCasterExtComponent::~UTriCasterExtComponent() +{} + +void UTriCasterExtComponent::InitializeComponent() +{ + Super::InitializeComponent(); + + if (IsValid(NDIMediaSource)) + { + // Ensure the TriCasterExt component is subscribed to the sender receiving metadata + this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this); + this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender); + } +} + +bool UTriCasterExtComponent::Initialize(UNDIMediaSender* InMediaSource) +{ + // is the media source already set? + if (this->NDIMediaSource == nullptr && InMediaSource != nullptr) + { + // we passed validation, so set the media source + this->NDIMediaSource = InMediaSource; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // Ensure the TriCasterExt component is subscribed to the sender receiving metadata + this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this); + this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender); + } + } + + // did we pass validation + return InMediaSource != nullptr && InMediaSource == NDIMediaSource; +} + +void UTriCasterExtComponent::TriCasterExt(AActor* Actor, UObject* Object, FProperty* Property, FString PropertyElementName, FString PropertyValueStr, FTimespan EasingDuration) +{ + if(Actor && Object && Property) + { + FTriCasterExtInterp Interp; + Interp.Actor = Actor; + Interp.Object = Object; + Interp.Property = Property; + Interp.PropertyElementName = PropertyElementName; + Interp.PropertyValueStr = PropertyValueStr; + Interp.EasingDuration = EasingDuration.GetTotalSeconds(); + Interp.EasingRemaining = Interp.EasingDuration; + + TriCasterExtInterp.Add(Interp); + } + + OnTriCasterExt.Broadcast(Actor, Object, PropertyElementName, PropertyValueStr, EasingDuration); +} + +void UTriCasterExtComponent::TriCasterExtCustom(const FTriCasterExt& TCData) +{ + OnTriCasterExtCustom.Broadcast(TCData); +} + + +void UTriCasterExtComponent::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) +{ + Super::TickComponent(DeltaTime, TickType, ThisTickFunction); + + for(int32 i = 0; i < TriCasterExtInterp.Num(); ++i) + { + FTriCasterExtInterp& Interp = TriCasterExtInterp[i]; + + float EasingDelta = FMath::Min(Interp.EasingRemaining, DeltaTime); + + void* Data = Interp.Property->ContainerPtrToValuePtr(Interp.Object); + if(Data) + { + bool Done = false; + +#if WITH_EDITOR + Interp.Object->PreEditChange(Interp.Property); + Interp.Actor->PreEditChange(Interp.Property); +#endif + + if(FNumericProperty* NumericProperty = CastField(Interp.Property)) + { + double PropertyValue = NumericProperty->GetFloatingPointPropertyValue(Data); + double TargetValue = FCString::Atod(*Interp.PropertyValueStr); + + double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1; + double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac; + + double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp; + NumericProperty->SetFloatingPointPropertyValue(Data, NewValue); + Done = true; + } + else if(FStructProperty* StructProperty = CastField(Interp.Property)) + { + FProperty* FieldProperty = FindFProperty(StructProperty->Struct, *(Interp.PropertyElementName)); + if(FNumericProperty* StructNumericProperty = CastField(FieldProperty)) + { + void* FieldData = FieldProperty->ContainerPtrToValuePtr(Data); + double PropertyValue = StructNumericProperty->GetFloatingPointPropertyValue(FieldData); + double TargetValue = FCString::Atod(*Interp.PropertyValueStr); + + double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1; + double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac; + + double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp; + StructNumericProperty->SetFloatingPointPropertyValue(FieldData, NewValue); + Done = true; + } + } + + if(!Done) + { + FString ImportText; + if(!Interp.PropertyElementName.IsEmpty()) + ImportText = "(" + Interp.PropertyElementName + "=" + Interp.PropertyValueStr + ")"; + else + ImportText = Interp.PropertyValueStr; + Interp.Property->ImportText_Direct(*ImportText, Data, Interp.Object, 0); + } + + UActorComponent* ActorComponent = Cast(Interp.Object); + if(ActorComponent) + { + if((Interp.Property->GetFName() == TEXT("RelativeLocation")) || + (Interp.Property->GetFName() == TEXT("RelativeRotation")) || + (Interp.Property->GetFName() == TEXT("RelativeScale3D"))) + { + ActorComponent->UpdateComponentToWorld(); + } + } +#if (ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION < 3) // Before 5.3 + if(Interp.Property->HasAnyPropertyFlags(CPF_Interp)) + Interp.Object->PostInterpChange(Interp.Property); +#endif + +#if WITH_EDITOR + TArray ModifiedObjects; + ModifiedObjects.Add(Interp.Actor); + FPropertyChangedEvent PropertyChangedEvent(Interp.Property, EPropertyChangeType::ValueSet, MakeArrayView(ModifiedObjects)); + FEditPropertyChain PropertyChain; + PropertyChain.AddHead(Interp.Property); + FPropertyChangedChainEvent PropertyChangedChainEvent(PropertyChain, PropertyChangedEvent); + + Interp.Object->PostEditChangeChainProperty(PropertyChangedChainEvent); + Interp.Actor->PostEditChangeChainProperty(PropertyChangedChainEvent); +#endif + } + + Interp.EasingRemaining -= EasingDelta; + if(Interp.EasingRemaining == 0) + TriCasterExtInterp.RemoveAtSwap(i); + } +} + + +void UTriCasterExtComponent::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data) +{ + FText OutErrorMessage; + int32 OutErrorLineNumber; + + FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(), + nullptr, // XmlFilePath + Data.GetCharArray().GetData(), // XmlFileContents + nullptr, // FeedbackContext + false, // bShowSlowTaskDialog + false, // bShowCancelButton + OutErrorMessage, // OutErrorMessage + OutErrorLineNumber // OutErrorLineNumber + ); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Components/NDIViewportCaptureComponent.cpp b/Plugins/NDIIO/Source/Core/Classes/Components/NDIViewportCaptureComponent.cpp new file mode 100644 index 0000000..24a796f --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Components/NDIViewportCaptureComponent.cpp @@ -0,0 +1,264 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +UNDIViewportCaptureComponent::UNDIViewportCaptureComponent(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{ + this->bWantsInitializeComponent = true; + this->CaptureSource = ESceneCaptureSource::SCS_FinalToneCurveHDR; + this->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = true; + this->PostProcessSettings.DepthOfFieldFocalDistance = 10000.f; +} + +UNDIViewportCaptureComponent::~UNDIViewportCaptureComponent() +{} + +void UNDIViewportCaptureComponent::InitializeComponent() +{ + Super::InitializeComponent(); + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // define default capture values + const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize; + const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate; + + // change the capture sizes as necessary + ChangeCaptureSettings(capture_size, capture_rate); + + // ensure we are subscribed to the broadcast configuration changed event + this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this); + this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic( + this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged); + } +} + +void UNDIViewportCaptureComponent::UninitializeComponent() +{ + if (IsValid(NDIMediaSource)) + { + if (IsValid(TextureTarget)) + { + NDIMediaSource->ChangeVideoTexture(nullptr); + } + } + + Super::UninitializeComponent(); +} + +bool UNDIViewportCaptureComponent::Initialize(UNDIMediaSender* InMediaSource) +{ + // is the media source already set? + if (this->NDIMediaSource == nullptr && InMediaSource != nullptr) + { + // we passed validation, so set the media source + this->NDIMediaSource = InMediaSource; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // define default capture values + const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize; + const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate; + + // change the capture sizes as necessary + ChangeCaptureSettings(capture_size, capture_rate); + + // ensure we are subscribed to the broadcast configuration changed event + this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this); + this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic( + this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged); + } + } + + // did we pass validation + return InMediaSource != nullptr && InMediaSource == NDIMediaSource; +} + +/** + Changes the name of the sender object as seen on the network for remote connections + + @param InSourceName The new name of the source to be identified as on the network +*/ +void UNDIViewportCaptureComponent::ChangeSourceName(const FString& InSourceName) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->ChangeSourceName(InSourceName); + } +} + +/** + Attempts to change the Broadcast information associated with this media object + + @param InConfiguration The new configuration to broadcast +*/ +void UNDIViewportCaptureComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration) +{ + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration); + } +} + +/** + Attempts to change the RenderTarget used in sending video frames over NDI + + @param BroadcastTexture The texture to use as video, while broadcasting over NDI +*/ +void UNDIViewportCaptureComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture) +{ + // ensure we have some thread-safety + FScopeLock Lock(&UpdateRenderContext); + + this->TextureTarget = BroadcastTexture; +} + +/** + Change the capture settings of the viewport capture + + @param InCaptureSize The Capture size of the frame to capture of the viewport + @param InCaptureRate A framerate at which to capture frames of the viewport +*/ +void UNDIViewportCaptureComponent::ChangeCaptureSettings(FIntPoint InCaptureSize, FFrameRate InCaptureRate) +{ + // clamp our viewport capture size + int32 capture_width = FMath::Max(InCaptureSize.X, 64); + int32 capture_height = FMath::Max(InCaptureSize.Y, 64); + + // set the capture size + this->CaptureSize = FIntPoint(capture_width, capture_height); + + // set the capture rate + this->CaptureRate = InCaptureRate; + + // clamp the maximum capture rate to something reasonable + float capture_rate_max = 1 / 1000.0f; + float capture_rate = CaptureRate.Denominator / (float)CaptureRate.Numerator; + + // set the primary tick interval to the sensible capture rate + this->PrimaryComponentTick.TickInterval = capture_rate >= capture_rate_max ? capture_rate : -1.0f; + + // ensure we have some thread-safety + FScopeLock Lock(&UpdateRenderContext); + + if (!IsValid(this->TextureTarget)) + { + this->TextureTarget = NewObject( + GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), NAME_None, RF_Transient | RF_MarkAsNative); + this->TextureTarget->UpdateResource(); + } + this->TextureTarget->ResizeTarget(this->CaptureSize.X, this->CaptureSize.Y); +} + +/** + Determines the current tally information. If you specify a timeout then it will wait until it has + changed, otherwise it will simply poll it and return the current tally immediately + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver +*/ +void UNDIViewportCaptureComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram) +{ + // Initialize the properties + IsOnPreview = false; + IsOnProgram = false; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0); + } +} + +/** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network + + @param Result The total number of connected receivers attached to the broadcast of this object +*/ +void UNDIViewportCaptureComponent::GetNumberOfConnections(int32& Result) +{ + // Initialize the property + Result = 0; + + // validate the Media Source object + if (IsValid(NDIMediaSource)) + { + // call the media source implementation of the function + NDIMediaSource->GetNumberOfConnections(Result); + } +} + + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later +void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene, ISceneRenderBuilder& SceneRenderBuilder) +#else +void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene) +#endif +{ + // ensure we have some thread-safety + FScopeLock Lock(&UpdateRenderContext); + + if (TextureTarget == nullptr) + return; + + if (IsValid(NDIMediaSource)) + { + NDIMediaSource->ChangeVideoTexture(TextureTarget); + + // Some capture sources treat alpha as opacity, some sources use transparency. + // Alpha in NDI is opacity. Reverse the alpha mapping to always get opacity. + bool flip_alpha = (CaptureSource == SCS_SceneColorHDR) || (CaptureSource == SCS_SceneColorHDRNoAlpha) || + (CaptureSource == SCS_SceneDepth) || (CaptureSource == SCS_Normal) || + (CaptureSource == SCS_BaseColor); + if (flip_alpha == false) + NDIMediaSource->ChangeAlphaRemap(AlphaMin, AlphaMax); + else + NDIMediaSource->ChangeAlphaRemap(AlphaMax, AlphaMin); + + // Do the actual capturing +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + Super::UpdateSceneCaptureContents(Scene, SceneRenderBuilder); +#else + Super::UpdateSceneCaptureContents(Scene); +#endif + } +} + +void UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged(UNDIMediaSender* Sender) +{ + // If we are not overriding the broadcast settings and the sender is valid + if (!bOverrideBroadcastSettings && IsValid(Sender)) + { + // change the capture sizes as necessary + ChangeCaptureSettings(Sender->GetFrameSize(), Sender->GetFrameRate()); + } +} diff --git a/Plugins/NDIIO/Source/Core/Classes/NDIIOPluginModule.cpp b/Plugins/NDIIO/Source/Core/Classes/NDIIOPluginModule.cpp new file mode 100644 index 0000000..f61fae4 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/NDIIOPluginModule.cpp @@ -0,0 +1,305 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include +#include "Player/NDIMediaPlayer.h" +#include + +#include + +#include +#include + +#include +#include + +// Meaning the plugin is being compiled with the editor +#if WITH_EDITOR + +#include "ThumbnailRendering/ThumbnailManager.h" +#include "ThumbnailRendering/TextureThumbnailRenderer.h" + +#include +#include + +#include + +#endif + +#define LOCTEXT_NAMESPACE "FNDIIOPluginModule" + + +void FNDIIOPluginModule::StartupModule() +{ + // Doubly Ensure that this handle is nullptr + NDI_LIB_HANDLE = nullptr; + + if (LoadModuleDependencies()) + { +#if UE_EDITOR + + if (ISettingsModule* SettingsModule = FModuleManager::GetModulePtr("Settings")) + { + SettingsModule->RegisterSettings( + "Project", "Plugins", "NDI", LOCTEXT("NDISettingsName", "Vizrt NDI"), + LOCTEXT("NDISettingsDescription", "Vizrt NDI(R) Engine Intergration Settings"), + GetMutableDefault()); + } + + // Ensure that the thumbnail for the 'NDI Media Texture2D' is being updated, as the texture is being used. + UThumbnailManager::Get().RegisterCustomRenderer(UNDIMediaTexture2D::StaticClass(), + UTextureThumbnailRenderer::StaticClass()); + +#endif + + // Construct our Services + this->NDIFinderService = MakeShareable(new FNDIFinderService()); + this->NDIConnectionService = MakeShareable(new FNDIConnectionService()); + + // Start the service + if (NDIFinderService.IsValid()) + NDIFinderService->Start(); + + // Start the service + if (NDIConnectionService.IsValid()) + NDIConnectionService->Start(); + } + else + { +#if PLATFORM_WINDOWS + // Write an error message to the log. + UE_LOG(LogWindows, Error, + TEXT("Unable to load \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory.")); + +#if UE_EDITOR + + const FText& WarningMessage = + LOCTEXT("NDIRuntimeMissing", + "Cannot find \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory. " + "Continued usage of the plugin can cause instability within the editor.\r\n\r\n" + + "Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' " + "for additional information related to installation instructions for this plugin.\r\n\r\n"); + + // Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found + if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok) + { + FString URLResult = FString(""); + FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult); + } + +#endif +#endif + +#if (PLATFORM_LINUX || PLATFORM_LINUXARM64) + // Write an error message to the log. + UE_LOG(LogLinux, Error, + TEXT("Unable to load \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime.")); + +#if UE_EDITOR + + const FText& WarningMessage = + LOCTEXT("NDIRuntimeMissing", + "Cannot find \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime. " + "Continued usage of the plugin can cause instability within the editor.\r\n\r\n" + + "Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' " + "for additional information related to installation instructions for this plugin.\r\n\r\n"); + + // Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found + if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok) + { + FString URLResult = FString(""); + FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult); + } + +#endif +#endif + } + + + // supported platforms + SupportedPlatforms.Add(TEXT("Windows")); + SupportedPlatforms.Add(TEXT("Linux")); + SupportedPlatforms.Add(TEXT("LinuxAArch64")); + + // supported schemes + SupportedUriSchemes.Add(TEXT("ndiio")); + + // register player factory + auto MediaModule = FModuleManager::LoadModulePtr("Media"); + + if (MediaModule != nullptr) + { + MediaModule->RegisterPlayerFactory(*this); + } + + FApp::SetUnfocusedVolumeMultiplier(1.f); +} + +void FNDIIOPluginModule::ShutdownModule() +{ + // unregister player factory + auto MediaModule = FModuleManager::GetModulePtr("Media"); + + if (MediaModule != nullptr) + { + MediaModule->UnregisterPlayerFactory(*this); + } + + + if (NDIFinderService.IsValid()) + NDIFinderService->Shutdown(); + + ShutdownModuleDependencies(); +} + +bool FNDIIOPluginModule::BeginBroadcastingActiveViewport() +{ + // Ensure we have a valid service + if (NDIConnectionService.IsValid()) + { + // perform the requested functionality + return NDIConnectionService->BeginBroadcastingActiveViewport(); + } + + return false; +} + +void FNDIIOPluginModule::StopBroadcastingActiveViewport() +{ + // Ensure we have a valid service + if (NDIConnectionService.IsValid()) + { + // perform the requested functionality + NDIConnectionService->StopBroadcastingActiveViewport(); + } +} + + + + +//~ IMediaPlayerFactory interface +bool FNDIIOPluginModule::CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray* /*OutWarnings*/, TArray* OutErrors) const +{ + FString Scheme; + FString Location; + + // check scheme + if (!Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive)) + { + if (OutErrors != nullptr) + { + OutErrors->Add(LOCTEXT("NoSchemeFound", "No URI scheme found")); + } + + return false; + } + + if (!SupportedUriSchemes.Contains(Scheme)) + { + if (OutErrors != nullptr) + { + OutErrors->Add(FText::Format(LOCTEXT("SchemeNotSupported", "The URI scheme '{0}' is not supported"), FText::FromString(Scheme))); + } + + return false; + } + + return true; +} + +TSharedPtr FNDIIOPluginModule::CreatePlayer(IMediaEventSink& EventSink) +{ + return MakeShared(EventSink); +} + +FText FNDIIOPluginModule::GetDisplayName() const +{ + return LOCTEXT("MediaPlayerDisplayName", "NDI Interface"); +} + +FName FNDIIOPluginModule::GetPlayerName() const +{ + static FName PlayerName(TEXT("NDIMedia")); + return PlayerName; +} + +FGuid FNDIIOPluginModule::GetPlayerPluginGUID() const +{ + static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f); + return PlayerPluginGUID; +} + +const TArray& FNDIIOPluginModule::GetSupportedPlatforms() const +{ + return SupportedPlatforms; +} + +bool FNDIIOPluginModule::SupportsFeature(EMediaFeature Feature) const +{ + return Feature == EMediaFeature::AudioSamples || + Feature == EMediaFeature::MetadataTracks || + Feature == EMediaFeature::VideoSamples; +} + + + + +bool FNDIIOPluginModule::LoadModuleDependencies() +{ +#if PLATFORM_WINDOWS + // Get the Binaries File Location + const FString env_variable = TEXT(NDILIB_REDIST_FOLDER); + const FString binaries_path = FPlatformMisc::GetEnvironmentVariable(*env_variable) + "/Processing.NDI.Lib.x64.dll"; + + // We can't validate if it's valid, but we can determine if it's explicitly not. + if (binaries_path.Len() > 0) + { + // Load the DLL + this->NDI_LIB_HANDLE = FPlatformProcess::GetDllHandle(*binaries_path); + + // Not required, but "correct" (see the SDK documentation) + if (this->NDI_LIB_HANDLE != nullptr && !NDIlib_initialize()) + { + // We were unable to initialize the library, so lets free the handle + FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE); + this->NDI_LIB_HANDLE = nullptr; + } + } + + // Did we successfully load the NDI library? + return this->NDI_LIB_HANDLE != nullptr; +#endif + +#if (PLATFORM_LINUX || PLATFORM_LINUXARM64) + return true; +#endif +} + +void FNDIIOPluginModule::ShutdownModuleDependencies() +{ +#if PLATFORM_WINDOWS + if (this->NDI_LIB_HANDLE != nullptr) + { + NDIlib_destroy(); + FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE); + this->NDI_LIB_HANDLE = nullptr; + } +#endif + +#if (PLATFORM_LINUX || PLATFORM_LINUXARM64) +#endif +} + +#undef LOCTEXT_NAMESPACE + +IMPLEMENT_MODULE(FNDIIOPluginModule, NDIIO); diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Libraries/NDIIOLibrary.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Libraries/NDIIOLibrary.cpp new file mode 100644 index 0000000..58fdfa5 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Libraries/NDIIOLibrary.cpp @@ -0,0 +1,201 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include + +#include + +const TArray UNDIIOLibrary::K2_GetNDISourceCollection() +{ + // Return the FinderServices current network source collection + return FNDIFinderService::GetNetworkSourceCollection(); +} + +const bool UNDIIOLibrary::K2_FindNetworkSourceByName(UObject* WorldContextObject, + FNDIConnectionInformation& ConnectionInformation, + FString InSourceName) +{ + // Ensure that the passed in information is empty + ConnectionInformation.Reset(); + + // Get the current network source collection from the finder service. + const TArray NetworkSourceCollection = FNDIFinderService::GetNetworkSourceCollection(); + + // Get the current number of network source items in the collection + int32 final_count = NetworkSourceCollection.Num(); + + // Ensure we have a wide range of items to search through. + int32 last_index = final_count; + + // Since the Source collection returned is already sorted alphabetically do a binary search to speed things up. + // We are only going to do comparisons that are necessary using O(log(n)) time complexity + for (int32 current_index = 0; current_index < last_index; /* current_index changed in loop */) + { + // Ensure that the index is valid (this will protect against negative values) + if (NetworkSourceCollection.IsValidIndex(current_index)) + { + // Get the source reference from the collection + FNDIConnectionInformation source_info = NetworkSourceCollection[current_index]; + + // do a comparison against the requested SourceName + if (int32 comparitor_value = InSourceName.Compare(source_info.SourceName, ESearchCase::IgnoreCase)) + { + // Our search says that our source name is greater than the info we checked + if (comparitor_value <= 0) + { + // set the last index to the current index + last_index = current_index; + + // get halfway between the last index and the 0th index + current_index = last_index / 2; + } + + // Our search says that our source name is less than the info we checked + else if (comparitor_value > 0) + { + // move up half the number of items within the collection + current_index = (last_index + current_index + 1) / 2; + } + } + + // We found a comparable source. + else + { + // Set the source information structure + ConnectionInformation = source_info; + + // return success + return true; + } + } + + // Something weird happened (maybe the first check was larger than the search term); just return a fail + else + return false; + } + + return false; +} + +bool UNDIIOLibrary::K2_BeginBroadcastingActiveViewport(UObject* WorldContextObject) +{ + // Get the plugin module for the owner of this object + if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr("NDIIO")) + { + // Call the underlying functionality + return PluginModule->BeginBroadcastingActiveViewport(); + } + + return false; +} + +void UNDIIOLibrary::K2_StopBroadcastingActiveViewport(UObject* WorldContextObject) +{ + // Get the plugin module for the owner of this object + if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr("NDIIO")) + { + // Call the underlying functionality + PluginModule->StopBroadcastingActiveViewport(); + } +} + +UNDIMediaReceiver* UNDIIOLibrary::K2_GetNDIMediaReceiver(UNDIMediaReceiver* Receiver) +{ + return Receiver; +} + +UNDIMediaSender* UNDIIOLibrary::K2_GetNDIMediaSender(UNDIMediaSender* Sender) +{ + return Sender; +} + + +const TArray UNDIIOLibrary::K2_ParseNDIMetaData(FString Data) +{ + class Parser : public IFastXmlCallback + { + public: + Parser(TArray& ElementsIn) + : Elements(ElementsIn) + {} + + virtual ~Parser() + {} + + virtual bool ProcessXmlDeclaration(const TCHAR* ElementData, int32 XmlFileLineNumber) override + { + return true; + } + + virtual bool ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData, int32 XmlFileLineNumber) override + { + if(CurrentElementStack.Num() > 0) + return false; + + FNDIMetaDataElement NewElement; + NewElement.ElementName = ElementName; + NewElement.Data = ElementData; + + if(CurrentElementStack.Num() == 0) + { + Elements.Push(NewElement); + CurrentElementStack.Push(&Elements.Last()); + } + + return true; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + check(CurrentElementStack.Num() > 0); + + FNDIMetaDataElement* CurrentElement = CurrentElementStack.Last(); + + CurrentElement->Attributes.Add(AttributeName, AttributeValue); + + return true; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + check(CurrentElementStack.Num() > 0); + + CurrentElementStack.Pop(); + + return true; + } + + virtual bool ProcessComment(const TCHAR* Comment) override + { + return true; + } + + private: + TArray& Elements; + TArray CurrentElementStack; + }; + + TArray Elements; + Parser Parser(Elements); + + FText OutErrorMessage; + int32 OutErrorLineNumber; + + FFastXml::ParseXmlFile(&Parser, + nullptr, // XmlFilePath + Data.GetCharArray().GetData(), // XmlFileContents + nullptr, // FeedbackContext + false, // bShowSlowTaskDialog + false, // bShowCancelButton + OutErrorMessage, // OutErrorMessage + OutErrorLineNumber // OutErrorLineNumber + ); + + return Elements; +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaReceiver.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaReceiver.cpp new file mode 100644 index 0000000..944228c --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaReceiver.cpp @@ -0,0 +1,1525 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "NDIShaders.h" + +#if WITH_EDITOR +#include +#endif + +#include + +UNDIMediaReceiver::UNDIMediaReceiver() +{ + this->InternalVideoTexture = NewObject(GetTransientPackage(), UNDIMediaTexture2D::StaticClass(), NAME_None, RF_Transient | RF_MarkAsNative); +} + +/** + Attempts to perform initialization logic for creating a receiver through the NDI sdk api +*/ +bool UNDIMediaReceiver::Initialize(const FNDIConnectionInformation& InConnectionInformation, UNDIMediaReceiver::EUsage InUsage) +{ + if (this->p_receive_instance == nullptr) + { + if (IsValid(this->InternalVideoTexture)) + this->InternalVideoTexture->UpdateResource(); + + // create a non-connected receiver instance + NDIlib_recv_create_v3_t settings; + settings.allow_video_fields = false; + settings.bandwidth = NDIlib_recv_bandwidth_highest; + settings.color_format = NDIlib_recv_color_format_fastest; + + p_receive_instance = NDIlib_recv_create_v3(&settings); + + // check if it was successful + if (p_receive_instance != nullptr) + { + // If the incoming connection information is valid + if (InConnectionInformation.IsValid()) + { + //// Alright we created a non-connected receiver. Lets actually connect + ChangeConnection(InConnectionInformation); + } + + if (InUsage == UNDIMediaReceiver::EUsage::Standalone) + { + this->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle); + VideoCaptureEventHandle = this->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame) + { + FTextureRHIRef ConversionTexture = this->DisplayFrame(video_frame); + if (ConversionTexture != nullptr) + { + if ((GetVideoTextureResource() != nullptr) && (GetVideoTextureResource()->TextureRHI != ConversionTexture)) + { + GetVideoTextureResource()->TextureRHI = ConversionTexture; + RHIUpdateTextureReference(this->VideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture); + } + if ((GetInternalVideoTextureResource() != nullptr) && (GetInternalVideoTextureResource()->TextureRHI != ConversionTexture)) + { + GetInternalVideoTextureResource()->TextureRHI = ConversionTexture; + RHIUpdateTextureReference(this->InternalVideoTexture->TextureReference.TextureReferenceRHI, ConversionTexture); + } + } + }); + + // We don't want to limit the engine rendering speed to the sync rate of the connection hook + // into the core delegates render thread 'EndFrame' + FCoreDelegates::OnEndFrameRT.Remove(FrameEndRTHandle); + FrameEndRTHandle.Reset(); + FrameEndRTHandle = FCoreDelegates::OnEndFrameRT.AddLambda([this]() + { + while(this->CaptureConnectedMetadata()) + ; // Potential improvement: limit how much metadata is processed, to avoid appearing to lock up due to a metadata flood + this->CaptureConnectedVideo(); + }); + +#if UE_EDITOR + // We don't want to provide perceived issues with the plugin not working so + // when we get a Pre-exit message, forcefully shutdown the receiver + FCoreDelegates::OnPreExit.AddWeakLambda(this, [&]() { + this->Shutdown(); + FCoreDelegates::OnPreExit.RemoveAll(this); + }); + + // We handle this in the 'Play In Editor' versions as well. + FEditorDelegates::PrePIEEnded.AddWeakLambda(this, [&](const bool) { + this->Shutdown(); + FEditorDelegates::PrePIEEnded.RemoveAll(this); + }); +#endif + } + + return true; + } + } + + return false; +} + +bool UNDIMediaReceiver::Initialize(UNDIMediaReceiver::EUsage InUsage) +{ + return Initialize(ConnectionSetting, InUsage); +} + + +void UNDIMediaReceiver::StartConnection() +{ + FScopeLock RenderLock(&RenderSyncContext); + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock MetadataLock(&MetadataSyncContext); + + if (this->ConnectionInformation.IsValid()) + { + // Create a non-connected receiver instance + NDIlib_recv_create_v3_t settings; + settings.allow_video_fields = true; + settings.bandwidth = this->ConnectionInformation; + settings.color_format = NDIlib_recv_color_format_fastest; + + // Do the conversion on the connection information + // Beware of the limited lifetime of TCHAR_TO_UTF8 values + NDIlib_source_t connection; + std::string SourceNameStr(TCHAR_TO_UTF8(*this->ConnectionInformation.GetNDIName())); + connection.p_ndi_name = SourceNameStr.c_str(); + std::string UrlStr(TCHAR_TO_UTF8(*this->ConnectionInformation.Url)); + connection.p_url_address = UrlStr.c_str(); + + // Create a receiver and connect to the source + auto* receive_instance = NDIlib_recv_create_v3(&settings); + NDIlib_recv_connect(receive_instance, &connection); + + // Get rid of existing connection + StopConnection(); + + // set the receiver to the new connection + p_receive_instance = receive_instance; + + // create a new frame sync instance + p_framesync_instance = NDIlib_framesync_create(p_receive_instance); + } +} + +void UNDIMediaReceiver::StopConnection() +{ + FScopeLock RenderLock(&RenderSyncContext); + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock MetadataLock(&MetadataSyncContext); + + // destroy the framesync instance + if (p_framesync_instance != nullptr) + NDIlib_framesync_destroy(p_framesync_instance); + p_framesync_instance = nullptr; + + // Free the receiver + if (p_receive_instance != nullptr) + NDIlib_recv_destroy(p_receive_instance); + p_receive_instance = nullptr; +} + +/** + Attempts to change the connection to another NDI sender source +*/ +void UNDIMediaReceiver::ChangeConnection(const FNDIConnectionInformation& InConnectionInformation) +{ + // Ensure some thread-safety because our 'Capture Connected Video' function is called on the render thread + FScopeLock RenderLock(&RenderSyncContext); + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock MetadataLock(&MetadataSyncContext); + + // We should only worry about connections that are already created + if (p_receive_instance != nullptr) + { + // Set the connection information for the requested new connection + if (this->ConnectionInformation != InConnectionInformation) + { + bool bSourceChanged = false; + if(this->ConnectionInformation.SourceName != InConnectionInformation.SourceName) + bSourceChanged = true; + if(this->ConnectionInformation.Url != InConnectionInformation.Url) + bSourceChanged = true; + if(this->ConnectionInformation.MachineName != InConnectionInformation.MachineName) + bSourceChanged = true; + if(this->ConnectionInformation.StreamName != InConnectionInformation.StreamName) + bSourceChanged = true; + + bool bBandwidthChanged = false; + if(this->ConnectionInformation.Bandwidth != InConnectionInformation.Bandwidth) + bBandwidthChanged = true; + + bool bMutingChanged = false; + if(this->ConnectionInformation.bMuteAudio != InConnectionInformation.bMuteAudio) + bMutingChanged = true; + if(this->ConnectionInformation.bMuteVideo != InConnectionInformation.bMuteVideo) + bMutingChanged = true; + + this->ConnectionInformation = InConnectionInformation; + + if (this->ConnectionInformation.IsValid()) + { + if (bSourceChanged || bBandwidthChanged || (p_receive_instance == nullptr) || (p_framesync_instance == nullptr)) + { + // Connection information is valid, and something has changed that requires the connection to be remade + + StartConnection(); + } + } + else + { + // Requested connection is invalid, indicating we should close the current connection + + StopConnection(); + } + } + } +} + +/** + Attempts to change the Video Texture object used as the video frame capture object +*/ +void UNDIMediaReceiver::ChangeVideoTexture(UNDIMediaTexture2D* InVideoTexture) +{ + FScopeLock Lock(&RenderSyncContext); + + if (IsValid(this->VideoTexture)) + { + // make sure that the old texture is not referencing the rendering of this texture + this->VideoTexture->UpdateTextureReference(FRHICommandListExecutor::GetImmediateCommandList(), nullptr); + } + if (IsValid(this->InternalVideoTexture)) + { + // make sure that the old texture is not referencing the rendering of this texture + this->InternalVideoTexture->UpdateTextureReference(FRHICommandListExecutor::GetImmediateCommandList(), nullptr); + } + + // Just copy the new texture here. + this->VideoTexture = InVideoTexture; +} + +/** + Attempts to generate the pcm data required by the 'AudioWave' object + We will generate mono audio, down-mixing if the source has multiple channels +*/ +int32 UNDIMediaReceiver::GeneratePCMData(UNDIMediaSoundWave* AudioWave, uint8* PCMData, const int32 SamplesNeeded) +{ + FScopeLock Lock(&AudioSyncContext); + + int32 samples_generated = 0; + int32 requested_frame_rate = IsValid(AudioWave) ? AudioWave->GetSampleRateForCurrentPlatform() : 48000; + int32 requested_no_channels = IsValid(AudioWave) ? AudioWave->NumChannels : 1; + int32 requested_no_frames = SamplesNeeded / requested_no_channels; + + if ((p_framesync_instance != nullptr) && (ConnectionInformation.bMuteAudio == false)) + { + int available_no_frames = NDIlib_framesync_audio_queue_depth(p_framesync_instance); // Samples per channel + + if (available_no_frames > 0) + { + NDIlib_audio_frame_v2_t audio_frame; + NDIlib_framesync_capture_audio(p_framesync_instance, &audio_frame, requested_frame_rate, 0, FMath::Min(available_no_frames, requested_no_frames)); + + if (requested_no_channels == audio_frame.no_channels) + { + // Convert to PCM + for (int32 channel_index = 0; channel_index < requested_no_channels; ++channel_index) + { + const float* channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + channel_index * audio_frame.channel_stride_in_bytes); + uint8* pcm_data = PCMData + channel_index * sizeof(int16); + + for (int32 sample_index = 0; sample_index < audio_frame.no_samples; ++sample_index) + { + // convert float to int16 + int32 sample_int32 = FMath::RoundToInt(*channel_data * 32767.0f); + // perform clamp between different integer types + int16 sample = sample_int32 < INT16_MIN ? INT16_MIN : sample_int32 > INT16_MAX ? INT16_MAX : sample_int32; + + pcm_data[0] = sample & 0xff; + pcm_data[1] = (sample >> 8) & 0xff; + + ++channel_data; + pcm_data += requested_no_channels * sizeof(int16); + } + } + } + + else if (requested_no_channels < audio_frame.no_channels) + { + // Add extra channels to all common channels + + const int32 no_extra_channels = audio_frame.no_channels - requested_no_channels; + + for (int32 src_channel_index = requested_no_channels; src_channel_index < audio_frame.no_channels; ++src_channel_index) + { + const float* src_channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + src_channel_index * audio_frame.channel_stride_in_bytes); + for (int32 dst_channel_index = 0; dst_channel_index < requested_no_channels; ++dst_channel_index) + { + float* dst_channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + dst_channel_index * audio_frame.channel_stride_in_bytes); + for (int32 sample_index = 0; sample_index < audio_frame.no_samples; ++sample_index) + { + dst_channel_data[sample_index] += src_channel_data[sample_index]; + } + } + } + + // Convert to PCM, taking care of any normalization + for (int32 channel_index = 0; channel_index < requested_no_channels; ++channel_index) + { + const float* channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + channel_index * audio_frame.channel_stride_in_bytes); + uint8* pcm_data = PCMData + channel_index * sizeof(int16); + + for (int32 sample_index = 0; sample_index < audio_frame.no_samples; ++sample_index) + { + // normalize and convert float to int16 + int32 sample_int32 = FMath::RoundToInt(*channel_data / (no_extra_channels+1) * 32767.0f); + // perform clamp between different integer types + int16 sample = sample_int32 < INT16_MIN ? INT16_MIN : sample_int32 > INT16_MAX ? INT16_MAX : sample_int32; + + pcm_data[0] = sample & 0xff; + pcm_data[1] = (sample >> 8) & 0xff; + + ++channel_data; + pcm_data += requested_no_channels * sizeof(int16); + } + } + } + + else if (requested_no_channels > audio_frame.no_channels) + { + // Copy common channels + + // Convert to PCM, taking care of any normalization + for (int32 channel_index = 0; channel_index < audio_frame.no_channels; ++channel_index) + { + const float* channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + channel_index * audio_frame.channel_stride_in_bytes); + uint8* pcm_data = PCMData + channel_index * sizeof(int16); + + for (int32 sample_index = 0; sample_index < audio_frame.no_samples; ++sample_index) + { + // normalize and convert float to int16 + int32 sample_int32 = FMath::RoundToInt(*channel_data * 32767.0f); + // perform clamp between different integer types + int16 sample = sample_int32 < INT16_MIN ? INT16_MIN : sample_int32 > INT16_MAX ? INT16_MAX : sample_int32; + + pcm_data[0] = sample & 0xff; + pcm_data[1] = (sample >> 8) & 0xff; + + ++channel_data; + pcm_data += requested_no_channels * sizeof(int16); + } + } + + // Average source channels to duplicate to extra channels + + for (int32 sample_index = 0; sample_index < audio_frame.no_samples; ++sample_index) + { + float sample_value = 0.f; + for (int32 src_channel_index = 0; src_channel_index < audio_frame.no_channels; ++src_channel_index) + { + const float* src_channel_data = reinterpret_cast(reinterpret_cast(audio_frame.p_data) + src_channel_index * audio_frame.channel_stride_in_bytes); + sample_value += src_channel_data[sample_index]; + } + + // normalize and convert float to int16 + int32 sample_int32 = FMath::RoundToInt(sample_value / audio_frame.no_channels * 32767.0f); + // perform clamp between different integer types + int16 sample = sample_int32 < INT16_MIN ? INT16_MIN : sample_int32 > INT16_MAX ? INT16_MAX : sample_int32; + + for (int32 dst_channel_index = audio_frame.no_channels; dst_channel_index < requested_no_channels; ++dst_channel_index) + { + uint8* pcm_data = PCMData + dst_channel_index * sizeof(int16) + sample_index * requested_no_channels * sizeof(int16); + pcm_data[0] = sample & 0xff; + pcm_data[1] = (sample >> 8) & 0xff; + } + } + } + + samples_generated = audio_frame.no_samples * requested_no_channels; + + // clean up our audio frame + NDIlib_framesync_free_audio(p_framesync_instance, &audio_frame); + } + else + { + const int32 available_samples = FMath::Min(128 * requested_no_channels, SamplesNeeded); + + FMemory::Memset(PCMData, 0, available_samples * sizeof(int16)); + + samples_generated = available_samples; + } + } + + return samples_generated; +} + +int32 UNDIMediaReceiver::GetAudioChannels() +{ + FScopeLock Lock(&AudioSyncContext); + + int32 no_channels = 0; + + if ((p_framesync_instance != nullptr) && (ConnectionInformation.bMuteAudio == false)) + { + int available_no_frames = NDIlib_framesync_audio_queue_depth(p_framesync_instance); // Samples per channel + + if (available_no_frames > 0) + { + NDIlib_audio_frame_v2_t audio_frame; + NDIlib_framesync_capture_audio(p_framesync_instance, &audio_frame, 48000, 0, 0); + no_channels = audio_frame.no_channels; + } + } + + return no_channels; +} + +/** + Attempts to register a sound wave object with this object +*/ +void UNDIMediaReceiver::RegisterAudioWave(UNDIMediaSoundWave* InAudioWave) +{ + FScopeLock Lock(&AudioSyncContext); + + // Determine if the audio wave being passed into this object is valid + if (IsValid(InAudioWave)) + { + // Only add sources which are not already a part of this receiver + if (!AudioSourceCollection.ContainsByPredicate( + [&](UNDIMediaSoundWave* Source) { return Source == InAudioWave; })) + { + + AudioSourceCollection.Add(InAudioWave); + InAudioWave->SetConnectionSource(this); + } + } +} + +/** + This will send a metadata frame to the sender + The data is expected to be valid XML +*/ +void UNDIMediaReceiver::SendMetadataFrame(const FString& Data) +{ + FScopeLock Lock(&MetadataSyncContext); + + if (p_receive_instance != nullptr) + { + NDIlib_metadata_frame_t metadata; + std::string DataStr(TCHAR_TO_UTF8(*Data)); + metadata.p_data = const_cast(DataStr.c_str()); + metadata.length = DataStr.length(); + metadata.timecode = FDateTime::Now().GetTimeOfDay().GetTicks(); + + NDIlib_recv_send_metadata(p_receive_instance, &metadata); + } +} + +/** + This will send a metadata frame to the sender + The data will be formatted as: +*/ +void UNDIMediaReceiver::SendMetadataFrameAttr(const FString& Element, const FString& ElementData) +{ + FString Data = "<" + Element + ">" + ElementData + ""; + SendMetadataFrame(Data); +} + +/** + This will send a metadata frame to the sender + The data will be formatted as: +*/ +void UNDIMediaReceiver::SendMetadataFrameAttrs(const FString& Element, const TMap& Attributes) +{ + FString Data = "<" + Element; + + for(const auto& Attribute : Attributes) + { + Data += " " + Attribute.Key + "=\"" + Attribute.Value + "\""; + } + + Data += "/>"; + + SendMetadataFrame(Data); +} + + +/** + This will set the up-stream tally notifications. If no streams are connected, it will automatically + send the tally state upon connection +*/ +void UNDIMediaReceiver::SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram) +{ + // Currently unsupported +} + +/** + Attempts to immediately stop receiving frames from the connected NDI sender +*/ +void UNDIMediaReceiver::Shutdown() +{ + ENQUEUE_RENDER_COMMAND(NDIMediaReceiver_ShutdownRT)([this](FRHICommandListImmediate& RHICmdList) + { + this->RenderTarget.SafeRelease(); + this->RenderTargetDescriptor = FPooledRenderTargetDesc(); + }); + + this->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle); + VideoCaptureEventHandle.Reset(); + + // Unregister render thread frame end delegate lambda. + FCoreDelegates::OnEndFrameRT.Remove(FrameEndRTHandle); + FrameEndRTHandle.Reset(); + + // Move audio source collection to temporary, so that cleanup can be done without + // holding the lock (which could otherwise cause a deadlock if UNDIMediaSoundWave + // is still generating PCM data) + TArray OldAudioSourceCollection; + { + FScopeLock AudioLock(&AudioSyncContext); + + OldAudioSourceCollection = MoveTemp(AudioSourceCollection); + } + + // get the number of available audio sources within the collection + int32 source_count = OldAudioSourceCollection.Num(); + + // iterate the collection of available audio sources + for (int32 iter = source_count - 1; iter >= 0; --iter) + { + // Define and Determine the validity of an item within the collection + if (auto* AudioWave = OldAudioSourceCollection[iter]) + { + // ensure that we remove the audio source reference + OldAudioSourceCollection.RemoveAt(iter); + + // Remove ourselves from the Audio wave object which is trying to render audio frames + // as fast as possible + AudioWave->SetConnectionSource(nullptr); + } + } + + { + FScopeLock RenderLock(&RenderSyncContext); + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock MetadataLock(&MetadataSyncContext); + + if (p_receive_instance != nullptr) + { + if (p_framesync_instance != nullptr) + { + NDIlib_framesync_destroy(p_framesync_instance); + p_framesync_instance = nullptr; + } + + NDIlib_recv_destroy(p_receive_instance); + p_receive_instance = nullptr; + } + } + + // Reset the connection status of this object + SetIsCurrentlyConnected(false); + + this->ConnectionInformation.Reset(); + this->PerformanceData.Reset(); + this->FrameRate = FFrameRate(60, 1); + this->Resolution = FIntPoint(0, 0); + this->Timecode = FTimecode(0, FrameRate, true, true); +} + +/** + Remove the AudioWave object from this object (if it was previously registered) + + @param InAudioWave An NDIMediaSoundWave object registered with this object +*/ +void UNDIMediaReceiver::UnregisterAudioWave(UNDIMediaSoundWave* InAudioWave) +{ + FScopeLock Lock(&AudioSyncContext); + + // Determine if the audio wave being passed into this object is valid + if (IsValid(InAudioWave)) + { + // We don't care about the order of the collection, + // we only care to remove the object as fast as possible + this->AudioSourceCollection.RemoveSwap(InAudioWave); + } +} + +/** + Updates the DynamicMaterial with the VideoTexture of this object +*/ +void UNDIMediaReceiver::UpdateMaterialTexture(UMaterialInstanceDynamic* MaterialInstance, FString ParameterName) +{ + // Ensure that both the material instance and the video texture are valid + if (IsValid(MaterialInstance)) + { + if (IsValid(this->VideoTexture)) + { + // Call the function to set the texture parameter with the proper texture + MaterialInstance->SetTextureParameterValue(FName(*ParameterName), this->VideoTexture); + } + else if (IsValid(this->InternalVideoTexture)) + { + // Call the function to set the texture parameter with the proper texture + MaterialInstance->SetTextureParameterValue(FName(*ParameterName), this->InternalVideoTexture); + } + } +} + +/** + Called before destroying the object. This is called immediately upon deciding to destroy the object, + to allow the object to begin an asynchronous cleanup process. + */ +void UNDIMediaReceiver::BeginDestroy() +{ + // Call the shutdown procedure here. + this->Shutdown(); + + // Call the base implementation of 'BeginDestroy' + Super::BeginDestroy(); +} + +/** + Attempts to capture a video frame from the connected source. If a new frame is captured, broadcast it to + interested receivers through the capture event. +*/ +bool UNDIMediaReceiver::CaptureConnectedVideo() +{ + // This function is called on the Engine's Main Rendering Thread. Be very careful when doing stuff here. + // Make sure things are done quick and efficient. + + // Ensure thread safety + FScopeLock Lock(&RenderSyncContext); + + bool bHaveCaptured = false; + + // check for our frame sync object and that we are actually connected to the end point + if ((p_framesync_instance != nullptr) && (ConnectionInformation.bMuteVideo == false)) + { + // Using a frame-sync we can always get data which is the magic and it will adapt + // to the frame-rate that it is being called with. + NDIlib_video_frame_v2_t video_frame; + NDIlib_framesync_capture_video(p_framesync_instance, &video_frame, NDIlib_frame_format_type_progressive); + + // Update our Performance Metrics + GatherPerformanceMetrics(); + + if (video_frame.p_data) + { + // Ensure that we inform all those interested when the stream starts up + SetIsCurrentlyConnected(true); + + // Update the Framerate, if it has changed + this->FrameRate.Numerator = video_frame.frame_rate_N; + this->FrameRate.Denominator = video_frame.frame_rate_D; + + // Update the Resolution + this->Resolution.X = video_frame.xres; + this->Resolution.Y = video_frame.yres; + + if (bSyncTimecodeToSource) + { + int64_t SourceTime = video_frame.timecode % 864000000000; // Modulo the number of 100ns intervals in 24 hours + // Update the timecode from the current 'SourceTime' value + this->Timecode = FTimecode::FromTimespan(FTimespan::FromSeconds(SourceTime / (float)1e+7), FrameRate, + FTimecode::IsDropFormatTimecodeSupported(FrameRate), + true // use roll-over timecode + ); + } + else + { + int64_t SystemTime = FDateTime::Now().GetTimeOfDay().GetTicks(); + // Update the timecode from the current 'SystemTime' value + this->Timecode = FTimecode::FromTimespan(FTimespan::FromSeconds(SystemTime / (float)1e+7), FrameRate, + FTimecode::IsDropFormatTimecodeSupported(FrameRate), + true // use roll-over timecode + ); + } + + // Redraw if: + // - timestamp is undefined, or + // - timestamp has changed, or + // - frame format type has changed (e.g. different field) + if ((video_frame.timestamp == NDIlib_recv_timestamp_undefined) || + (video_frame.timestamp != LastFrameTimestamp) || + (video_frame.frame_format_type != LastFrameFormatType)) + { + bHaveCaptured = true; + + LastFrameTimestamp = video_frame.timestamp; + LastFrameFormatType = video_frame.frame_format_type; + + OnNDIReceiverVideoCaptureEvent.Broadcast(this, video_frame); + + OnReceiverVideoReceived.Broadcast(this); + + if (video_frame.p_metadata) + { + FString Data(UTF8_TO_TCHAR(video_frame.p_metadata)); + OnReceiverMetaDataReceived.Broadcast(this, Data, true); + } + } + } + + // Release the video. You could keep the frame if you want and release it later. + NDIlib_framesync_free_video(p_framesync_instance, &video_frame); + } + + return bHaveCaptured; +} + + +/** + Attempts to capture an audio frame from the connected source. If a new frame is captured, broadcast it to + interested receivers through the capture event. +*/ +bool UNDIMediaReceiver::CaptureConnectedAudio() +{ + FScopeLock Lock(&AudioSyncContext); + + bool bHaveCaptured = false; + + if ((p_framesync_instance != nullptr) && (ConnectionInformation.bMuteAudio == false)) + { + int no_samples = NDIlib_framesync_audio_queue_depth(p_framesync_instance); + + // Using a frame-sync we can always get data which is the magic and it will adapt + // to the frame-rate that it is being called with. + NDIlib_audio_frame_v2_t audio_frame; + NDIlib_framesync_capture_audio(p_framesync_instance, &audio_frame, 0, 0, no_samples); + + if (audio_frame.p_data) + { + // Ensure that we inform all those interested when the stream starts up + SetIsCurrentlyConnected(true); + + const int32 available_samples = audio_frame.no_samples * audio_frame.no_channels; + + if (available_samples > 0) + { + bHaveCaptured = true; + + OnNDIReceiverAudioCaptureEvent.Broadcast(this, audio_frame); + + OnReceiverAudioReceived.Broadcast(this); + } + } + + // Release the audio frame + NDIlib_framesync_free_audio(p_framesync_instance, &audio_frame); + } + + return bHaveCaptured; +} + + +bool UNDIMediaReceiver::CaptureConnectedMetadata() +{ + FScopeLock Lock(&MetadataSyncContext); + + bool bHaveCaptured = false; + + if (p_receive_instance != nullptr) + { + NDIlib_metadata_frame_t metadata; + NDIlib_frame_type_e frame_type = NDIlib_recv_capture_v3(p_receive_instance, nullptr, nullptr, &metadata, 0); + if (frame_type == NDIlib_frame_type_metadata) + { + if (metadata.p_data) + { + // Ensure that we inform all those interested when the stream starts up + SetIsCurrentlyConnected(true); + + if (metadata.length > 0) + { + bHaveCaptured = true; + + OnNDIReceiverMetadataCaptureEvent.Broadcast(this, metadata); + + FString Data(UTF8_TO_TCHAR(metadata.p_data)); + OnReceiverMetaDataReceived.Broadcast(this, Data, false); + } + } + + NDIlib_recv_free_metadata(p_receive_instance, &metadata); + } + } + + return bHaveCaptured; +} + + +void UNDIMediaReceiver::SetIsCurrentlyConnected(bool bConnected) +{ + if (bConnected != bIsCurrentlyConnected) + { + FScopeLock Lock(&ConnectionSyncContext); + + if (bConnected != bIsCurrentlyConnected) + { + bIsCurrentlyConnected = bConnected; + + if (bConnected == true) + { + if (OnNDIReceiverConnectedEvent.IsBound()) + { + AsyncTask(ENamedThreads::GameThread, [&]() { + // Broadcast the event + OnNDIReceiverConnectedEvent.Broadcast(this); + }); + } + } + else + { + if (OnNDIReceiverDisconnectedEvent.IsBound()) + { + AsyncTask(ENamedThreads::GameThread, [&]() { + // Broadcast the event + OnNDIReceiverDisconnectedEvent.Broadcast(this); + }); + } + } + } + } +} + + +/** + Attempts to immediately update the 'VideoTexture' object with the last capture video frame + from the connected source +*/ +FTextureRHIRef UNDIMediaReceiver::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame) +{ + // we need a command list to work with + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + // Actually draw the video frame from cpu to gpu + switch(video_frame.frame_format_type) + { + case NDIlib_frame_format_type_progressive: + if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY) + return DrawProgressiveVideoFrame(RHICmdList, video_frame); + else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA) + return DrawProgressiveVideoFrameAlpha(RHICmdList, video_frame); + break; + case NDIlib_frame_format_type_field_0: + case NDIlib_frame_format_type_field_1: + if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVY) + return DrawInterlacedVideoFrame(RHICmdList, video_frame); + else if(video_frame.FourCC == NDIlib_FourCC_video_type_UYVA) + return DrawInterlacedVideoFrameAlpha(RHICmdList, video_frame); + break; + } + + return nullptr; +} + +/** + Perform the color conversion (if any) and bit copy from the gpu +*/ +FTextureRHIRef UNDIMediaReceiver::DrawProgressiveVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result) +{ + // Ensure thread safety + FScopeLock Lock(&RenderSyncContext); + + FTextureRHIRef TargetableTexture; + + // check for our frame sync object and that we are actually connected to the end point + if (p_framesync_instance != nullptr) + { + // Initialize the frame size parameter + FIntPoint FrameSize = FIntPoint(Result.xres, Result.yres); + + if (!RenderTarget.IsValid() || !RenderTargetDescriptor.IsValid() || + RenderTargetDescriptor.GetSize() != FIntVector(FrameSize.X, FrameSize.Y, 0) || + DrawMode != EDrawMode::Progressive) + { + // Create the RenderTarget descriptor + RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc( + FrameSize, PF_B8G8R8A8, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable | TexCreate_SRGB, false); + + // Update the shader resource for the 'SourceTexture' + // The source texture will be given UYVY data, so make it half-width + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverProgressiveSourceTexture")) + .SetExtent(FrameSize.X / 2, FrameSize.Y) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceTexture = RHICreateTexture(CreateDesc); + + // Find a free target-able texture from the render pool + GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTarget, TEXT("NDIIO")); + + DrawMode = EDrawMode::Progressive; + } + + TargetableTexture = RenderTarget->GetRHI(); + + // Initialize the Graphics Pipeline State Object + FGraphicsPipelineStateInitializer GraphicsPSOInit; + + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture.GetReference(); + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + // configure media shaders + FGlobalShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + + // construct the shaders + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef ConvertShader(ShaderMap); + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(RHICmdList); +#else + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(); +#endif + + // Needs to be called *before* ApplyCachedRenderTargets, since BeginRenderPass is caching the render targets. + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Recv Color Conversion")); + + // do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + + // set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // set the stream source + RHICmdList.SetStreamSource(0, VertexBuffer, 0); + + // set the texture parameter of the conversion shader + FNDIIOShaderUYVYtoBGRAPS::Params Params(SourceTexture, SourceTexture, FrameSize, + FVector2D(0, 0), FVector2D(1, 1), + bPerformsRGBtoLinear ? FNDIIOShaderPS::EColorCorrection::sRGBToLinear : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(0.f, 1.f)); + ConvertShader->SetParameters(RHICmdList, Params); + + // Create the update region structure + FUpdateTextureRegion2D Region(0, 0, 0, 0, FrameSize.X/2, FrameSize.Y); + + // Set the Pixel data of the NDI Frame to the SourceTexture + RHIUpdateTexture2D(SourceTexture, 0, Region, Result.line_stride_in_bytes, (uint8*&)Result.p_data); + + // begin our drawing + { + RHICmdList.SetViewport(0, 0, 0.0f, FrameSize.X, FrameSize.Y, 1.0f); + RHICmdList.DrawPrimitive(0, 2, 1); + } + + RHICmdList.EndRenderPass(); + } + + return TargetableTexture; +} + +FTextureRHIRef UNDIMediaReceiver::DrawProgressiveVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result) +{ + // Ensure thread safety + FScopeLock Lock(&RenderSyncContext); + + FTextureRHIRef TargetableTexture; + + // check for our frame sync object and that we are actually connected to the end point + if (p_framesync_instance != nullptr) + { + // Initialize the frame size parameter + FIntPoint FrameSize = FIntPoint(Result.xres, Result.yres); + + if (!RenderTarget.IsValid() || !RenderTargetDescriptor.IsValid() || + RenderTargetDescriptor.GetSize() != FIntVector(FrameSize.X, FrameSize.Y, 0) || + DrawMode != EDrawMode::ProgressiveAlpha) + { + // Create the RenderTarget descriptor + RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc( + FrameSize, PF_B8G8R8A8, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable | TexCreate_SRGB, false); + + // Update the shader resource for the 'SourceTexture' + // The source texture will be given UYVY data, so make it half-width + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverProgressiveAlphaSourceTexture")) + .SetExtent(FrameSize.X / 2, FrameSize.Y) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceTexture = RHICreateTexture(CreateDesc); + + const FRHITextureCreateDesc CreateAlphaDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverProgressiveAlphaSourceAlphaTexture")) + .SetExtent(FrameSize.X, FrameSize.Y) + .SetFormat(PF_A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceAlphaTexture = RHICreateTexture(CreateAlphaDesc); + + // Find a free target-able texture from the render pool + GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTarget, TEXT("NDIIO")); + + DrawMode = EDrawMode::ProgressiveAlpha; + } + + TargetableTexture = RenderTarget->GetRHI(); + + // Initialize the Graphics Pipeline State Object + FGraphicsPipelineStateInitializer GraphicsPSOInit; + + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture.GetReference(); + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + // configure media shaders + FGlobalShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + + // construct the shaders + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef ConvertShader(ShaderMap); + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(RHICmdList); +#else + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(); +#endif + + // Needs to be called *before* ApplyCachedRenderTargets, since BeginRenderPass is caching the render targets. + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Recv Color Conversion")); + + // do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + + // set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // set the stream source + RHICmdList.SetStreamSource(0, VertexBuffer, 0); + + // set the texture parameter of the conversion shader + //bool bHasAlpha = (Result.FourCC == NDIlib_FourCC_video_type_UYVA) ? true : false; + FNDIIOShaderUYVAtoBGRAPS::Params Params(SourceTexture, SourceAlphaTexture, FrameSize, + FVector2D(0, 0), FVector2D(1, 1), + bPerformsRGBtoLinear ? FNDIIOShaderPS::EColorCorrection::sRGBToLinear : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(0.f, 1.f)); + ConvertShader->SetParameters(RHICmdList, Params); + + // Create the update region structure + FUpdateTextureRegion2D Region(0, 0, 0, 0, FrameSize.X/2, FrameSize.Y); + FUpdateTextureRegion2D AlphaRegion(0, 0, 0, 0, FrameSize.X, FrameSize.Y); + + // Set the Pixel data of the NDI Frame to the SourceTexture + RHIUpdateTexture2D(SourceTexture, 0, Region, Result.line_stride_in_bytes, (uint8*&)Result.p_data); + RHIUpdateTexture2D(SourceAlphaTexture, 0, AlphaRegion, FrameSize.X, ((uint8*&)Result.p_data)+FrameSize.Y*Result.line_stride_in_bytes); + + // begin our drawing + { + RHICmdList.SetViewport(0, 0, 0.0f, FrameSize.X, FrameSize.Y, 1.0f); + RHICmdList.DrawPrimitive(0, 2, 1); + } + + RHICmdList.EndRenderPass(); + } + + return TargetableTexture; +} + + +FTextureRHIRef UNDIMediaReceiver::DrawInterlacedVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result) +{ + // Ensure thread safety + FScopeLock Lock(&RenderSyncContext); + + FTextureRHIRef TargetableTexture; + + // check for our frame sync object and that we are actually connected to the end point + if (p_framesync_instance != nullptr) + { + // Initialize the frame size parameter + FIntPoint FieldSize = FIntPoint(Result.xres, Result.yres); + FIntPoint FrameSize = FIntPoint(Result.xres, Result.yres*2); + + if (!RenderTarget.IsValid() || !RenderTargetDescriptor.IsValid() || + RenderTargetDescriptor.GetSize() != FIntVector(FrameSize.X, FrameSize.Y, 0) || + DrawMode != EDrawMode::Interlaced) + { + // Create the RenderTarget descriptor + RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc( + FrameSize, PF_B8G8R8A8, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable | TexCreate_SRGB, false); + + // Update the shader resource for the 'SourceTexture' + // The source texture will be given UYVY data, so make it half-width + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverInterlacedSourceTexture")) + .SetExtent(FieldSize.X / 2, FieldSize.Y) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceTexture = RHICreateTexture(CreateDesc); + + // Find a free target-able texture from the render pool + GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTarget, TEXT("NDIIO")); + + DrawMode = EDrawMode::Interlaced; + } + + TargetableTexture = RenderTarget->GetRHI(); + + // Initialize the Graphics Pipeline State Object + FGraphicsPipelineStateInitializer GraphicsPSOInit; + + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture.GetReference(); + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + // configure media shaders + FGlobalShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + + // construct the shaders + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef ConvertShader(ShaderMap); + + float FieldUVOffset = (Result.frame_format_type == NDIlib_frame_format_type_field_1) ? 0.5f/Result.yres : 0.f; +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(RHICmdList, 0.f, 1.f, 0.f-FieldUVOffset, 1.f-FieldUVOffset); +#else + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(0.f, 1.f, 0.f-FieldUVOffset, 1.f-FieldUVOffset); +#endif + + // Needs to be called *before* ApplyCachedRenderTargets, since BeginRenderPass is caching the render targets. + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Recv Color Conversion")); + + // do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + + // set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // set the stream source + RHICmdList.SetStreamSource(0, VertexBuffer, 0); + + // set the texture parameter of the conversion shader + FNDIIOShaderUYVYtoBGRAPS::Params Params(SourceTexture, SourceTexture, FrameSize, + FVector2D(0, 0), FVector2D(1, 1), + bPerformsRGBtoLinear ? FNDIIOShaderPS::EColorCorrection::sRGBToLinear : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(0.f, 1.f)); + ConvertShader->SetParameters(RHICmdList, Params); + + // Create the update region structure + FUpdateTextureRegion2D Region(0, 0, 0, 0, FieldSize.X/2, FieldSize.Y); + + // Set the Pixel data of the NDI Frame to the SourceTexture + RHIUpdateTexture2D(SourceTexture, 0, Region, Result.line_stride_in_bytes, (uint8*&)Result.p_data); + + // begin our drawing + { + RHICmdList.SetViewport(0, 0, 0.0f, FrameSize.X, FrameSize.Y, 1.0f); + RHICmdList.DrawPrimitive(0, 2, 1); + } + + RHICmdList.EndRenderPass(); + } + + return TargetableTexture; +} + +FTextureRHIRef UNDIMediaReceiver::DrawInterlacedVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result) +{ + // Ensure thread safety + FScopeLock Lock(&RenderSyncContext); + + FTextureRHIRef TargetableTexture; + + // check for our frame sync object and that we are actually connected to the end point + if (p_framesync_instance != nullptr) + { + // Initialize the frame size parameter + FIntPoint FieldSize = FIntPoint(Result.xres, Result.yres); + FIntPoint FrameSize = FIntPoint(Result.xres, Result.yres*2); + + if (!RenderTarget.IsValid() || !RenderTargetDescriptor.IsValid() || + RenderTargetDescriptor.GetSize() != FIntVector(FrameSize.X, FrameSize.Y, 0) || + DrawMode != EDrawMode::InterlacedAlpha) + { + // Create the RenderTarget descriptor + RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc( + FrameSize, PF_B8G8R8A8, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable | TexCreate_SRGB, false); + + // Update the shader resource for the 'SourceTexture' + // The source texture will be given UYVY data, so make it half-width + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverInterlacedAlphaSourceTexture")) + .SetExtent(FieldSize.X / 2, FieldSize.Y) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceTexture = RHICreateTexture(CreateDesc); + + const FRHITextureCreateDesc CreateAlphaDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaReceiverInterlacedAlphaSourceAlphaTexture")) + .SetExtent(FieldSize.X, FieldSize.Y) + .SetFormat(PF_A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable | ETextureCreateFlags::Dynamic); + + SourceAlphaTexture = RHICreateTexture(CreateAlphaDesc); + + // Find a free target-able texture from the render pool + GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTarget, TEXT("NDIIO")); + + DrawMode = EDrawMode::InterlacedAlpha; + } + + TargetableTexture = RenderTarget->GetRHI(); + + // Initialize the Graphics Pipeline State Object + FGraphicsPipelineStateInitializer GraphicsPSOInit; + + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture.GetReference(); + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + // configure media shaders + FGlobalShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + + // construct the shaders + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef ConvertShader(ShaderMap); + + float FieldUVOffset = (Result.frame_format_type == NDIlib_frame_format_type_field_1) ? 0.5f/Result.yres : 0.f; +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(RHICmdList, 0.f, 1.f, 0.f-FieldUVOffset, 1.f-FieldUVOffset); +#else + FBufferRHIRef VertexBuffer = CreateTempMediaVertexBuffer(0.f, 1.f, 0.f-FieldUVOffset, 1.f-FieldUVOffset); +#endif + + // Needs to be called *before* ApplyCachedRenderTargets, since BeginRenderPass is caching the render targets. + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Recv Color Conversion")); + + // do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + + // set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // set the stream source + RHICmdList.SetStreamSource(0, VertexBuffer, 0); + + // set the texture parameter of the conversion shader + FNDIIOShaderUYVAtoBGRAPS::Params Params(SourceTexture, SourceAlphaTexture, FrameSize, + FVector2D(0, 0), FVector2D(1, 1), + bPerformsRGBtoLinear ? FNDIIOShaderPS::EColorCorrection::sRGBToLinear : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(0.f, 1.f)); + ConvertShader->SetParameters(RHICmdList, Params); + + // Create the update region structure + FUpdateTextureRegion2D Region(0, 0, 0, 0, FieldSize.X/2, FieldSize.Y); + FUpdateTextureRegion2D AlphaRegion(0, 0, 0, 0, FieldSize.X, FieldSize.Y); + + // Set the Pixel data of the NDI Frame to the SourceTexture + RHIUpdateTexture2D(SourceTexture, 0, Region, Result.line_stride_in_bytes, (uint8*&)Result.p_data); + RHIUpdateTexture2D(SourceAlphaTexture, 0, AlphaRegion, FieldSize.X, ((uint8*&)Result.p_data)+FieldSize.Y*Result.line_stride_in_bytes); + + // begin our drawing + { + RHICmdList.SetViewport(0, 0, 0.0f, FrameSize.X, FrameSize.Y, 1.0f); + RHICmdList.DrawPrimitive(0, 2, 1); + } + + RHICmdList.EndRenderPass(); + } + + return TargetableTexture; +} + +/** + Attempts to gather the performance metrics of the connection to the remote source +*/ +void UNDIMediaReceiver::GatherPerformanceMetrics() +{ + // provide references to store the values + NDIlib_recv_performance_t stable_performance; + NDIlib_recv_performance_t dropped_performance; + + // get the performance values from the SDK + NDIlib_recv_get_performance(p_receive_instance, &stable_performance, &dropped_performance); + + // update our structure with the updated values + this->PerformanceData.AudioFrames = stable_performance.audio_frames; + this->PerformanceData.DroppedAudioFrames = dropped_performance.audio_frames; + this->PerformanceData.DroppedMetadataFrames = dropped_performance.metadata_frames; + this->PerformanceData.DroppedVideoFrames = dropped_performance.video_frames; + this->PerformanceData.MetadataFrames = stable_performance.metadata_frames; + this->PerformanceData.VideoFrames = stable_performance.video_frames; +} + +/** + Returns the current performance data of the receiver while connected to the source +*/ +const FNDIReceiverPerformanceData& UNDIMediaReceiver::GetPerformanceData() const +{ + return this->PerformanceData; +} + +/** + Returns a value indicating whether this object is currently connected to the sender source +*/ +const bool UNDIMediaReceiver::GetIsCurrentlyConnected() const +{ + if (p_receive_instance != nullptr) + return NDIlib_recv_get_no_connections(p_receive_instance) > 0 ? true : false; + else + return false; +} + +/** + Returns the current connection information of the connected source +*/ +const FNDIConnectionInformation& UNDIMediaReceiver::GetCurrentConnectionInformation() const +{ + return this->ConnectionInformation; +} + +/** + Returns the current timecode of the connected source +*/ +const FTimecode& UNDIMediaReceiver::GetCurrentTimecode() const +{ + return this->Timecode; +} + +/** + Set whether or not a sRGB to Linear conversion is made +*/ +void UNDIMediaReceiver::PerformsRGBToLinearConversion(bool Value) +{ + this->bPerformsRGBtoLinear = Value; +} + +/** + Returns the current framerate of the connected source +*/ +const FFrameRate& UNDIMediaReceiver::GetCurrentFrameRate() const +{ + return this->FrameRate; +} + +const FIntPoint& UNDIMediaReceiver::GetCurrentResolution() const +{ + return this->Resolution; +} + + +FString UNDIMediaReceiver::GetUrl() const +{ + if(!ConnectionInformation.SourceName.IsEmpty()) + return "ndiio://" + ConnectionInformation.SourceName; + else if(!ConnectionSetting.SourceName.IsEmpty()) + return "ndiio://" + ConnectionSetting.SourceName; + else if(!ConnectionInformation.Url.IsEmpty()) + return "ndiio://" + ConnectionInformation.Url; + else if(!ConnectionSetting.Url.IsEmpty()) + return "ndiio://" + ConnectionSetting.Url; + else + return "ndiio://"; +} + +bool UNDIMediaReceiver::GetMediaOption(const FName& Key, bool DefaultValue) const +{ + if (Key == NDIMediaOption::IsNDIMediaReceiver) { return true; } + + return Super::GetMediaOption(Key, DefaultValue); +} + +int64 UNDIMediaReceiver::GetMediaOption(const FName& Key, int64 DefaultValue) const +{ + if (Key == FMediaIOCoreMediaOption::FrameRateNumerator) { return FrameRate.Numerator; } + if (Key == FMediaIOCoreMediaOption::FrameRateDenominator) { return FrameRate.Denominator; } + if (Key == FMediaIOCoreMediaOption::ResolutionWidth) { return Resolution.X; } + if (Key == FMediaIOCoreMediaOption::ResolutionHeight) { return Resolution.Y; } + + return Super::GetMediaOption(Key, DefaultValue); +} + +FString UNDIMediaReceiver::GetMediaOption(const FName& Key, const FString& DefaultValue) const +{ + return Super::GetMediaOption(Key, DefaultValue); +} + +bool UNDIMediaReceiver::HasMediaOption(const FName& Key) const +{ + if ( Key == NDIMediaOption::IsNDIMediaReceiver) + { + return true; + } + + if ( Key == FMediaIOCoreMediaOption::FrameRateNumerator + || Key == FMediaIOCoreMediaOption::FrameRateDenominator + || Key == FMediaIOCoreMediaOption::ResolutionWidth + || Key == FMediaIOCoreMediaOption::ResolutionHeight) + { + return true; + } + + return Super::HasMediaOption(Key); +} + + +FTextureResource* UNDIMediaReceiver::GetVideoTextureResource() const +{ + if(IsValid(this->VideoTexture)) + return this->VideoTexture->GetResource(); + + return nullptr; +} + +FTextureResource* UNDIMediaReceiver::GetInternalVideoTextureResource() const +{ + if(IsValid(this->InternalVideoTexture)) + return this->InternalVideoTexture->GetResource(); + + return nullptr; +} + + +#if WITH_EDITORONLY_DATA + +void UNDIMediaReceiver::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + // get the name of the property which changed + FName MemberPropertyName = + (PropertyChangedEvent.MemberProperty != nullptr) ? PropertyChangedEvent.MemberProperty->GetFName() : NAME_None; + FName PropertyName = + (PropertyChangedEvent.Property != nullptr) ? PropertyChangedEvent.Property->GetFName() : NAME_None; + + if (MemberPropertyName == GET_MEMBER_NAME_CHECKED(UNDIMediaReceiver, ConnectionSetting)) + { + if (PropertyName == GET_MEMBER_NAME_CHECKED(FNDIConnectionInformation, SourceName)) + { + ConnectionSetting.SourceName.Split(TEXT(" "), &ConnectionSetting.MachineName, &ConnectionSetting.StreamName); + ConnectionSetting.StreamName.RemoveFromStart("("); + ConnectionSetting.StreamName.RemoveFromEnd(")"); + } + + else if (PropertyName == GET_MEMBER_NAME_CHECKED(FNDIConnectionInformation, MachineName)) + { + if ((!ConnectionSetting.MachineName.IsEmpty()) && (!ConnectionSetting.StreamName.IsEmpty())) + ConnectionSetting.SourceName = ConnectionSetting.MachineName + " (" + ConnectionSetting.StreamName + ")"; + else + ConnectionSetting.SourceName = FString(""); + } + + else if (PropertyName == GET_MEMBER_NAME_CHECKED(FNDIConnectionInformation, StreamName)) + { + if ((!ConnectionSetting.MachineName.IsEmpty()) && (!ConnectionSetting.StreamName.IsEmpty())) + ConnectionSetting.SourceName = ConnectionSetting.MachineName + " (" + ConnectionSetting.StreamName + ")"; + else + ConnectionSetting.SourceName = FString(""); + } + } + + // call the base class 'PostEditChangeProperty' + Super::PostEditChangeProperty(PropertyChangedEvent); +} + +#endif diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSender.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSender.cpp new file mode 100644 index 0000000..8efea1b --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSender.cpp @@ -0,0 +1,1368 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include + +#include + +#include "NDIShaders.h" + +#if WITH_EDITOR +#include +#endif + +#include + + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + +static FBufferRHIRef CreateColorVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FRHIBufferCreateDesc CreateDesc = + FRHIBufferCreateDesc::Create(TEXT("VertexBufferRHI"), sizeof(FMediaElementVertex) * 4, 0, EBufferUsageFlags::Volatile | EBufferUsageFlags::VertexBuffer) + .DetermineInitialState(); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateBuffer(CreateDesc); +#else + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); +#endif + void* VoidPtr = RHICmdList.LockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set(-1.0f, 1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, 1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set(-1.0f, 1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, 1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHICmdList.UnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +static FBufferRHIRef CreateAlphaEvenVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FRHIBufferCreateDesc CreateDesc = + FRHIBufferCreateDesc::Create(TEXT("VertexBufferRHI"), sizeof(FMediaElementVertex) * 4, 0, EBufferUsageFlags::Volatile | EBufferUsageFlags::VertexBuffer) + .DetermineInitialState(); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateBuffer(CreateDesc); +#else + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); +#endif + + void* VoidPtr = RHICmdList.LockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set(-1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 0.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHICmdList.UnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +static FBufferRHIRef CreateAlphaOddVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FRHIBufferCreateDesc CreateDesc = + FRHIBufferCreateDesc::Create(TEXT("VertexBufferRHI"), sizeof(FMediaElementVertex) * 4, 0, EBufferUsageFlags::Volatile | EBufferUsageFlags::VertexBuffer) + .DetermineInitialState(); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateBuffer(CreateDesc); +#else + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICmdList.CreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); +#endif + + void* VoidPtr = RHICmdList.LockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set( 0.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHICmdList.UnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +#elif ENGINE_MAJOR_VERSION == 5 // Before 5.3 + +static FBufferRHIRef CreateColorVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); + + void* VoidPtr = RHILockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set(-1.0f, 1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, 1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set(-1.0f, 1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, 1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHIUnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +static FBufferRHIRef CreateAlphaEvenVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); + + void* VoidPtr = RHILockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set(-1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 0.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set(-1.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHIUnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +static FBufferRHIRef CreateAlphaOddVertexBuffer(FRHICommandListImmediate& RHICmdList, const FIntPoint& FitFrameSize, const FIntPoint& DrawFrameSize, bool OutputAlpha) +{ + FRHIResourceCreateInfo CreateInfo(TEXT("VertexBufferRHI")); + FBufferRHIRef VertexBufferRHI = RHICreateVertexBuffer(sizeof(FMediaElementVertex) * 4, BUF_Volatile, CreateInfo); + + void* VoidPtr = RHILockBuffer(VertexBufferRHI, 0, sizeof(FMediaElementVertex) * 4, RLM_WriteOnly); + + FMediaElementVertex* Vertices = (FMediaElementVertex*)VoidPtr; + if (OutputAlpha == false) + { + Vertices[0].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + else + { + Vertices[0].Position.Set( 0.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Left + Vertices[1].Position.Set( 1.0f, -1.0f/3.0f, 1.0f, 1.0f); // Top Right + Vertices[2].Position.Set( 0.0f, -1.0f, 1.0f, 1.0f); // Bottom Left + Vertices[3].Position.Set( 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Right + } + + Vertices[0].TextureCoordinate.Set(0.0f, 0.0f); + Vertices[1].TextureCoordinate.Set(1.0f, 0.0f); + Vertices[2].TextureCoordinate.Set(0.0f, 1.0f); + Vertices[3].TextureCoordinate.Set(1.0f, 1.0f); + + RHIUnlockBuffer(VertexBufferRHI); + + return VertexBufferRHI; +} + +#else + #error "Unsupported engine major version" +#endif + + + + + + + +UNDIMediaSender::UNDIMediaSender(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{} + + + +/** + Attempts to perform initialization logic for creating a sender through the NDI(R) sdk api +*/ +void UNDIMediaSender::Initialize(USoundSubmix* SubmixCapture) +{ + if (this->p_send_instance == nullptr) + { + // Create valid settings to be seen on the network + CreateSender(); + + // If it's valid then lets do some engine related setup + if (p_send_instance != nullptr) + { + // Update the Render Target Configuration + ChangeRenderTargetConfiguration(FrameSize, FrameRate); + + // Send audio frames at the end of the 'update' loop + FNDIConnectionService::AddAudioSender(this, SubmixCapture, &UNDIMediaSender::TrySendAudioFrame); + + // We don't want to limit the engine rendering speed to the sync rate of the connection hook + // into the core delegates render thread 'EndFrame' + FNDIConnectionService::EventOnSendVideoFrame.AddUObject(this, &UNDIMediaSender::TrySendVideoFrame); + + // Initialize the 'LastRender' timecode + LastRenderTime = FTimecode::FromTimespan(0, FrameRate, FTimecode::IsDropFormatTimecodeSupported(FrameRate), + true // use roll-over timecode + ); + +#if UE_EDITOR + + // We don't want to provide perceived issues with the plugin not working so + // when we get a Pre-exit message, forcefully shutdown the receiver + FCoreDelegates::OnPreExit.AddWeakLambda(this, [&]() { + this->Shutdown(); + FCoreDelegates::OnPreExit.RemoveAll(this); + }); + + // We handle this in the 'Play In Editor' versions as well. + FEditorDelegates::PrePIEEnded.AddWeakLambda(this, [&](const bool) { + this->Shutdown(); + FEditorDelegates::PrePIEEnded.RemoveAll(this); + }); + +#endif + } + } +} + +void UNDIMediaSender::PrepareDefaultTexture() +{ + if (!DefaultVideoTextureRHI.IsValid()) + { + // Default to 240p + static int32 DefaultWidth = 352; + static int32 DefaultHeight = 240; + + // Set the default video texture to reference nothing + TRefCountPtr RenderableTexture; + + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaSenderInitializeTexture")) + .SetExtent(DefaultWidth, DefaultHeight) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::RenderTargetable) + .SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f))); + + RenderableTexture = RHICreateTexture(CreateDesc); + + DefaultVideoTextureRHI = (FTextureRHIRef&)RenderableTexture; + } +} + +bool UNDIMediaSender::CreateSender() +{ + if (p_send_instance != nullptr) + { + // free up the old sender instance + NDIlib_send_destroy(p_send_instance); + + p_send_instance = nullptr; + } + + // Create valid settings to be seen on the network + NDIlib_send_create_t settings; + settings.clock_audio = false; + settings.clock_video = false; + // Beware of the limited lifetime of TCHAR_TO_UTF8 values + std::string SourceNameStr(TCHAR_TO_UTF8(*this->SourceName)); + settings.p_ndi_name = SourceNameStr.c_str(); + + // create the instance and store it + p_send_instance = NDIlib_send_create(&settings); + + if (p_send_instance != nullptr) + { + // We are going to mark this as if it was a PTZ camera. + NDIlib_metadata_frame_t NDI_capabilities; + if (bEnablePTZ == true) + NDI_capabilities.p_data = const_cast(""); + else + NDI_capabilities.p_data = const_cast(""); + NDIlib_send_add_connection_metadata(p_send_instance, &NDI_capabilities); + } + + return p_send_instance != nullptr ? true : false; +} + + +/** + Changes the name of the sender object as seen on the network for remote connections +*/ +void UNDIMediaSender::ChangeSourceName(const FString& InSourceName) +{ + this->SourceName = InSourceName; + + if (p_send_instance != nullptr) + { + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock RenderLock(&RenderSyncContext); + + // Get the command list interface + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + // send an empty frame over NDI to be able to cleanup the buffers + ReadbackTextures.Flush(RHICmdList, p_send_instance); + + CreateSender(); + } +} + +/** + Attempts to change the Broadcast information associated with this media object +*/ +void UNDIMediaSender::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration) +{ + bIsChangingBroadcastSize = true; + + // Determine if we need to prevent the audio / video threads from updating frames + if (p_send_instance != nullptr) + { + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock RenderLock(&RenderSyncContext); + + // Get the command list interface + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + // send an empty frame over NDI to be able to cleanup the buffers + ReadbackTextures.Flush(RHICmdList, p_send_instance); + } + + // Change the render target configuration based on the incoming configuration + ChangeRenderTargetConfiguration(InConfiguration.FrameSize, InConfiguration.FrameRate); + + bIsChangingBroadcastSize = false; +} + +/** + This will attempt to generate an audio frame, add the frame to the stack and return immediately, + having scheduled the frame asynchronously. +*/ +void UNDIMediaSender::TrySendAudioFrame(int64 time_code, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) +{ + if (bEnableAudio && (p_send_instance != nullptr) && (!bIsChangingBroadcastSize)) + { + FScopeTryLock Lock(&AudioSyncContext); + // Ignore audio while changes are being made; + if (Lock.IsLocked()) + { + if (NDIlib_send_get_no_connections(p_send_instance, 0) > 0) + { + // Convert from the interleaved audio that Unreal Engine produces + + NDIlib_audio_frame_interleaved_32f_t NDI_interleaved_audio_frame; + NDI_interleaved_audio_frame.timecode = time_code; + NDI_interleaved_audio_frame.sample_rate = SampleRate; + NDI_interleaved_audio_frame.no_channels = NumChannels; + NDI_interleaved_audio_frame.no_samples = NumSamples / NumChannels; + NDI_interleaved_audio_frame.p_data = AudioData; + + NDIlib_audio_frame_v2_t NDI_audio_frame; + SendAudioData.Reset(NumSamples); + NDI_audio_frame.p_data = SendAudioData.GetData(); + NDI_audio_frame.channel_stride_in_bytes = (NumSamples / NumChannels) * sizeof(float); + + NDIlib_util_audio_from_interleaved_32f_v2(&NDI_interleaved_audio_frame, &NDI_audio_frame); + + + OnSenderAudioPreSend.Broadcast(this); + + NDIlib_send_send_audio_v2(p_send_instance, &NDI_audio_frame); + + OnSenderAudioSent.Broadcast(this); + } + } + } +} + +/** + This will attempt to generate a video frame, add the frame to the stack and return immediately, + having scheduled the frame asynchronously. +*/ +void UNDIMediaSender::TrySendVideoFrame(int64 time_code) +{ + // This function is called on the Engine's Main Rendering Thread. Be very careful when doing stuff here. + // Make sure things are done quick and efficient. + + if (p_send_instance != nullptr && !bIsChangingBroadcastSize) + { + FScopeLock Lock(&RenderSyncContext); + + while(GetMetadataFrame()) + ; // Potential improvement: limit how much metadata is processed, to avoid appearing to lock up due to a metadata flood + + if (GetRenderTargetResource() != nullptr) + { + // Alright time to perform the magic :D + if (NDIlib_send_get_no_connections(p_send_instance, 0) > 0) + { + FTimecode RenderTimecode = + FTimecode::FromTimespan(FTimespan::FromSeconds(time_code / (float)1e+7), FrameRate, + FTimecode::IsDropFormatTimecodeSupported(FrameRate), + true // use roll-over timecode + ); + + if (RenderTimecode.Frames != LastRenderTime.Frames) + { + // Get the command list interface + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + // alright, lets hope the render target hasn't changed sizes + NDI_video_frame.timecode = time_code; + + // performing color conversion if necessary and copy pixels into the data buffer for sending + if (DrawRenderTarget(RHICmdList)) + { + int32 Width = 0, Height = 0, LineStride = 0; + + // Map the staging surface so we can copy the buffer for the NDI SDK to use + ReadbackTextures.Map(RHICmdList, Width, Height, LineStride); + // Width and height are the size of the readback texture, and not the framesize represented + // Readback texture is used in 4:2:2 format, so actual width in pixels is double + Width *= 2; + // Readback texture may be extended in height to accomodate alpha values; remove it + if (ReadbackTexturesHaveAlpha == true) + Height = (2*Height) / 3; + + NDI_video_frame.line_stride_in_bytes = LineStride; + + // If we don't have a draw result, ensure we send an empty frame and resize our frame + if (FrameSize != FIntPoint(Width, Height)) + { + // send an empty frame over NDI to be able to cleanup the buffers + ReadbackTextures.Flush(RHICmdList, p_send_instance); + + // Do not hold the lock when going into ChangeRenderTargetConfiguration() + Lock.Unlock(); + + // Change the render target configuration based on what the RHI determines the size to be + ChangeRenderTargetConfiguration(FIntPoint(Width, Height), this->FrameRate); + } + else + { + OnSenderVideoPreSend.Broadcast(this); + + // send the frame over NDI + ReadbackTextures.Send(RHICmdList, p_send_instance, NDI_video_frame); + + // Update the Last Render Time to the current Render Timecode + LastRenderTime = RenderTimecode; + + OnSenderVideoSent.Broadcast(this); + } + } + } + } + } + } +} + +/** + Perform the color conversion (if any) and bit copy from the gpu +*/ +bool UNDIMediaSender::DrawRenderTarget(FRHICommandListImmediate& RHICmdList) +{ + bool DrawResult = false; + + // We should only do conversions and pixel copies, if we have something to work with + if (!bIsChangingBroadcastSize && (GetRenderTargetResource() != nullptr)) + { + // Get the underlying texture to use for the color conversion + FTextureRHIRef SourceTexture = (FTextureRHIRef&)GetRenderTargetResource()->TextureRHI; + + // Validate the Source Texture + if (SourceTexture.IsValid()) + { + // We have something to draw + DrawResult = true; + + TRefCountPtr RenderTargetTexturePooled; + + // Find a free target-able texture from the render pool + GRenderTargetPool.FindFreeElement(RHICmdList, RenderTargetDescriptor, RenderTargetTexturePooled, TEXT("NDIIO")); + + FRHITexture* TargetableTexture = RenderTargetTexturePooled->GetRHI(); + + PrepareDefaultTexture(); + + // Get the target size of the conversion + FIntPoint TargetSize = SourceTexture->GetSizeXY(); + + // Calculate the rectangle in which to draw the source, maintaining aspect ratio + float FrameRatio = FrameSize.X / (float)FrameSize.Y; + float TargetRatio = TargetSize.X / (float)TargetSize.Y; + + FIntPoint NewFrameSize = FrameSize; + + if (TargetRatio > FrameRatio) + { + // letterbox + NewFrameSize.Y = FMath::RoundToInt(FrameSize.X / TargetRatio); + } + else if (TargetRatio < FrameRatio) + { + // pillarbox + NewFrameSize.X = FMath::RoundToInt(FrameSize.Y * TargetRatio); + } + + float ULeft = (NewFrameSize.X - FrameSize.X) / (float)(2*NewFrameSize.X); + float URight = (NewFrameSize.X + FrameSize.X) / (float)(2*NewFrameSize.X); + float VTop = (NewFrameSize.Y - FrameSize.Y) / (float)(2*NewFrameSize.Y); + float VBottom = (NewFrameSize.Y + FrameSize.Y) / (float)(2*NewFrameSize.Y); + + FBufferRHIRef ColorVertexBuffer = CreateColorVertexBuffer(RHICmdList, FrameSize, NewFrameSize, this->OutputAlpha); + FBufferRHIRef AlphaEvenVertexBuffer = CreateAlphaEvenVertexBuffer(RHICmdList, FrameSize, NewFrameSize, this->OutputAlpha); + FBufferRHIRef AlphaOddVertexBuffer = CreateAlphaOddVertexBuffer(RHICmdList, FrameSize, NewFrameSize, this->OutputAlpha); + + // Initialize the Graphics Pipeline State Object + FGraphicsPipelineStateInitializer GraphicsPSOInit; + + // Configure shaders + FGlobalShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + + // Construct the shaders + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef ConvertShader(ShaderMap); + TShaderMapRef ConvertAlphaEvenShader(ShaderMap); + TShaderMapRef ConvertAlphaOddShader(ShaderMap); + + // Scaled drawing pass with conversion to UYVY + { + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture; + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Send Scaling Conversion")); + + // Do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + // Set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // Perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // Set the stream source + RHICmdList.SetStreamSource(0, ColorVertexBuffer, 0); + + // Set the texture parameter of the conversion shader + FNDIIOShaderBGRAtoUYVYPS::Params Params(SourceTexture, DefaultVideoTextureRHI, FrameSize, + FVector2D(ULeft, VTop), FVector2D(URight-ULeft, VBottom-VTop), + bPerformLinearTosRGB ? FNDIIOShaderPS::EColorCorrection::LinearTosRGB : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(this->AlphaMin, this->AlphaMax)); + ConvertShader->SetParameters(RHICmdList, Params); + + // Draw the texture + RHICmdList.DrawPrimitive(0, 2, 1); + + // Release the reference to SourceTexture from the shader + // The SourceTexture may be the viewport's backbuffer, and Unreal does not like + // extra references to the backbuffer when the viewport is resized + Params.InputTarget = DefaultVideoTextureRHI; + ConvertShader->SetParameters(RHICmdList, Params); + + RHICmdList.EndRenderPass(); + } + + // Scaled drawing pass with conversion to the alpha part of UYVA + if (this->OutputAlpha == true) + { + // Alpha even-numbered lines + { + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture; + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Send Scaling Conversion")); + + // Do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + // Set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // Perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertAlphaEvenShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // Set the stream source + RHICmdList.SetStreamSource(0, AlphaEvenVertexBuffer, 0); + + // Set the texture parameter of the conversion shader + FNDIIOShaderBGRAtoAlphaEvenPS::Params Params(SourceTexture, DefaultVideoTextureRHI, FrameSize, + FVector2D(ULeft, VTop), FVector2D(URight-ULeft, VBottom-VTop), + bPerformLinearTosRGB ? FNDIIOShaderPS::EColorCorrection::LinearTosRGB : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(this->AlphaMin, this->AlphaMax)); + ConvertAlphaEvenShader->SetParameters(RHICmdList, Params); + + // Draw the texture + RHICmdList.DrawPrimitive(0, 2, 1); + + // Release the reference to SourceTexture from the shader + // The SourceTexture may be the viewport's backbuffer, and Unreal does not like + // extra references to the backbuffer when the viewport is resized + Params.InputTarget = DefaultVideoTextureRHI; + ConvertAlphaEvenShader->SetParameters(RHICmdList, Params); + + RHICmdList.EndRenderPass(); + } + + // Alpha odd-numbered lines + { + // Initialize the Render pass with the conversion texture + FRHITexture* ConversionTexture = TargetableTexture; + FRHIRenderPassInfo RPInfo(ConversionTexture, ERenderTargetActions::DontLoad_Store); + + RHICmdList.BeginRenderPass(RPInfo, TEXT("NDI Send Scaling Conversion")); + + // Do as it suggests + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + // Set the state objects + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.BlendState = TStaticBlendStateWriteMask::GetRHI(); + // Perform binding operations for the shaders to be used + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMediaVertexDeclaration.VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = VertexShader.GetVertexShader(); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = ConvertAlphaOddShader.GetPixelShader(); + // Going to draw triangle strips + GraphicsPSOInit.PrimitiveType = PT_TriangleStrip; + + // Ensure the pipeline state is set to the one we've configured + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit, 0); + + // Set the stream source + RHICmdList.SetStreamSource(0, AlphaOddVertexBuffer, 0); + + // Set the texture parameter of the conversion shader + FNDIIOShaderBGRAtoAlphaOddPS::Params Params(SourceTexture, DefaultVideoTextureRHI, FrameSize, + FVector2D(ULeft, VTop), FVector2D(URight-ULeft, VBottom-VTop), + bPerformLinearTosRGB ? FNDIIOShaderPS::EColorCorrection::LinearTosRGB : FNDIIOShaderPS::EColorCorrection::None, + FVector2D(this->AlphaMin, this->AlphaMax)); + ConvertAlphaOddShader->SetParameters(RHICmdList, Params); + + // Draw the texture + RHICmdList.DrawPrimitive(0, 2, 1); + + // Release the reference to SourceTexture from the shader + // The SourceTexture may be the viewport's backbuffer, and Unreal does not like + // extra references to the backbuffer when the viewport is resized + Params.InputTarget = DefaultVideoTextureRHI; + ConvertAlphaOddShader->SetParameters(RHICmdList, Params); + + RHICmdList.EndRenderPass(); + } + } + + // Copy to resolve target... + // This is by far the most expensive in terms of cost, since we are having to pull + // data from the gpu, while in the render thread. + ReadbackTextures.Resolve(RHICmdList, TargetableTexture, FResolveRect(0, 0, FrameSize.X/2,FrameSize.Y), FResolveRect(0, 0, FrameSize.X/2,FrameSize.Y)); + + // Force all the drawing to be done here and now + RHICmdList.ImmediateFlush(EImmediateFlushType::FlushRHIThreadFlushResources); + } + } + + return DrawResult; +} + +/** + Change the render target configuration based on the passed in parameters + + @param InFrameSize The frame size to resize the render target to + @param InFrameRate The frame rate at which we should be sending frames via NDI +*/ +void UNDIMediaSender::ChangeRenderTargetConfiguration(FIntPoint InFrameSize, FFrameRate InFrameRate) +{ + FScopeLock RenderLock(&RenderSyncContext); + + // Ensure that the frame size matches what we are told the frame size is + this->FrameSize = InFrameSize; + this->FrameRate = InFrameRate; + + // Reiterate the properties that the frame needs to be when sent + NDI_video_frame.xres = FrameSize.X; + NDI_video_frame.yres = FrameSize.Y; + NDI_video_frame.line_stride_in_bytes = 0; + NDI_video_frame.frame_rate_D = FrameRate.Denominator; + NDI_video_frame.frame_rate_N = FrameRate.Numerator; + NDI_video_frame.FourCC = this->OutputAlpha ? NDIlib_FourCC_type_UYVA : NDIlib_FourCC_type_UYVY; + + // Size of the readback texture in UYVY format, optionally with alpha + FIntPoint UYVYTextureSize(FrameSize.X/2, FrameSize.Y + (this->OutputAlpha ? FrameSize.Y/2 : 0)); + + // Create readback textures, suitably sized for UYVY + this->ReadbackTextures.Create(UYVYTextureSize); + this->ReadbackTexturesHaveAlpha = this->OutputAlpha; + + // Create the RenderTarget descriptor, suitably sized for UYVY + RenderTargetDescriptor = FPooledRenderTargetDesc::Create2DDesc(UYVYTextureSize, PF_B8G8R8A8, FClearValueBinding::None, + TexCreate_None, TexCreate_RenderTargetable, false); + + // If our RenderTarget is valid change the size + if (IsValid(this->RenderTarget)) + { + // Ensure that our render target is the same size as we expect + this->RenderTarget->ResizeTarget(FrameSize.X, FrameSize.Y); + } + + // Do not hold a lock when broadcasting, as it calls outside of the sender's context + RenderLock.Unlock(); + + // determine if the notifier is bound + if (this->OnBroadcastConfigurationChanged.IsBound()) + { + // broadcast the notification to all interested parties + OnBroadcastConfigurationChanged.Broadcast(this); + } +} + + +/** + This will send a metadata frame to all receivers + The data is expected to be valid XML +*/ +void UNDIMediaSender::SendMetadataFrame(const FString& Data, bool AttachToVideoFrame) +{ + if (p_send_instance != nullptr) + { + if(AttachToVideoFrame == true) + { + // Attach the metadata to the next video frame to be sent + FScopeLock RenderLock(&RenderSyncContext); + this->ReadbackTextures.AddMetaData(Data); + } + else + { + OnSenderMetaDataPreSend.Broadcast(this); + + // Send the metadata separate from the video frame + NDIlib_metadata_frame_t metadata; + std::string DataStr(TCHAR_TO_UTF8(*Data)); + metadata.p_data = const_cast(DataStr.c_str()); + metadata.length = DataStr.length(); + metadata.timecode = FDateTime::Now().GetTimeOfDay().GetTicks(); + + NDIlib_send_send_metadata(p_send_instance, &metadata); + + OnSenderMetaDataSent.Broadcast(this); + } + } +} + +/** + This will send a metadata frame to all receivers + The data will be formatted as: +*/ +void UNDIMediaSender::SendMetadataFrameAttr(const FString& Element, const FString& ElementData, bool AttachToVideoFrame) +{ + FString Data = "<" + Element + ">" + ElementData + ""; + SendMetadataFrame(Data, AttachToVideoFrame); +} + +/** + This will send a metadata frame to all receivers + The data will be formatted as: +*/ +void UNDIMediaSender::SendMetadataFrameAttrs(const FString& Element, const TMap& Attributes, bool AttachToVideoFrame) +{ + FString Data = "<" + Element; + + for(const auto& Attribute : Attributes) + { + Data += " " + Attribute.Key + "=\"" + Attribute.Value + "\""; + } + + Data += "/>"; + + SendMetadataFrame(Data, AttachToVideoFrame); +} + + +/** + Attempts to get a metadata frame from the sender. + If there is one, the data is broadcast through OnSenderMetaDataReceived. + Returns true if metadata was received, false otherwise. +*/ +bool UNDIMediaSender::GetMetadataFrame() +{ + bool bProcessed = false; + + if (p_send_instance != nullptr) + { + NDIlib_metadata_frame_t metadata; + if(NDIlib_send_capture(p_send_instance, &metadata, 0) == NDIlib_frame_type_metadata) + { + if ((metadata.p_data != nullptr) && (metadata.length > 0)) + { + FString Data(UTF8_TO_TCHAR(metadata.p_data)); + OnSenderMetaDataReceived.Broadcast(this, Data); + } + NDIlib_send_free_metadata(p_send_instance, &metadata); + + bProcessed = true; + } + } + + return bProcessed; +} + +/** + Attempts to change the RenderTarget used in sending video frames over NDI +*/ +void UNDIMediaSender::ChangeVideoTexture(UTextureRenderTarget2D* VideoTexture) +{ + // Wait render thread so that we can do something + FScopeLock RenderLock(&RenderSyncContext); + + // Set our Render Target to the incoming video texture + this->RenderTarget = VideoTexture; +} + +/** + Change the alpha remapping settings +*/ +void UNDIMediaSender::ChangeAlphaRemap(float AlphaMinIn, float AlphaMaxIn) +{ + // Wait render thread so that we can do something + FScopeLock RenderLock(&RenderSyncContext); + + this->AlphaMin = AlphaMinIn; + this->AlphaMax = AlphaMaxIn; +} + +/** + Determines the current tally information. If you specify a timeout then it will wait until it has + changed, otherwise it will simply poll it and return the current tally immediately + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver + @param TimeOut - Indicates the amount of time to wait (in milliseconds) until a change has occurred +*/ +void UNDIMediaSender::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram, uint32 Timeout) +{ + // reset the parameters with the default values + IsOnPreview = IsOnProgram = false; + + // validate our sender object + if (p_send_instance != nullptr) + { + // construct a tally structure + NDIlib_tally_t tally_info; + + // retrieve the tally information from the SDK + NDIlib_send_get_tally(p_send_instance, &tally_info, 0); + + // perform a copy from the tally info object to our parameters + IsOnPreview = tally_info.on_preview; + IsOnProgram = tally_info.on_program; + } +} + +/** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network +*/ +void UNDIMediaSender::GetNumberOfConnections(int32& Result) +{ + // reset the result + Result = 0; + + // have we created a sender object + if (p_send_instance != nullptr) + { + // call the SDK to get the current number of connection for the sender instance of this object + Result = NDIlib_send_get_no_connections(p_send_instance, 0); + } +} + +/** + Attempts to immediately stop sending frames over NDI to any connected receivers +*/ +void UNDIMediaSender::Shutdown() +{ + // Perform cleanup on the audio related materials + { + FScopeLock Lock(&AudioSyncContext); + + // Remove the handler for the send audio frame + FNDIConnectionService::RemoveAudioSender(this); + } + + // Perform cleanup on the renderer related materials + { + FScopeLock RenderLock(&RenderSyncContext); + + // destroy the sender + if (p_send_instance != nullptr) + { + // Get the command list interface + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + // send an empty frame over NDI to be able to cleanup the buffers + this->ReadbackTextures.Flush(RHICmdList, p_send_instance); + + NDIlib_send_destroy(p_send_instance); + p_send_instance = nullptr; + } + + this->DefaultVideoTextureRHI.SafeRelease(); + + this->ReadbackTextures.Destroy(); + + this->RenderTargetDescriptor.Reset(); + } +} + +/** + Called before destroying the object. This is called immediately upon deciding to destroy the object, + to allow the object to begin an asynchronous cleanup process. + */ +void UNDIMediaSender::BeginDestroy() +{ + // Call the shutdown procedure here. + this->Shutdown(); + + // Call the base implementation of 'BeginDestroy' + Super::BeginDestroy(); +} + +/** + Set whether or not a Linear to sRGB conversion is made +*/ +void UNDIMediaSender::PerformLinearTosRGBConversion(bool Value) +{ + this->bPerformLinearTosRGB = Value; +} + +/** + Set whether or not to enable PTZ support +*/ +void UNDIMediaSender::EnablePTZ(bool Value) +{ + this->bEnablePTZ = Value; +} + +/** + Returns the Render Target used for sending a frame over NDI +*/ +UTextureRenderTarget2D* UNDIMediaSender::GetRenderTarget() +{ + return this->RenderTarget; +} + + +FTextureResource* UNDIMediaSender::GetRenderTargetResource() const +{ + if(IsValid(this->RenderTarget)) + return this->RenderTarget->GetResource(); + + return nullptr; +} + + + +/** + A texture with CPU readback +*/ + +/** + Check that the MappedTexture is not mapped, and the readback texture has been destroyed. +*/ +UNDIMediaSender::MappedTexture::~MappedTexture() +{ + check(Texture.IsValid() == false); + check(pData == nullptr); +} + +/** + Create the readback texture. If the texture was already created it will first be destroyed. + The MappedTexture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTexture::Create(FIntPoint InFrameSize) +{ + Destroy(); + + FrameSize = InFrameSize; +} + +/** + Destroy the readback texture (if not already destroyed). The MappedTexture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTexture::Destroy() +{ + check(pData == nullptr); + + if (Texture.IsValid()) + { + Texture.SafeRelease(); + Texture = nullptr; + } + pData = nullptr; + + check(Texture.IsValid() == false); + check(pData == nullptr); +} + +void UNDIMediaSender::MappedTexture::PrepareTexture() +{ + if (Texture.IsValid() && (Texture->GetSizeXY() == FrameSize)) + return; + + Destroy(); + + check(Texture.IsValid() == false); + check(pData == nullptr); + + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaSenderMappedTexture")) + .SetExtent(FrameSize.X, FrameSize.Y) + .SetFormat(PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::CPUReadback); + Texture = RHICreateTexture(CreateDesc); + + pData = nullptr; + + check(Texture.IsValid() == true); + check(pData == nullptr); +} + +FIntPoint UNDIMediaSender::MappedTexture::GetSizeXY() const +{ + return FrameSize; +} + +/** + Resolve the source texture to the readback texture. The readback texture must have been created. + The MappedTexture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTexture::Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect, const FResolveRect& DestRect) +{ + PrepareTexture(); + + check(Texture.IsValid() == true); + check(pData == nullptr); + check(SourceTextureRHI != nullptr); + + // Copy to resolve target... + // This is by far the most expensive in terms of cost, since we are having to pull + // data from the gpu, while in the render thread. + RHICmdList.CopyTexture(SourceTextureRHI, Texture, FRHICopyTextureInfo()); +} + +/** + Map the readback texture so that its content can be read by the CPU. + The readback texture must have been created. The MappedTexture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTexture::Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride) +{ + check(Texture.IsValid() == true); + check(pData == nullptr); + + // Map the staging surface so we can copy the buffer for the NDI SDK to use + int32 MappedWidth = 0, MappedHeight = 0; + RHICmdList.MapStagingSurface(Texture, pData, MappedWidth, MappedHeight); + OutWidth = FrameSize.X; + OutHeight = FrameSize.Y; + OutLineStride = MappedWidth * 4; + + check(pData != nullptr); +} + +/** + Return a pointer to the mapped readback texture content. + The MappedTexture must currently be mapped. +*/ +void* UNDIMediaSender::MappedTexture::MappedData() const +{ + check(pData != nullptr); + + return pData; +} + +/** + Unmap the readback texture (if currently mapped). +*/ +void UNDIMediaSender::MappedTexture::Unmap(FRHICommandListImmediate& RHICmdList) +{ + if(pData != nullptr) + { + check(Texture.IsValid() == true); + + RHICmdList.UnmapStagingSurface(Texture); + pData = nullptr; + } + + MetaData.clear(); + + check(pData == nullptr); +} + + +/** + Adds metadata to the texture +*/ +void UNDIMediaSender::MappedTexture::AddMetaData(const FString& Data) +{ + std::string DataStr(TCHAR_TO_UTF8(*Data)); + MetaData += DataStr; +} + +/** + Gets the metadata for the texture +*/ +const std::string& UNDIMediaSender::MappedTexture::GetMetaData() const +{ + return MetaData; +} + + +/** + Class for managing the sending of mapped texture data to an NDI video stream. + Sending is done asynchronously, so mapping and unmapping of texture data must + be managed so that CPU accessible texture content remains valid until the + sending of the frame is guaranteed to have been completed. This is achieved + by double-buffering readback textures. +*/ + +/** + Create the mapped texture sender. If the mapped texture sender was already created + it will first be destroyed. No texture must currently be mapped. +*/ +void UNDIMediaSender::MappedTextureASyncSender::Create(FIntPoint InFrameSize) +{ + Destroy(); + + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.Create(InFrameSize); + + MappedTexture& PreviousMappedTexture = MappedTextures[1-CurrentIndex]; + PreviousMappedTexture.Create(InFrameSize); +} + +/** + Destroy the mapped texture sender (if not already destroyed). No texture must currently be mapped. +*/ +void UNDIMediaSender::MappedTextureASyncSender::Destroy() +{ + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.Destroy(); + + MappedTexture& PreviousMappedTexture = MappedTextures[1-CurrentIndex]; + PreviousMappedTexture.Destroy(); +} + +FIntPoint UNDIMediaSender::MappedTextureASyncSender::GetSizeXY() const +{ + const MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + return CurrentMappedTexture.GetSizeXY(); +} + +/** + Resolve the source texture to the current texture of the mapped texture sender. + The mapped texture sender must have been created. The current texture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTextureASyncSender::Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect, const FResolveRect& DestRect) +{ + // Copy to resolve target... + // This is by far the most expensive in terms of cost, since we are having to pull + // data from the gpu, while in the render thread. + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.Resolve(RHICmdList, SourceTextureRHI, Rect, DestRect); +} + +/** + Map the current texture of the mapped texture sender so that its content can be read by the CPU. + The mapped texture sender must have been created. The current texture must currently not be mapped. +*/ +void UNDIMediaSender::MappedTextureASyncSender::Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride) +{ + // Map the staging surface so we can copy the buffer for the NDI SDK to use + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.Map(RHICmdList, OutWidth, OutHeight, OutLineStride); +} + +/** + Send the current texture of the mapped texture sender to an NDI video stream, then swaps the textures. + The mapped texture sender must have been created. The current texture must currently be mapped. +*/ +void UNDIMediaSender::MappedTextureASyncSender::Send(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance_in, NDIlib_video_frame_v2_t& p_video_data) +{ + // Send the currently mapped data to an NDI stream asynchronously + + check(p_send_instance_in != nullptr); + + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + + p_video_data.p_data = (uint8_t*)CurrentMappedTexture.MappedData(); + + auto& MetaData = CurrentMappedTexture.GetMetaData(); + if(MetaData.empty() == false) + { + p_video_data.p_metadata = MetaData.c_str(); + } + else + { + p_video_data.p_metadata = nullptr; + } + + NDIlib_send_send_video_async_v2(p_send_instance_in, &p_video_data); + + // After send_video_async returns, the frame sent before this one is guaranteed to have been processed + // So the texture for the previous frame can be unmapped + MappedTexture& PreviousMappedTexture = MappedTextures[1-CurrentIndex]; + PreviousMappedTexture.Unmap(RHICmdList); + + // Switch the current and previous textures + CurrentIndex = 1 - CurrentIndex; +} + +/** + Flushes the NDI video stream, and unmaps the textures (if mapped) +*/ +void UNDIMediaSender::MappedTextureASyncSender::Flush(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance_in) +{ + // Flush the asynchronous NDI stream and unmap all the textures + + check(p_send_instance_in != nullptr); + + NDIlib_send_send_video_async_v2(p_send_instance_in, nullptr); + + // After send_video_async returns, the frame sent before this one is guaranteed to have been processed + // So the texture for the previous frame can be unmapped + MappedTexture& PreviousMappedTexture = MappedTextures[1-CurrentIndex]; + PreviousMappedTexture.Unmap(RHICmdList); + + // As the send queue was flushed, also unmap the current frame as it is not used + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.Unmap(RHICmdList); + + // Switch the current and previous textures + CurrentIndex = 1 - CurrentIndex; +} + +/** + Adds metadata to the current texture +*/ +void UNDIMediaSender::MappedTextureASyncSender::AddMetaData(const FString& Data) +{ + MappedTexture& CurrentMappedTexture = MappedTextures[CurrentIndex]; + CurrentMappedTexture.AddMetaData(Data); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSoundWave.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSoundWave.cpp new file mode 100644 index 0000000..af0fade --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaSoundWave.cpp @@ -0,0 +1,77 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include + + +UNDIMediaSoundWave::UNDIMediaSoundWave(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) +{ + // Set the Default Values for this object + this->bLooping = false; + this->NumChannels = 1; + this->SampleRate = 48000; + + this->Duration = INDEFINITELY_LOOPING_DURATION; +} + +/** + Set the Media Source of this object, so that when this object is called to 'GeneratePCMData' by the engine + we can request the media source to provide the pcm data from the current connected source +*/ +void UNDIMediaSoundWave::SetConnectionSource(UNDIMediaReceiver* InMediaSource) +{ + // Ensure there is no thread contention for generating pcm data from the connection source + FScopeLock Lock(&SyncContext); + + // Do we have a media source object to work with + if (this->MediaSource != nullptr) + { + // Are we already registered with the incoming media source object + if (this->MediaSource != InMediaSource) + { + // It doesn't look like we are registered with the incoming, make sure + // to unregistered with the previous source + this->MediaSource->UnregisterAudioWave(this); + } + } + + // Ensure we have a reference to the media source object + this->MediaSource = InMediaSource; +} + +/** + Called by the engine to generate pcm data to be 'heard' by audio listener objects +*/ +int32 UNDIMediaSoundWave::OnGeneratePCMAudio(TArray& OutAudio, int32 NumSamples) +{ + // Ensure there is no thread contention for generating pcm data from the connection source + FScopeLock Lock(&SyncContext); + + // set the default value, in case we have no connection source + int32 samples_generated = 0; + + OutAudio.Reset(); + OutAudio.AddZeroed(NumSamples * sizeof(int16)); + + // check the connection source and continue + if (this->MediaSource != nullptr) + { + samples_generated = MediaSource->GeneratePCMData(this, OutAudio.GetData(), NumSamples); + } + + // return to the engine the number of samples actually generated + return samples_generated; +} + +bool UNDIMediaSoundWave::IsReadyForFinishDestroy() +{ + // Ensure that there is no thread contention for generating data + FScopeLock Lock(&SyncContext); + + return USoundWaveProcedural::IsReadyForFinishDestroy(); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTexture2D.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTexture2D.cpp new file mode 100644 index 0000000..c5a643a --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTexture2D.cpp @@ -0,0 +1,153 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include + +UNDIMediaTexture2D::UNDIMediaTexture2D(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) +{ + this->SetMyResource(nullptr); +} + +void UNDIMediaTexture2D::UpdateTextureReference(FRHICommandList& RHICmdList, FTextureRHIRef Reference) +{ + if (GetMyResource() != nullptr) + { + if (Reference.IsValid() && GetMyResource()->TextureRHI != Reference) + { + GetMyResource()->TextureRHI = (FTextureRHIRef&)Reference; + RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI); + } + else if (!Reference.IsValid()) + { + if (FNDIMediaTextureResource* TextureResource = static_cast(this->GetMyResource())) + { + ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference) + ([this](FRHICommandListImmediate& RHICmdList) { + + static int32 DefaultWidth = 1280; + static int32 DefaultHeight = 720; + + // Set the default video texture to reference nothing + TRefCountPtr RenderableTexture; + + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DUpdateTextureReference")) + .SetExtent(DefaultWidth, DefaultHeight) + .SetFormat(EPixelFormat::PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable) + .SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f))); + + RenderableTexture = RHICreateTexture(CreateDesc); + + GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture; + + RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI); + }); + + // Make sure _RenderThread is executed before continuing + FlushRenderingCommands(); + } + } + } +} + +FTextureResource* UNDIMediaTexture2D::CreateResource() +{ + if (this->GetMyResource() != nullptr) + { + delete this->GetMyResource(); + this->SetMyResource(nullptr); + } + + if (FNDIMediaTextureResource* TextureResource = new FNDIMediaTextureResource(this)) + { + this->SetMyResource(TextureResource); + + ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference) + ([this](FRHICommandListImmediate& RHICmdList) { + + static int32 DefaultWidth = 1280; + static int32 DefaultHeight = 720; + + // Set the default video texture to reference nothing + TRefCountPtr RenderableTexture; + + const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DCreateResourceTexture")) + .SetExtent(DefaultWidth, DefaultHeight) + .SetFormat(EPixelFormat::PF_B8G8R8A8) + .SetNumMips(1) + .SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable) + .SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f))); + + RenderableTexture = RHICreateTexture(CreateDesc); + + GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture; + + RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI); + }); + } + + return this->GetMyResource(); +} + +void UNDIMediaTexture2D::GetResourceSizeEx(FResourceSizeEx& CumulativeResourceSize) +{ + Super::GetResourceSizeEx(CumulativeResourceSize); + + if (FNDIMediaTextureResource* CurrentResource = static_cast(this->GetMyResource())) + { + CumulativeResourceSize.AddUnknownMemoryBytes(CurrentResource->GetResourceSize()); + } +} + +float UNDIMediaTexture2D::GetSurfaceHeight() const +{ + return GetMyResource() != nullptr ? GetMyResource()->GetSizeY() : 0.0f; +} + +float UNDIMediaTexture2D::GetSurfaceWidth() const +{ + return GetMyResource() != nullptr ? GetMyResource()->GetSizeX() : 0.0f; +} + +float UNDIMediaTexture2D::GetSurfaceDepth() const +{ + return 0.0f; +} + +uint32 UNDIMediaTexture2D::GetSurfaceArraySize() const +{ + return 0; +} + +EMaterialValueType UNDIMediaTexture2D::GetMaterialType() const +{ + return MCT_Texture2D; +} + + +ETextureClass UNDIMediaTexture2D::GetTextureClass() const +{ + return ETextureClass::Other2DNoSource; +} + +void UNDIMediaTexture2D::SetMyResource(FTextureResource* ResourceIn) +{ + SetResource(ResourceIn); +} + +FTextureResource* UNDIMediaTexture2D::GetMyResource() +{ + return GetResource(); +} + +const FTextureResource* UNDIMediaTexture2D::GetMyResource() const +{ + return GetResource(); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTextureResource.cpp b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTextureResource.cpp new file mode 100644 index 0000000..098bf84 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Objects/Media/NDIMediaTextureResource.cpp @@ -0,0 +1,73 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include +#include +#include + +/** + Constructs a new instance of this object specifying a media texture owner + + @param Owner The media object used as the owner for this object +*/ +FNDIMediaTextureResource::FNDIMediaTextureResource(UNDIMediaTexture2D* Owner) +{ + this->MediaTexture = Owner; +} + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +void FNDIMediaTextureResource::InitRHI(FRHICommandListBase& RHICmdList) +#else +void FNDIMediaTextureResource::InitDynamicRHI() +#endif +{ + if (this->MediaTexture != nullptr) + { + FSamplerStateInitializerRHI SamplerStateInitializer( + (ESamplerFilter)UDeviceProfileManager::Get().GetActiveProfile()->GetTextureLODSettings()->GetSamplerFilter( + MediaTexture), + AM_Border, AM_Border, AM_Wrap); + + SamplerStateRHI = RHICreateSamplerState(SamplerStateInitializer); + } +} + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +void FNDIMediaTextureResource::ReleaseRHI() +#else +void FNDIMediaTextureResource::ReleaseDynamicRHI() +#endif +{ + // Release the TextureRHI bound by this object + this->TextureRHI.SafeRelease(); + + // Ensure that we have a owning media texture + if (this->MediaTexture != nullptr) + { + // Remove the texture reference associated with the owner texture object + RHIUpdateTextureReference(MediaTexture->TextureReference.TextureReferenceRHI, nullptr); + } +} + +SIZE_T FNDIMediaTextureResource::GetResourceSize() +{ + return CalcTextureSize(GetSizeX(), GetSizeY(), EPixelFormat::PF_A8R8G8B8, 1); +} + +uint32 FNDIMediaTextureResource::GetSizeX() const +{ + return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().X : 0; +} + +uint32 FNDIMediaTextureResource::GetSizeY() const +{ + return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().Y : 0; +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.cpp b/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.cpp new file mode 100644 index 0000000..1392435 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.cpp @@ -0,0 +1,492 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + + +#include "NDIMediaPlayer.h" + +#include +#include +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +#include +#endif +#include +#include +#include +#include + + +#define LOCTEXT_NAMESPACE "FNDIMediaPlayer" + + + +// An NDI-derived media texture sample, representing a frame of video +class NDIMediaTextureSample : public FMediaIOCoreTextureSampleBase, public IMediaTextureSampleConverter +{ + using Super = FMediaIOCoreTextureSampleBase; + +public: + + NDIMediaTextureSample() = default; + virtual ~NDIMediaTextureSample() = default; + + bool Initialize(const NDIlib_video_frame_v2_t& InVideoFrame, FTimespan InTime, UNDIMediaReceiver* InReceiver) + { + FreeSample(); + + VideoFrame = InVideoFrame; + Receiver = InReceiver; + + if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVY) + SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres); + else if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVA) + SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres + + InVideoFrame.xres*InVideoFrame.yres); + else + return false; + + VideoFrame.p_data = Buffer.GetData(); + + SetProperties(InVideoFrame.line_stride_in_bytes, InVideoFrame.xres, InVideoFrame.yres, EMediaTextureSampleFormat::CharUYVY, + InTime, FFrameRate(InVideoFrame.frame_rate_N, InVideoFrame.frame_rate_D), FTimecode(), + true); + + return true; + } + + virtual const FMatrix& GetYUVToRGBMatrix() const override + { + return MediaShaders::YuvToRgbRec709Scaled; + } + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + virtual void CopyConfiguration(const TSharedPtr& SourceSample) override + { + Super::CopyConfiguration(SourceSample); + + if (SourceSample.IsValid()) + { + TSharedPtr NDISamplePtr = StaticCastSharedPtr(SourceSample); + VideoFrame = NDISamplePtr->VideoFrame; + Receiver = NDISamplePtr->Receiver; + } + } +#endif + + virtual uint32 GetConverterInfoFlags() const override + { + return ConverterInfoFlags_WillCreateOutputTexture; + } + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later + virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override + { + if (!Receiver) + return false; + + FTextureRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame)); + InDstTexture = DstTexture; + + return true; + } +#else + virtual bool Convert(FTexture2DRHIRef & InDstTexture, const FConversionHints & Hints) override + { + if (!Receiver) + return false; + + FTexture2DRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame)); + InDstTexture = DstTexture; + + return true; + } +#endif + +private: + NDIlib_video_frame_v2_t VideoFrame; + UNDIMediaReceiver* Receiver { nullptr }; + //FMediaTimeStamp Time; + //std::vector Data; +}; + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +class NDIMediaTextureSampleConverter : public FMediaIOCoreTextureSampleConverter +{ + using Super = FMediaIOCoreTextureSampleConverter; + +public: + + NDIMediaTextureSampleConverter() = default; + virtual ~NDIMediaTextureSampleConverter() = default; + + virtual void Setup(const TSharedPtr& InSample) override + { + FMediaIOCoreTextureSampleConverter::Setup(InSample); + JITRProxySample = InSample; + } + + virtual uint32 GetConverterInfoFlags() const override + { + return ConverterInfoFlags_WillCreateOutputTexture; + } + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later + virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override + { + if (FMediaIOCoreTextureSampleConverter::Convert(RHICmdList, InDstTexture, Hints)) + { + TSharedPtr SamplePtr = JITRProxySample.Pin(); + if (SamplePtr.IsValid()) + { + TSharedPtr NDISamplePtr = StaticCastSharedPtr(SamplePtr); + return NDISamplePtr->Convert(RHICmdList, InDstTexture, Hints); + } + } + + return false; + } +#else + virtual bool Convert(FTexture2DRHIRef& InDstTexture, const FConversionHints& Hints) override + { + if (FMediaIOCoreTextureSampleConverter::Convert(InDstTexture, Hints)) + { + TSharedPtr SamplePtr = JITRProxySample.Pin(); + if (SamplePtr.IsValid()) + { + TSharedPtr NDISamplePtr = StaticCastSharedPtr(SamplePtr); + return NDISamplePtr->Convert(InDstTexture, Hints); + } + } + + return false; + } +#endif + +private: + TWeakPtr JITRProxySample; +}; +#endif + + +class NDIMediaTextureSamplePool : public TMediaObjectPool +{}; + + +// An NDI-derived media audio sample, representing a frame of audio +class NDIMediaAudioSample : public FMediaIOCoreAudioSampleBase +{ + using Super = FMediaIOCoreAudioSampleBase; + +public: +}; + +class NDIMediaAudioSamplePool : public TMediaObjectPool +{}; + + + +FNDIMediaPlayer::FNDIMediaPlayer(IMediaEventSink& InEventSink) + : Super(InEventSink) + , NDIPlayerState(EMediaState::Closed) + , EventSink(InEventSink) + , TextureSamplePool(new NDIMediaTextureSamplePool) + , AudioSamplePool(new NDIMediaAudioSamplePool) +{} + + +FNDIMediaPlayer::~FNDIMediaPlayer() +{ + Close(); + + delete TextureSamplePool; + delete AudioSamplePool; +} + + +FGuid FNDIMediaPlayer::GetPlayerPluginGUID() const +{ + static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f); + return PlayerPluginGUID; +} + + +bool FNDIMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) +{ + if (!Super::Open(Url, Options)) + { + return false; + } + + MaxNumVideoFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxVideoFrameBuffer, (int64)8); + MaxNumAudioFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAudioFrameBuffer, (int64)8); + MaxNumMetadataFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAncillaryFrameBuffer, (int64)8); + + // Setup our different supported channels based on source settings + SetupSampleChannels(); + + // If the player is opened with an NDIMediaReceiver, use that. Otherwise create an internal one. + bool bIsNDIMediaReceiver = Options->HasMediaOption(NDIMediaOption::IsNDIMediaReceiver); + if (bIsNDIMediaReceiver) + { + Receiver = static_cast(const_cast(Options)); + bInternalReceiver = false; + } + else + { + Receiver = NewObject(); + bInternalReceiver = true; + } + + // Hook into the video and audio captures + Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle); + VideoCaptureEventHandle = Receiver->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame) + { + this->DisplayFrame(video_frame); + }); + Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle); + AudioCaptureEventHandle = Receiver->OnNDIReceiverAudioCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_audio_frame_v2_t& audio_frame) + { + this->PlayAudio(audio_frame); + }); + + // Control the player's state based on the receiver connecting and disconnecting + Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle); + ConnectedEventHandle = Receiver->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver) + { + this->NDIPlayerState = EMediaState::Playing; + }); + Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle); + DisconnectedEventHandle = Receiver->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver) + { + this->NDIPlayerState = EMediaState::Closed; + }); + + + // Get ready to connect + CurrentState = EMediaState::Preparing; + NDIPlayerState = EMediaState::Preparing; + EventSink.ReceiveMediaEvent(EMediaEvent::MediaConnecting); + + // Start up the receiver under the player's control. + // Use the provided URL as the source if given, otherwise use the connection info set for the receiver + FString Scheme; + FString Location; + if (Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive)) + { + FNDIConnectionInformation ConnectionInformation = Receiver->ConnectionSetting; + ConnectionInformation.SourceName = Location; + Receiver->Initialize(ConnectionInformation, UNDIMediaReceiver::EUsage::Controlled); + } + else + { + Receiver->Initialize(UNDIMediaReceiver::EUsage::Controlled); + } + + return true; +} + + +void FNDIMediaPlayer::Close() +{ + NDIPlayerState = EMediaState::Closed; + + if (Receiver != nullptr) + { + // Disconnect from receiver events + Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle); + VideoCaptureEventHandle.Reset(); + Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle); + AudioCaptureEventHandle.Reset(); + Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle); + ConnectedEventHandle.Reset(); + Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle); + DisconnectedEventHandle.Reset(); + + // Shut down the receiver + Receiver->Shutdown(); + + // If the player created the receiver, destroy the receiver + if (bInternalReceiver) + Receiver->ConditionalBeginDestroy(); + + Receiver = nullptr; + bInternalReceiver = false; + } + + TextureSamplePool->Reset(); + AudioSamplePool->Reset(); + + Super::Close(); +} + + +void FNDIMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode) +{ + // Update player state + EMediaState NewState = NDIPlayerState; + + if (NewState != CurrentState) + { + CurrentState = NewState; + if (CurrentState == EMediaState::Playing) + { + EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); + EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpened); + EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackResumed); + } + else if (NewState == EMediaState::Error) + { + EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpenFailed); + Close(); + } + } + + if (CurrentState != EMediaState::Playing) + { + return; + } + + TickTimeManagement(); +} + + +void FNDIMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan Timecode) +{ + Super::TickFetch(DeltaTime, Timecode); + + if ((CurrentState == EMediaState::Preparing) || (CurrentState == EMediaState::Playing)) + { + if (Receiver != nullptr) + { + // Ask receiver to capture a new frame of video and audio. + // Will call DisplayFrame() and PlayAudio() through capture event. + Receiver->CaptureConnectedAudio(); + Receiver->CaptureConnectedVideo(); + } + } + + if (CurrentState == EMediaState::Playing) + { + ProcessFrame(); + VerifyFrameDropCount(); + } +} + + +void FNDIMediaPlayer::ProcessFrame() +{ + if (CurrentState == EMediaState::Playing) + { + // No need to lock here. That info is only used for debug information. + //AudioTrackFormat.NumChannels = 0;//NDIThreadAudioChannels; + //AudioTrackFormat.SampleRate = 0;//NDIThreadAudioSampleRate; + } +} + + +void FNDIMediaPlayer::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame) +{ + auto TextureSample = TextureSamplePool->AcquireShared(); + + if (TextureSample->Initialize(video_frame, FTimespan::FromSeconds(GetPlatformSeconds()), Receiver)) + { +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + AddVideoSample(TextureSample); +#else + Samples->AddVideo(TextureSample); +#endif + } +} + + +void FNDIMediaPlayer::PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame) +{ + auto AudioSample = AudioSamplePool->AcquireShared(); + + // UE wants 32bit signed interleaved audio data, so need to convert the NDI audio. + // Fortunately the NDI library has a utility function to do that. + + // Get a buffer to convert to + const int32 available_samples = audio_frame.no_samples * audio_frame.no_channels; + void* SampleBuffer = AudioSample->RequestBuffer(available_samples); + + if (SampleBuffer != nullptr) + { + // Format to convert to + NDIlib_audio_frame_interleaved_32s_t audio_frame_32s( + audio_frame.sample_rate, + audio_frame.no_channels, + audio_frame.no_samples, + audio_frame.timecode, + 20, + static_cast(SampleBuffer)); + + // Convert received NDI audio + NDIlib_util_audio_to_interleaved_32s_v2(&audio_frame, &audio_frame_32s); + + // Supply converted audio data + if (AudioSample->SetProperties(available_samples + , audio_frame_32s.no_channels + , audio_frame_32s.sample_rate + , FTimespan::FromSeconds(GetPlatformSeconds()) + , TOptional())) + { + Samples->AddAudio(AudioSample); + } + } +} + + +void FNDIMediaPlayer::VerifyFrameDropCount() +{ +} + + +bool FNDIMediaPlayer::IsHardwareReady() const +{ + return NDIPlayerState == EMediaState::Playing ? true : false; +} + + +void FNDIMediaPlayer::SetupSampleChannels() +{ + FMediaIOSamplingSettings VideoSettings = BaseSettings; + VideoSettings.BufferSize = MaxNumVideoFrameBuffer; + Samples->InitializeVideoBuffer(VideoSettings); + + FMediaIOSamplingSettings AudioSettings = BaseSettings; + AudioSettings.BufferSize = MaxNumAudioFrameBuffer; + Samples->InitializeAudioBuffer(AudioSettings); + + FMediaIOSamplingSettings MetadataSettings = BaseSettings; + MetadataSettings.BufferSize = MaxNumMetadataFrameBuffer; + Samples->InitializeMetadataBuffer(MetadataSettings); +} + + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +TSharedPtr FNDIMediaPlayer::AcquireTextureSample_AnyThread() const +{ + return TextureSamplePool->AcquireShared(); +} + +TSharedPtr FNDIMediaPlayer::CreateTextureSampleConverter() const +{ + return MakeShared(); +} +#endif + + +//~ ITimedDataInput interface +#if WITH_EDITOR +const FSlateBrush* FNDIMediaPlayer::GetDisplayIcon() const +{ + return nullptr; +} +#endif + + +#undef LOCTEXT_NAMESPACE diff --git a/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.h b/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.h new file mode 100644 index 0000000..f258627 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Player/NDIMediaPlayer.h @@ -0,0 +1,75 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + + +class FNDIMediaPlayer : public FMediaIOCorePlayerBase +{ + using Super = FMediaIOCorePlayerBase; + +public: + FNDIMediaPlayer(IMediaEventSink& InEventSink); + + virtual ~FNDIMediaPlayer(); + + + //~ IMediaPlayer interface + virtual FGuid GetPlayerPluginGUID() const override; + virtual bool Open(const FString& Url, const IMediaOptions* Options) override; + virtual void Close() override; + + virtual void TickInput(FTimespan DeltaTime, FTimespan Timecode) override; + virtual void TickFetch(FTimespan DeltaTime, FTimespan Timecode) override; + +protected: + virtual bool IsHardwareReady() const override; + virtual void SetupSampleChannels() override; +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + virtual TSharedPtr AcquireTextureSample_AnyThread() const override; + virtual TSharedPtr CreateTextureSampleConverter() const override; +#endif + + void DisplayFrame(const NDIlib_video_frame_v2_t& video_frame); + void PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame); + + void ProcessFrame(); + void VerifyFrameDropCount(); + +public: + //~ ITimedDataInput interface +#if WITH_EDITOR + virtual const FSlateBrush* GetDisplayIcon() const override; +#endif + +private: + /** Max sample count our different buffer can hold. Taken from MediaSource */ + int32 MaxNumAudioFrameBuffer = 0; + int32 MaxNumMetadataFrameBuffer = 0; + int32 MaxNumVideoFrameBuffer = 0; + + /** Current state of the media player. */ + EMediaState NDIPlayerState = EMediaState::Closed; + + /** The media event handler. */ + IMediaEventSink& EventSink; + + UNDIMediaReceiver* Receiver = nullptr; + bool bInternalReceiver = true; + + FDelegateHandle VideoCaptureEventHandle; + FDelegateHandle AudioCaptureEventHandle; + FDelegateHandle ConnectedEventHandle; + FDelegateHandle DisconnectedEventHandle; + + class NDIMediaTextureSamplePool* TextureSamplePool; + class NDIMediaAudioSamplePool* AudioSamplePool; +}; diff --git a/Plugins/NDIIO/Source/Core/Classes/Services/NDIConnectionService.cpp b/Plugins/NDIIO/Source/Core/Classes/Services/NDIConnectionService.cpp new file mode 100644 index 0000000..00cef43 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Services/NDIConnectionService.cpp @@ -0,0 +1,404 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if WITH_EDITOR + +#include + +#endif + +/** Define Global Accessors */ + +FNDIConnectionServiceSendVideoEvent FNDIConnectionService::EventOnSendVideoFrame; +TMap FNDIConnectionService::SubmixSendAudioFrameEvents; + + +FCriticalSection FNDIConnectionService::AudioSyncContext; +FCriticalSection FNDIConnectionService::RenderSyncContext; + +/** ************************ **/ + +/** + Constructs a new instance of this object +*/ +FNDIConnectionService::FNDIConnectionService() {} + +// Begin the service +bool FNDIConnectionService::Start() +{ + if (!bIsInitialized) + { + bIsInitialized = true; + + // Define some basic properties + FNDIBroadcastConfiguration Configuration; + FString BroadcastName = TEXT("Unreal Engine"); + EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative; + + bool bBeginBroadcastOnPlay = false; + + // Load the plugin settings for broadcasting the active viewport + if (auto* CoreSettings = NewObject()) + { + // Define the configuration properties + Configuration.FrameRate = CoreSettings->BroadcastRate; + Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840), + FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840)); + + // Set the broadcast name + BroadcastName = CoreSettings->ApplicationStreamName; + + bBeginBroadcastOnPlay = CoreSettings->bBeginBroadcastOnPlay; + + // clean-up the settings object + CoreSettings->ConditionalBeginDestroy(); + CoreSettings = nullptr; + } + + /** Construct the Active Viewport video texture */ + this->VideoTexture = NewObject( + GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), TEXT("NDIViewportVideoTexture"), Flags); + + /** Construct the active viewport sender */ + this->ActiveViewportSender = NewObject(GetTransientPackage(), UNDIMediaSender::StaticClass(), + TEXT("NDIViewportSender"), Flags); + + VideoTexture->UpdateResource(); + + // Update the active viewport sender, with the properties defined in the settings configuration + this->ActiveViewportSender->ChangeSourceName(BroadcastName); + this->ActiveViewportSender->ChangeVideoTexture(VideoTexture); + this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration); + + // Hook into the core for the end of frame handlers + FCoreDelegates::OnEndFrameRT.AddRaw(this, &FNDIConnectionService::OnEndRenderFrame); + + if (!GIsEditor) + { + FCoreDelegates::OnPostEngineInit.AddRaw(this, &FNDIConnectionService::OnPostEngineInit); + FCoreDelegates::OnEnginePreExit.AddRaw(this, &FNDIConnectionService::OnEnginePreExit); + if (bBeginBroadcastOnPlay) + BeginBroadcastingActiveViewport(); + } +#if WITH_EDITOR + else + { + FEditorDelegates::PostPIEStarted.AddLambda([this](const bool Success) { + if (auto* CoreSettings = NewObject()) + { + if (CoreSettings->bBeginBroadcastOnPlay == true) + BeginBroadcastingActiveViewport(); + else + BeginAudioCapture(); + + // clean-up the settings object + CoreSettings->ConditionalBeginDestroy(); + CoreSettings = nullptr; + } + bIsInPIEMode = true; + }); + FEditorDelegates::PrePIEEnded.AddLambda([this](const bool Success) { StopBroadcastingActiveViewport(); }); + } +#endif + } + + return true; +} + +// Stop the service +void FNDIConnectionService::Shutdown() +{ + // Wait for the sync context locks + FScopeLock AudioLock(&AudioSyncContext); + FScopeLock RenderLock(&RenderSyncContext); + + // reset the initialization properties + bIsInitialized = false; + + StopAudioCapture(); + + // unbind our handlers for our frame events + FCoreDelegates::OnEndFrame.RemoveAll(this); + FCoreDelegates::OnEndFrameRT.RemoveAll(this); + + // Cleanup the broadcasting of the active viewport + StopBroadcastingActiveViewport(); +} + + +// Handler for when the render thread frame has ended +void FNDIConnectionService::OnEndRenderFrame() +{ + FScopeLock Lock(&RenderSyncContext); + + if (bIsInitialized) + { + int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks(); + + if (FNDIConnectionService::EventOnSendVideoFrame.IsBound()) + { + FNDIConnectionService::EventOnSendVideoFrame.Broadcast(ticks); + } + } +} + +void FNDIConnectionService::BeginAudioCapture() +{ + if (bIsInitialized) + { + if (!bIsAudioInitialized) + { + if (GEngine) + { + FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice(); + if (AudioDevice.IsValid()) + { +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later + for (auto& SendAudioEvent : SubmixSendAudioFrameEvents) + { + if (SendAudioEvent.Key == nullptr) + AudioDevice->RegisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject()); + else + AudioDevice->RegisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key); + } +#else + AudioDevice->RegisterSubmixBufferListener(this); +#endif + bIsAudioInitialized = true; + } + } + } + } +} + +void FNDIConnectionService::StopAudioCapture() +{ + if (bIsAudioInitialized) + { + if (GEngine) + { + FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice(); + if (AudioDevice) + { +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later + for (auto& SendAudioEvent : SubmixSendAudioFrameEvents) + { + if (SendAudioEvent.Key == nullptr) + AudioDevice->UnregisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject()); + else + AudioDevice->UnregisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key); + } +#else + AudioDevice->UnregisterSubmixBufferListener(this); +#endif + } + } + bIsAudioInitialized = false; + } +} + +void FNDIConnectionService::OnPostEngineInit() +{ + BeginAudioCapture(); +} + +void FNDIConnectionService::OnEnginePreExit() +{ + StopAudioCapture(); +} + +bool FNDIConnectionService::BeginBroadcastingActiveViewport() +{ + if (!bIsBroadcastingActiveViewport && IsValid(ActiveViewportSender)) + { + // Load the plugin settings for broadcasting the active viewport + if (auto* CoreSettings = NewObject()) + { + // Define some basic properties + FNDIBroadcastConfiguration Configuration; + FString BroadcastName = TEXT("Unreal Engine"); + + // Define the configuration properties + Configuration.FrameRate = CoreSettings->BroadcastRate; + Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840), + FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840)); + + // Set the broadcast name + BroadcastName = CoreSettings->ApplicationStreamName; + + // clean-up the settings object + CoreSettings->ConditionalBeginDestroy(); + CoreSettings = nullptr; + + // Update the active viewport sender, with the properties defined in the settings configuration + this->ActiveViewportSender->ChangeSourceName(BroadcastName); + this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration); + } + + // we don't want to perform the linear conversion for the active viewport, + // since it's already had the conversion completed by the engine before passing to the sender + ActiveViewportSender->PerformLinearTosRGBConversion(false); + + // Do not enable PTZ capabilities for active viewport sender + ActiveViewportSender->EnablePTZ(false); + + // Initialize the sender, this will automatically start rendering output via NDI + ActiveViewportSender->Initialize(nullptr); + + // We've initialized the active viewport + bIsBroadcastingActiveViewport = true; + + // However we need to update the 'Video Texture' to the active viewport back buffer... + FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().AddRaw( + this, &FNDIConnectionService::OnActiveViewportBackbufferPreResize); + FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().AddRaw( + this, &FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent); + + BeginAudioCapture(); + } + + // always return true + return true; +} + +// Handler for when the active viewport back buffer has been resized +void FNDIConnectionService::OnActiveViewportBackbufferPreResize(void* Backbuffer) +{ + check(IsInGameThread()); + + // Ensure we have a valid video texture + FTextureResource* TextureResource = GetVideoTextureResource(); + if (TextureResource != nullptr) + { + FRenderCommandFence Fence; + + TextureResource->TextureRHI.SafeRelease(); + this->ActiveViewportSender->ChangeVideoTexture(VideoTexture); + + ENQUEUE_RENDER_COMMAND(FlushRHIThreadToUpdateTextureRenderTargetReference)( + [this](FRHICommandListImmediate& RHICmdList) + { + RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, nullptr); + RHICmdList.ImmediateFlush(EImmediateFlushType::FlushRHIThread); + }); + + // Wait for render thread to finish, so that renderthread texture references are updated + Fence.BeginFence(); + Fence.Wait(); + } +} + +// Handler for when the back buffer is read to present to the end user +void FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent(SWindow& Window, + const FTextureRHIRef& Backbuffer) +{ + if (Window.GetType() == EWindowType::GameWindow || (Window.IsRegularWindow() && IsRunningInPIE())) + { + FTextureResource* TextureResource = GetVideoTextureResource(); + if (TextureResource != nullptr) + { + // Lets improve the performance a bit + if (TextureResource->TextureRHI != Backbuffer) + { + TextureResource->TextureRHI = (FTextureRHIRef&)Backbuffer; + this->ActiveViewportSender->ChangeVideoTexture(VideoTexture); + RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, Backbuffer); + } + } + } +} + +void FNDIConnectionService::StopBroadcastingActiveViewport() +{ + // Wait for the sync context locks + FScopeLock RenderLock(&RenderSyncContext); + + // reset the initialization properties + bIsInPIEMode = false; + + StopAudioCapture(); + + // Ensure that if the active viewport sender is active, that we shut it down + if (IsValid(this->ActiveViewportSender)) + { + FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().RemoveAll(this); + FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().RemoveAll(this); + + // shutdown the active viewport sender (just in case it was activated) + this->ActiveViewportSender->Shutdown(); + + // reset the broadcasting flag, so that we can restart the broadcast later + this->bIsBroadcastingActiveViewport = false; + + FTextureResource* TextureResource = GetVideoTextureResource(); + if (TextureResource != nullptr) + { + TextureResource->TextureRHI.SafeRelease(); + this->ActiveViewportSender->ChangeVideoTexture(VideoTexture); + } + } +} + + +FTextureResource* FNDIConnectionService::GetVideoTextureResource() const +{ + if(IsValid(this->VideoTexture)) + return this->VideoTexture->GetResource(); + + return nullptr; +} + + +void FNDIConnectionService::OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) +{ + if (NumSamples > 0) + { + FScopeLock Lock(&AudioSyncContext); + + if (bIsAudioInitialized) + { + int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks(); + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later + FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice(); + if (&AudioDevice->GetMainSubmixObject() == OwningSubmix) + OwningSubmix = nullptr; +#else + OwningSubmix = nullptr; +#endif + + FNDIConnectionServiceSendAudioEvent* SendAudioEvent = SubmixSendAudioFrameEvents.Find(OwningSubmix); + if (SendAudioEvent) + { + if (SendAudioEvent->IsBound()) + { + SendAudioEvent->Broadcast(ticks, AudioData, NumSamples, NumChannels, SampleRate, AudioClock); + } + } + } + } +} + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later +const FString& FNDIConnectionService::GetListenerName() const +{ + static const FString ListenerName(TEXT("NDIConnectionServiceListener")); + return ListenerName; +} +#endif diff --git a/Plugins/NDIIO/Source/Core/Classes/Services/NDIFinderService.cpp b/Plugins/NDIIO/Source/Core/Classes/Services/NDIFinderService.cpp new file mode 100644 index 0000000..fed298b --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Services/NDIFinderService.cpp @@ -0,0 +1,232 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include +#include +#include + +/** Define Global Accessors */ + +static NDIlib_find_instance_t NDI_FIND_INSTANCE = nullptr; +static FCriticalSection NDI_FIND_SYNC_CONTEXT; + +FNDIFinderService::FNDISourceCollectionChangedEvent FNDIFinderService::EventOnNDISourceCollectionChanged; + +TArray FNDIFinderService::NetworkSourceCollection = TArray(); + +/** ************************ **/ + +FNDIFinderService::FNDIFinderService() +{ + if (NDI_FIND_INSTANCE == nullptr) + { + FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT); + + NDI_FIND_INSTANCE = NDIlib_find_create_v2(nullptr); + } +} + +// Begin the service +bool FNDIFinderService::Start() +{ + if (!bIsThreadRunning && p_RunnableThread == nullptr) + { + if (NDI_FIND_INSTANCE != nullptr) + { + this->bIsThreadRunning = true; + p_RunnableThread = FRunnableThread::Create(this, TEXT("FNDIFinderService_Tick"), 0, TPri_BelowNormal); + + return bIsThreadRunning = p_RunnableThread != nullptr; + } + } + + return false; +} + +/** FRunnable Interface implementation for 'Init' */ +bool FNDIFinderService::Init() +{ + return NDI_FIND_INSTANCE != nullptr; +} + +/** FRunnable Interface implementation for 'Stop' */ +uint32 FNDIFinderService::Run() +{ + static const uint32 find_wait_time = 500; + + if (NDI_FIND_INSTANCE == nullptr) + return 0; + + // Only update when we are suppose to run + while (bIsThreadRunning) + { + // Wait up to 'find_wait_time' (in milliseconds) to determine whether new sources have been added + if (!NDIlib_find_wait_for_sources(NDI_FIND_INSTANCE, find_wait_time)) + { + // alright the source collection has stopped updating, did we change the network source collection? + if (UpdateNetworkSourceCollection()) + { + // Broadcast the even on the game thread for thread safety purposes + AsyncTask(ENamedThreads::GameThread, []() { + if (FNDIFinderService::EventOnNDISourceCollectionChanged.IsBound()) + FNDIFinderService::EventOnNDISourceCollectionChanged.Broadcast(); + }); + } + } + } + + // return success + return 1; +} + +/** FRunnable Interface implementation for 'Run' */ +void FNDIFinderService::Shutdown() +{ + if (p_RunnableThread != nullptr) + { + this->bIsThreadRunning = false; + + p_RunnableThread->WaitForCompletion(); + p_RunnableThread = nullptr; + } + + // Ensure we unload the finder instance + if (NDI_FIND_INSTANCE != nullptr) + NDIlib_find_destroy(NDI_FIND_INSTANCE); +} + +// Stop the service +void FNDIFinderService::Stop() +{ + Shutdown(); +} + +bool FNDIFinderService::UpdateNetworkSourceCollection() +{ + uint32 no_sources = 0; + bool bHasCollectionChanged = false; + + if (NDI_FIND_INSTANCE != nullptr) + { + const NDIlib_source_t* p_sources = NDIlib_find_get_current_sources(NDI_FIND_INSTANCE, &no_sources); + + // Change Scope + { + FScopeLock lock(&NDI_FIND_SYNC_CONTEXT); + + bHasCollectionChanged = FNDIFinderService::NetworkSourceCollection.Num() != no_sources; + + if (no_sources > 0 && p_sources != nullptr) + { + uint32 CurrentSourceCount = NetworkSourceCollection.Num(); + + for (uint32 iter = 0; iter < no_sources; iter++) + { + if (iter >= CurrentSourceCount) + { + NetworkSourceCollection.Add(FNDIConnectionInformation()); + } + + const NDIlib_source_t* SourceInformation = &p_sources[iter]; + FNDIConnectionInformation* CollectionSource = &NetworkSourceCollection[iter]; + + bHasCollectionChanged |= SourceInformation->p_url_address != CollectionSource->Url; + CollectionSource->Url = SourceInformation->p_url_address; + CollectionSource->SourceName = SourceInformation->p_ndi_name; + FString SourceName = SourceInformation->p_ndi_name; + SourceName.Split(TEXT(" "), &CollectionSource->MachineName, &CollectionSource->StreamName); + + // Now that the MachineName and StreamName have been split, cleanup the stream name + CollectionSource->StreamName.RemoveFromStart("("); + CollectionSource->StreamName.RemoveFromEnd(")"); + } + + if (CurrentSourceCount > no_sources) + { +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later + NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes); +#else + NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true); +#endif + bHasCollectionChanged = true; + } + } + else if (NetworkSourceCollection.Num() > 0) + { + NetworkSourceCollection.Empty(); + bHasCollectionChanged = true; + } + + bHasCollectionChanged |= NetworkSourceCollection.Num() != no_sources; + } + } + + return bHasCollectionChanged; +} + +/** Call to update an existing collection of network sources to match the current collection */ +bool FNDIFinderService::UpdateSourceCollection(TArray& InSourceCollection) +{ + bool bHasCollectionChanged = false; + + { + FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT); + + const uint32& no_sources = NetworkSourceCollection.Num(); + bHasCollectionChanged = InSourceCollection.Num() != no_sources; + + if (no_sources > 0) + { + uint32 CurrentSourceCount = InSourceCollection.Num(); + + for (uint32 iter = 0; iter < no_sources; iter++) + { + if (iter >= CurrentSourceCount) + { + InSourceCollection.Add(FNDIConnectionInformation()); + CurrentSourceCount = InSourceCollection.Num(); + } + + FNDIConnectionInformation* CollectionSource = &InSourceCollection[iter]; + const FNDIConnectionInformation* SourceInformation = &NetworkSourceCollection[iter]; + + bHasCollectionChanged |= SourceInformation->Url != CollectionSource->Url; + + CollectionSource->Url = SourceInformation->Url; + CollectionSource->SourceName = SourceInformation->SourceName; + CollectionSource->MachineName = SourceInformation->MachineName; + CollectionSource->StreamName = SourceInformation->StreamName; + } + + if (CurrentSourceCount > no_sources) + { +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later + InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes); +#else + InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true); +#endif + bHasCollectionChanged = true; + } + } + else if (InSourceCollection.Num() > 0) + { + InSourceCollection.Empty(); + bHasCollectionChanged = true; + } + } + + return bHasCollectionChanged; +} + +/** Get the available sources on the network */ +const TArray FNDIFinderService::GetNetworkSourceCollection() +{ + FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT); + + return FNDIFinderService::NetworkSourceCollection; +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Structures/NDIBroadcastConfiguration.cpp b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIBroadcastConfiguration.cpp new file mode 100644 index 0000000..5399f0f --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIBroadcastConfiguration.cpp @@ -0,0 +1,50 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +/** Copies an existing instance to this object */ +FNDIBroadcastConfiguration::FNDIBroadcastConfiguration(const FNDIBroadcastConfiguration& other) +{ + // perform a deep copy of the 'other' structure and store the values in this object + this->FrameRate = other.FrameRate; + this->FrameSize = other.FrameSize; +} + +/** Copies existing instance properties to this object */ +FNDIBroadcastConfiguration& FNDIBroadcastConfiguration::operator=(const FNDIBroadcastConfiguration& other) +{ + // perform a deep copy of the 'other' structure + this->FrameRate = other.FrameRate; + this->FrameSize = other.FrameSize; + + // return the result of the copy + return *this; +} + +/** Compares this object to 'other' and returns a determination of whether they are equal */ +bool FNDIBroadcastConfiguration::operator==(const FNDIBroadcastConfiguration& other) const +{ + // return the value of a deep compare against the 'other' structure + return this->FrameRate == other.FrameRate && this->FrameSize == other.FrameSize; +} + +/** Attempts to serialize this object using an Archive object */ +FArchive& FNDIBroadcastConfiguration::Serialize(FArchive& Ar) +{ + // we want to make sure that we are able to serialize this object, over many different version of this structure + int32 current_version = 0; + + // serialize this structure + return Ar << current_version << this->FrameRate.Numerator << this->FrameRate.Denominator << this->FrameSize; +} + +/** Compares this object to 'other" and returns a determination of whether they are NOT equal */ +bool FNDIBroadcastConfiguration::operator!=(const FNDIBroadcastConfiguration& other) const +{ + return !(*this == other); +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Classes/Structures/NDIConnectionInformation.cpp b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIConnectionInformation.cpp new file mode 100644 index 0000000..b867b70 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIConnectionInformation.cpp @@ -0,0 +1,111 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include + +/** Copies an existing instance to this object */ +FNDIConnectionInformation::FNDIConnectionInformation(const FNDIConnectionInformation& other) +{ + // perform a deep copy of the 'other' structure and store the values in this object + this->Bandwidth = other.Bandwidth; + this->MachineName = other.MachineName; + this->SourceName = other.SourceName; + this->StreamName = other.StreamName; + this->Url = other.Url; + this->bMuteAudio = other.bMuteAudio; + this->bMuteVideo = other.bMuteVideo; +} + +/** Copies existing instance properties to this object */ +FNDIConnectionInformation& FNDIConnectionInformation::operator=(const FNDIConnectionInformation& other) +{ + // perform a deep copy of the 'other' structure + this->Bandwidth = other.Bandwidth; + this->MachineName = other.MachineName; + this->SourceName = other.SourceName; + this->StreamName = other.StreamName; + this->Url = other.Url; + this->bMuteAudio = other.bMuteAudio; + this->bMuteVideo = other.bMuteVideo; + + // return the result of the copy + return *this; +} + +/** Compares this object to 'other' and returns a determination of whether they are equal */ +bool FNDIConnectionInformation::operator==(const FNDIConnectionInformation& other) const +{ + // return the value of a deep compare against the 'other' structure + return this->Bandwidth == other.Bandwidth && + this->MachineName == other.MachineName && this->SourceName == other.SourceName && + this->StreamName == other.StreamName && this->Url == other.Url && + this->bMuteAudio == other.bMuteAudio && this->bMuteVideo == other.bMuteVideo; +} + +FNDIConnectionInformation::operator NDIlib_recv_bandwidth_e() const +{ + return this->Bandwidth == ENDISourceBandwidth::MetadataOnly ? NDIlib_recv_bandwidth_metadata_only + : this->Bandwidth == ENDISourceBandwidth::AudioOnly ? NDIlib_recv_bandwidth_audio_only + : this->Bandwidth == ENDISourceBandwidth::Lowest ? NDIlib_recv_bandwidth_lowest + : NDIlib_recv_bandwidth_highest; +} + +/** Resets the current parameters to the default property values */ +void FNDIConnectionInformation::Reset() +{ + // Ensure we reset all the properties of this object to nominal default properties + this->Bandwidth = ENDISourceBandwidth::Highest; + this->MachineName = FString(""); + this->SourceName = FString(""); + this->StreamName = FString(""); + this->Url = FString(""); + this->bMuteAudio = false; + this->bMuteVideo = false; +} + +/** Attempts to serialize this object using an Archive object */ +FArchive& FNDIConnectionInformation::Serialize(FArchive& Ar) +{ + // we want to make sure that we are able to serialize this object, over many different version of this structure + int32 current_version = 0; + + // serialize this structure + return Ar << current_version << this->Bandwidth + << this->MachineName << this->SourceName << this->StreamName << this->Url + << this->bMuteAudio << this->bMuteVideo; +} + +/** Determines whether this object is valid connection information */ +bool FNDIConnectionInformation::IsValid() const +{ + // Need at least a source name and/or machine+stream name and/or a URL + return (!this->SourceName.IsEmpty()) || + ((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty())) || + (!this->Url.IsEmpty()); +} + +FString FNDIConnectionInformation::GetNDIName() const +{ + std::string source_name; + + if(!this->SourceName.IsEmpty()) + return this->SourceName; + + if ((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty())) + return this->MachineName + " (" + this->StreamName + ")"; + + return FString(); +} + + +/** Compares this object to 'other" and returns a determination of whether they are NOT equal */ +bool FNDIConnectionInformation::operator!=(const FNDIConnectionInformation& other) const +{ + return !(*this == other); +} diff --git a/Plugins/NDIIO/Source/Core/Classes/Structures/NDIReceiverPerformanceData.cpp b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIReceiverPerformanceData.cpp new file mode 100644 index 0000000..b4a7202 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Classes/Structures/NDIReceiverPerformanceData.cpp @@ -0,0 +1,74 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +/** Copies an existing instance to this object */ +FNDIReceiverPerformanceData::FNDIReceiverPerformanceData(const FNDIReceiverPerformanceData& other) +{ + // perform a deep copy of the 'other' structure and store the values in this object + this->AudioFrames = other.AudioFrames; + this->DroppedAudioFrames = other.DroppedAudioFrames; + this->DroppedMetadataFrames = other.DroppedMetadataFrames; + this->DroppedVideoFrames = other.DroppedVideoFrames; + this->MetadataFrames = other.MetadataFrames; + this->VideoFrames = other.VideoFrames; +} + +/** Copies existing instance properties to this object */ +FNDIReceiverPerformanceData& FNDIReceiverPerformanceData::operator=(const FNDIReceiverPerformanceData& other) +{ + // perform a deep copy of the 'other' structure + this->AudioFrames = other.AudioFrames; + this->DroppedAudioFrames = other.DroppedAudioFrames; + this->DroppedMetadataFrames = other.DroppedMetadataFrames; + this->DroppedVideoFrames = other.DroppedVideoFrames; + this->MetadataFrames = other.MetadataFrames; + this->VideoFrames = other.VideoFrames; + + // return the result of the copy + return *this; +} + +/** Compares this object to 'other' and returns a determination of whether they are equal */ +bool FNDIReceiverPerformanceData::operator==(const FNDIReceiverPerformanceData& other) const +{ + // return the value of a deep compare against the 'other' structure + return this->AudioFrames == other.AudioFrames && this->DroppedAudioFrames == other.DroppedAudioFrames && + this->DroppedMetadataFrames == other.DroppedMetadataFrames && + this->DroppedVideoFrames == other.DroppedVideoFrames && this->MetadataFrames == other.MetadataFrames && + this->VideoFrames == other.VideoFrames; +} + +/** Resets the current parameters to the default property values */ +void FNDIReceiverPerformanceData::Reset() +{ + // Ensure we reset all the properties of this object to nominal default properties + this->AudioFrames = 0; + this->DroppedAudioFrames = 0; + this->DroppedMetadataFrames = 0; + this->DroppedVideoFrames = 0; + this->MetadataFrames = 0; + this->VideoFrames = 0; +} + +/** Attempts to serialize this object using an Archive object */ +FArchive& FNDIReceiverPerformanceData::Serialize(FArchive& Ar) +{ + // we want to make sure that we are able to serialize this object, over many different version of this structure + int32 current_version = 0; + + // serialize this structure + return Ar << current_version << this->AudioFrames << this->DroppedAudioFrames << this->DroppedMetadataFrames + << this->DroppedVideoFrames << this->MetadataFrames << this->VideoFrames; +} + +/** Compares this object to 'other" and returns a determination of whether they are NOT equal */ +bool FNDIReceiverPerformanceData::operator!=(const FNDIReceiverPerformanceData& other) const +{ + return !(*this == other); +} \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/NDIIO.Build.cs b/Plugins/NDIIO/Source/Core/NDIIO.Build.cs new file mode 100644 index 0000000..28011b1 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/NDIIO.Build.cs @@ -0,0 +1,106 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +using System; +using System.IO; + +using UnrealBuildTool; + +public class NDIIO : ModuleRules +{ + public NDIIO(ReadOnlyTargetRules Target) : base(Target) + { +#if UE_5_2_OR_LATER + IWYUSupport = IWYUSupport.Full; +#else + bEnforceIWYU = true; +#endif + PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs; + + #region Public Includes + + // Include the Public include paths + if (Directory.Exists(Path.Combine(ModuleDirectory, "Public"))) + { + PublicIncludePaths.AddRange(new string[] { + // ... add public include paths required here ... + Path.Combine(ModuleDirectory, "Public" ), + }); + } + + // Define the public dependencies + PublicDependencyModuleNames.AddRange(new string[] { + "Engine", + "Core", + "CoreUObject", + "Projects", + "NDIIOShaders" + }); + + #endregion + + #region Private Includes + + if (Directory.Exists(Path.Combine(ModuleDirectory, "Private"))) + { + PrivateIncludePaths.AddRange(new string[] { + // ... add other private include paths required here ... + Path.Combine(ModuleDirectory, "Private" ) + }); + } + + PrivateDependencyModuleNames.AddRange(new string[] { + "Renderer", + "RenderCore", + "RHI", + "Slate", + "SlateCore", + "UMG", + "ImageWrapper", + "AudioMixer", + "AudioExtensions", + + "InputCore", + + "Media", + "MediaAssets", + "MediaIOCore", + "MediaUtils", + "TimeManagement", + + "CinematicCamera", + + "XmlParser" + }); + + #endregion + + #region Editor Includes + + if (Target.bBuildEditor == true) + { + PrivateIncludePathModuleNames.AddRange(new string[] { + "AssetTools", + "TargetPlatform", + }); + + PrivateDependencyModuleNames.AddRange(new string[] { + "UnrealEd", + "AssetTools", + "MaterialUtilities" + }); + } + + #endregion + + #region ThirdParty Includes + + PublicDependencyModuleNames.Add("NDI"); + + #endregion + } +} diff --git a/Plugins/NDIIO/Source/Core/Public/Actors/NDIBroadcastActor.h b/Plugins/NDIIO/Source/Core/Public/Actors/NDIBroadcastActor.h new file mode 100644 index 0000000..691020a --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Actors/NDIBroadcastActor.h @@ -0,0 +1,59 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include +#include + +#include "NDIBroadcastActor.generated.h" + +/** + A quick and easy way to capture the from the perspective of a camera that starts broadcasting the viewport + immediate upon 'BeginPlay' +*/ +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Broadcast Actor")) +class NDIIO_API ANDIBroadcastActor : public AActor, public IPTZControllableInterface +{ + GENERATED_UCLASS_BODY() + +private: + /** + The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata + */ + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaSender* NDIMediaSource = nullptr; + + /** + A component used to capture an additional viewport for broadcasting over NDI + */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "NDI IO", + META = (DisplayName = "Viewport Capture Component", AllowPrivateAccess = true)) + UNDIViewportCaptureComponent* ViewportCaptureComponent = nullptr; + + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", + META = (DisplayName = "Audio Submix Capture", AllowPrivateAccess = true)) + USoundSubmix* SubmixCapture = nullptr; + + /** + Component used for PTZ control + */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "NDI IO", + META = (DisplayName = "PTZ Controller", AllowPrivateAccess = true)) + UPTZController* PTZController = nullptr; + +public: + virtual void BeginPlay() override; + + // IPTZControllableInterface + virtual FPTZState GetPTZStateFromUE() const override; + virtual void SetPTZStateToUE(const FPTZState& PTZState) override; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Actors/NDIReceiveActor.h b/Plugins/NDIIO/Source/Core/Public/Actors/NDIReceiveActor.h new file mode 100644 index 0000000..ab07add --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Actors/NDIReceiveActor.h @@ -0,0 +1,129 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include "NDIReceiveActor.generated.h" + +UCLASS(HideCategories = (Activation, Rendering, AssetUserData, Material, Attachment, Actor, Input, Cooking, LOD, + StaticMesh, Materials), + Category = "NDI IO", META = (DisplayName = "NDI Receive Actor")) +class NDIIO_API ANDIReceiveActor : public AActor +{ + GENERATED_UCLASS_BODY() + +private: + /** The desired width of the frame in cm, represented in the virtual scene */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Interp, BlueprintSetter = "SetFrameWidth", Category = "NDI IO", + META = (DisplayName = "Frame Width", AllowPrivateAccess = true)) + float FrameWidth = 177.778; + + /** The desired height of the frame in cm, represented in the virtual scene */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Interp, BlueprintSetter = "SetFrameHeight", Category = "NDI IO", + META = (DisplayName = "Frame Height", AllowPrivateAccess = true)) + float FrameHeight = 100.0f; + + /** + Indicates that this object should play the audio. + + *Note Audio played by this object will be played as a UI sound, and won't normalize the audio + if the same 'MediaSource' object is being used as the audio source on multiple receivers. + */ + UPROPERTY(EditInstanceOnly, BlueprintSetter = "UpdateAudioPlayback", Category = "NDI IO", + META = (DisplayName = "Enable Audio Playback?", AllowPrivateAccess = true)) + bool bEnableAudioPlayback = false; + + UPROPERTY(EditInstanceOnly, BlueprintSetter = "UpdateAudioPlaybackChannels", Category = "NDI IO", + META = (DisplayName = "Audio Playback Channels", AllowPrivateAccess = true)) + ENDIAudioChannels AudioPlaybackChannels = ENDIAudioChannels::Mono; + + /** Enable/disable the use of the color channels (if there are any) */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "EnableColor", Category = "NDI IO", + META = (DisplayName = "Enable Color?", AllowPrivateAccess = true)) + bool bEnableColor = true; + + /** Enable/disable the use of the alpha channel (if there is one) */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "EnableAlpha", Category = "NDI IO", + META = (DisplayName = "Enable Alpha?", AllowPrivateAccess = true)) + bool bEnableAlpha = true; + + /** The Receiver object used to get Audio, Video, and Metadata from on the network */ + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaReceiver* NDIMediaSource = nullptr; + + /** The component used to display the video received from the Media Sender object */ + UPROPERTY(Transient, META = (DisplayName = "Video Mesh Component")) + UStaticMeshComponent* VideoMeshComponent = nullptr; + + /** The component used to play the audio from the NDI Media source */ + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category= "NDI IO", META = (DisplayName = "Audio Component", AllowPrivateAccess = true)) + UAudioComponent* AudioComponent = nullptr; + + /** The audio sound wave which receives the audio from the NDI Media source */ + UPROPERTY(Transient, META = (DisplayName = "Soundwave")) + UNDIMediaSoundWave* AudioSoundWave = nullptr; + +private: + /** The material we are trying to apply to the video mesh */ + class UMaterialInterface* VideoMaterial = nullptr; + + /** The dynamic material to apply to the plane object of this actor */ + UPROPERTY() + class UMaterialInstanceDynamic* VideoMaterialInstance = nullptr; + +public: + virtual void BeginPlay() override; + virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override; + virtual void Tick(float DeltaTime) override; + + /** + Attempts to set the desired frame size in cm, represented in the virtual scene + */ + void SetFrameSize(FVector2D InFrameSize); + + /** + Returns the current frame size of the 'VideoMeshComponent' for this object + */ + const FVector2D GetFrameSize() const; + +private: + UFUNCTION(BlueprintSetter) + void SetFrameHeight(const float& InFrameHeight); + + UFUNCTION(BlueprintSetter) + void SetFrameWidth(const float& InFrameWidth); + + UFUNCTION(BlueprintSetter) + void UpdateAudioPlayback(const bool& Enabled); + + UFUNCTION(BlueprintSetter) + void UpdateAudioPlaybackChannels(const ENDIAudioChannels& Channels); + + UFUNCTION(BlueprintSetter) + void EnableColor(const bool& Enabled); + + UFUNCTION(BlueprintSetter) + void EnableAlpha(const bool& Enabled); + +#if WITH_EDITORONLY_DATA + + virtual void PreEditChange(FProperty* InProperty) override; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + + void ApplyChannelsMode(); + bool bStoppedForChannelsMode = false; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Assets/NDITimecodeProvider.h b/Plugins/NDIIO/Source/Core/Public/Assets/NDITimecodeProvider.h new file mode 100644 index 0000000..bb228a7 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Assets/NDITimecodeProvider.h @@ -0,0 +1,52 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include + +#include "NDITimecodeProvider.generated.h" + + +/** + Timecode provider from an NDI source +*/ +UCLASS(Blueprintable, editinlinenew, meta=(DisplayName="NDI Timecode Provider")) +class NDIIO_API UNDITimecodeProvider : public UGenlockedTimecodeProvider +{ + GENERATED_UCLASS_BODY() + +private: + /** The Receiver object used to get timecodes from */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "NDI IO", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaReceiver* NDIMediaSource = nullptr; + +public: + //~ UTimecodeProvider interface + virtual bool FetchTimecode(FQualifiedFrameTime& OutFrameTime) override; + virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override; + virtual bool Initialize(class UEngine* InEngine) override; + virtual void Shutdown(class UEngine* InEngine) override; + + //~ UObject interface + virtual void BeginDestroy() override; + +private: + void ReleaseResources(); + +private: + FDelegateHandle VideoCaptureEventHandle; + FDelegateHandle ConnectedEventHandle; + FDelegateHandle DisconnectedEventHandle; + + mutable FCriticalSection StateSyncContext; + ETimecodeProviderSynchronizationState State = ETimecodeProviderSynchronizationState::Closed; + FQualifiedFrameTime MostRecentFrameTime; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDIBroadcastComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDIBroadcastComponent.h new file mode 100644 index 0000000..24cbfba --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDIBroadcastComponent.h @@ -0,0 +1,99 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include + +#include "NDIBroadcastComponent.generated.h" + +/** + Provides a wrapper to allow you to modify an NDI Media Sender object from blueprints and perform broadcasting + functionality +*/ +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI Broadcast Component", BlueprintSpawnableComponent)) +class NDIIO_API UNDIBroadcastComponent : public UActorComponent +{ + GENERATED_UCLASS_BODY() + +private: + /** The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata */ + UPROPERTY(EditDefaultsOnly, Category = "Properties", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaSender* NDIMediaSource = nullptr; + +public: + /** + Initialize this component with the media source required for sending NDI audio, video, and metadata. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. + */ + bool Initialize(UNDIMediaSender* InMediaSource = nullptr); + + /** + Attempts to start broadcasting audio, video, and metadata via the 'NDIMediaSource' associated with this object + + @param ErrorMessage The error message received when the media source is unable to start broadcasting + @result Indicates whether this object successfully started broadcasting + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Broadcasting")) + bool StartBroadcasting(FString& ErrorMessage); + + /** + Changes the name of the sender object as seen on the network for remote connections + + @param InSourceName The new name of the source to be identified as on the network + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name")) + void ChangeSourceName(const FString& InSourceName); + + /** + Attempts to change the Broadcast information associated with this media object + + @param InConfiguration The new configuration to broadcast + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration")) + void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration); + + /** + Attempts to change the RenderTarget used in sending video frames over NDI + + @param BroadcastTexture The texture to use as video, while broadcasting over NDI + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Texture")) + void ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture = nullptr); + + /** + Determines the current tally information. + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Tally Information")) + void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram); + + /** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network + + @param Result The total number of connected receivers attached to the broadcast of this object + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Number of Connections")) + void GetNumberOfConnections(int32& Result); + + /** + Attempts to immediately stop sending frames over NDI to any connected receivers + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Stop Broadcasting")) + void StopBroadcasting(); +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDIFinderComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDIFinderComponent.h new file mode 100644 index 0000000..b76a074 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDIFinderComponent.h @@ -0,0 +1,79 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include + +#include "NDIFinderComponent.generated.h" + +/** Delegates **/ + +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIFinderServiceCollectionChangedDelegate, UNDIFinderComponent*, + InComponent); + +/** ******************* **/ + +/** + A component used for essential functionality when dealing with the finder service. Allowing you to + get a collection of sources found on the network. +*/ +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI Finder Component", BlueprintSpawnableComponent)) +class NDIIO_API UNDIFinderComponent : public UActorComponent +{ + GENERATED_UCLASS_BODY() + +public: + /** A collection of the current sources and their information, found on the network */ + UPROPERTY() + TArray NetworkSourceCollection; + + /** A delegate which is broadcast when any change to the network source collection has been detected */ + UPROPERTY(BlueprintAssignable, META = (DisplayName = "On Network Sources Changed", AllowPrivateAccess = true)) + FNDIFinderServiceCollectionChangedDelegate OnNetworkSourcesChanged; + +public: + /** + Attempts to find a network source by the supplied name. + + @param ConnectionInformation An existing source information structure which contains the source name + @param InSourceName A string value representing the name of the source to find + @result A value indicating whether a source with the supplied name was found + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Find Network Source by Name")) + const bool FindNetworkSourceByName(FNDIConnectionInformation& ConnectionInformation, + FString InSourceName = FString("")); + + /** + Returns the current collection of sources found on the network + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", META = (DisplayName = "Get Network Sources")) + const TArray GetNetworkSources(); + +protected: + virtual void BeginPlay() override; + virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override; + + /** An override function for when the network source collection has been changed */ + UFUNCTION(BlueprintImplementableEvent, META = (DisplayName = "On Network Sources Changed Event")) + void OnNetworkSourcesChangedEvent(); + +private: + /** + An Event handler for when the NDI Finder Service notifies listeners that changes have been + detected in the network source collection + */ + UFUNCTION() + virtual void OnNetworkSourceCollectionChangedEvent() final; + +private: + FCriticalSection CollectionSyncContext; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDIPTZControllerComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDIPTZControllerComponent.h new file mode 100644 index 0000000..463699c --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDIPTZControllerComponent.h @@ -0,0 +1,174 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include + +#include "NDIPTZControllerComponent.generated.h" + + +USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI PTZ State")) +struct NDIIO_API FPTZState +{ + GENERATED_USTRUCT_BODY() + + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + float Pan; + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + float Tilt; + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + float FieldOfView; + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + float FocusDistance; + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + bool bAutoFocus; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ") + FTransform CameraTransform; + + FPTZState() + : Pan(0.f) + , Tilt(0.f) + , FieldOfView(90.f) + , FocusDistance(0.5f) + , bAutoFocus(false) + {} +}; + + +DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIEventDelegate_OnPTZPanTiltSpeed, float, PanSpeed, float, TiltSpeed); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZZoomSpeed, float, ZoomSpeed); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIEventDelegate_OnPTZFocus, bool, AutoMode, float, Distance); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZStore, int, Index); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZRecall, int, Index); + + + +UINTERFACE(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI PTZ Controllable", BlueprintSpawnableComponent)) +class NDIIO_API UPTZControllableInterface : public UInterface +{ + GENERATED_BODY() +}; + +class IPTZControllableInterface +{ + GENERATED_BODY() + +public: + virtual FPTZState GetPTZStateFromUE() const = 0; + virtual void SetPTZStateToUE(const FPTZState& PTZState) = 0; +}; + + +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI PTZ Controller", BlueprintSpawnableComponent)) +class UPTZController : public UActorComponent +{ + GENERATED_BODY() + +protected: + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Enable PTZ", AllowPrivateAccess = true), Category="PTZ") + bool EnablePTZ = true; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Limit", AllowPrivateAccess = true), Category="PTZ") + bool PTZWithPanLimit = false; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Min Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ") + float PTZPanMinLimit = -180.f; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Max Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ") + float PTZPanMaxLimit = 180.f; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Invert Pan", AllowPrivateAccess = true), Category="PTZ") + bool bPTZPanInvert = true; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Limit", AllowPrivateAccess = true), Category="PTZ") + bool PTZWithTiltLimit = true; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Min Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ") + float PTZTiltMinLimit = -90.f; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Max Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ") + float PTZTiltMaxLimit = 90.f; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Invert Tilt", AllowPrivateAccess = true), Category="PTZ") + bool bPTZTiltInvert = false; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Limit", AllowPrivateAccess = true), Category="PTZ") + bool PTZWithFoVLimit = false; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Min Limit", UIMin="5", UIMax="170", AllowPrivateAccess = true), Category="PTZ") + float PTZFoVMinLimit = 5.f; + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Max Limit", UIMin="5", UIMax="170", AllowPrivateAccess = true), Category="PTZ") + float PTZFoVMaxLimit = 170.f; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Preset Recall Easing", UIMin="0", UIMax="60", AllowPrivateAccess = true), Category="PTZ") + float PTZRecallEasing = 2.f; + + UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ") + float PTZPanSpeed = 0.f; + UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ") + float PTZTiltSpeed = 0.f; + UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ") + float PTZZoomSpeed = 0.f; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="PTZ Presets", AllowPrivateAccess = true), Category="PTZ") + TArray PTZStoredStates; + + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaSender* NDIMediaSource = nullptr; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Pan Tilt Speed", AllowPrivateAccess = true)) + FNDIEventDelegate_OnPTZPanTiltSpeed OnPTZPanTiltSpeed; + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Zoom Speed", AllowPrivateAccess = true)) + FNDIEventDelegate_OnPTZZoomSpeed OnPTZZoomSpeed; + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Focus", AllowPrivateAccess = true)) + FNDIEventDelegate_OnPTZFocus OnPTZFocus; + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Store", AllowPrivateAccess = true)) + FNDIEventDelegate_OnPTZStore OnPTZStore; + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Recall", AllowPrivateAccess = true)) + FNDIEventDelegate_OnPTZRecall OnPTZRecall; + +public: + /** Call with the PTZ metadata received from an NDI media sender */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Receive Metadata From Sender")) + void ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data); + +public: + UPTZController(); + virtual ~UPTZController(); + + /** + Initialize this component with the required media source to receive metadata from. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. + */ + bool Initialize(UNDIMediaSender* InMediaSource = nullptr); + + void SetPTZPanTiltSpeed(float PanSpeed, float TiltSpeed); + void SetPTZZoomSpeed(float ZoomSpeed); + void SetPTZFocus(bool AutoMode, float Distance); + void StorePTZState(int Index); + void RecallPTZState(int Index); + + FPTZState GetPTZStateFromUE() const; + void SetPTZStateToUE(const FPTZState& PTZState); + +protected: + virtual void InitializeComponent() override; + + virtual void TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) override; + +protected: + TSharedPtr NDIMetadataParser; + + struct FPTZStateInterp + { + FPTZState PTZTargetState; + float EasingDuration { 0 }; + float EasingRemaining { 0 }; + }; + FPTZStateInterp PTZStateInterp; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDIReceiverComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDIReceiverComponent.h new file mode 100644 index 0000000..f5bd6a2 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDIReceiverComponent.h @@ -0,0 +1,97 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include + +#include "NDIReceiverComponent.generated.h" + +/** + A component used to receive audio, video, and metadata over NDI +*/ +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI Receiver Component", BlueprintSpawnableComponent)) +class NDIIO_API UNDIReceiverComponent : public UActorComponent +{ + GENERATED_UCLASS_BODY() + +private: + /** The NDI Media Receiver representing the configuration of the network source to receive audio, video, and + * metadata from */ + UPROPERTY(EditDefaultsOnly, Category = "Properties", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaReceiver* NDIMediaSource = nullptr; + +public: + /** + Initialize this component with the media source required for receiving NDI audio, video, and metadata. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. + */ + bool Initialize(UNDIMediaReceiver* InMediaSource = nullptr); + + /** + Begin receiving NDI audio, video, and metadata frames + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Receiver")) + bool StartReceiver(const FNDIConnectionInformation& InConnectionInformation); + + /** + Attempt to change the connection for which to get audio, video, and metadata frame from + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Connection")) + void ChangeConnection(const FNDIConnectionInformation& InConnectionInformation); + + /** + This will add a metadata frame and return immediately, having scheduled the frame asynchronously + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata Frame")) + void SendMetadataFrame(const FString& metadata); + + /** + This will setup the up-stream tally notifications. If no streams are connected, it will automatically send + the tally state upon connection + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Tally Information")) + void SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram); + + /** + Attempts to stop receiving audio, video, and metadata frame from the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Shutdown Receiver")) + void ShutdownReceiver(); + +public: + /** + Returns the current framerate of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Frame Rate")) + FFrameRate GetCurrentFrameRate() const; + + /** + Returns the current timecode of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Timecode")) + FTimecode GetCurrentTimecode() const; + + /** + Returns the current connection information of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Connection Information")) + FNDIConnectionInformation GetCurrentConnectionInformation() const; + + /** + Returns the current performance data of the receiver while connected to the source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Performance Data")) + FNDIReceiverPerformanceData GetPerformanceData() const; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDITriCasterExtComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDITriCasterExtComponent.h new file mode 100644 index 0000000..849d476 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDITriCasterExtComponent.h @@ -0,0 +1,91 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include + +#include "NDITriCasterExtComponent.generated.h" + + +USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI TricasterExt")) +struct NDIIO_API FTriCasterExt +{ + GENERATED_USTRUCT_BODY() + + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category="TricasterExt") + FString Value; + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category="TricasterExt") + TMap KeyValues; +}; + + +DECLARE_DYNAMIC_MULTICAST_DELEGATE_FiveParams(FNDIEventDelegate_OnTriCasterExt, AActor*, Actor, UObject*, Object, FString, PropertyElementName, FString, PropertyValueStr, FTimespan, EasingDuration); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnTriCasterExtCustom, const FTriCasterExt&, TCData); + + +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI TricasterExt Component", BlueprintSpawnableComponent)) +class UTriCasterExtComponent : public UActorComponent +{ + GENERATED_BODY() + +protected: + UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Enable TricasterExt", AllowPrivateAccess = true), Category="TricasterExt") + bool EnableTriCasterExt = true; + + UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaSender* NDIMediaSource = nullptr; + + UPROPERTY(BlueprintAssignable, BlueprintCallable, Category="NDI Events", META = (DisplayName = "On TricasterExt", AllowPrivateAccess = true)) + FNDIEventDelegate_OnTriCasterExt OnTriCasterExt; + UPROPERTY(BlueprintAssignable, BlueprintCallable, Category="NDI Events", META = (DisplayName = "On TricasterExt Custom", AllowPrivateAccess = true)) + FNDIEventDelegate_OnTriCasterExtCustom OnTriCasterExtCustom; + +public: + /** Call with the TriCasterExt metadata received from an NDI media sender */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Receive Metadata From Sender")) + void ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data); + +public: + UTriCasterExtComponent(); + virtual ~UTriCasterExtComponent(); + + /** + Initialize this component with the required media source to receive metadata from. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. + */ + bool Initialize(UNDIMediaSender* InMediaSource = nullptr); + + void TriCasterExt(AActor* Actor, UObject* Object, FProperty* Property, FString PropertyElementName, FString PropertyValueStr, FTimespan EasingDuration); + void TriCasterExtCustom(const FTriCasterExt& TCData); + +protected: + virtual void InitializeComponent() override; + + virtual void TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) override; + +protected: + TSharedPtr NDIMetadataParser; + + struct FTriCasterExtInterp + { + AActor* Actor; + UObject* Object; + FProperty* Property; + FString PropertyElementName; + FString PropertyValueStr; + float EasingDuration; + + float EasingRemaining; + }; + TArray TriCasterExtInterp; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Components/NDIViewportCaptureComponent.h b/Plugins/NDIIO/Source/Core/Public/Components/NDIViewportCaptureComponent.h new file mode 100644 index 0000000..ae33008 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Components/NDIViewportCaptureComponent.h @@ -0,0 +1,154 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "NDIViewportCaptureComponent.generated.h" + + +/** + A component used to capture an additional viewport for broadcasting over NDI +*/ +UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", + META = (DisplayName = "NDI Viewport Capture Component", BlueprintSpawnableComponent)) +class NDIIO_API UNDIViewportCaptureComponent : public USceneCaptureComponent2D +{ + GENERATED_UCLASS_BODY() + +private: + /** + If true, will allow you to override the capture settings by ignoring the default Broadcast Settings + in the NDI Media Sender, Potentially Requiring a texture rescale of the capture frame when broadcasting + over NDI. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings", META = (AllowPrivateAccess = true)) + bool bOverrideBroadcastSettings = false; + + /** + Describes the Height and Width of the viewport frame to capture. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings", + META = (DisplayName = "Capture Size", AllowPrivateAccess = true, + EditCondition = "bOverrideBroadcastSettings")) + FIntPoint CaptureSize = FIntPoint(1280, 720); + + /** + Represents the desired number of frames (per second) to capture the viewport + */ + UPROPERTY(BlueprintReadwrite, EditAnywhere, Category = "Capture Settings", + META = (DisplayName = "Capture Rate", AllowPrivateAccess = true, + EditCondition = "bOverrideBroadcastSettings")) + FFrameRate CaptureRate = FFrameRate(60, 1); + + /** + The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata + */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Properties", + META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true)) + UNDIMediaSender* NDIMediaSource = nullptr; + + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings", + META = (DisplayName = "Alpha Remap Min", AllowPrivateAccess = true)) + float AlphaMin = 0.f; + + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings", + META = (DisplayName = "Alpha Remap Max", AllowPrivateAccess = true)) + float AlphaMax = 1.f; + +public: + /** + Initialize this component with the media source required for sending NDI audio, video, and metadata. + Returns false, if the MediaSource is already been set. This is usually the case when this component is + initialized in Blueprints. + */ + bool Initialize(UNDIMediaSender* InMediaSource = nullptr); + + /** + Changes the name of the sender object as seen on the network for remote connections + + @param InSourceName The new name of the source to be identified as on the network + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name")) + void ChangeSourceName(const FString& InSourceName); + + /** + Attempts to change the Broadcast information associated with this media object + + @param InConfiguration The new configuration to broadcast + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration")) + void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration); + + /** + Attempts to change the RenderTarget used in sending video frames over NDI + + @param BroadcastTexture The texture to use as video, while broadcasting over NDI + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Texture")) + void ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture = nullptr); + + /** + Change the capture settings of the viewport capture and overrides the NDI Media Sender settings + + @param InCaptureSize The Capture size of the frame to capture of the viewport + @param InCaptureRate A framerate at which to capture frames of the viewport + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Capture Settings")) + void ChangeCaptureSettings(FIntPoint InCaptureSize, FFrameRate InCaptureRate); + + /** + Determines the current tally information. If you specify a timeout then it will wait until it has + changed, otherwise it will simply poll it and return the current tally immediately + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Tally Information")) + void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram); + + /** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network + + @param Result The total number of connected receivers attached to the broadcast of this object + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Number of Connections")) + void GetNumberOfConnections(int32& Result); + +protected: + virtual ~UNDIViewportCaptureComponent(); + + virtual void InitializeComponent() override; + virtual void UninitializeComponent() override; + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + virtual void UpdateSceneCaptureContents(FSceneInterface* Scene, ISceneRenderBuilder& SceneRenderBuilder) override; +#else + virtual void UpdateSceneCaptureContents(FSceneInterface* Scene) override; +#endif + +private: + UFUNCTION() + void OnBroadcastConfigurationChanged(UNDIMediaSender* Sender); + +private: + FCriticalSection UpdateRenderContext; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Enumerations/NDIAudioChannels.h b/Plugins/NDIIO/Source/Core/Public/Enumerations/NDIAudioChannels.h new file mode 100644 index 0000000..ba532e8 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Enumerations/NDIAudioChannels.h @@ -0,0 +1,28 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include "NDIAudioChannels.generated.h" + +/** + Receiver Bandwidth modes +*/ +UENUM(BlueprintType, META = (DisplayName = "NDI Audio Channels")) +enum class ENDIAudioChannels : uint8 +{ + /** Mono. */ + Mono = 0x00 UMETA(DisplayName = "Mono"), + + /** Stereo. */ + Stereo = 0x01 UMETA(DisplayName = "Stereo"), + + /** Whatever the number of channels in the source is. */ + Source = 0x02 UMETA(DisplayName = "Source"), +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Enumerations/NDISourceBandwidth.h b/Plugins/NDIIO/Source/Core/Public/Enumerations/NDISourceBandwidth.h new file mode 100644 index 0000000..dde65b5 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Enumerations/NDISourceBandwidth.h @@ -0,0 +1,31 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include "NDISourceBandwidth.generated.h" + +/** + Receiver Bandwidth modes +*/ +UENUM(BlueprintType, META = (DisplayName = "NDI Source Bandwidth")) +enum class ENDISourceBandwidth : uint8 +{ + /** Receive metadata. */ + MetadataOnly = 0x00 UMETA(DisplayName = "Metadata Only"), + + /** Receive metadata, audio */ + AudioOnly = 0x01 UMETA(DisplayName = "Audio Only"), + + /** Receive metadata, audio, video at a lower bandwidth and resolution. */ + Lowest = 0x02 UMETA(DisplayName = "Lowest"), + + // Receive metadata, audio, video at full resolution. + Highest = 0x03 UMETA(DisplayName = "Highest") +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/NDIIOPluginAPI.h b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginAPI.h new file mode 100644 index 0000000..b34a564 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginAPI.h @@ -0,0 +1,34 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include +#include + +#if PLATFORM_WINDOWS +#include +#endif + +#ifndef NDI_SDK_ENABLED +#error NDI(R) 6.x Runtime must be installed for the NDI(R) IO plugin to run properly. +#endif + +#ifdef NDI_SDK_ENABLED +#include +#include +#endif + +#if PLATFORM_WINDOWS +#include +#endif + +#define NDIIO_MODULE_NAME FName(TEXT("NDIIO")) \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/NDIIOPluginModule.h b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginModule.h new file mode 100644 index 0000000..858ca57 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginModule.h @@ -0,0 +1,61 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include +#include + +#include +#include +#include +#include + +#include + +class NDIIO_API FNDIIOPluginModule + : public IModuleInterface + , public IMediaPlayerFactory +{ +public: + /** IModuleInterface implementation */ + virtual void StartupModule() override; + virtual void ShutdownModule() override; + + /** IMediaPlayerFactory implementation */ + virtual bool CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray* /*OutWarnings*/, TArray* OutErrors) const override; + virtual TSharedPtr CreatePlayer(IMediaEventSink& EventSink) override; + virtual FText GetDisplayName() const override; + virtual FName GetPlayerName() const override; + virtual FGuid GetPlayerPluginGUID() const override; + virtual const TArray& GetSupportedPlatforms() const override; + virtual bool SupportsFeature(EMediaFeature Feature) const override; + + + bool BeginBroadcastingActiveViewport(); + void StopBroadcastingActiveViewport(); + +private: + bool LoadModuleDependencies(); + void ShutdownModuleDependencies(); + +private: + TSharedPtr NDIFinderService = nullptr; + TSharedPtr NDIConnectionService = nullptr; + + void* NDI_LIB_HANDLE = nullptr; + + /** List of platforms that the media player support. */ + TArray SupportedPlatforms; + + /** List of supported URI schemes. */ + TArray SupportedUriSchemes; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/NDIIOPluginSettings.h b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginSettings.h new file mode 100644 index 0000000..b5596af --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/NDIIOPluginSettings.h @@ -0,0 +1,52 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include + +#include "NDIIOPluginSettings.generated.h" + +/** + Settings for the Broadcasting of the Active Viewport configurable in the running editor of the application +*/ +UCLASS(Config = Engine, DefaultConfig) +class NDIIO_API UNDIIOPluginSettings : public UObject +{ + GENERATED_BODY() + +public: + UPROPERTY(VisibleAnywhere, Category = "NDI IO", META = (DisplayName = "Description", MultiLine = true)) + FString Decription = TEXT( + "These values define the 'Active Viewport' broadcast settings and does not define default values for outputs." + "\r\n" + "\r\nApplication Stream Name - The default name to use when broadcasting the Currently Active Viewport over " + "NDI." + "\r\nBroadcast Rate - Indicates the preferred frame rate to broadcast the Currently Active Viewport over NDI." + "\r\nPreferred FrameSize - Indicates the preferred frame size to broadcast the Currently Active Viewport over " + "NDI." + "\r\nBegin Broadcast On Play - Starts the broadcast of the Currently Active Viewport immediately on Play." + ); + + /** The default name to use when broadcasting the Currently Active Viewport over NDI. */ + UPROPERTY(Config, EditAnywhere, Category = "NDI IO") + FString ApplicationStreamName = FString("Unreal Engine"); + + /** Indicates the preferred frame rate to broadcast the Currently Active Viewport over NDI. */ + UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Broadcast Rate")) + FFrameRate BroadcastRate = FFrameRate(60, 1); + + /** Indicates the preferred frame size to broadcast the Currently Active Viewport over NDI. */ + UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Preferred Broadcast Framesize")) + FIntPoint PreferredFrameSize = FIntPoint(1920, 1080); + + UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Begin Broadcast On Play")) + bool bBeginBroadcastOnPlay = false; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIBroadcastConfigurationLibrary.h b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIBroadcastConfigurationLibrary.h new file mode 100644 index 0000000..b23b510 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIBroadcastConfigurationLibrary.h @@ -0,0 +1,52 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIBroadcastConfigurationLibrary.generated.h" + +UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO", + META = (DisplayName = "NDI Broadcast Configuration Library")) +class NDIIO_API UNDIBroadcastConfigurationLibrary : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + +private: + /** + Returns a value indicating whether the two structures are comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Equals (NDI Broadcast Configuration)", + CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true)) + static bool K2_Compare_NDIBroadcastConfiguration(FNDIBroadcastConfiguration A, FNDIBroadcastConfiguration B) + { + return A == B; + } + + /** + Returns a value indicating whether the two structures are NOT comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Not Equals (NDI Broadcast Configuration)", + CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true)) + static bool K2_Compare_Not_NDIBroadcastConfiguration(FNDIBroadcastConfiguration A, FNDIBroadcastConfiguration B) + { + return A != B; + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIConnectionInformationLibrary.h b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIConnectionInformationLibrary.h new file mode 100644 index 0000000..2557aa4 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIConnectionInformationLibrary.h @@ -0,0 +1,82 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIConnectionInformationLibrary.generated.h" + +UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO", + META = (DisplayName = "NDI Connection Information Library")) +class NDIIO_API UNDIConnectionInformationLibrary : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + +private: + /** + Returns a value indicating whether the two structures are comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Equals (NDI Connection Information)", + CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true)) + static bool K2_Compare_NDIConnectionInformation(FNDIConnectionInformation A, FNDIConnectionInformation B) + { + return A == B; + } + + /** + Returns a value indicating whether the two structures are NOT comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Not Equals (NDI Connection Information)", + CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true)) + static bool K2_Compare_Not_NDIConnectionInformation(FNDIConnectionInformation A, FNDIConnectionInformation B) + { + return A != B; + } + + /** + Returns a value indicating whether the property values of the supplied structure is valid + + @param ConnectionInformation The structure to validate + @return An indication of the supplied structures validity + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Is Valid?", AllowPrivateAccess = true)) + static bool K2_NDIConnectionInformation_IsValid(FNDIConnectionInformation& ConnectionInformation) + { + return ConnectionInformation.IsValid(); + } + + /** + Resets the structure's properties to their default values + + @param ConnectionInformation The structure to reset to the default value + @return The reference to the passed in structure after the 'reset' has been completed + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Reset Connection Information", AllowPrivateAccess = true)) + static UPARAM(ref) FNDIConnectionInformation& K2_NDIConnectionInformation_Reset( + UPARAM(ref) FNDIConnectionInformation& ConnectionInformation) + { + // call the underlying function to reset the properties of the object + ConnectionInformation.Reset(); + + // return the ConnectionInformation object reference + return ConnectionInformation; + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIIOLibrary.h b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIIOLibrary.h new file mode 100644 index 0000000..58320d5 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIIOLibrary.h @@ -0,0 +1,121 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include + +#include +#include +#include + +#include "NDIIOLibrary.generated.h" + + +/** + An metadata element as returned by K2_ParseNDIMetaData() + Blueprints do not support recursive datastructures, so parsing metadata + with this will result in only the top-level elements being returned. +*/ +USTRUCT(BlueprintType) +struct FNDIMetaDataElement +{ + GENERATED_USTRUCT_BODY() + + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata") + FString ElementName; + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata") + TMap Attributes; + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata") + FString Data; +}; + + +UCLASS(META = (DisplayName = "NDI IO Library")) +class NDIIO_API UNDIIOLibrary : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + +private: + /** + Retrieves a collection of NDI sources appearing on the network + + @return A collection of NDI Sources appearing on the network + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Get NDI Source Collection", AllowPrivateAccess = true)) + static const TArray K2_GetNDISourceCollection(); + + /** + Attempts to search the NDI Source Collection for the source name, returning a result indicating + success with the ConnectionInformation parameter filled with the found connection + + @param ConnectionInformation The connection information for a successful find with the supplied InSourceName + @param InSourceName The name of the source to find within the collection of NDI sources + + @return The result of whether the search was successful + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Find Network Source by Name", DefaultToSelf = "WorldContextObject", + HidePin = "WorldContextObject", AllowPrivateAccess = true)) + static const bool K2_FindNetworkSourceByName(UObject* WorldContextObject, + FNDIConnectionInformation& ConnectionInformation, + FString InSourceName = FString("")); + +private: + /** + Attempts to start broadcasting the active viewport. The output of the active viewport is the current camera + that is actively being viewed (through), and does not have to be an NDI Broadcast Viewport Component. + + @return The result of whether broadcasting the active viewport was started + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Begin Broadcasting Active Viewport", DefaultToSelf = "WorldContextObject", + HidePin = "WorldContextObject", AllowPrivateAccess = true)) + static bool K2_BeginBroadcastingActiveViewport(UObject* WorldContextObject); + + /** + Will stop broadcasting the active viewport, which was started by a previous call to 'Begin Broadcasting Active + Viewport' + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Stop Broadcasting Active Viewport", DefaultToSelf = "WorldContextObject", + HidePin = "WorldContextObject", AllowPrivateAccess = true)) + static void K2_StopBroadcastingActiveViewport(UObject* WorldContextObject); + +private: + /** + Returns an NDI Media Receiver object + + @param Receiver The Receiver object to return + @return The selected Receiver object + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Get NDI Media Receiver", AllowPrivateAccess = true)) + static UPARAM(ref) UNDIMediaReceiver* K2_GetNDIMediaReceiver(UNDIMediaReceiver* Receiver = nullptr); + + /** + Returns an NDI Media Sender object + + @param Sender The Sender object to return + @return The selected Sender object + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Get NDI Media Sender", AllowPrivateAccess = true)) + static UPARAM(ref) UNDIMediaSender* K2_GetNDIMediaSender(UNDIMediaSender* Sender = nullptr); + +private: + /** + Parses a string as metadata + Blueprints do not support recursive datastructures, so parsing metadata + with this will result in only the top-level elements being returned. + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Parse NDI MetaData", AllowPrivateAccess = true)) + static const TArray K2_ParseNDIMetaData(FString Data); +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIReceiverPerformanceDataLibrary.h b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIReceiverPerformanceDataLibrary.h new file mode 100644 index 0000000..3203192 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Libraries/NDIReceiverPerformanceDataLibrary.h @@ -0,0 +1,70 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIReceiverPerformanceDataLibrary.generated.h" + +UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO", + META = (DisplayName = "NDI Receiver Performance Data Library")) +class NDIIO_API UNDIReceiverPerformanceDataLibrary : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + +private: + /** + Returns a value indicating whether the two structures are comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Equals (NDI Receiver Performance Data)", + CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true)) + static bool K2_Compare_NDIReceiverPerformanceData(FNDIReceiverPerformanceData A, FNDIReceiverPerformanceData B) + { + return A == B; + } + + /** + Returns a value indicating whether the two structures are NOT comparably equal + + @param A The structure used as the source comparator + @param B The structure used as the target comparator + @return The resulting value of the comparator operator + */ + UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", + META = (DisplayName = "Not Equals (NDI Receiver Performance Data)", + CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true)) + static bool K2_Compare_Not_NDIReceiverPerformanceData(FNDIReceiverPerformanceData A, FNDIReceiverPerformanceData B) + { + return A != B; + } + + /** + Resets the structure's properties to their default values + + @param PerformanceData The structure to reset to the default value + @return The reference to the passed in structure after the 'reset' has been completed + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", + META = (DisplayName = "Reset Receiver Performance Data", AllowPrivateAccess = true)) + static UPARAM(ref) FNDIReceiverPerformanceData& K2_NDIReceiverPerformanceData_Reset( + UPARAM(ref) FNDIReceiverPerformanceData& PerformanceData) + { + // call the underlying function to reset the properties of the object + PerformanceData.Reset(); + + // return the Performance Data object reference + return PerformanceData; + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaReceiver.h b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaReceiver.h new file mode 100644 index 0000000..d9a6fdb --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaReceiver.h @@ -0,0 +1,361 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "NDIMediaReceiver.generated.h" + + +namespace NDIMediaOption +{ + static const FName IsNDIMediaReceiver("IsNDIMediaReceiver"); + static const FName MaxVideoFrameBuffer("MaxVideoFrameBuffer"); + static const FName MaxAudioFrameBuffer("MaxAudioFrameBuffer"); + static const FName MaxAncillaryFrameBuffer("MaxAncillaryFrameBuffer"); +} + + +/** + Delegates to notify that the NDIMediaReceiver has received a video, audio, or metadata frame +*/ +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaReceiverVideoReceived, UNDIMediaReceiver*, Receiver); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaReceiverAudioReceived, UNDIMediaReceiver*, Receiver); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_ThreeParams(FNDIMediaReceiverMetaDataReceived, UNDIMediaReceiver*, Receiver, FString, Data, bool, bAttachedToVideoFrame); + + +/** + A Media object representing the NDI Receiver for being able to receive Audio, Video, and Metadata over NDI +*/ +UCLASS(BlueprintType, Blueprintable, HideCategories = ("Platforms"), Category = "NDI IO", + HideCategories = ("Information"), AutoCollapseCategories = ("Content"), + META = (DisplayName = "NDI Media Receiver")) +class NDIIO_API UNDIMediaReceiver : public UTimeSynchronizableMediaSource +{ + GENERATED_BODY() + +public: + /** + Information describing detailed information about the sender this receiver is to connect to + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Settings", + META = (DisplayName = "Connection", AllowPrivateAccess = true)) + FNDIConnectionInformation ConnectionSetting; + +private: + /** + The current frame count, seconds, minutes, and hours in time-code notation + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Timecode", AllowPrivateAccess = true)) + FTimecode Timecode; + + /** + The desired number of frames (per second) for video to be displayed + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Frame Rate", AllowPrivateAccess = true)) + FFrameRate FrameRate; + + /** + The width and height of the last received video frame + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Resolution", AllowPrivateAccess = true)) + FIntPoint Resolution; + + /** + Indicates whether the timecode should be synced to the Source Timecode value + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Settings", + META = (DisplayName = "Sync Timecode to Source", AllowPrivateAccess = true)) + bool bSyncTimecodeToSource = true; + + /** + Should perform the sRGB to Linear color space conversion + */ + UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Perform sRGB to Linear?", AllowPrivateAccess = true)) + bool bPerformsRGBtoLinear = true; + + /** + Information describing detailed information about the sender this receiver is currently connected to + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Connection Information", AllowPrivateAccess = true)) + FNDIConnectionInformation ConnectionInformation; + + /** + Information describing detailed information about the receiver performance when connected to an NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Performance Data", AllowPrivateAccess = true)) + FNDIReceiverPerformanceData PerformanceData; + + /** + Provides an NDI Video Texture object to render videos frames from the source onto (optional) + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "ChangeVideoTexture", Category = "Content", + AdvancedDisplay, META = (DisplayName = "Video Texture (optional)", AllowPrivateAccess = true)) + UNDIMediaTexture2D* VideoTexture = nullptr; + +public: + DECLARE_EVENT_OneParam(FNDIMediaReceiverConnectionEvent, FOnReceiverConnectionEvent, + UNDIMediaReceiver*) FOnReceiverConnectionEvent OnNDIReceiverConnectedEvent; + DECLARE_EVENT_OneParam(FNDIMediaReceiverDisconnectionEvent, FOnReceiverDisconnectionEvent, + UNDIMediaReceiver*) FOnReceiverDisconnectionEvent OnNDIReceiverDisconnectedEvent; + + DECLARE_EVENT_TwoParams(FNDIMediaReceiverVideoCaptureEvent, FOnReceiverVideoCaptureEvent, + UNDIMediaReceiver*, const NDIlib_video_frame_v2_t&) FOnReceiverVideoCaptureEvent OnNDIReceiverVideoCaptureEvent; + DECLARE_EVENT_TwoParams(FNDIMediaReceiverAudioCaptureEvent, FOnReceiverAudioCaptureEvent, + UNDIMediaReceiver*, const NDIlib_audio_frame_v2_t&) FOnReceiverAudioCaptureEvent OnNDIReceiverAudioCaptureEvent; + DECLARE_EVENT_TwoParams(FNDIMediaReceiverMetadataCaptureEvent, FOnReceiverMetadataCaptureEvent, + UNDIMediaReceiver*, const NDIlib_metadata_frame_t&) FOnReceiverMetadataCaptureEvent OnNDIReceiverMetadataCaptureEvent; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Video Received by Receiver", AllowPrivateAccess = true)) + FNDIMediaReceiverVideoReceived OnReceiverVideoReceived; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Audio Received by Receiver", AllowPrivateAccess = true)) + FNDIMediaReceiverAudioReceived OnReceiverAudioReceived; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Received by Receiver", AllowPrivateAccess = true)) + FNDIMediaReceiverMetaDataReceived OnReceiverMetaDataReceived; + +public: + + UNDIMediaReceiver(); + + /** + Called before destroying the object. This is called immediately upon deciding to destroy the object, + to allow the object to begin an asynchronous cleanup process. + */ + void BeginDestroy() override; + + /** + Attempts to perform initialization logic for creating a receiver through the NDI sdk api + */ + enum class EUsage + { + Standalone, // The receiver automatically captures its own video frame every engine render frame + Controlled // The user of the receiver manually triggers capturing a frame through CaptureConnectedVideo/Audio() + }; + bool Initialize(const FNDIConnectionInformation& InConnectionInformation, EUsage InUsage); + bool Initialize(EUsage Inusage); + + /** + Attempt to (re-)start the connection + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Connection")) + void StartConnection(); + + /** + Stop the connection + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Stop Connection")) + void StopConnection(); + + /** + Attempts to change the connection to another NDI sender source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Connection")) + void ChangeConnection(const FNDIConnectionInformation& InConnectionInformation); + + /** + Attempts to change the Video Texture object used as the video frame capture object + */ + UFUNCTION(BlueprintSetter) + void ChangeVideoTexture(UNDIMediaTexture2D* InVideoTexture = nullptr); + + /** + Attempts to generate the pcm data required by the 'AudioWave' object + */ + int32 GeneratePCMData(UNDIMediaSoundWave* AudioWave, uint8* PCMData, const int32 SamplesNeeded); + int32 GetAudioChannels(); + + /** + Attempts to register a sound wave object with this object + */ + void RegisterAudioWave(UNDIMediaSoundWave* InAudioWave = nullptr); + + /** + This will send a metadata frame to the sender + The data is expected to be valid XML + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender")) + void SendMetadataFrame(const FString& Data); + /** + This will send a metadata frame to the sender + The data will be formatted as: ElementData + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender (Element + Data)")) + void SendMetadataFrameAttr(const FString& Element, const FString& ElementData); + /** + This will send a metadata frame to the sender + The data will be formatted as: + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender (Element + Attributes)")) + void SendMetadataFrameAttrs(const FString& Element, const TMap& Attributes); + + /** + This will set the up-stream tally notifications. If no streams are connected, it will automatically + send the tally state upon connection + */ + void SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram); + + /** + Attempts to immediately stop receiving frames from the connected NDI sender + */ + void Shutdown(); + + /** + Remove the AudioWave object from this object (if it was previously registered) + + @param InAudioWave An NDIMediaSoundWave object registered with this object + */ + void UnregisterAudioWave(UNDIMediaSoundWave* InAudioWave = nullptr); + + /** + Updates the DynamicMaterial with the VideoTexture of this object + */ + void UpdateMaterialTexture(class UMaterialInstanceDynamic* MaterialInstance, FString ParameterName); + + /** + Attempts to capture a frame from the connected source. If a new frame is captured, broadcast it to + interested receivers through the capture event. Returns true if new data was captured. + */ + bool CaptureConnectedVideo(); + bool CaptureConnectedAudio(); + bool CaptureConnectedMetadata(); + + /** + Attempts to immediately update the 'VideoTexture' object with the captured video frame + */ + FTextureRHIRef DisplayFrame(const NDIlib_video_frame_v2_t& video_frame); + +private: + void SetIsCurrentlyConnected(bool bConnected); + + /** + Attempts to gather the performance metrics of the connection to the remote source + */ + void GatherPerformanceMetrics(); + +public: + /** + Set whether or not a RGB to Linear conversion is made + */ + void PerformsRGBToLinearConversion(bool Value); + + /** + Returns the current framerate of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Frame Rate")) + const FFrameRate& GetCurrentFrameRate() const; + + /** + Returns the current resolution of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Resolution")) + const FIntPoint& GetCurrentResolution() const; + + /** + Returns the current timecode of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Timecode")) + const FTimecode& GetCurrentTimecode() const; + + /** + Returns the current connection information of the connected source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Connection Information")) + const FNDIConnectionInformation& GetCurrentConnectionInformation() const; + + /** + Returns the current performance data of the receiver while connected to the source + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Performance Data")) + const FNDIReceiverPerformanceData& GetPerformanceData() const; + + /** Returns a value indicating whether this object is currently connected to the sender source */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Is Currently Connected")) + const bool GetIsCurrentlyConnected() const; + +private: + /** + Perform the color conversion (if any) and bit copy from the gpu + */ + FTextureRHIRef DrawProgressiveVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result); + FTextureRHIRef DrawProgressiveVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result); + FTextureRHIRef DrawInterlacedVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result); + FTextureRHIRef DrawInterlacedVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result); + + virtual bool Validate() const override + { + return true; + } + virtual FString GetUrl() const override; + + FTextureResource* GetVideoTextureResource() const; + FTextureResource* GetInternalVideoTextureResource() const; + +#if WITH_EDITORONLY_DATA + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; +#endif + +public: + virtual bool GetMediaOption(const FName& Key, bool DefaultValue) const override; + virtual int64 GetMediaOption(const FName& Key, int64 DefaultValue) const override; + virtual FString GetMediaOption(const FName& Key, const FString& DefaultValue) const override; + virtual bool HasMediaOption(const FName& Key) const override; + +private: + int64_t LastFrameTimestamp = 0; + NDIlib_frame_format_type_e LastFrameFormatType = NDIlib_frame_format_type_max; + + bool bIsCurrentlyConnected = false; + + NDIlib_recv_instance_t p_receive_instance = nullptr; + NDIlib_framesync_instance_t p_framesync_instance = nullptr; + + FCriticalSection RenderSyncContext; + FCriticalSection AudioSyncContext; + FCriticalSection MetadataSyncContext; + FCriticalSection ConnectionSyncContext; + + TArray AudioSourceCollection; + + UNDIMediaTexture2D* InternalVideoTexture = nullptr; + + FTextureRHIRef SourceTexture; + FTextureRHIRef SourceAlphaTexture; + FPooledRenderTargetDesc RenderTargetDescriptor; + TRefCountPtr RenderTarget; + enum class EDrawMode + { + Invalid, + Progressive, + ProgressiveAlpha, + Interlaced, + InterlacedAlpha + }; + EDrawMode DrawMode = EDrawMode::Invalid; + + FDelegateHandle FrameEndRTHandle; + FDelegateHandle VideoCaptureEventHandle; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSender.h b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSender.h new file mode 100644 index 0000000..25ef37d --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSender.h @@ -0,0 +1,362 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "NDIMediaSender.generated.h" + +/** + A delegate used for notifications on property changes on the NDIMediaSender object +*/ +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderPropertyChanged, UNDIMediaSender*, Sender); + +/** + A delegate used for notifications on the NDIMediaSender object receiving metadata +*/ +DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIMediaSenderMetaDataReceived, UNDIMediaSender*, Sender, FString, Data); + +/** + Delegates to notify just before and after the NDIMediaSender sends a video, audio, or metadata frame +*/ +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderVideoPreSend, UNDIMediaSender*, Sender); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderVideoSent, UNDIMediaSender*, Sender); + +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderAudioPreSend, UNDIMediaSender*, Sender); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderAudioSent, UNDIMediaSender*, Sender); + +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderMetaDataPreSend, UNDIMediaSender*, Sender); +DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderMetaDataSent, UNDIMediaSender*, Sender); + +/** + Defines a media object representing an NDI(R) Sender object. This object is used with the + NDI Broadcast Component to send Audio / Video / Metadata to a 'receiving' NDI object. +*/ +UCLASS(BlueprintType, Blueprintable, HideCategories = ("Platforms"), Category = "NDI IO", + HideCategories = ("Information"), AutoCollapseCategories = ("Content"), + META = (DisplayName = "NDI Sender Object")) +class NDIIO_API UNDIMediaSender : public UBaseMediaSource +{ + GENERATED_UCLASS_BODY() + +private: + /** Describes a user-friendly name of the output stream to differentiate from other output streams on the current + * machine */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName = "Source Name", AllowPrivateAccess = true)) + FString SourceName = TEXT("Unreal Engine Output"); + + /** Describes the output frame size while sending video frame over NDI */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName = "Frame Size", AllowPrivateAccess = true)) + FIntPoint FrameSize = FIntPoint(1920, 1080); + + /** Represents the desired number of frames (per second) for video to be sent over NDI */ + UPROPERTY(BlueprintReadwrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName = "Frame Rate", AllowPrivateAccess = true)) + FFrameRate FrameRate = FFrameRate(60, 1); + + /** Sets whether or not to output an alpha channel */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName="Output Alpha", AllowPrivateAccess = true)) + bool OutputAlpha = false; + + UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Broadcast Settings", + META = (DisplayName = "Alpha Remap Min", AllowPrivateAccess = true)) + float AlphaMin = 0.f; + + UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Broadcast Settings", + META = (DisplayName = "Alpha Remap Max", AllowPrivateAccess = true)) + float AlphaMax = 1.f; + + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName="Enable Audio", AllowPrivateAccess = true)) + bool bEnableAudio = true; + + /** Sets whether or not to present PTZ capabilities */ + UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings", + META = (DisplayName="Enable PTZ", AllowPrivateAccess = true)) + bool bEnablePTZ = true; + + /** Indicates the texture to send over NDI (optional) */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Content", + AdvancedDisplay, META = (DisplayName = "Render Target (optional)", AllowPrivateAccess = true)) + UTextureRenderTarget2D* RenderTarget = nullptr; + + /** + Should perform the Linear to sRGB color space conversion + */ + UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Perform Linear to sRGB?", AllowPrivateAccess = true)) + bool bPerformLinearTosRGB = true; + +public: + UPROPERTY() + FNDIMediaSenderPropertyChanged OnBroadcastConfigurationChanged; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Received by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderMetaDataReceived OnSenderMetaDataReceived; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before Video Being Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderVideoPreSend OnSenderVideoPreSend; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Video Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderVideoSent OnSenderVideoSent; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before Audio Being Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderAudioPreSend OnSenderAudioPreSend; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Audio Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderAudioSent OnSenderAudioSent; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before MetaData Being Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderMetaDataPreSend OnSenderMetaDataPreSend; + + UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Sent by Sender", AllowPrivateAccess = true)) + FNDIMediaSenderMetaDataSent OnSenderMetaDataSent; + +public: + /** + Attempts to perform initialization logic for creating a sender through the NDI(R) sdk api + */ + void Initialize(USoundSubmix* SubmixCapture); + + /** + Changes the name of the sender object as seen on the network for remote connections + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name")) + void ChangeSourceName(const FString& InSourceName); + + /** + Attempts to change the Broadcast information associated with this media object + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration")) + void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration); + + /** + This will send a metadata frame to all receivers + The data is expected to be valid XML + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers")) + void SendMetadataFrame(const FString& Data, bool AttachToVideoFrame = true); + /** + This will send a metadata frame to all receivers + The data will be formatted as: ElementData + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers (Element + Data)")) + void SendMetadataFrameAttr(const FString& Element, const FString& ElementData, bool AttachToVideoFrame = true); + /** + This will send a metadata frame to all receivers + The data will be formatted as: + */ + UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers (Element + Attributes)")) + void SendMetadataFrameAttrs(const FString& Element, const TMap& Attributes, bool AttachToVideoFrame = true); + + /** + Attempts to change the RenderTarget used in sending video frames over NDI + */ + void ChangeVideoTexture(UTextureRenderTarget2D* VideoTexture = nullptr); + + /** + Change the alpha remapping settings + */ + void ChangeAlphaRemap(float AlphaMinIn, float AlphaMaxIn); + + /** + Determines the current tally information. If you specify a timeout then it will wait until it has + changed, otherwise it will simply poll it and return the current tally immediately + + @param IsOnPreview - A state indicating whether this source in on preview of a receiver + @param IsOnProgram - A state indicating whether this source is on program of a receiver + @param TimeOut - Indicates the amount of time to wait (in milliseconds) until a change has occurred + */ + void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram, uint32 Timeout = 0); + + /** + Gets the current number of receivers connected to this source. This can be used to avoid rendering + when nothing is connected to the video source. which can significantly improve the efficiency if + you want to make a lot of sources available on the network + */ + void GetNumberOfConnections(int32& Result); + + /** + Attempts to immediately stop sending frames over NDI to any connected receivers + */ + void Shutdown(); + + /** + Called before destroying the object. This is called immediately upon deciding to destroy the object, + to allow the object to begin an asynchronous cleanup process. + */ + virtual void BeginDestroy() override; + + /** + Set whether or not a RGB to Linear conversion is made + */ + void PerformLinearTosRGBConversion(bool Value); + + /** + Set whether or not to enable PTZ support + */ + void EnablePTZ(bool Value); + + /** + Returns the Render Target used for sending a frame over NDI + */ + UTextureRenderTarget2D* GetRenderTarget(); + + const FIntPoint& GetFrameSize() + { + return this->FrameSize; + } + + const FFrameRate& GetFrameRate() + { + return this->FrameRate; + } + +private: + + bool CreateSender(); + + /** + Attempts to get a metadata frame from the sender. + If there is one, the data is broadcast through OnSenderMetaDataReceived. + Returns true if metadata was received, false otherwise. + */ + bool GetMetadataFrame(); + + /** + This will attempt to generate an audio frame, add the frame to the stack and return immediately, + having scheduled the frame asynchronously. + */ + void TrySendAudioFrame(int64 time_code, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock); + + /** + This will attempt to generate a video frame, add the frame to the stack and return immediately, + having scheduled the frame asynchronously. + */ + void TrySendVideoFrame(int64 time_code = 0); + + /** + Perform the color conversion (if any) and bit copy from the gpu + */ + bool DrawRenderTarget(FRHICommandListImmediate& RHICmdList); + + /** + Change the render target configuration based on the passed in parameters + + @param InFrameSize The frame size to resize the render target to + @param InFrameRate The frame rate at which we should be sending frames via NDI + */ + void ChangeRenderTargetConfiguration(FIntPoint InFrameSize, FFrameRate InFrameRate); + + virtual bool Validate() const override + { + return true; + } + virtual FString GetUrl() const override + { + return FString(); + } + + FTextureResource* GetRenderTargetResource() const; + + void PrepareDefaultTexture(); + +private: + std::atomic bIsChangingBroadcastSize { false }; + + FTimecode LastRenderTime; + + FTextureRHIRef DefaultVideoTextureRHI; + + TArray SendAudioData; + + NDIlib_video_frame_v2_t NDI_video_frame; + NDIlib_send_instance_t p_send_instance = nullptr; + + FCriticalSection AudioSyncContext; + FCriticalSection RenderSyncContext; + + /** + A texture with CPU readback + */ + class MappedTexture + { + private: + FTextureRHIRef Texture = nullptr; + void* pData = nullptr; + std::string MetaData; + FIntPoint FrameSize; + + public: + ~MappedTexture(); + + void Create(FIntPoint FrameSize); + void Destroy(); + + FIntPoint GetSizeXY() const; + + void Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect = FResolveRect(), const FResolveRect& DestRect = FResolveRect()); + + void Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride); + void* MappedData() const; + void Unmap(FRHICommandListImmediate& RHICmdList); + + void AddMetaData(const FString& Data); + const std::string& GetMetaData() const; + + private: + void PrepareTexture(); + }; + + /** + Class for managing the sending of mapped texture data to an NDI video stream. + Sending is done asynchronously, so mapping and unmapping of texture data must + be managed so that CPU accessible texture content remains valid until the + sending of the frame is guaranteed to have been completed. This is achieved + by double-buffering readback textures. + */ + class MappedTextureASyncSender + { + private: + MappedTexture MappedTextures[2]; + int32 CurrentIndex = 0; + + public: + void Create(FIntPoint FrameSize); + void Destroy(); + + FIntPoint GetSizeXY() const; + + void Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect = FResolveRect(), const FResolveRect& DestRect = FResolveRect()); + + void Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride); + void Send(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance, NDIlib_video_frame_v2_t& p_video_data); + void Flush(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance); + + void AddMetaData(const FString& Data); + }; + + MappedTextureASyncSender ReadbackTextures; + bool ReadbackTexturesHaveAlpha = false; + FPooledRenderTargetDesc RenderTargetDescriptor; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSoundWave.h b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSoundWave.h new file mode 100644 index 0000000..93683d7 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaSoundWave.h @@ -0,0 +1,42 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include + +#include "NDIMediaSoundWave.generated.h" + +/** + Defines a SoundWave object used by an NDI Media Receiver object for capturing audio from + a network source +*/ +UCLASS(NotBlueprintable, Category = "NDI IO", META = (DisplayName = "NDI Media Sound Wave")) +class NDIIO_API UNDIMediaSoundWave : public USoundWaveProcedural +{ + GENERATED_UCLASS_BODY() + +public: + /** + Set the Media Source of this object, so that when this object is called to 'GeneratePCMData' by the engine + we can request the media source to provide the pcm data from the current connected source + */ + void SetConnectionSource(class UNDIMediaReceiver* InMediaSource = nullptr); + +protected: + /** + Called by the engine to generate pcm data to be 'heard' by audio listener objects + */ + virtual int32 OnGeneratePCMAudio(TArray& OutAudio, int32 NumSamples) override final; + + virtual bool IsReadyForFinishDestroy() override final; + +private: + FCriticalSection SyncContext; + class UNDIMediaReceiver* MediaSource = nullptr; +}; diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTexture2D.h b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTexture2D.h new file mode 100644 index 0000000..02cc3bc --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTexture2D.h @@ -0,0 +1,49 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include +#include + +#include "NDIMediaTexture2D.generated.h" + +/** + A Texture Object used by an NDI Media Receiver object for capturing video from + a network source +*/ +UCLASS(NotBlueprintType, NotBlueprintable, HideDropdown, + HideCategories = (ImportSettings, Compression, Texture, Adjustments, Compositing, LevelOfDetail, Object), + META = (DisplayName = "NDI Media Texture 2D")) +class NDIIO_API UNDIMediaTexture2D : public UTexture +{ + GENERATED_UCLASS_BODY() + +public: + virtual float GetSurfaceHeight() const override; + virtual float GetSurfaceWidth() const override; + + virtual float GetSurfaceDepth() const; + virtual uint32 GetSurfaceArraySize() const; + + virtual ETextureClass GetTextureClass() const; + + virtual void GetResourceSizeEx(FResourceSizeEx& CumulativeResourceSize) override; + virtual EMaterialValueType GetMaterialType() const override; + + virtual void UpdateTextureReference(FRHICommandList& RHICmdList, FTextureRHIRef Reference) final; + +private: + virtual class FTextureResource* CreateResource() override; + + void SetMyResource(FTextureResource* ResourceIn); + FTextureResource* GetMyResource(); + const FTextureResource* GetMyResource() const; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTextureResource.h b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTextureResource.h new file mode 100644 index 0000000..33adf1f --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Objects/Media/NDIMediaTextureResource.h @@ -0,0 +1,54 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include + +/** + A Texture Resource object used by the NDIMediaTexture2D object for capturing video + from a network source +*/ +class NDIIO_API FNDIMediaTextureResource : public FTextureResource +{ +public: + /** + Constructs a new instance of this object specifying a media texture owner + + @param Owner The media object used as the owner for this object + */ + FNDIMediaTextureResource(class UNDIMediaTexture2D* Owner = nullptr); + +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + /** FTextureResource Interface Implementation for 'InitDynamicRHI' */ + virtual void InitRHI(FRHICommandListBase& RHICmdList) override; + + /** FTextureResource Interface Implementation for 'ReleaseDynamicRHI' */ + virtual void ReleaseRHI() override; +#else + /** FTextureResource Interface Implementation for 'InitDynamicRHI' */ + virtual void InitDynamicRHI() override; + + /** FTextureResource Interface Implementation for 'ReleaseDynamicRHI' */ + virtual void ReleaseDynamicRHI() override; +#endif + + /** FTextureResource Interface Implementation for 'GetResourceSize' */ + SIZE_T GetResourceSize(); + + /** FTextureResource Interface Implementation for 'GetSizeX' */ + virtual uint32 GetSizeX() const override; + + /** FTextureResource Interface Implementation for 'GetSizeY' */ + virtual uint32 GetSizeY() const override; + +private: + class UNDIMediaTexture2D* MediaTexture = nullptr; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Services/NDIConnectionService.h b/Plugins/NDIIO/Source/Core/Public/Services/NDIConnectionService.h new file mode 100644 index 0000000..72ac4e5 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Services/NDIConnectionService.h @@ -0,0 +1,109 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include +#include +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later +#include +#endif +#include + +DECLARE_EVENT_OneParam(FNDICoreDelegates, FNDIConnectionServiceSendVideoEvent, int64) +DECLARE_EVENT_SixParams(FNDICoreDelegates, FNDIConnectionServiceSendAudioEvent, int64, float*, int32, int32, const int32, double) + +/** + A service which runs and triggers updates for interested parties to be notified of + Audio and Video Frame events +*/ +class NDIIO_API FNDIConnectionService final : public ISubmixBufferListener +{ +public: + static FNDIConnectionServiceSendVideoEvent EventOnSendVideoFrame; +private: + static TMap SubmixSendAudioFrameEvents; + +public: + /** + Constructs a new instance of this object + */ + FNDIConnectionService(); + + // Begin the service + bool Start(); + + // Stop the service + void Shutdown(); + + bool BeginBroadcastingActiveViewport(); + void StopBroadcastingActiveViewport(); + + bool IsRunningInPIE() const + { + return bIsInPIEMode; + } + + template + static void AddAudioSender(UserClass* InUserObject, USoundSubmix* Submix, typename TMemFunPtrType::Type InFunc) + { + FScopeLock Lock(&AudioSyncContext); + + FNDIConnectionServiceSendAudioEvent& SendAudioEvent = SubmixSendAudioFrameEvents.FindOrAdd(Submix); + SendAudioEvent.AddUObject(InUserObject, InFunc); + } + + template + static void RemoveAudioSender(UserClass* InUserObject) + { + FScopeLock Lock(&AudioSyncContext); + + for (auto it = SubmixSendAudioFrameEvents.CreateIterator(); it; ++it) + { + it->Value.RemoveAll(InUserObject); + if (it->Value.IsBound() == false) + it.RemoveCurrent(); + } + } + +private: + // Handler for when the render thread frame has ended + void OnEndRenderFrame(); + + void BeginAudioCapture(); + void StopAudioCapture(); + + void OnPostEngineInit(); + void OnEnginePreExit(); + + // Handler for when the active viewport back buffer is about to be resized + void OnActiveViewportBackbufferPreResize(void* Backbuffer); + + // Handler for when the back buffer is read to present to the end user + void OnActiveViewportBackbufferReadyToPresent(SWindow& Window, const FTextureRHIRef& Backbuffer); + + FTextureResource* GetVideoTextureResource() const; + + virtual void OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) override final; +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later + virtual const FString& GetListenerName() const override final; +#endif + +private: + bool bIsInitialized = false; + bool bIsAudioInitialized = false; + bool bIsBroadcastingActiveViewport = false; + bool bIsInPIEMode = false; + + static FCriticalSection AudioSyncContext; + static FCriticalSection RenderSyncContext; + + UTextureRenderTarget2D* VideoTexture = nullptr; + class UNDIMediaSender* ActiveViewportSender = nullptr; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Services/NDIFinderService.h b/Plugins/NDIIO/Source/Core/Public/Services/NDIFinderService.h new file mode 100644 index 0000000..7aacf8a --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Services/NDIFinderService.h @@ -0,0 +1,62 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include +#include + +/** + A Runnable object used for Finding NDI network Sources, and updating interested parties +*/ +class NDIIO_API FNDIFinderService : public FRunnable +{ +public: + FNDIFinderService(); + + // Begin the service + virtual bool Start(); + + // Stop the service + virtual void Shutdown(); + +public: + /** Get the available sources on the network */ + static const TArray GetNetworkSourceCollection(); + + /** Call to update an existing collection of network sources to match the current collection */ + static bool UpdateSourceCollection(TArray& InSourceCollection); + + /** Event which is triggered when the collection of network sources has changed */ + DECLARE_EVENT(FNDICoreDelegates, FNDISourceCollectionChangedEvent) + static FNDISourceCollectionChangedEvent EventOnNDISourceCollectionChanged; + +protected: + /** FRunnable Interface implementation for 'Init' */ + virtual bool Init() override; + + /** FRunnable Interface implementation for 'Stop' */ + virtual void Stop() override; + + /** FRunnable Interface implementation for 'Run' */ + virtual uint32 Run() override; + +private: + bool UpdateNetworkSourceCollection(); + +private: + bool bShouldWaitOneFrame = true; + bool bIsNetworkSourceCollectionDirty = false; + + FThreadSafeBool bIsThreadRunning; + FRunnableThread* p_RunnableThread = nullptr; + + static TArray NetworkSourceCollection; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Structures/NDIBroadcastConfiguration.h b/Plugins/NDIIO/Source/Core/Public/Structures/NDIBroadcastConfiguration.h new file mode 100644 index 0000000..b380548 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Structures/NDIBroadcastConfiguration.h @@ -0,0 +1,61 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include + +#include "NDIBroadcastConfiguration.generated.h" + +/** + Describes essential properties used for modifying the broadcast configuration of an Sender object +*/ +USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Broadcast Configuration")) +struct NDIIO_API FNDIBroadcastConfiguration +{ + GENERATED_USTRUCT_BODY() + +public: + /** Describes the output frame size while sending video frame over NDI */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Broadcast Settings", META = (DisplayName = "Frame Size")) + FIntPoint FrameSize = FIntPoint(1920, 1080); + + /** Represents the desired number of frames (per second) for video to be sent over NDI */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Broadcast Settings", META = (DisplayName = "Frame Rate")) + FFrameRate FrameRate = FFrameRate(60, 1); + +public: + /** Constructs a new instance of this object */ + FNDIBroadcastConfiguration() = default; + + /** Copies an existing instance to this object */ + FNDIBroadcastConfiguration(const FNDIBroadcastConfiguration& other); + + /** Copies existing instance properties to this object */ + FNDIBroadcastConfiguration& operator=(const FNDIBroadcastConfiguration& other); + + /** Destructs this object */ + virtual ~FNDIBroadcastConfiguration() = default; + + /** Compares this object to 'other' and returns a determination of whether they are equal */ + bool operator==(const FNDIBroadcastConfiguration& other) const; + + /** Compares this object to 'other" and returns a determination of whether they are NOT equal */ + bool operator!=(const FNDIBroadcastConfiguration& other) const; + +protected: + /** Attempts to serialize this object using an Archive object */ + virtual FArchive& Serialize(FArchive& Ar); + +private: + /** Operator override for serializing this object to an Archive object */ + friend class FArchive& operator<<(FArchive& Ar, FNDIBroadcastConfiguration& Input) + { + return Input.Serialize(Ar); + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Structures/NDIConnectionInformation.h b/Plugins/NDIIO/Source/Core/Public/Structures/NDIConnectionInformation.h new file mode 100644 index 0000000..ebe8eb9 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Structures/NDIConnectionInformation.h @@ -0,0 +1,92 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIConnectionInformation.generated.h" + +/** + Describes essential properties used for connection objects over NDI +*/ +USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Connection Information")) +struct NDIIO_API FNDIConnectionInformation +{ + GENERATED_USTRUCT_BODY() + +public: + /** A user-friendly name of the source */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Source Name")) + FString SourceName = FString(""); + + /** The machine name of the source */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Machine Name")) + FString MachineName = FString(""); + + /** The stream name of the source */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Stream Name")) + FString StreamName = FString(""); + + /** A location on the network for which this source exists */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Url")) + FString Url = FString(""); + + /** Indicates the current bandwidth mode used for this connection */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Bandwidth")) + ENDISourceBandwidth Bandwidth = ENDISourceBandwidth::Highest; + + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Mute Audio")) + bool bMuteAudio = false; + + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Mute Video")) + bool bMuteVideo = false; + +public: + /** Constructs a new instance of this object */ + FNDIConnectionInformation() = default; + + /** Copies an existing instance to this object */ + FNDIConnectionInformation(const FNDIConnectionInformation& other); + + /** Copies existing instance properties to this object */ + FNDIConnectionInformation& operator=(const FNDIConnectionInformation& other); + + /** Destructs this object */ + virtual ~FNDIConnectionInformation() = default; + + /** Implicit conversion to a base NDI bandwidth value */ + operator NDIlib_recv_bandwidth_e() const; + + /** Compares this object to 'other' and returns a determination of whether they are equal */ + bool operator==(const FNDIConnectionInformation& other) const; + + /** Compares this object to 'other" and returns a determination of whether they are NOT equal */ + bool operator!=(const FNDIConnectionInformation& other) const; + +public: + /** Resets the current parameters to the default property values */ + void Reset(); + + /** Determines whether this object is valid connection information */ + bool IsValid() const; + + FString GetNDIName() const; + +protected: + /** Attempts to serialize this object using an Archive object */ + virtual FArchive& Serialize(FArchive& Ar); + +private: + /** Operator override for serializing this object to an Archive object */ + friend class FArchive& operator<<(FArchive& Ar, FNDIConnectionInformation& Input) + { + return Input.Serialize(Ar); + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Structures/NDIReceiverPerformanceData.h b/Plugins/NDIIO/Source/Core/Public/Structures/NDIReceiverPerformanceData.h new file mode 100644 index 0000000..2ee0611 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Structures/NDIReceiverPerformanceData.h @@ -0,0 +1,97 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include + +#include "NDIReceiverPerformanceData.generated.h" + +/** + A structure holding data allowing you to determine the current performance levels of the receiver with the + ability to detect whether frames has been dropped +*/ +USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Receiver Performance Data")) +struct NDIIO_API FNDIReceiverPerformanceData +{ + GENERATED_USTRUCT_BODY() + +public: + /** + The number of audio frames received from the NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Audio Frames")) + int64 AudioFrames = 0; + + /** + The number of video frames dropped in transit from an NDI sender + */ + UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Dropped Video Frames")) + int64 DroppedVideoFrames = 0; + + /** + The number of audio frames dropped in transit from the NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Dropped Audio Frames")) + int64 DroppedAudioFrames = 0; + + /** + The number of metadata frames dropped in transit from the NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", + META = (DisplayName = "Dropped Metadata Frames")) + int64 DroppedMetadataFrames = 0; + + /** + The number of metadata frames received from the NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Metadata Frames")) + int64 MetadataFrames = 0; + + /** + The number of video frames received from the NDI sender + */ + UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Video Frames")) + int64 VideoFrames = 0; + +public: + /** Constructs a new instance of this object */ + FNDIReceiverPerformanceData() = default; + + /** Copies an existing instance to this object */ + FNDIReceiverPerformanceData(const FNDIReceiverPerformanceData& other); + + /** Copies existing instance properties to this object */ + FNDIReceiverPerformanceData& operator=(const FNDIReceiverPerformanceData& other); + + /** Destructs this object */ + virtual ~FNDIReceiverPerformanceData() = default; + + /** Compares this object to 'other' and returns a determination of whether they are equal */ + bool operator==(const FNDIReceiverPerformanceData& other) const; + + /** Compares this object to 'other" and returns a determination of whether they are NOT equal */ + bool operator!=(const FNDIReceiverPerformanceData& other) const; + +public: + /** Resets the current parameters to the default property values */ + void Reset(); + +protected: + /** Attempts to serialize this object using an Archive object */ + virtual FArchive& Serialize(FArchive& Ar); + +private: + /** Operator override for serializing this object to an Archive object */ + friend class FArchive& operator<<(FArchive& Ar, FNDIReceiverPerformanceData& Input) + { + return Input.Serialize(Ar); + } +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Core/Public/Structures/NDIXml.h b/Plugins/NDIIO/Source/Core/Public/Structures/NDIXml.h new file mode 100644 index 0000000..c4ce475 --- /dev/null +++ b/Plugins/NDIIO/Source/Core/Public/Structures/NDIXml.h @@ -0,0 +1,131 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include + +class NDIXmlElementParser +{ +public: + virtual ~NDIXmlElementParser() + {} + + // Start parsing this element + virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData) + { + return true; + } + + // Parse an attribute of this element + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) + { + return true; + } + + // Start parsing a sub-element + virtual TSharedRef* ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData) + { + return nullptr; + } + + // Finish parsing this element + virtual bool ProcessClose(const TCHAR* ElementName) + { + return true; + } +}; + +class NDIXmlElementParser_null : public NDIXmlElementParser +{ +public: +}; + + +class NDIXmlParser : public IFastXmlCallback +{ +public: + virtual ~NDIXmlParser() + {} + + + void AddElementParser(FName ElementName, TSharedRef ElementParser) + { + ElementParsers.Add(ElementName, ElementParser); + } + + virtual bool ProcessXmlDeclaration(const TCHAR* ElementData, int32 XmlFileLineNumber) override + { + return true; + } + + virtual bool ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData, int32 XmlFileLineNumber) override + { + if(ElementParserStack.Num() == 0) + { + TSharedRef* ParserPtr = ElementParsers.Find(ElementName); + if(ParserPtr == nullptr) + ParserPtr = &NullParser; + + ElementParserStack.Push(*ParserPtr); + return (*ParserPtr)->ProcessOpen(ElementName, ElementData); + } + else + { + TSharedRef* ParserPtr = ElementParserStack.Last()->ProcessElement(ElementName, ElementData); + if(ParserPtr == nullptr) + ParserPtr = &NullParser; + + ElementParserStack.Push(*ParserPtr); + return (*ParserPtr)->ProcessOpen(ElementName, ElementData); + } + + //return false; + } + + virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override + { + if(ElementParserStack.Num() == 0) + { + return true; + } + else + { + return ElementParserStack.Last()->ProcessAttribute(AttributeName, AttributeValue); + } + + //return false; + } + + virtual bool ProcessClose(const TCHAR* ElementName) override + { + if(ElementParserStack.Num() == 0) + { + return true; + } + else + { + auto Parser = ElementParserStack.Pop(); + return Parser->ProcessClose(ElementName); + } + + //return false; + } + + virtual bool ProcessComment(const TCHAR* Comment) override + { + return true; + } + +protected: + TMap > ElementParsers; + TArray > ElementParserStack; + + TSharedRef NullParser { MakeShareable(new NDIXmlElementParser_null()) }; +}; diff --git a/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaReceiverFactory.cpp b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaReceiverFactory.cpp new file mode 100644 index 0000000..97f6ba1 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaReceiverFactory.cpp @@ -0,0 +1,36 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include + +#define LOCTEXT_NAMESPACE "NDIIOEditorMediaReceiverFactory" + +UNDIMediaReceiverFactory::UNDIMediaReceiverFactory(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) { + + this->bCreateNew = true; + this->bEditAfterNew = true; + + this->SupportedClass = UNDIMediaReceiver::StaticClass(); +} + +FText UNDIMediaReceiverFactory::GetDisplayName() const { return LOCTEXT("NDIMediaReceiverFactoryDisplayName", "NDI Media Receiver"); } + +uint32 UNDIMediaReceiverFactory::GetMenuCategories() const +{ + return EAssetTypeCategories::Media; +} + +UObject* UNDIMediaReceiverFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, InClass, InName, Flags | RF_Transactional); +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSenderFactory.cpp b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSenderFactory.cpp new file mode 100644 index 0000000..499d0d4 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSenderFactory.cpp @@ -0,0 +1,33 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include + +#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSenderFactory" + +UNDIMediaSenderFactory::UNDIMediaSenderFactory(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) { + + bCreateNew = true; + bEditAfterNew = true; + + this->SupportedClass = UNDIMediaSender::StaticClass(); +} + +FText UNDIMediaSenderFactory::GetDisplayName() const { return LOCTEXT("NDIMediaSenderFactoryDisplayName", "NDI Media Sender"); } + +uint32 UNDIMediaSenderFactory::GetMenuCategories() const { return EAssetTypeCategories::Media; } + +UObject* UNDIMediaSenderFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, InClass, InName, Flags | RF_Transactional); +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSoundWaveFactory.cpp b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSoundWaveFactory.cpp new file mode 100644 index 0000000..460c338 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaSoundWaveFactory.cpp @@ -0,0 +1,33 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include + +#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSoundWaveFactory" + +UNDIMediaSoundWaveFactory::UNDIMediaSoundWaveFactory(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) { + + this->bCreateNew = true; + this->bEditAfterNew = true; + + this->SupportedClass = UNDIMediaSoundWave::StaticClass(); +} + +FText UNDIMediaSoundWaveFactory::GetDisplayName() const { return LOCTEXT("NDIMediaSoundWaveFactoryDisplayName", "NDI Media Sound Wave"); } + +uint32 UNDIMediaSoundWaveFactory::GetMenuCategories() const { return EAssetTypeCategories::Sounds; } + +UObject* UNDIMediaSoundWaveFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, InName, Flags | RF_Transactional); +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaTexture2DFactory.cpp b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaTexture2DFactory.cpp new file mode 100644 index 0000000..6a3c7a7 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/Factories/NDIMediaTexture2DFactory.cpp @@ -0,0 +1,40 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include + +#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSoundWaveFactory" + +UNDIMediaTexture2DFactory::UNDIMediaTexture2DFactory(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) { + + this->bCreateNew = true; + this->bEditAfterNew = true; + + this->SupportedClass = UNDIMediaTexture2D::StaticClass(); +} + +FText UNDIMediaTexture2DFactory::GetDisplayName() const { return LOCTEXT("NDIMediaTexture2DFactoryDisplayName", "NDI Media Texture2D"); } + +uint32 UNDIMediaTexture2DFactory::GetMenuCategories() const { return EAssetTypeCategories::Textures; } + +UObject* UNDIMediaTexture2DFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + if (UNDIMediaTexture2D* Resource = NewObject(InParent, InName, Flags | RF_Transactional)) + { + Resource->UpdateResource(); + return Resource; + } + + return nullptr; +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Classes/NDIIOEditorModule.cpp b/Plugins/NDIIO/Source/Editor/Classes/NDIIOEditorModule.cpp new file mode 100644 index 0000000..4dbb3d0 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/NDIIOEditorModule.cpp @@ -0,0 +1,136 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include + +#include +#include +#include +#include +#include + +#include +#include + +#include + +#include + +#include "Widgets/NDIWidgets.h" + + +#define LOCTEXT_NAMESPACE "FNDIEditorModule" +#define IMAGE_BRUSH(RelativePath, ...) FSlateImageBrush(StyleInstance->RootToContentDir(RelativePath, TEXT(".png")), __VA_ARGS__) + +#define PLACEMENT_CATEGORY TEXT("NDI(R)") +#define PLACEMENT_LOCTEXT NSLOCTEXT("Vizrt", "NDI", "NDI(R)") +#define PLACEMENT_TEXT TEXT("PMNDI") + +void FNDIIOEditorModule::StartupModule() +{ + const FName& CategoryName = PLACEMENT_CATEGORY; + IPlacementModeModule& PlacementModeModule = IPlacementModeModule::Get(); + + const FVector2D Icon20x20(20.0f, 20.0f); + const FVector2D Icon64x64(64.0f, 64.0f); + + this->StyleInstance = MakeUnique("NDIEditorStyle"); + + if (IPlugin* NDIIOPlugin = IPluginManager::Get().FindPlugin("NDIIOPlugin").Get()) + { + StyleInstance->SetContentRoot(FPaths::Combine(NDIIOPlugin->GetContentDir(), TEXT("Editor/Icons"))); + + StyleInstance->Set("ClassThumbnail.NDIBroadcastActor", new IMAGE_BRUSH("NDIBroadcastActorIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIBroadcastActor", new IMAGE_BRUSH("NDIBroadcastActorIcon_x20", Icon20x20)); + + StyleInstance->Set("ClassThumbnail.NDIReceiveActor", new IMAGE_BRUSH("NDIReceiveActorIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIReceiveActor", new IMAGE_BRUSH("NDIReceiveActorIcon_x20", Icon20x20)); + + StyleInstance->Set("ClassThumbnail.NDIMediaReceiver", new IMAGE_BRUSH("NDIReceiverIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIMediaReceiver", new IMAGE_BRUSH("NDIReceiverIcon_x20", Icon20x20)); + + StyleInstance->Set("ClassThumbnail.NDIMediaSender", new IMAGE_BRUSH("NDISenderIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIMediaSender", new IMAGE_BRUSH("NDISenderIcon_x20", Icon20x20)); + + StyleInstance->Set("ClassThumbnail.NDIMediaSoundWave", new IMAGE_BRUSH("NDISoundWaveIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIMediaSoundWave", new IMAGE_BRUSH("NDISoundWaveIcon_x20", Icon20x20)); + + StyleInstance->Set("ClassThumbnail.NDIMediaTexture2D", new IMAGE_BRUSH("NDIVideoTextureIcon_x64", Icon64x64)); + StyleInstance->Set("ClassIcon.NDIMediaTexture2D", new IMAGE_BRUSH("NDIVideoTextureIcon_x20", Icon20x20)); + + FSlateStyleRegistry::RegisterSlateStyle(*StyleInstance.Get()); + + PlacementModeModule.RegisterPlacementCategory( + FPlacementCategoryInfo( + PLACEMENT_LOCTEXT, + CategoryName, + PLACEMENT_TEXT, + 41, // FBuiltInPlacementCategories::Volumes() == 40 + true + ) + ); + } + + // Get the Registered Placement Category + if (const FPlacementCategoryInfo* PlacementCategoryInformation = PlacementModeModule.GetRegisteredPlacementCategory(CategoryName)) + { + // Register the NDI Broadcast Actor a placeable item within the editor + PlacementModeModule.RegisterPlaceableItem(PlacementCategoryInformation->UniqueHandle, MakeShareable( + new FPlaceableItem( + *UActorFactory::StaticClass(), +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FAssetData(GetDefault()), +#else + FAssetData(ANDIBroadcastActor::StaticClass()->ClassDefaultObject), +#endif + FName("ClassThumbnail.NDIBroadcastActor"), + NAME_None, + TOptional(), + 10, + NSLOCTEXT("Vizrt", "NDIBroadcastActor", "NDI Broadcast Actor") + )) + ); + + // Register the NDI Receive Actor a placeable item within the editor + PlacementModeModule.RegisterPlaceableItem(PlacementCategoryInformation->UniqueHandle, MakeShareable( + new FPlaceableItem( + *UActorFactory::StaticClass(), +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later + FAssetData(GetDefault()), +#else + FAssetData(ANDIReceiveActor::StaticClass()->ClassDefaultObject), +#endif + FName("ClassThumbnail.NDIReceiveActor"), + NAME_None, + TOptional(), + 20, + NSLOCTEXT("Vizrt", "NDIReceiveActor", "NDI Receive Actor") + )) + ); + } + + FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + PropertyModule.RegisterCustomPropertyTypeLayout(FNDIConnectionInformation::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FNDIConnectionInformationCustomization::MakeInstance)); +} + +void FNDIIOEditorModule::ShutdownModule() +{ + FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + PropertyModule.UnregisterCustomPropertyTypeLayout(FNDIConnectionInformation::StaticStruct()->GetFName()); + + FSlateStyleRegistry::UnRegisterSlateStyle(*StyleInstance.Get()); + StyleInstance.Reset(); + + IPlacementModeModule& PlacementModeModule = IPlacementModeModule::Get(); + PlacementModeModule.UnregisterPlacementCategory(PLACEMENT_CATEGORY); +} + +#undef PLACEMENT_CATEGORY +#undef PLACEMENT_LOCTEXT +#undef PLACEMENT_TEXT +#undef IMAGE_BRUSH +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Classes/Widgets/NDIWidgets.cpp b/Plugins/NDIIO/Source/Editor/Classes/Widgets/NDIWidgets.cpp new file mode 100644 index 0000000..55efe24 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Classes/Widgets/NDIWidgets.cpp @@ -0,0 +1,380 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#define LOCTEXT_NAMESPACE "UNDIWidgets" + + +/** + Organizes NDI sources into a tree +*/ +struct FNDISourceTreeItem +{ + TArray > Children; + FNDIConnectionInformation NDISource; + FText DisplayText; + bool IsExpanded { false }; + bool IsSelected { false }; + + FNDISourceTreeItem() + {} + + FNDISourceTreeItem(const FText& DisplayTextIn) + : DisplayText(DisplayTextIn) + {} + + FNDISourceTreeItem(const FNDIConnectionInformation& Source) + : NDISource(Source) + {} + + FNDISourceTreeItem(TSharedRef&& Child) + { + Children.Add(Child); + } + + static const TSharedRef* FindMachineNode(const FNDISourceTreeItem& RootNode, const FNDIConnectionInformation& SourceItem) + { + const TSharedRef* MachineNode = nullptr; + + if(!SourceItem.MachineName.IsEmpty()) + { + const FString& SearchName = SourceItem.MachineName; + MachineNode = RootNode.Children.FindByPredicate([&SearchName](const TSharedRef& Child) + { + if(Child->Children.Num() > 0) + return Child->Children[0]->NDISource.MachineName == SearchName; + else + return false; + }); + } + else if(!SourceItem.Url.IsEmpty()) + { + const FString& SearchName = SourceItem.Url; + MachineNode = RootNode.Children.FindByPredicate([&SearchName](const TSharedRef& Child) + { + if(Child->Children.Num() > 0) + return Child->Children[0]->NDISource.Url == SearchName; + else + return false; + }); + } + + return MachineNode; + } + + static const TSharedRef* FindStreamNodeInMachineNode(const TSharedRef& MachineNode, const FNDIConnectionInformation& SourceItem) + { + const TSharedRef* StreamNode = nullptr; + + if(!SourceItem.StreamName.IsEmpty()) + { + const FString& SearchName = SourceItem.StreamName; + StreamNode = MachineNode->Children.FindByPredicate([&SearchName](const TSharedRef& Child) + { + return Child->NDISource.StreamName == SearchName; + }); + } + else if(!SourceItem.Url.IsEmpty()) + { + const FString& SearchName = SourceItem.Url; + StreamNode = MachineNode->Children.FindByPredicate([&SearchName](const TSharedRef& Child) + { + return Child->NDISource.Url == SearchName; + }); + } + + return StreamNode; + } + + void SetFromSources(const TArray& SourceItems, const FText& SearchingTxt, bool StartExpanded) + { + FNDISourceTreeItem RootNode; + + // + // Build new tree + // + + for(int32 i = 0; i < SourceItems.Num(); ++i) + { + const TSharedRef* MachineNode = FindMachineNode(RootNode, SourceItems[i]); + + if(MachineNode != nullptr) + { + FNDISourceTreeItem* NewNode = new FNDISourceTreeItem(SourceItems[i]); + (*MachineNode)->Children.Add(MakeShareable(NewNode)); + } + else + { + FNDISourceTreeItem* NewNode = new FNDISourceTreeItem(SourceItems[i]); + FNDISourceTreeItem* NewMachineNode = new FNDISourceTreeItem(MakeShareable(NewNode)); + RootNode.Children.Add(MakeShareable(NewMachineNode)); + } + } + + // + // Preserve expansion and selection state by matching with old tree + // + + for(int32 i = 0; i < RootNode.Children.Num(); ++i) + { + const TSharedRef* OldMachineNode = FindMachineNode(*this, RootNode.Children[i]->Children[0]->NDISource); + if(OldMachineNode != nullptr) + { + RootNode.Children[i]->IsExpanded = (*OldMachineNode)->IsExpanded; + + for(int32 j = 0; j < RootNode.Children[i]->Children.Num(); ++j) + { + const TSharedRef* OldStreamNode = FindStreamNodeInMachineNode(*OldMachineNode, RootNode.Children[i]->Children[j]->NDISource); + if(OldStreamNode != nullptr) + { + RootNode.Children[i]->Children[j]->IsSelected = (*OldStreamNode)->IsSelected; + } + } + } + else + { + RootNode.Children[i]->IsExpanded = StartExpanded; + } + } + + if(RootNode.Children.Num() == 0) + { + RootNode.Children.Add(MakeShareable(new FNDISourceTreeItem(SearchingTxt))); + } + + + // + // Set to new tree + // + + *this = RootNode; + } +}; + + + +/** + A menu widget containing NDI sources +*/ + +DECLARE_DELEGATE_OneParam(FOnSourceClicked, FNDIConnectionInformation); + +class SNDISourcesMenu : public SCompoundWidget +{ +public: + SLATE_BEGIN_ARGS(SNDISourcesMenu) + : _OnSourceClicked() + {} + + SLATE_EVENT(FOnSourceClicked, OnSourceClicked) + + SLATE_END_ARGS() + + SNDISourcesMenu() + {} + + virtual ~SNDISourcesMenu() + { + FNDIFinderService::EventOnNDISourceCollectionChanged.Remove(SourceCollectionChangedEventHandle); + SourceCollectionChangedEventHandle.Reset(); + } + + void Construct(const FArguments& InArgs) + { + OnSourceClicked = InArgs._OnSourceClicked; + + ChildSlot + [ + SNew(SComboButton) + .ButtonContent() + [ + SNew(STextBlock) + .Font(IDetailLayoutBuilder::GetDetailFont()) + .ToolTipText(LOCTEXT("NDI Sources Tip", "Currently Available NDI Sources")) + .Text(LOCTEXT("NDI Sources", "NDI Sources")) + ] + .OnGetMenuContent_Lambda([this]() -> TSharedRef + { + FMenuBuilder MenuBuilder(true, nullptr); + + for (const auto& Sources : SourceTreeItems.Children) + ConstructSourceMenu(MenuBuilder, Sources.Get()); + + return MenuBuilder.MakeWidget(); + }) + ]; + + UpdateSources = true; + + FNDIFinderService::EventOnNDISourceCollectionChanged.Remove(SourceCollectionChangedEventHandle); + SourceCollectionChangedEventHandle.Reset(); + SourceCollectionChangedEventHandle = FNDIFinderService::EventOnNDISourceCollectionChanged.AddLambda([this]() + { + UpdateSources = true; + }); + } + + virtual void Tick(const FGeometry& AllottedGeometry, const double CurrentTime, const float DeltaTime) override + { + bool IsDifferent = false; + + if (UpdateSources.exchange(false)) + { + IsDifferent = FNDIFinderService::UpdateSourceCollection(SourceItems); + } + + if (SourceItems.Num() == 0) + { + FText NewSearchingTxt; + + double WholeTime = 0.0; + double FracTime = FMath::Modf(CurrentTime, &WholeTime); + if(FracTime < 1/4.0) + NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching0", "Searching")); + else if(FracTime < 2/4.0) + NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching1", "Searching.")); + else if(FracTime < 3/4.0) + NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching2", "Searching..")); + else + NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching3", "Searching...")); + + if(!NewSearchingTxt.EqualTo(SearchingTxt)) + { + SearchingTxt = NewSearchingTxt; + IsDifferent = true; + } + } + + if (IsDifferent) + { + SourceTreeItems.SetFromSources(SourceItems, SearchingTxt, false); + Invalidate(EInvalidateWidgetReason::PaintAndVolatility | EInvalidateWidgetReason::ChildOrder); + } + + SCompoundWidget::Tick(AllottedGeometry, CurrentTime, DeltaTime); + } + +protected: + void ConstructSourceMenu(FMenuBuilder& MenuBuilder, const FNDISourceTreeItem& SourceTreeItem) + { + if (SourceTreeItem.NDISource.IsValid()) + { + MenuBuilder.AddMenuEntry( + FText::FromString(SourceTreeItem.NDISource.StreamName), + FText::GetEmpty(), + FSlateIcon(), + FUIAction(FExecuteAction::CreateLambda([this,&SourceTreeItem]() + { + this->OnSourceClicked.ExecuteIfBound(SourceTreeItem.NDISource); + })), + NAME_None, + EUserInterfaceActionType::Button + ); + } + else if (SourceTreeItem.Children.Num() > 0) + { + MenuBuilder.AddSubMenu( + FText::FromString(SourceTreeItem.Children[0]->NDISource.MachineName), + FText::GetEmpty(), + FNewMenuDelegate::CreateLambda([this,&SourceTreeItem](FMenuBuilder& MenuBuilder) + { + for(const auto& ChildSource : SourceTreeItem.Children) + ConstructSourceMenu(MenuBuilder, ChildSource.Get()); + }) + ); + } + else if (!SourceTreeItem.DisplayText.IsEmpty()) + { + MenuBuilder.AddMenuEntry( + SourceTreeItem.DisplayText, + FText::GetEmpty(), + FSlateIcon(), + FUIAction(FExecuteAction::CreateLambda([this] + { + })), + NAME_None, + EUserInterfaceActionType::Button + ); + } + } + +private: + TArray SourceItems; + FText SearchingTxt; + FNDISourceTreeItem SourceTreeItems; + + FDelegateHandle SourceCollectionChangedEventHandle; + std::atomic_bool UpdateSources { false }; + + FOnSourceClicked OnSourceClicked; +}; + + +/** + Customization of NDIConnectionInformation property + by including a menu to select from currently available NDI sources +*/ + +TSharedRef FNDIConnectionInformationCustomization::MakeInstance() +{ + return MakeShareable(new FNDIConnectionInformationCustomization); +} + +void FNDIConnectionInformationCustomization::CustomizeHeader(TSharedRef PropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) +{ + HeaderRow.NameContent() + [ + PropertyHandle->CreatePropertyNameWidget() + ] + .ValueContent() + [ + SNew(SNDISourcesMenu) + .OnSourceClicked_Lambda([this,PropertyHandle](FNDIConnectionInformation Source) + { + TArray RawData; + PropertyHandle->AccessRawData(RawData); + FNDIConnectionInformation* ConnectionInformation = reinterpret_cast(RawData[0]); + if (ConnectionInformation != nullptr) + { + ConnectionInformation->Url = ""; + PropertyHandle->GetChildHandle("SourceName")->SetValue(Source.SourceName); + } + }) + ].IsEnabled(true); +} + +void FNDIConnectionInformationCustomization::CustomizeChildren(TSharedRef PropertyHandle, IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils) +{ + TSharedPtr PropertyUtils = CustomizationUtils.GetPropertyUtilities(); + uint32 NumberOfChild; + if (PropertyHandle->GetNumChildren(NumberOfChild) == FPropertyAccess::Success) + { + for (uint32 Index = 0; Index < NumberOfChild; ++Index) + { + TSharedRef ChildPropertyHandle = PropertyHandle->GetChildHandle(Index).ToSharedRef(); + ChildBuilder.AddProperty(ChildPropertyHandle) + .ShowPropertyButtons(true) + .IsEnabled(MakeAttributeLambda([=] { return !PropertyHandle->IsEditConst() && PropertyUtils->IsPropertyEditingEnabled(); })); + } + } +} + + +#undef LOCTEXT_NAMESPACE diff --git a/Plugins/NDIIO/Source/Editor/NDIIOEditor.Build.cs b/Plugins/NDIIO/Source/Editor/NDIIOEditor.Build.cs new file mode 100644 index 0000000..877bd23 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/NDIIOEditor.Build.cs @@ -0,0 +1,92 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +using System; +using System.IO; + +using UnrealBuildTool; + +public class NDIIOEditor : ModuleRules +{ + public NDIIOEditor(ReadOnlyTargetRules Target) : base(Target) + { +#if UE_5_2_OR_LATER + IWYUSupport = IWYUSupport.Full; +#else + bEnforceIWYU = true; +#endif + PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs; + + #region Public Includes + + if (Directory.Exists(Path.Combine(ModuleDirectory, "Public"))) + { + PublicIncludePaths.AddRange(new string[] { + // ... add public include paths required here ... + Path.Combine(ModuleDirectory, "Public" ), + }); + } + + PublicDependencyModuleNames.AddRange(new string[] { + "Engine", + "Core", + "CoreUObject" + }); + + #endregion + + if (Target.bBuildEditor == true) + { + #region Private Includes + + if (Directory.Exists(Path.Combine(ModuleDirectory, "Private"))) + { + PrivateIncludePaths.AddRange(new string[] { + // ... add other private include paths required here ... + Path.Combine(ModuleDirectory, "Private" ), + Path.Combine(ModuleDirectory, "../Core/Private"), + }); + } + + #endregion + + PrivateIncludePathModuleNames.AddRange(new string[] { + "AssetTools", + "TargetPlatform", + }); + + PrivateDependencyModuleNames.AddRange(new string[] { + "Projects", + "UnrealEd", + "AssetTools", + "MaterialUtilities", + "Renderer", + "RenderCore", + "PlacementMode", + "CinematicCamera", + + "RHI", + "Slate", + "SlateCore", + "UMG", + "ImageWrapper", + + "Media", + "MediaAssets", + "MediaUtils", + + "AssetTools", + "TargetPlatform", + "PropertyEditor", + "DetailCustomizations", + "EditorStyle", + + "NDIIO" + }); + } + } +} diff --git a/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaReceiverFactory.h b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaReceiverFactory.h new file mode 100644 index 0000000..d97a88c --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaReceiverFactory.h @@ -0,0 +1,30 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIMediaReceiverFactory.generated.h" + +/** + Factory Class used to create assets via content browser for NDI Receiver objects +*/ +UCLASS() +class NDIIOEDITOR_API UNDIMediaReceiverFactory : public UFactory +{ + GENERATED_UCLASS_BODY() + + public: + virtual FText GetDisplayName() const override; + virtual uint32 GetMenuCategories() const override; + + virtual bool ShouldShowInNewMenu() const override { return true; } + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; +}; diff --git a/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSenderFactory.h b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSenderFactory.h new file mode 100644 index 0000000..2a4bdd5 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSenderFactory.h @@ -0,0 +1,29 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIMediaSenderFactory.generated.h" + +/** + Factory Class used to create assets via content browser for NDI Sender objects +*/ +UCLASS() +class NDIIOEDITOR_API UNDIMediaSenderFactory : public UFactory +{ + GENERATED_UCLASS_BODY() + + public: + virtual FText GetDisplayName() const override; + virtual uint32 GetMenuCategories() const override; + + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSoundWaveFactory.h b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSoundWaveFactory.h new file mode 100644 index 0000000..7a36b31 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaSoundWaveFactory.h @@ -0,0 +1,29 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIMediaSoundWaveFactory.generated.h" + +/** + Factory Class used to create assets via content browser for NDI Sound Wave objects +*/ +UCLASS() +class NDIIOEDITOR_API UNDIMediaSoundWaveFactory : public UFactory +{ + GENERATED_UCLASS_BODY() + + public: + virtual FText GetDisplayName() const override; + virtual uint32 GetMenuCategories() const override; + + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaTexture2DFactory.h b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaTexture2DFactory.h new file mode 100644 index 0000000..2eeaa4a --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/Factories/NDIMediaTexture2DFactory.h @@ -0,0 +1,29 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include +#include +#include + +#include "NDIMediaTexture2DFactory.generated.h" + +/** + Factory Class used to create assets via content browser for NDI Texture2D objects +*/ +UCLASS() +class NDIIOEDITOR_API UNDIMediaTexture2DFactory : public UFactory +{ + GENERATED_UCLASS_BODY() + + public: + virtual FText GetDisplayName() const override; + virtual uint32 GetMenuCategories() const override; + + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; +}; \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorAPI.h b/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorAPI.h new file mode 100644 index 0000000..89f5051 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorAPI.h @@ -0,0 +1,12 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#define NDIIO_EDITOR_MODULE_NAME FName(TEXT("NDIIOEditor")) \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorModule.h b/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorModule.h new file mode 100644 index 0000000..68bfb13 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/NDIIOEditorModule.h @@ -0,0 +1,25 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include +#include + +class NDIIOEDITOR_API FNDIIOEditorModule : public IModuleInterface +{ + public: + virtual void StartupModule() override; + virtual void ShutdownModule() override; + + private: + TUniquePtr StyleInstance; +}; + +IMPLEMENT_MODULE(FNDIIOEditorModule, NDIIOEditor) \ No newline at end of file diff --git a/Plugins/NDIIO/Source/Editor/Public/Widgets/NDIWidgets.h b/Plugins/NDIIO/Source/Editor/Public/Widgets/NDIWidgets.h new file mode 100644 index 0000000..3e5bca2 --- /dev/null +++ b/Plugins/NDIIO/Source/Editor/Public/Widgets/NDIWidgets.h @@ -0,0 +1,29 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include + +#include + + +/** + Customization of NDIConnectionInformation property + by including a menu to select from currently available NDI sources +*/ +class FNDIConnectionInformationCustomization : public IPropertyTypeCustomization +{ +public: + static TSharedRef MakeInstance(); + + // IDetailCustomization interface + virtual void CustomizeHeader(TSharedRef PropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) override; + virtual void CustomizeChildren(TSharedRef PropertyHandle, IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils) override; + +private: +}; diff --git a/Plugins/NDIIO/Source/Shaders/NDIIOShaders.build.cs b/Plugins/NDIIO/Source/Shaders/NDIIOShaders.build.cs new file mode 100644 index 0000000..118edc2 --- /dev/null +++ b/Plugins/NDIIO/Source/Shaders/NDIIOShaders.build.cs @@ -0,0 +1,38 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +using System; +using System.IO; + +using UnrealBuildTool; + +public class NDIIOShaders : ModuleRules +{ + public NDIIOShaders(ReadOnlyTargetRules Target) : base(Target) + { +#if UE_5_2_OR_LATER + IWYUSupport = IWYUSupport.Full; +#else + bEnforceIWYU = true; +#endif + PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs; + + PublicDependencyModuleNames.AddRange(new string[] { + "Engine", + "Core", + "CoreUObject", + "Projects", + "InputCore" + }); + + PrivateDependencyModuleNames.AddRange(new string[] { + "Renderer", + "RenderCore", + "RHI" + }); + } +} diff --git a/Plugins/NDIIO/Source/Shaders/Private/NDIShaders.cpp b/Plugins/NDIIO/Source/Shaders/Private/NDIShaders.cpp new file mode 100644 index 0000000..5e3d4de --- /dev/null +++ b/Plugins/NDIIO/Source/Shaders/Private/NDIShaders.cpp @@ -0,0 +1,114 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#include "NDIShaders.h" + +#include "Modules/ModuleManager.h" +#include "Interfaces/IPluginManager.h" + +#include "Misc/Paths.h" +#include "Misc/EngineVersionComparison.h" + +#include "Engine/TextureRenderTarget2D.h" +#include "Engine/World.h" +#include "PipelineStateCache.h" +#include "SceneUtils.h" +#include "SceneInterface.h" + + + +BEGIN_GLOBAL_SHADER_PARAMETER_STRUCT(FNDIIOShaderUB, ) + SHADER_PARAMETER(uint32, InputWidth) + SHADER_PARAMETER(uint32, InputHeight) + SHADER_PARAMETER(uint32, OutputWidth) + SHADER_PARAMETER(uint32, OutputHeight) + SHADER_PARAMETER(FVector2f, UVOffset) + SHADER_PARAMETER(FVector2f, UVScale) + SHADER_PARAMETER(uint32, ColorCorrection) + SHADER_PARAMETER(float, AlphaScale) + SHADER_PARAMETER(float, AlphaOffset) + SHADER_PARAMETER_TEXTURE(Texture2D, InputTarget) + SHADER_PARAMETER_TEXTURE(Texture2D, InputAlphaTarget) + SHADER_PARAMETER_SAMPLER(SamplerState, SamplerP) + SHADER_PARAMETER_SAMPLER(SamplerState, SamplerB) + SHADER_PARAMETER_SAMPLER(SamplerState, SamplerT) +END_GLOBAL_SHADER_PARAMETER_STRUCT() + +IMPLEMENT_GLOBAL_SHADER_PARAMETER_STRUCT(FNDIIOShaderUB, "NDIIOShaderUB"); + +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderVS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOMainVS", SF_Vertex); +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoUYVYPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoUYVYPS", SF_Pixel); +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoAlphaEvenPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoAlphaEvenPS", SF_Pixel); +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoAlphaOddPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoAlphaOddPS", SF_Pixel); +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderUYVYtoBGRAPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOUYVYtoBGRAPS", SF_Pixel); +IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderUYVAtoBGRAPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOUYVAtoBGRAPS", SF_Pixel); + + + +void FNDIIOShaderPS::SetParameters(FRHICommandList& CommandList, const Params& params) +{ + FNDIIOShaderUB UB; + { + UB.InputWidth = params.InputTarget->GetSizeX(); + UB.InputHeight = params.InputTarget->GetSizeY(); + UB.OutputWidth = params.OutputSize.X; + UB.OutputHeight = params.OutputSize.Y; + UB.UVOffset = static_cast(params.UVOffset); + UB.UVScale = static_cast(params.UVScale); + UB.ColorCorrection = static_cast(params.ColorCorrection); + + /* + * Alpha' = Alpha * AlphaScale + AlphaOffset + * = (Alpha - AlphaMin) / (AlphaMax - AlphaMin) + * = Alpha / (AlphaMax - AlphaMin) - AlphaMin / (AlphaMax - AlphaMin) + * AlphaScale = 1 / (AlphaMax - AlphaMin) + * AlphaOffset = - AlphaMin / (AlphaMax - AlphaMin) + */ + float AlphaRange = params.AlphaMinMax[1] - params.AlphaMinMax[0]; + if (AlphaRange != 0.f) + { + UB.AlphaScale = 1.f / AlphaRange; + UB.AlphaOffset = - params.AlphaMinMax[0] / AlphaRange; + } + else + { + UB.AlphaScale = 0.f; + UB.AlphaOffset = -params.AlphaMinMax[0]; + } + + UB.InputTarget = params.InputTarget; + UB.InputAlphaTarget = params.InputAlphaTarget; + UB.SamplerP = TStaticSamplerState::GetRHI(); + UB.SamplerB = TStaticSamplerState::GetRHI(); + UB.SamplerT = TStaticSamplerState::GetRHI(); + } + + TUniformBufferRef Data = TUniformBufferRef::CreateUniformBufferImmediate(UB, UniformBuffer_SingleFrame); +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later + FRHIBatchedShaderParameters& BatchedParameters = CommandList.GetScratchShaderParameters(); + SetUniformBufferParameter(BatchedParameters, GetUniformBufferParameter(), Data); + CommandList.SetBatchedShaderParameters(CommandList.GetBoundPixelShader(), BatchedParameters); +#else + SetUniformBufferParameter(CommandList, CommandList.GetBoundPixelShader(), GetUniformBufferParameter(), Data); +#endif +} + + +class FNDIIOShaders : public INDIIOShaders +{ + /** IModuleInterface implementation */ + virtual void StartupModule() override + { + FString PluginShaderDir = FPaths::Combine(IPluginManager::Get().FindPlugin(TEXT("NDIIOPlugin"))->GetBaseDir(), TEXT("Shaders")); + AddShaderSourceDirectoryMapping(TEXT("/Plugin/NDIIOPlugin"), PluginShaderDir); + } + virtual void ShutdownModule() override + { + } +}; + +IMPLEMENT_MODULE( FNDIIOShaders, NDIIOShaders ) diff --git a/Plugins/NDIIO/Source/Shaders/Public/NDIShaders.h b/Plugins/NDIIO/Source/Shaders/Public/NDIShaders.h new file mode 100644 index 0000000..14c8e8e --- /dev/null +++ b/Plugins/NDIIO/Source/Shaders/Public/NDIShaders.h @@ -0,0 +1,138 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +#pragma once + +#include "CoreMinimal.h" +#include "RHI.h" +#include "RenderResource.h" +#include "Shader.h" +#include "GlobalShader.h" +#include "ShaderParameterUtils.h" +#include "RHIStaticStates.h" +#include "Misc/EngineVersionComparison.h" +#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 2)) // 5.2 or later +#include "DataDrivenShaderPlatformInfo.h" +#endif + +#include "Logging/LogMacros.h" + +DECLARE_LOG_CATEGORY_EXTERN(LogNDIIOShaders, Log, All); + + +class FNDIIOShaderVS : public FGlobalShader +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderVS, Global, NDIIOSHADERS_API); + +public: + static bool ShouldCompilePermutation(const FGlobalShaderPermutationParameters& Parameters) + { + return IsFeatureLevelSupported(Parameters.Platform, ERHIFeatureLevel::ES3_1); + } + + FNDIIOShaderVS() + {} + + FNDIIOShaderVS(const ShaderMetaType::CompiledShaderInitializerType& Initializer) + : FGlobalShader(Initializer) + {} +}; + + +class FNDIIOShaderPS : public FGlobalShader +{ +public: + static bool ShouldCompilePermutation(const FGlobalShaderPermutationParameters& Parameters) + { + return IsFeatureLevelSupported(Parameters.Platform, ERHIFeatureLevel::ES3_1); + } + + FNDIIOShaderPS() + {} + + FNDIIOShaderPS(const ShaderMetaType::CompiledShaderInitializerType& Initializer) + : FGlobalShader(Initializer) + {} + + enum class EColorCorrection : uint32 + { + None = 0, + sRGBToLinear, + LinearTosRGB + }; + + struct Params + { + Params(const TRefCountPtr& InputTargetIn, const TRefCountPtr& InputAlphaTargetIn, FIntPoint OutputSizeIn, FVector2D UVOffsetIn, FVector2D UVScaleIn, EColorCorrection ColorCorrectionIn, FVector2D AlphaMinMaxIn) + : InputTarget(InputTargetIn) + , InputAlphaTarget(InputAlphaTargetIn) + , OutputSize(OutputSizeIn) + , UVOffset(UVOffsetIn) + , UVScale(UVScaleIn) + , ColorCorrection(ColorCorrectionIn) + , AlphaMinMax(AlphaMinMaxIn) + {} + + TRefCountPtr InputTarget; + TRefCountPtr InputAlphaTarget; + FIntPoint OutputSize; + FVector2D UVOffset; + FVector2D UVScale; + EColorCorrection ColorCorrection; + FVector2D AlphaMinMax; + }; + + NDIIOSHADERS_API void SetParameters(FRHICommandList& CommandList, const Params& params); + +protected: +}; + + +class FNDIIOShaderBGRAtoUYVYPS : public FNDIIOShaderPS +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoUYVYPS, Global, NDIIOSHADERS_API); + +public: + using FNDIIOShaderPS::FNDIIOShaderPS; +}; + +class FNDIIOShaderBGRAtoAlphaEvenPS : public FNDIIOShaderPS +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoAlphaEvenPS, Global, NDIIOSHADERS_API); + +public: + using FNDIIOShaderPS::FNDIIOShaderPS; +}; + +class FNDIIOShaderBGRAtoAlphaOddPS : public FNDIIOShaderPS +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoAlphaOddPS, Global, NDIIOSHADERS_API); + +public: + using FNDIIOShaderPS::FNDIIOShaderPS; +}; + +class FNDIIOShaderUYVYtoBGRAPS : public FNDIIOShaderPS +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderUYVYtoBGRAPS, Global, NDIIOSHADERS_API); + +public: + using FNDIIOShaderPS::FNDIIOShaderPS; +}; + +class FNDIIOShaderUYVAtoBGRAPS : public FNDIIOShaderPS +{ + DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderUYVAtoBGRAPS, Global, NDIIOSHADERS_API); + +public: + using FNDIIOShaderPS::FNDIIOShaderPS; +}; + +class INDIIOShaders : public IModuleInterface +{ +public: +}; diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.DynamicLoad.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.DynamicLoad.h new file mode 100644 index 0000000..4df80a8 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.DynamicLoad.h @@ -0,0 +1,635 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +typedef struct NDIlib_v6 { + // v1.5 + union { + bool (*initialize)(void); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_initialize)(void); + }; + + union { + void (*destroy)(void); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_destroy)(void); + }; + union { + const char* (*version)(void); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_version)(void); + }; + + union { + bool (*is_supported_CPU)(void); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_is_supported_CPU)(void); + }; + + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*find_create)(const NDIlib_find_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create)(const NDIlib_find_create_t* p_create_settings); + }; + + union { + NDIlib_find_instance_t (*find_create_v2)(const NDIlib_find_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create_v2)(const NDIlib_find_create_t* p_create_settings); + }; + + union { + void (*find_destroy)(NDIlib_find_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_find_destroy)(NDIlib_find_instance_t p_instance); + }; + + union { + const NDIlib_source_t* (*find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); + }; + + union { + NDIlib_send_instance_t (*send_create)(const NDIlib_send_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_send_instance_t (*NDIlib_send_create)(const NDIlib_send_create_t* p_create_settings); + }; + + union { + void (*send_destroy)(NDIlib_send_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_destroy)(NDIlib_send_instance_t p_instance); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + }; + + union { + void (*send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + NDIlib_frame_type_e (*send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + }; + + union { + void (*send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + bool (*send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); + }; + + union { + int (*send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED int (*NDIlib_send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); + }; + + union { + void (*send_clear_connection_metadata)(NDIlib_send_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_clear_connection_metadata)(NDIlib_send_instance_t p_instance); + }; + + union { + void (*send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + void (*send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); + }; + + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create_v2)(const NDIlib_recv_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v2)(const NDIlib_recv_create_t* p_create_settings); + }; + + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create)(const NDIlib_recv_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create)(const NDIlib_recv_create_t* p_create_settings); + }; + + union { + void (*recv_destroy)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_destroy)(NDIlib_recv_instance_t p_instance); + }; + + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + }; + + union { + void (*recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + bool (*recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + bool (*recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); + }; + + union { + void (*recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); + }; + + union { + void (*recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); + }; + + union { + void (*recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance); + }; + + union { + void (*recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + }; + + union { + int (*recv_get_no_connections)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED int (*NDIlib_recv_get_no_connections)(NDIlib_recv_instance_t p_instance); + }; + + union { + NDIlib_routing_instance_t (*routing_create)(const NDIlib_routing_create_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_routing_instance_t (*NDIlib_routing_create)(const NDIlib_routing_create_t* p_create_settings); + }; + + union { + void (*routing_destroy)(NDIlib_routing_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_routing_destroy)(NDIlib_routing_instance_t p_instance); + }; + + union { + bool (*routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); + }; + + union { + bool (*routing_clear)(NDIlib_routing_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_clear)(NDIlib_routing_instance_t p_instance); + }; + + union { + void (*util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); + }; + + // v2 + union { + bool (*find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); + }; + + union { + const NDIlib_source_t* (*find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); + PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + }; + + union { + PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); + }; + + union { + void (*util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); + }; + + // v3 + union { + void (*recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + }; + + union { + void (*recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + }; + + union { + NDIlib_frame_type_e (*recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + }; + + union { + void (*send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + }; + + union { + void (*send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + }; + + union { + void (*send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + }; + + union { + void (*util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + }; + + union { + void (*util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + }; + + union { + void (*util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + }; + + union { + void (*util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + }; + + // V3.01 + union { + void (*recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string); + }; + + union { + bool (*recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_supported)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_supported)(NDIlib_recv_instance_t p_instance); + }; + + union { + const char* (*recv_get_web_control)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_get_web_control)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value); + }; + + union { + bool (*recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed); + }; + + union { + bool (*recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); + }; + + union { + bool (*recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); + }; + + union { + bool (*recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no); + }; + + union { + bool (*recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); + }; + + union { + bool (*recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value); + }; + + union { + bool (*recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed); + }; + + union { + bool (*recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue); + }; + + union { + bool (*recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance); + }; + + union { + bool (*recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_stop)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_stop)(NDIlib_recv_instance_t p_instance); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB); + }; + + union { // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_recording)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_recording)(NDIlib_recv_instance_t p_instance); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_error)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_error)(NDIlib_recv_instance_t p_instance); + }; + + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times); + }; + + // v3.1 + union { + NDIlib_recv_instance_t (*recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings); + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings); + }; + + // v3.5 + union { + void (*recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src); + }; + + // v3.6 + union { + NDIlib_framesync_instance_t (*framesync_create)(NDIlib_recv_instance_t p_receiver); + PROCESSINGNDILIB_DEPRECATED NDIlib_framesync_instance_t (*NDIlib_framesync_create)(NDIlib_recv_instance_t p_receiver); + }; + + union { + void (*framesync_destroy)(NDIlib_framesync_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_destroy)(NDIlib_framesync_instance_t p_instance); + }; + + union { + void (*framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + }; + + union { + void (*framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); + }; + + union { + void (*framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type); + }; + + union { + void (*framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); + }; + + union { + void (*util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); + }; + + union { + void (*util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); + }; + + union { + void (*util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + }; + + // v3.8 + union { + const NDIlib_source_t* (*send_get_source_name)(NDIlib_send_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_send_get_source_name)(NDIlib_send_instance_t p_instance); + }; + + // v4.0 + union { + void (*send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + }; + + union { + void (*util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); + }; + + union { + void (*util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); + }; + + // v4.1 + union { + int (*routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); + PROCESSINGNDILIB_DEPRECATED int (*NDIlib_routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); + }; + + union { + const NDIlib_source_t* (*routing_get_source_name)(NDIlib_routing_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_routing_get_source_name)(NDIlib_routing_instance_t p_instance); + }; + + union { + NDIlib_frame_type_e (*recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + }; + + union { + void (*recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + }; + + union { + void (*framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + }; + + union { + void (*framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); + PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); + }; + + union { + int (*framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED int (*NDIlib_framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance); + }; + + // v5 + union { + bool (*recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed); + PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed); + }; + + // v6.1 + bool (*util_audio_to_interleaved_16s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + bool (*util_audio_from_interleaved_16s_v3)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v3_t* p_dst); + bool (*util_audio_to_interleaved_32s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); + bool (*util_audio_from_interleaved_32s_v3)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v3_t* p_dst); + bool (*util_audio_to_interleaved_32f_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + bool (*util_audio_from_interleaved_32f_v3)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v3_t* p_dst); + + // v6.2 + bool (*recv_get_source_name)(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms); + + NDIlib_recv_advertiser_instance_t (*recv_advertiser_create)(const NDIlib_recv_advertiser_create_t* p_create_settings); + void (*recv_advertiser_destroy)(NDIlib_recv_advertiser_instance_t p_instance); + bool (*recv_advertiser_add_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver, bool allow_controlling, bool allow_monitoring, const char* p_input_group_name); + bool (*recv_advertiser_del_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver); + + NDIlib_recv_listener_instance_t (*recv_listener_create)(const NDIlib_recv_listener_create_t* p_create_settings); + void (*recv_listener_destroy)(NDIlib_recv_listener_instance_t p_instance); + bool (*recv_listener_is_connected)(NDIlib_recv_listener_instance_t p_instance); + const char* (*recv_listener_get_server_url)(NDIlib_recv_listener_instance_t p_instance); + const NDIlib_receiver_t* (*recv_listener_get_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers); + bool (*recv_listener_wait_for_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms); +} NDIlib_v6; + +typedef struct NDIlib_v6 NDIlib_v5; +typedef struct NDIlib_v6 NDIlib_v4_5; +typedef struct NDIlib_v6 NDIlib_v4; +typedef struct NDIlib_v6 NDIlib_v3; +typedef struct NDIlib_v6 NDIlib_v2; + +// Load the library. +PROCESSINGNDILIB_API +const NDIlib_v6* NDIlib_v6_load(void); + +// Load the library. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_v5* NDIlib_v5_load(void); + +// Load the library. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_v4_5* NDIlib_v4_5_load(void); + +// Load the library. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_v4* NDIlib_v4_load(void); + +// Load the library. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_v3* NDIlib_v3_load(void); + +// Load the library. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_v2* NDIlib_v2_load(void); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Find.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Find.h new file mode 100644 index 0000000..cba0792 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Find.h @@ -0,0 +1,79 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Structures and type definitions required by NDI finding. +// The reference to an instance of the finder. +struct NDIlib_find_instance_type; +typedef struct NDIlib_find_instance_type* NDIlib_find_instance_t; + +// The creation structure that is used when you are creating a finder. +typedef struct NDIlib_find_create_t { + // Do we want to include the list of NDI sources that are running on the local machine? If TRUE then + // local sources will be visible, if FALSE then they will not. + bool show_local_sources; + + // Which groups do you want to search in for sources. + const char* p_groups; + + // The list of additional IP addresses that exist that we should query for sources on. For instance, if + // you want to find the sources on a remote machine that is not on your local sub-net then you can put a + // comma separated list of those IP addresses here and those sources will be available locally even + // though they are not mDNS discoverable. An example might be "12.0.0.8,13.0.12.8". When none is + // specified the registry is used. + // Default = NULL; + const char* p_extra_ips; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_find_create_t( + bool show_local_sources_ = true, + const char* p_groups_ = NULL, + const char* p_extra_ips_ = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_find_create_t; + +//*********************************************************************************************************** +// Create a new finder instance. This will return NULL if it fails. +PROCESSINGNDILIB_API +NDIlib_find_instance_t NDIlib_find_create_v2(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// This will destroy an existing finder instance. +PROCESSINGNDILIB_API +void NDIlib_find_destroy(NDIlib_find_instance_t p_instance); + +// This function will recover the current set of sources (i.e. the ones that exist right this second). The +// char* memory buffers returned in NDIlib_source_t are valid until the next call to +// NDIlib_find_get_current_sources or a call to NDIlib_find_destroy. For a given NDIlib_find_instance_t, do +// not call NDIlib_find_get_current_sources asynchronously. +PROCESSINGNDILIB_API +const NDIlib_source_t* NDIlib_find_get_current_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); + +// This will allow you to wait until the number of online sources have changed. +PROCESSINGNDILIB_API +bool NDIlib_find_wait_for_sources(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.FrameSync.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.FrameSync.h new file mode 100644 index 0000000..8933346 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.FrameSync.h @@ -0,0 +1,172 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// It is important when using video to realize that often you are using difference clocks for different parts +// of the signal chain. Within NDI, the sender can send at the clock rate that it wants and the receiver will +// receive it at that rate. The receiver however is very unlikely to share the exact same clock rate in many +// cases. For instance, bear in mind that computer clocks rely on crystals which while all rated for the same +// frequency are still not exact. If you sending computer has an audio clock that it "thinks" is 48000Hz, to +// the receiver computer that has a different audio clock this might be 48001Hz or 47998Hz. While these +// differences might appear small they accumulate over time and can cause audio to either slightly drift out +// of sync (it is receiving more audio sample than it needs to play back) or might cause audio glitches +// because it is not receiving enough audio samples. While we have described the situation for audio, the +// same exact problem occurs for video sources; it is commonly thought that this can be solved by simply +// having a "frame buffer" and that displaying the "most recently received video frame" will solve these +// timing discrepancies. Unfortunately this is not the case and when it is done because of the variance in +// clock timings, it is very common the video will appear the "jitter" when the sending and receiving clocks +// are almost in alignment. The solution to these problems is to implement a "time base corrector" for the +// video clock which is a device that uses hysteresis to know when the best time is to either drop or insert +// a video frame such that the video is most likely to play back smoothly, and audio should be dynamically +// audio sampled (with a high order resampling filter) to adaptively track any clocking differences. +// Implementing these components is very difficult to get entirely correct under all scenarios and this +// implementation is provided to facilitate this and help people who are building real time video +// applications to receive audio and video without needing to undertake the full complexity of implementing +// such clock devices. +// +// Another way to look at what this class does is that it transforms "push" sources (i.e. NDI sources in +// which the data is pushed from the sender to the receiver) into "pull" sources in which a host application +// is pulling the data down-stream. The frame-sync automatically tracks all clocks to achieve the best video +// performance doing this operation. +// +// In addition to time-base correction operations, these implementations also will automatically detect and +// correct timing jitter that might occur. This will internally correct for timing anomalies that might be +// caused by network, sender or receiver side timing errors caused by CPU limitations, network bandwidth +// fluctuations, etc... +// +// A very common use of a frame-synchronizer might be if you are displaying video on screen timed to the GPU +// v-sync, you should use such a device to convert from the incoming time-base into the time-base of the GPU. +// +// The following are common times that you want to use a frame-synchronizer +// Video playback on screen : Yes, you want the clock to be synced with vertical refresh. +// Audio playback through sound card : Yes you want the clock to be synced with your sound card clock. +// Video mixing : Yes you want the input video clocks to all be synced to your output video clock. +// Audio mixing : Yes, you want all input audio clocks to be brought into sync with your output +// audio clock. +// Recording a single channel : No, you want to record the signal in it's raw form without +// any re-clocking. +// Recording multiple channels : Maybe. If you want to sync some input channels to match a master clock +// so that they can be ISO edited, then you might want a frame-sync. + +// The type instance for a frame-synchronizer. +struct NDIlib_framesync_instance_type; +typedef struct NDIlib_framesync_instance_type* NDIlib_framesync_instance_t; + +// Create a frame synchronizer instance that can be used to get frames from a receiver. Once this receiver +// has been bound to a frame-sync then you should use it in order to receive video frames. You can continue +// to use the underlying receiver for other operations (tally, PTZ, etc...). Note that it remains your +// responsibility to destroy the receiver even when a frame-sync is using it. You should always destroy the +// receiver after the frame-sync has been destroyed. +// +PROCESSINGNDILIB_API +NDIlib_framesync_instance_t NDIlib_framesync_create(NDIlib_recv_instance_t p_receiver); + +// Destroy a frame-sync implementation. +PROCESSINGNDILIB_API +void NDIlib_framesync_destroy(NDIlib_framesync_instance_t p_instance); + +// This function will pull audio samples from the frame-sync queue. This function will always return data +// immediately, inserting silence if no current audio data is present. You should call this at the rate that +// you want audio and it will automatically adapt the incoming audio signal to match the rate at which you +// are calling by using dynamic audio sampling. Note that you have no obligation that your requested sample +// rate, no channels and no samples match the incoming signal and all combinations of conversions +// are supported. +// +// If you wish to know what the current incoming audio format is, then you can make a call with the +// parameters set to zero and it will then return the associated settings. For instance a call as follows: +// +// NDIlib_framesync_capture_audio(p_instance, p_audio_data, 0, 0, 0); +// +// will return in p_audio_data the current received audio format if there is one or sample_rate and +// no_channels equal to zero if there is not one. At any time you can specify sample_rate and no_channels as +// zero and it will return the current received audio format. +// +PROCESSINGNDILIB_API +void NDIlib_framesync_capture_audio( + NDIlib_framesync_instance_t p_instance, + NDIlib_audio_frame_v2_t* p_audio_data, + int sample_rate, int no_channels, int no_samples +); +PROCESSINGNDILIB_API +void NDIlib_framesync_capture_audio_v2( + NDIlib_framesync_instance_t p_instance, + NDIlib_audio_frame_v3_t* p_audio_data, + int sample_rate, int no_channels, int no_samples +); + +// Free audio returned by NDIlib_framesync_capture_audio. +PROCESSINGNDILIB_API +void NDIlib_framesync_free_audio(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); + +// Free audio returned by NDIlib_framesync_capture_audio_v2. +PROCESSINGNDILIB_API +void NDIlib_framesync_free_audio_v2(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); + +// This function will tell you the approximate current depth of the audio queue to give you an indication +// of the number of audio samples you can request. Note that if you should treat the results of this function +// with some care because in reality the frame-sync API is meant to dynamically resample audio to match the +// rate that you are calling it. If you have an inaccurate clock then this function can be useful. +// for instance : +// +// while(true) +// { int no_samples = NDIlib_framesync_audio_queue_depth(p_instance); +// NDIlib_framesync_capture_audio( ... ); +// play_audio( ... ) +// NDIlib_framesync_free_audio( ... ) +// inaccurate_sleep( 33ms ); +// } +// +// Obviously because audio is being received in real-time there is no guarantee after the call that the +// number is correct since new samples might have been captured in that time. On synchronous use of this +// function however this will be the minimum number of samples in the queue at any later time until +// NDIlib_framesync_capture_audio is called. +// +PROCESSINGNDILIB_API +int NDIlib_framesync_audio_queue_depth(NDIlib_framesync_instance_t p_instance); + +// This function will pull video samples from the frame-sync queue. This function will always immediately +// return a video sample by using time-base correction. You can specify the desired field type which is then +// used to return the best possible frame. Note that field based frame-synchronization means that the +// frame-synchronizer attempts to match the fielded input phase with the frame requests so that you have the +// most correct possible field ordering on output. Note that the same frame can be returned multiple times. +// +// If no video frame has ever been received, this will return NDIlib_video_frame_v2_t as an empty (all zero) +// structure. The reason for this is that it allows you to determine that there has not yet been any video +// and act accordingly. For instance you might want to display a constant frame output at a particular video +// format, or black. +// +PROCESSINGNDILIB_API +void NDIlib_framesync_capture_video( + NDIlib_framesync_instance_t p_instance, + NDIlib_video_frame_v2_t* p_video_data, + NDIlib_frame_format_type_e field_type NDILIB_CPP_DEFAULT_VALUE(NDIlib_frame_format_type_progressive) +); + +// Free audio returned by NDIlib_framesync_capture_video. +PROCESSINGNDILIB_API +void NDIlib_framesync_free_video(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.cplusplus.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.cplusplus.h new file mode 100644 index 0000000..0c7d973 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.cplusplus.h @@ -0,0 +1,129 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + + +// C++ implementations of default constructors are here to avoid them needing to be inline with all of the +// rest of the code. + +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +#endif + +// All the structs used and reasonable defaults are here +inline NDIlib_source_t::NDIlib_source_t(const char* p_ndi_name_, const char* p_url_address_) + : p_ndi_name(p_ndi_name_), p_url_address(p_url_address_) {} + +inline NDIlib_video_frame_v2_t::NDIlib_video_frame_v2_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_, + float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_, + int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_) + : xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_), + picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_), + timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {} + +inline NDIlib_audio_frame_v2_t::NDIlib_audio_frame_v2_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_, + int channel_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), + p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {} + +inline NDIlib_audio_frame_v3_t::NDIlib_audio_frame_v3_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, + NDIlib_FourCC_audio_type_e FourCC_, uint8_t* p_data_, int channel_stride_in_bytes_, + const char* p_metadata_, int64_t timestamp_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), + FourCC(FourCC_), p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_), + p_metadata(p_metadata_), timestamp(timestamp_) {} + +inline NDIlib_video_frame_t::NDIlib_video_frame_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_, + float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_, + int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_) + : xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_), + picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_), + timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_) {} + +inline NDIlib_audio_frame_t::NDIlib_audio_frame_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_, + int channel_stride_in_bytes_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), + p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_) {} + +inline NDIlib_metadata_frame_t::NDIlib_metadata_frame_t(int length_, int64_t timecode_, char* p_data_) + : length(length_), timecode(timecode_), p_data(p_data_) {} + +inline NDIlib_tally_t::NDIlib_tally_t(bool on_program_, bool on_preview_) + : on_program(on_program_), on_preview(on_preview_) {} + +inline NDIlib_routing_create_t::NDIlib_routing_create_t(const char* p_ndi_name_, const char* p_groups_) + : p_ndi_name(p_ndi_name_), p_groups(p_groups_) {} + +inline NDIlib_recv_create_v3_t::NDIlib_recv_create_v3_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_, + NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_, const char* p_ndi_name_) + : source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_), p_ndi_recv_name(p_ndi_name_) {} + +inline NDIlib_recv_create_t::NDIlib_recv_create_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_, + NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_) + : source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_) {} + +inline NDIlib_recv_performance_t::NDIlib_recv_performance_t(void) + : video_frames(0), audio_frames(0), metadata_frames(0) {} + +inline NDIlib_recv_queue_t::NDIlib_recv_queue_t(void) + : video_frames(0), audio_frames(0), metadata_frames(0) {} + +inline NDIlib_recv_recording_time_t::NDIlib_recv_recording_time_t(void) + : no_frames(0), start_time(0), last_time(0) {} + +inline NDIlib_send_create_t::NDIlib_send_create_t(const char* p_ndi_name_, const char* p_groups_, bool clock_video_, bool clock_audio_) + : p_ndi_name(p_ndi_name_), p_groups(p_groups_), clock_video(clock_video_), clock_audio(clock_audio_) {} + +inline NDIlib_find_create_t::NDIlib_find_create_t(bool show_local_sources_, const char* p_groups_, const char* p_extra_ips_) + : show_local_sources(show_local_sources_), p_groups(p_groups_), p_extra_ips(p_extra_ips_) {} + +inline NDIlib_audio_frame_interleaved_16s_t::NDIlib_audio_frame_interleaved_16s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int16_t* p_data_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), + reference_level(reference_level_), p_data(p_data_) {} + +inline NDIlib_audio_frame_interleaved_32s_t::NDIlib_audio_frame_interleaved_32s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int32_t* p_data_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), + reference_level(reference_level_), p_data(p_data_) {} + +inline NDIlib_audio_frame_interleaved_32f_t::NDIlib_audio_frame_interleaved_32f_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_) + : sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), p_data(p_data_) {} + +inline NDIlib_recv_advertiser_create_t::NDIlib_recv_advertiser_create_t(const char* p_url_address) + : p_url_address(p_url_address) {} + +inline NDIlib_recv_listener_create_t::NDIlib_recv_listener_create_t(const char* p_url_address) + : p_url_address(p_url_address) {} + +inline NDIlib_receiver_t::NDIlib_receiver_t(void) + : p_uuid(NULL), p_name(NULL), p_input_uuid(NULL), p_input_name(NULL), p_address(NULL), + p_streams(NULL), num_streams(0), p_commands(NULL), num_commands(0), events_subscribed(false) {} + +#ifdef __clang__ +#pragma clang diagnostic pop +#endif diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.h new file mode 100644 index 0000000..8f106c2 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Lib.h @@ -0,0 +1,165 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +#ifdef PROCESSINGNDILIB_STATIC +# ifdef __cplusplus +# define PROCESSINGNDILIB_API extern "C" +# else // __cplusplus +# define PROCESSINGNDILIB_API +# endif // __cplusplus +#else // PROCESSINGNDILIB_STATIC +# ifdef _WIN32 +# ifdef PROCESSINGNDILIB_EXPORTS +# ifdef __cplusplus +# define PROCESSINGNDILIB_API extern "C" __declspec(dllexport) +# else // __cplusplus +# define PROCESSINGNDILIB_API __declspec(dllexport) +# endif // __cplusplus +# else // PROCESSINGNDILIB_EXPORTS +# ifdef __cplusplus +# define PROCESSINGNDILIB_API extern "C" __declspec(dllimport) +# else // __cplusplus +# define PROCESSINGNDILIB_API __declspec(dllimport) +# endif // __cplusplus +# ifdef _WIN64 +# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x64.dll" +# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6" +# else // _WIN64 +# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x86.dll" +# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6" +# endif // _WIN64 +# endif // PROCESSINGNDILIB_EXPORTS +# else // _WIN32 +# ifdef __APPLE__ +# define NDILIB_LIBRARY_NAME "libndi.dylib" +# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6Apple" +# else // __APPLE__ +# define NDILIB_LIBRARY_NAME "libndi.so.6" +# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6" +# define NDILIB_REDIST_URL "" +# endif // __APPLE__ +# ifdef __cplusplus +# define PROCESSINGNDILIB_API extern "C" __attribute((visibility("default"))) +# else // __cplusplus +# define PROCESSINGNDILIB_API __attribute((visibility("default"))) +# endif // __cplusplus +# endif // _WIN32 +#endif // PROCESSINGNDILIB_STATIC + +#ifndef PROCESSINGNDILIB_DEPRECATED +# ifdef _WIN32 +# ifdef _MSC_VER +# define PROCESSINGNDILIB_DEPRECATED __declspec(deprecated) +# else // _MSC_VER +# define PROCESSINGNDILIB_DEPRECATED __attribute((deprecated)) +# endif // _MSC_VER +# else // _WIN32 +# define PROCESSINGNDILIB_DEPRECATED +# endif // _WIN32 +#endif // PROCESSINGNDILIB_DEPRECATED + +#ifndef NDILIB_CPP_DEFAULT_CONSTRUCTORS +# ifdef __cplusplus +# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 1 +# else // __cplusplus +# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 0 +# endif // __cplusplus +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS + +#ifndef NDILIB_CPP_DEFAULT_VALUE +# ifdef __cplusplus +# define NDILIB_CPP_DEFAULT_VALUE(a) =(a) +# else // __cplusplus +# define NDILIB_CPP_DEFAULT_VALUE(a) +# endif // __cplusplus +#endif // NDILIB_CPP_DEFAULT_VALUE + +// Data structures shared by multiple SDKs. +#include "Processing.NDI.compat.h" +#include "Processing.NDI.structs.h" + +// This is not actually required, but will start and end the libraries which might get you slightly better +// performance in some cases. In general it is more "correct" to call these although it is not required. +// There is no way to call these that would have an adverse impact on anything (even calling destroy before +// you've deleted all your objects). This will return false if the CPU is not sufficiently capable to run +// NDILib currently NDILib requires SSE4.2 instructions (see documentation). You can verify a specific CPU +// against the library with a call to NDIlib_is_supported_CPU(). +PROCESSINGNDILIB_API +bool NDIlib_initialize(void); + +PROCESSINGNDILIB_API +void NDIlib_destroy(void); + +PROCESSINGNDILIB_API +const char* NDIlib_version(void); + +// Recover whether the current CPU in the system is capable of running NDILib. +PROCESSINGNDILIB_API +bool NDIlib_is_supported_CPU(void); + +// The finding (discovery API). +#include "Processing.NDI.Find.h" + +// The receiving video and audio API. +#include "Processing.NDI.Recv.h" + +// Extensions to support PTZ control, etc... +#include "Processing.NDI.Recv.ex.h" + +// The receiver advertiser API. +#include "Processing.NDI.RecvAdvertiser.h" + +// The receiver listener API. +#include "Processing.NDI.RecvListener.h" + +// The sending video API. +#include "Processing.NDI.Send.h" + +// The routing of inputs API. +#include "Processing.NDI.Routing.h" + +// Utility functions. +#include "Processing.NDI.utilities.h" + +// Deprecated structures and functions. +#include "Processing.NDI.deprecated.h" + +// The frame synchronizer. +#include "Processing.NDI.FrameSync.h" + +// Dynamic loading used for OSS libraries. +#include "Processing.NDI.DynamicLoad.h" + +// The C++ implementations. +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS +#include "Processing.NDI.Lib.cplusplus.h" +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.ex.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.ex.h new file mode 100644 index 0000000..fd4dbe8 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.ex.h @@ -0,0 +1,211 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Has this receiver got PTZ control. Note that it might take a second or two after the connection for this +// value to be set. To avoid the need to poll this function, you can know when the value of this function +// might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_is_supported(NDIlib_recv_instance_t p_instance); + +// Has this receiver got recording control. Note that it might take a second or two after the connection for +// this value to be set. To avoid the need to poll this function, you can know when the value of this +// function might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change. +// +// Note on deprecation of this function: +// NDI version 4 includes the native ability to record all NDI streams using an external application that +// is provided with the SDK. This is better in many ways than the internal recording support which only +// ever supported remotely recording systems and NDI|HX. This functionality will be supported in the SDK +// for some time although we are recommending that you use the newer support which is more feature rich and +// supports the recording of all stream types, does not take CPU time to record NDI sources (it does not +// require any type of re-compression since it can just store the data in the file), it will synchronize +// all recorders on a system (and cross systems if NTP clock locking is used). +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_is_supported(NDIlib_recv_instance_t p_instance); + +// PTZ Controls. +// Zoom to an absolute value. +// zoom_value = 0.0 (zoomed in) ... 1.0 (zoomed out) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_zoom(NDIlib_recv_instance_t p_instance, const float zoom_value); + +// Zoom at a particular speed. +// zoom_speed = -1.0 (zoom outwards) ... +1.0 (zoom inwards) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_zoom_speed(NDIlib_recv_instance_t p_instance, const float zoom_speed); + +// Set the pan and tilt to an absolute value. +// pan_value = -1.0 (left) ... 0.0 (centered) ... +1.0 (right) +// tilt_value = -1.0 (bottom) ... 0.0 (centered) ... +1.0 (top) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_pan_tilt(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); + +// Set the pan and tilt direction and speed. +// pan_speed = -1.0 (moving right) ... 0.0 (stopped) ... +1.0 (moving left) +// tilt_speed = -1.0 (down) ... 0.0 (stopped) ... +1.0 (moving up) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_pan_tilt_speed(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); + +// Store the current position, focus, etc... as a preset. +// preset_no = 0 ... 99 +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_store_preset(NDIlib_recv_instance_t p_instance, const int preset_no); + +// Recall a preset, including position, focus, etc... +// preset_no = 0 ... 99 +// speed = 0.0(as slow as possible) ... 1.0(as fast as possible) The speed at which to move to the new preset. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_recall_preset(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); + +// Put the camera in auto-focus. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_auto_focus(NDIlib_recv_instance_t p_instance); + +// Focus to an absolute value. +// focus_value = 0.0 (focused to infinity) ... 1.0 (focused as close as possible) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_focus(NDIlib_recv_instance_t p_instance, const float focus_value); + +// Focus at a particular speed. +// focus_speed = -1.0 (focus outwards) ... +1.0 (focus inwards) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_focus_speed(NDIlib_recv_instance_t p_instance, const float focus_speed); + +// Put the camera in auto white balance mode. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_white_balance_auto(NDIlib_recv_instance_t p_instance); + +// Put the camera in indoor white balance. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_white_balance_indoor(NDIlib_recv_instance_t p_instance); + +// Put the camera in indoor white balance. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_white_balance_outdoor(NDIlib_recv_instance_t p_instance); + +// Use the current brightness to automatically set the current white balance. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_white_balance_oneshot(NDIlib_recv_instance_t p_instance); + +// Set the manual camera white balance using the R, B values. +// red = 0.0(not red) ... 1.0(very red) +// blue = 0.0(not blue) ... 1.0(very blue) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_white_balance_manual(NDIlib_recv_instance_t p_instance, const float red, const float blue); + +// Put the camera in auto-exposure mode. +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_exposure_auto(NDIlib_recv_instance_t p_instance); + +// Manually set the camera exposure iris. +// exposure_level = 0.0(dark) ... 1.0(light) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_exposure_manual(NDIlib_recv_instance_t p_instance, const float exposure_level); + +// Manually set the camera exposure parameters. +// iris = 0.0(dark) ... 1.0(light) +// gain = 0.0(dark) ... 1.0(light) +// shutter_speed = 0.0(slow) ... 1.0(fast) +PROCESSINGNDILIB_API +bool NDIlib_recv_ptz_exposure_manual_v2( + NDIlib_recv_instance_t p_instance, + const float iris, const float gain, const float shutter_speed +); + +// Recording control. +// This will start recording.If the recorder was already recording then the message is ignored.A filename is +// passed in as a "hint".Since the recorder might already be recording(or might not allow complete +// flexibility over its filename), the filename might or might not be used.If the filename is empty, or not +// present, a name will be chosen automatically. If you do not with to provide a filename hint you can simply +// pass NULL. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_start(NDIlib_recv_instance_t p_instance, const char* p_filename_hint); + +// Stop recording. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_stop(NDIlib_recv_instance_t p_instance); + +// This will control the audio level for the recording. dB is specified in decibels relative to the reference +// level of the source. Not all recording sources support controlling audio levels.For instance, a digital +// audio device would not be able to avoid clipping on sources already at the wrong level, thus might not +// support this message. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_set_audio_level(NDIlib_recv_instance_t p_instance, const float level_dB); + +// This will determine if the source is currently recording. It will return true while recording is in +// progress and false when it is not. Because there is one recorded and multiple people might be connected to +// it, there is a chance that it is recording which was initiated by someone else. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_is_recording(NDIlib_recv_instance_t p_instance); + +// Get the current filename for recording. When this is set it will return a non-NULL value which is owned by +// you and freed using NDIlib_recv_free_string. If a file was already being recorded by another client, the +// massage will contain the name of that file. The filename contains a UNC path (when one is available) to +// the recorded file, and can be used to access the file on your local machine for playback. If a UNC path +// is not available, then this will represent the local filename. This will remain valid even after the file +// has stopped being recorded until the next file is started. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const char* NDIlib_recv_recording_get_filename(NDIlib_recv_instance_t p_instance); + +// This will tell you whether there was a recording error and what that string is. When this is set it will +// return a non-NULL value which is owned by you and freed using NDIlib_recv_free_string. When there is no +// error it will return NULL. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const char* NDIlib_recv_recording_get_error(NDIlib_recv_instance_t p_instance); + +// In order to get the duration. +typedef struct NDIlib_recv_recording_time_t +{ + // The number of actual video frames recorded. + int64_t no_frames; + + // The starting time and current largest time of the record, in UTC time, at 100-nanosecond unit + // intervals. This allows you to know the record time irrespective of frame rate. For instance, + // last_time - start_time would give you the recording length in 100-nanosecond intervals. + int64_t start_time, last_time; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_recording_time_t(void); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_recording_time_t; + +// Get the current recording times. +// +// See note above on depreciation and why this is, and how to replace this functionality. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +bool NDIlib_recv_recording_get_times(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.h new file mode 100644 index 0000000..c95ee49 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Recv.h @@ -0,0 +1,297 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Structures and type definitions required by NDI finding. +// The reference to an instance of the receiver. +struct NDIlib_recv_instance_type; +typedef struct NDIlib_recv_instance_type* NDIlib_recv_instance_t; + +typedef enum NDIlib_recv_bandwidth_e { + NDIlib_recv_bandwidth_metadata_only = -10, // Receive metadata. + NDIlib_recv_bandwidth_audio_only = 10, // Receive metadata, audio. + NDIlib_recv_bandwidth_lowest = 0, // Receive metadata, audio, video at a lower bandwidth and resolution. + NDIlib_recv_bandwidth_highest = 100, // Receive metadata, audio, video at full resolution. + + // Make sure this is a 32-bit enumeration. + NDIlib_recv_bandwidth_max = 0x7fffffff +} NDIlib_recv_bandwidth_e; + +typedef enum NDIlib_recv_color_format_e { + // When there is no alpha channel, this mode delivers BGRX. + // When there is an alpha channel, this mode delivers BGRA. + NDIlib_recv_color_format_BGRX_BGRA = 0, + + // When there is no alpha channel, this mode delivers UYVY. + // When there is an alpha channel, this mode delivers BGRA. + NDIlib_recv_color_format_UYVY_BGRA = 1, + + // When there is no alpha channel, this mode delivers BGRX. + // When there is an alpha channel, this mode delivers RGBA. + NDIlib_recv_color_format_RGBX_RGBA = 2, + + // When there is no alpha channel, this mode delivers UYVY. + // When there is an alpha channel, this mode delivers RGBA. + NDIlib_recv_color_format_UYVY_RGBA = 3, + + // This format will try to decode the video using the fastest available color format for the incoming + // video signal. This format follows the following guidelines, although different platforms might + // vary slightly based on their capabilities and specific performance profiles. In general if you want + // the best performance this mode should be used. + // + // When using this format, you should consider than allow_video_fields is true, and individual fields + // will always be delivered. + // + // For most video sources on most platforms, this will follow the following conventions. + // No alpha channel : UYVY + // Alpha channel : UYVA + NDIlib_recv_color_format_fastest = 100, + + // This format will try to provide the video in the format that is the closest to native for the incoming + // codec yielding the highest quality. Specifically, this allows for receiving on 16bpp color from many + // sources. + // + // When using this format, you should consider than allow_video_fields is true, and individual fields + // will always be delivered. + // + // For most video sources on most platforms, this will follow the following conventions + // No alpha channel : P216, or UYVY + // Alpha channel : PA16 or UYVA + NDIlib_recv_color_format_best = 101, + + // Legacy definitions for backwards compatibility. + NDIlib_recv_color_format_e_BGRX_BGRA = NDIlib_recv_color_format_BGRX_BGRA, + NDIlib_recv_color_format_e_UYVY_BGRA = NDIlib_recv_color_format_UYVY_BGRA, + NDIlib_recv_color_format_e_RGBX_RGBA = NDIlib_recv_color_format_RGBX_RGBA, + NDIlib_recv_color_format_e_UYVY_RGBA = NDIlib_recv_color_format_UYVY_RGBA, + +#ifdef _WIN32 + // For Windows we can support flipped images which is unfortunately something that Microsoft decided to + // do back in the old days. + NDIlib_recv_color_format_BGRX_BGRA_flipped = 1000 + NDIlib_recv_color_format_BGRX_BGRA, +#endif + + // Make sure this is a 32-bit enumeration. + NDIlib_recv_color_format_max = 0x7fffffff +} NDIlib_recv_color_format_e; + +// The creation structure that is used when you are creating a receiver. +typedef struct NDIlib_recv_create_v3_t { + // The source that you wish to connect to. + NDIlib_source_t source_to_connect_to; + + // Your preference of color space. See above. + NDIlib_recv_color_format_e color_format; + + // The bandwidth setting that you wish to use for this video source. Bandwidth controlled by changing + // both the compression level and the resolution of the source. A good use for low bandwidth is working + // on WIFI connections. + NDIlib_recv_bandwidth_e bandwidth; + + // When this flag is FALSE, all video that you receive will be progressive. For sources that provide + // fields, this is de-interlaced on the receiving side (because we cannot change what the up-stream + // source was actually rendering. This is provided as a convenience to down-stream sources that do not + // wish to understand fielded video. There is almost no performance impact of using this function. + bool allow_video_fields; + + // The name of the NDI receiver to create. This is a NULL terminated UTF8 string and should be the name + // of receive channel that you have. This is in many ways symmetric with the name of senders, so this + // might be "Channel 1" on your system. If this is NULL then it will use the filename of your application + // indexed with the number of the instance number of this receiver. + const char* p_ndi_recv_name; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_create_v3_t( + const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), + NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, + NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, + bool allow_video_fields_ = true, + const char* p_ndi_name_ = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS + +} NDIlib_recv_create_v3_t; + + +// This allows you determine the current performance levels of the receiving to be able to detect whether +// frames have been dropped. +typedef struct NDIlib_recv_performance_t { + // The number of video frames. + int64_t video_frames; + + // The number of audio frames. + int64_t audio_frames; + + // The number of metadata frames. + int64_t metadata_frames; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_performance_t(void); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_performance_t; + +// Get the current queue depths. +typedef struct NDIlib_recv_queue_t { + // The number of video frames. + int video_frames; + + // The number of audio frames. + int audio_frames; + + // The number of metadata frames. + int metadata_frames; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_queue_t(void); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_queue_t; + +//************************************************************************************************************************** +// Create a new receiver instance. This will return NULL if it fails. If you create this with the default +// settings (NULL) then it will automatically determine a receiver name. +PROCESSINGNDILIB_API +NDIlib_recv_instance_t NDIlib_recv_create_v3(const NDIlib_recv_create_v3_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// This will destroy an existing receiver instance. +PROCESSINGNDILIB_API +void NDIlib_recv_destroy(NDIlib_recv_instance_t p_instance); + +// This function allows you to change the connection to another video source, you can also disconnect it by +// specifying a NULL here. This allows you to preserve a receiver without needing to. +PROCESSINGNDILIB_API +void NDIlib_recv_connect(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which +// case data of that type will not be captured in this call. This call can be called simultaneously on +// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads. +// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and +// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the +// appropriate free function below. +PROCESSINGNDILIB_API +NDIlib_frame_type_e NDIlib_recv_capture_v2( + NDIlib_recv_instance_t p_instance, // The library instance. + NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL). + NDIlib_audio_frame_v2_t* p_audio_data, // The audio data received (can be NULL). + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); + +// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which +// case data of that type will not be captured in this call. This call can be called simultaneously on +// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads. +// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and +// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the +// appropriate free function below. +PROCESSINGNDILIB_API +NDIlib_frame_type_e NDIlib_recv_capture_v3( + NDIlib_recv_instance_t p_instance, // The library instance. + NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL). + NDIlib_audio_frame_v3_t* p_audio_data, // The audio data received (can be NULL). + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); + +// Free the buffers returned by capture for video. +PROCESSINGNDILIB_API +void NDIlib_recv_free_video_v2(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + +// Free the buffers returned by capture for audio. +PROCESSINGNDILIB_API +void NDIlib_recv_free_audio_v2(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + +// Free the buffers returned by capture for audio. +PROCESSINGNDILIB_API +void NDIlib_recv_free_audio_v3(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + +// Free the buffers returned by capture for metadata. +PROCESSINGNDILIB_API +void NDIlib_recv_free_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// This will free a string that was allocated and returned by NDIlib_recv (for instance the +// NDIlib_recv_get_web_control) function. +PROCESSINGNDILIB_API +void NDIlib_recv_free_string(NDIlib_recv_instance_t p_instance, const char* p_string); + +// This function will send a meta message to the source that we are connected too. This returns FALSE if we +// are not currently connected to anything. +PROCESSINGNDILIB_API +bool NDIlib_recv_send_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// Set the up-stream tally notifications. This returns FALSE if we are not currently connected to anything. +// That said, the moment that we do connect to something it will automatically be sent the tally state. +PROCESSINGNDILIB_API +bool NDIlib_recv_set_tally(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); + +// Get the current performance structures. This can be used to determine if you have been calling +// NDIlib_recv_capture fast enough, or if your processing of data is not keeping up with real-time. The total +// structure will give you the total frame counts received, the dropped structure will tell you how many +// frames have been dropped. Either of these could be NULL. +PROCESSINGNDILIB_API +void NDIlib_recv_get_performance( + NDIlib_recv_instance_t p_instance, + NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped +); + +// This will allow you to determine the current queue depth for all of the frame sources at any time. +PROCESSINGNDILIB_API +void NDIlib_recv_get_queue(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); + +// Connection based metadata is data that is sent automatically each time a new connection is received. You +// queue all of these up and they are sent on each connection. To reset them you need to clear them all and +// set them up again. +PROCESSINGNDILIB_API +void NDIlib_recv_clear_connection_metadata(NDIlib_recv_instance_t p_instance); + +// Add a connection metadata string to the list of what is sent on each new connection. If someone is already +// connected then this string will be sent to them immediately. +PROCESSINGNDILIB_API +void NDIlib_recv_add_connection_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// Is this receiver currently connected to a source on the other end, or has the source not yet been found or +// is no longer online. This will normally return 0 or 1. +PROCESSINGNDILIB_API +int NDIlib_recv_get_no_connections(NDIlib_recv_instance_t p_instance); + +// Get the URL that might be used for configuration of this input. Note that it might take a second or two +// after the connection for this value to be set. This function will return NULL if there is no web control +// user interface. You should call NDIlib_recv_free_string to free the string that is returned by this +// function. The returned value will be a fully formed URL, for instance "http://10.28.1.192/configuration/". +// To avoid the need to poll this function, you can know when the value of this function might have changed +// when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change. +PROCESSINGNDILIB_API +const char* NDIlib_recv_get_web_control(NDIlib_recv_instance_t p_instance); + +// Retrieve the name of the current NDI source that the NDI receiver is connected to. This will return false +// if there has been no change in the source information since the last call. If p_source_name is NULL, then +// the name of the current NDI source will not be returned. If p_source_name is not NULL, then the name of +// the current source will be returned, however, the returned value can be NULL if the NDI receiver is +// currently not connected to a source. If the returned pointer is not NULL, then you should call +// NDIlib_recv_free_string to free the string that is returned by this function. A timeout value can be given +// to wait until a change occurs. If waiting is not desired, then use a timeout of 0. +PROCESSINGNDILIB_API +bool NDIlib_recv_get_source_name(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms NDILIB_CPP_DEFAULT_VALUE(0)); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvAdvertiser.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvAdvertiser.h new file mode 100644 index 0000000..8c2794e --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvAdvertiser.h @@ -0,0 +1,79 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// The type instance for a receiver advertiser. +struct NDIlib_recv_advertiser_instance_type; +typedef struct NDIlib_recv_advertiser_instance_type* NDIlib_recv_advertiser_instance_t; + +typedef struct NDIlib_recv_advertiser_create_t { + // The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery + // server will be used. If there is no discovery server available, then the receiver advertiser will not + // be able to be instantiated and the create function will return NULL. The format of this field is + // expected to be the hostname or IP address, optionally followed by a colon and a port number. If the + // port number is not specified, then port 5959 will be used. For example, + // 127.0.0.1:5959 + // or + // 127.0.0.1 + // or + // hostname:5959 + // This field can also specify multiple addresses separated by commas for redundancy support. + const char* p_url_address; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_advertiser_create_t( + const char* p_url_address = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_advertiser_create_t; + +// Create an instance of the receiver advertiser. This will return NULL if it fails to create the advertiser. +PROCESSINGNDILIB_API +NDIlib_recv_advertiser_instance_t NDIlib_recv_advertiser_create(const NDIlib_recv_advertiser_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// Destroy an instance of the receiver advertiser. +PROCESSINGNDILIB_API +void NDIlib_recv_advertiser_destroy(NDIlib_recv_advertiser_instance_t p_instance); + +// Add the receiver to the list of receivers that are being advertised. Returns false if the receiver has +// been previously registered. +PROCESSINGNDILIB_API +bool NDIlib_recv_advertiser_add_receiver( + NDIlib_recv_advertiser_instance_t p_instance, + NDIlib_recv_instance_t p_receiver, + bool allow_controlling, bool allow_monitoring, + const char* p_input_group_name NDILIB_CPP_DEFAULT_VALUE(NULL) +); + +// Remove the receiver from the list of receivers that are being advertised. Returns false if the receiver +// was not previously registered. +PROCESSINGNDILIB_API +bool NDIlib_recv_advertiser_del_receiver( + NDIlib_recv_advertiser_instance_t p_instance, + NDIlib_recv_instance_t p_receiver +); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvListener.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvListener.h new file mode 100644 index 0000000..c7b858a --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.RecvListener.h @@ -0,0 +1,141 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// The type instance for a receiver listener. +struct NDIlib_recv_listener_instance_type; +typedef struct NDIlib_recv_listener_instance_type* NDIlib_recv_listener_instance_t; + +typedef struct NDIlib_recv_listener_create_t { + // The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery + // server will be used. If there is no discovery server available, then the receiver listener will not + // be able to be instantiated and the create function will return NULL. The format of this field is + // expected to be the hostname or IP address, optionally followed by a colon and a port number. If the + // port number is not specified, then port 5959 will be used. For example, + // 127.0.0.1:5959 + // or + // 127.0.0.1 + // or + // hostname:5959 + // If this field is a comma-separated list, then only the first address will be used. + const char* p_url_address; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_listener_create_t( + const char* p_url_address = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_listener_create_t; + +// Create an instance of the receiver listener. This will return NULL if it fails to create the listener. +PROCESSINGNDILIB_API +NDIlib_recv_listener_instance_t NDIlib_recv_listener_create(const NDIlib_recv_listener_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// Destroy an instance of the receiver listener. +PROCESSINGNDILIB_API +void NDIlib_recv_listener_destroy(NDIlib_recv_listener_instance_t p_instance); + +// Returns true if the receiver listener is actively connected to the configured NDI Discovery Server. +PROCESSINGNDILIB_API +bool NDIlib_recv_listener_is_connected(NDIlib_recv_listener_instance_t p_instance); + +// Retrieve the URL address of the NDI Discovery Server that the receiver listener is connected to. This can +// return NULL if the instance pointer is invalid. +PROCESSINGNDILIB_API +const char* NDIlib_recv_listener_get_server_url(NDIlib_recv_listener_instance_t p_instance); + +// The types of streams that a receiver can receive from the source it's connected to. +typedef enum NDIlib_receiver_type_e { + NDIlib_receiver_type_none = 0, + NDIlib_receiver_type_metadata = 1, + NDIlib_receiver_type_video = 2, + NDIlib_receiver_type_audio = 3, + + // Make sure this is a 32-bit enumeration. + NDIlib_receiver_type_max = 0x7fffffff +} NDIlib_receiver_type_e; + +// The types of commands that a receiver can process. +typedef enum NDIlib_receiver_command_e { + NDIlib_receiver_command_none = 0, + + // A receiver can be told to connect to a specific source. + NDIlib_receiver_command_connect = 1, + + // Make sure this is a 32-bit enumeration. + NDIlib_receiver_command_max = 0x7fffffff +} NDIlib_receiver_command_e; + +// Describes a receiver that has been discovered. +typedef struct NDIlib_receiver_t { + // The unique identifier for the receiver on the network. + const char* p_uuid; + + // The human-readable name of the receiver. + const char* p_name; + + // The unique identifier for the input group that the receiver belongs to. + const char* p_input_uuid; + + // The human-readable name of the input group that the receiver belongs to. + const char* p_input_name; + + // The known IP address of the receiver. + const char* p_address; + + // An array of streams that the receiver is set to receive. The last entry in this list will be + // NDIlib_receiver_type_none. + NDIlib_receiver_type_e* p_streams; + + // How many elements are in the p_streams array, excluding the NDIlib_receiver_type_none entry. + uint32_t num_streams; + + // An array of commands that the receiver can process. The last entry in this list will be + // NDIlib_receiver_command_none. + NDIlib_receiver_command_e* p_commands; + + // How many elements are in the p_commands array, excluding the NDIlib_receiver_command_none entry. + uint32_t num_commands; + + // Are we currently subscribed for receive events? + bool events_subscribed; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_receiver_t(void); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_receiver_t; + +// Retrieves the current list of advertised receivers. The memory for the returned structure is only valid +// until the next call or when destroy is called. For a given NDIlib_recv_listener_instance_t, do not call +// NDIlib_recv_listener_get_receivers asynchronously. +PROCESSINGNDILIB_API +const NDIlib_receiver_t* NDIlib_recv_listener_get_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers); + +// This will allow you to wait until the number of online receivers has changed. +PROCESSINGNDILIB_API +bool NDIlib_recv_listener_wait_for_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Routing.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Routing.h new file mode 100644 index 0000000..0c63200 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Routing.h @@ -0,0 +1,75 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Structures and type definitions required by NDI routing. +// The reference to an instance of the router. +struct NDIlib_routing_instance_type; +typedef struct NDIlib_routing_instance_type* NDIlib_routing_instance_t; + +// The creation structure that is used when you are creating a sender. +typedef struct NDIlib_routing_create_t +{ + // The name of the NDI source to create. This is a NULL terminated UTF8 string. + const char* p_ndi_name; + + // What groups should this source be part of. + const char* p_groups; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_routing_create_t(const char* p_ndi_name_ = NULL, const char* p_groups_ = NULL); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_routing_create_t; + +// Create an NDI routing source. +PROCESSINGNDILIB_API +NDIlib_routing_instance_t NDIlib_routing_create(const NDIlib_routing_create_t* p_create_settings); + +// Destroy and NDI routing source. +PROCESSINGNDILIB_API +void NDIlib_routing_destroy(NDIlib_routing_instance_t p_instance); + +// Change the routing of this source to another destination. +PROCESSINGNDILIB_API +bool NDIlib_routing_change(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); + +// Change the routing of this source to another destination. +PROCESSINGNDILIB_API +bool NDIlib_routing_clear(NDIlib_routing_instance_t p_instance); + +// Get the current number of receivers connected to this source. This can be used to avoid even rendering +// when nothing is connected to the video source. which can significantly improve the efficiency if you want +// to make a lot of sources available on the network. If you specify a timeout that is not 0 then it will +// wait until there are connections for this amount of time. +PROCESSINGNDILIB_API +int NDIlib_routing_get_no_connections(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); + +// Retrieve the source information for the given router instance. This pointer is valid until +// NDIlib_routing_destroy is called. +PROCESSINGNDILIB_API +const NDIlib_source_t* NDIlib_routing_get_source_name(NDIlib_routing_instance_t p_instance); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Send.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Send.h new file mode 100644 index 0000000..b9ef850 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.Send.h @@ -0,0 +1,145 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Structures and type definitions required by NDI sending. +// The reference to an instance of the sender. +struct NDIlib_send_instance_type; +typedef struct NDIlib_send_instance_type* NDIlib_send_instance_t; + +// The creation structure that is used when you are creating a sender. +typedef struct NDIlib_send_create_t { + // The name of the NDI source to create. This is a NULL terminated UTF8 string. + const char* p_ndi_name; + + // What groups should this source be part of. NULL means default. + const char* p_groups; + + // Do you want audio and video to "clock" themselves. When they are clocked then by adding video frames, + // they will be rate limited to match the current frame rate that you are submitting at. The same is true + // for audio. In general if you are submitting video and audio off a single thread then you should only + // clock one of them (video is probably the better of the two to clock off). If you are submitting audio + // and video of separate threads then having both clocked can be useful. + bool clock_video, clock_audio; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_send_create_t( + const char* p_ndi_name_ = NULL, + const char* p_groups_ = NULL, + bool clock_video_ = true, bool clock_audio_ = true + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_send_create_t; + +// Create a new sender instance. This will return NULL if it fails. If you specify leave p_create_settings +// null then the sender will be created with default settings. +PROCESSINGNDILIB_API +NDIlib_send_instance_t NDIlib_send_create(const NDIlib_send_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// This will destroy an existing finder instance. +PROCESSINGNDILIB_API +void NDIlib_send_destroy(NDIlib_send_instance_t p_instance); + +// This will add a video frame. +PROCESSINGNDILIB_API +void NDIlib_send_send_video_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + +// This will add a video frame and will return immediately, having scheduled the frame to be displayed. All +// processing and sending of the video will occur asynchronously. The memory accessed by NDIlib_video_frame_t +// cannot be freed or re-used by the caller until a synchronizing event has occurred. In general the API is +// better able to take advantage of asynchronous processing than you might be able to by simple having a +// separate thread to submit frames. +// +// This call is particularly beneficial when processing BGRA video since it allows any color conversion, +// compression and network sending to all be done on separate threads from your main rendering thread. +// +// Synchronizing events are : +// - a call to NDIlib_send_send_video +// - a call to NDIlib_send_send_video_async with another frame to be sent +// - a call to NDIlib_send_send_video with p_video_data=NULL +// - a call to NDIlib_send_destroy +PROCESSINGNDILIB_API +void NDIlib_send_send_video_async_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + +// This will add an audio frame. +PROCESSINGNDILIB_API +void NDIlib_send_send_audio_v2(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + +// This will add an audio frame. +PROCESSINGNDILIB_API +void NDIlib_send_send_audio_v3(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + +// This will add a metadata frame. +PROCESSINGNDILIB_API +void NDIlib_send_send_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// This allows you to receive metadata from the other end of the connection. +PROCESSINGNDILIB_API +NDIlib_frame_type_e NDIlib_send_capture( + NDIlib_send_instance_t p_instance, // The instance data. + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); + +// Free the buffers returned by capture for metadata. +PROCESSINGNDILIB_API +void NDIlib_send_free_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// Determine the current tally sate. If you specify a timeout then it will wait until it has changed, +// otherwise it will simply poll it and return the current tally immediately. The return value is whether +// anything has actually change (true) or whether it timed out (false) +PROCESSINGNDILIB_API +bool NDIlib_send_get_tally(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); + +// Get the current number of receivers connected to this source. This can be used to avoid even rendering +// when nothing is connected to the video source. which can significantly improve the efficiency if you want +// to make a lot of sources available on the network. If you specify a timeout that is not 0 then it will +// wait until there are connections for this amount of time. +PROCESSINGNDILIB_API +int NDIlib_send_get_no_connections(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); + +// Connection based metadata is data that is sent automatically each time a new connection is received. You +// queue all of these up and they are sent on each connection. To reset them you need to clear them all and +// set them up again. +PROCESSINGNDILIB_API +void NDIlib_send_clear_connection_metadata(NDIlib_send_instance_t p_instance); + +// Add a connection metadata string to the list of what is sent on each new connection. If someone is already +// connected then this string will be sent to them immediately. +PROCESSINGNDILIB_API +void NDIlib_send_add_connection_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + +// This will assign a new fail-over source for this video source. What this means is that if this video +// source was to fail any receivers would automatically switch over to use this source, unless this source +// then came back online. You can specify NULL to clear the source. +PROCESSINGNDILIB_API +void NDIlib_send_set_failover(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); + +// Retrieve the source information for the given sender instance. This pointer is valid until NDIlib_send_destroy is called. +PROCESSINGNDILIB_API +const NDIlib_source_t* NDIlib_send_get_source_name(NDIlib_send_instance_t p_instance); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.compat.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.compat.h new file mode 100644 index 0000000..ff44499 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.compat.h @@ -0,0 +1,39 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +#ifndef __cplusplus +#include +#endif + +#include + +#ifndef INFINITE +//#define INFINITE INFINITE +static const uint32_t INFINITE = 0xFFFFFFFF; +#endif diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.deprecated.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.deprecated.h new file mode 100644 index 0000000..4ad2137 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.deprecated.h @@ -0,0 +1,230 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// This describes a video frame +PROCESSINGNDILIB_DEPRECATED +typedef struct NDIlib_video_frame_t { + // The resolution of this frame. + int xres, yres; + + // What FourCC this is with. This can be two values. + NDIlib_FourCC_video_type_e FourCC; + + // What is the frame rate of this frame. + // For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps + int frame_rate_N, frame_rate_D; + + // What is the picture aspect ratio of this frame. + // For instance 16.0/9.0 = 1.778 is 16:9 video. If this is zero, then square pixels are assumed (xres/yres). + float picture_aspect_ratio; + + // Is this a fielded frame, or is it progressive. + NDIlib_frame_format_type_e frame_format_type; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The video data itself. + uint8_t* p_data; + + // The inter-line stride of the video data, in bytes. + int line_stride_in_bytes; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_video_frame_t( + int xres_ = 0, int yres_ = 0, + NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_type_UYVY, + int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, + float picture_aspect_ratio_ = 0.0f, + NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0 + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_video_frame_t; + +// This describes an audio frame +PROCESSINGNDILIB_DEPRECATED +typedef struct NDIlib_audio_frame_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The audio data. + float* p_data; + + // The inter channel stride of the audio channels, in bytes. + int channel_stride_in_bytes; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL, int channel_stride_in_bytes_ = 0 + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_t; + +// For legacy reasons I called this the wrong thing. For backwards compatibility. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_find_instance_t NDIlib_find_create2(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_find_instance_t NDIlib_find_create(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// DEPRECATED. This function is basically exactly the following and was confusing to use. +// if ((!timeout_in_ms) || (NDIlib_find_wait_for_sources(timeout_in_ms))) +// return NDIlib_find_get_current_sources(p_instance, p_no_sources); +// return NULL; +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +const NDIlib_source_t* NDIlib_find_get_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); + +// The creation structure that is used when you are creating a receiver. +PROCESSINGNDILIB_DEPRECATED +typedef struct NDIlib_recv_create_t { + // The source that you wish to connect to. + NDIlib_source_t source_to_connect_to; + + // Your preference of color space. See above. + NDIlib_recv_color_format_e color_format; + + // The bandwidth setting that you wish to use for this video source. Bandwidth + // controlled by changing both the compression level and the resolution of the source. + // A good use for low bandwidth is working on WIFI connections. + NDIlib_recv_bandwidth_e bandwidth; + + // When this flag is FALSE, all video that you receive will be progressive. For sources that provide + // fields, this is de-interlaced on the receiving side (because we cannot change what the up-stream + // source was actually rendering. This is provided as a convenience to down-stream sources that do not + // wish to understand fielded video. There is almost no performance impact of using this function. + bool allow_video_fields; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_recv_create_t( + const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), + NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, + NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, + bool allow_video_fields_ = true + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_recv_create_t; + +// This function is deprecated, please use NDIlib_recv_create_v3 if you can. Using this function will +// continue to work, and be supported for backwards compatibility. If the input parameter is NULL it will be +// created with default settings and an automatically determined receiver name. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_recv_instance_t NDIlib_recv_create_v2(const NDIlib_recv_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// For legacy reasons I called this the wrong thing. For backwards compatibility. If the input parameter is +// NULL it will be created with default settings and an automatically determined receiver name. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_recv_instance_t NDIlib_recv_create2(const NDIlib_recv_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL)); + +// This function is deprecated, please use NDIlib_recv_create_v3 if you can. Using this function will +// continue to work, and be supported for backwards compatibility. This version sets bandwidth to highest and +// allow fields to true. If the input parameter is NULL it will be created with default settings and an +// automatically determined receiver name. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_recv_instance_t NDIlib_recv_create(const NDIlib_recv_create_t* p_create_settings); + +// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which +// case data of that type will not be captured in this call. This call can be called simultaneously on +// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads. +// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and +// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the +// appropriate free function below. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +NDIlib_frame_type_e NDIlib_recv_capture( + NDIlib_recv_instance_t p_instance, // The library instance. + NDIlib_video_frame_t* p_video_data, // The video data received (can be NULL). + NDIlib_audio_frame_t* p_audio_data, // The audio data received (can be NULL). + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); + +// Free the buffers returned by capture for video. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_recv_free_video(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + +// Free the buffers returned by capture for audio. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_recv_free_audio(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + +// This will add a video frame. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_send_send_video(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + +// This will add a video frame and will return immediately, having scheduled the frame to be displayed. All +// processing and sending of the video will occur asynchronously. The memory accessed by NDIlib_video_frame_t +// cannot be freed or re-used by the caller until a synchronizing event has occurred. In general the API is +// better able to take advantage of asynchronous processing than you might be able to by simple having a +// separate thread to submit frames. +// +// This call is particularly beneficial when processing BGRA video since it allows any color conversion, +// compression and network sending to all be done on separate threads from your main rendering thread. +// +// Synchronizing events are : +// - a call to NDIlib_send_send_video +// - a call to NDIlib_send_send_video_async with another frame to be sent +// - a call to NDIlib_send_send_video with p_video_data=NULL +// - a call to NDIlib_send_destroy +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_send_send_video_async(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + +// This will add an audio frame +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_send_send_audio(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + +// Convert an planar floating point audio buffer into a interleaved short audio buffer. +// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_util_audio_to_interleaved_16s(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + +// Convert an interleaved short audio buffer audio buffer into a planar floating point one. +// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_util_audio_from_interleaved_16s(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); + +// Convert an planar floating point audio buffer into a interleaved floating point audio buffer. +// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_util_audio_to_interleaved_32f(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + +// Convert an interleaved floating point audio buffer into a planar floating point one. +// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management. +PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED +void NDIlib_util_audio_from_interleaved_32f(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.structs.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.structs.h new file mode 100644 index 0000000..5a96053 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.structs.h @@ -0,0 +1,387 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +#ifndef NDI_LIB_FOURCC +#define NDI_LIB_FOURCC(ch0, ch1, ch2, ch3) \ + ((uint32_t)(uint8_t)(ch0) | ((uint32_t)(uint8_t)(ch1) << 8) | ((uint32_t)(uint8_t)(ch2) << 16) | ((uint32_t)(uint8_t)(ch3) << 24)) +#endif + +// An enumeration to specify the type of a packet returned by the functions. +typedef enum NDIlib_frame_type_e { + // What frame type is this? + NDIlib_frame_type_none = 0, + NDIlib_frame_type_video = 1, + NDIlib_frame_type_audio = 2, + NDIlib_frame_type_metadata = 3, + NDIlib_frame_type_error = 4, + + // This indicates that the settings on this input have changed. This value will be returned from one of + // the NDIlib_recv_capture functions when the device is known to have new settings, for instance the web + // URL has changed or the device is now known to be a PTZ camera. + NDIlib_frame_type_status_change = 100, + + // This indicates that the source has changed. This value will be returned from one of the + // NDIlib_recv_capture functions when the source that the receiver is connected to has changed. + NDIlib_frame_type_source_change = 101, + + // Make sure this is a 32-bit enumeration. + NDIlib_frame_type_max = 0x7fffffff +} NDIlib_frame_type_e; + +// FourCC values for video frames. +typedef enum NDIlib_FourCC_video_type_e { + // YCbCr color space using 4:2:2. + NDIlib_FourCC_video_type_UYVY = NDI_LIB_FOURCC('U', 'Y', 'V', 'Y'), + NDIlib_FourCC_type_UYVY = NDIlib_FourCC_video_type_UYVY, + + // YCbCr + Alpha color space, using 4:2:2:4. + // In memory there are two separate planes. The first is a regular + // UYVY 4:2:2 buffer. Immediately following this in memory is a + // alpha channel buffer. + NDIlib_FourCC_video_type_UYVA = NDI_LIB_FOURCC('U', 'Y', 'V', 'A'), + NDIlib_FourCC_type_UYVA = NDIlib_FourCC_video_type_UYVA, + + // YCbCr color space using 4:2:2 in 16bpp. + // In memory this is a semi-planar format. This is identical to a 16bpp version of the NV16 format. + // The first buffer is a 16bpp luminance buffer. + // Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs. + NDIlib_FourCC_video_type_P216 = NDI_LIB_FOURCC('P', '2', '1', '6'), + NDIlib_FourCC_type_P216 = NDIlib_FourCC_video_type_P216, + + // YCbCr color space with an alpha channel, using 4:2:2:4. + // In memory this is a semi-planar format. + // The first buffer is a 16bpp luminance buffer. + // Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs. + // Immediately after is a single buffer of 16bpp alpha channel. + NDIlib_FourCC_video_type_PA16 = NDI_LIB_FOURCC('P', 'A', '1', '6'), + NDIlib_FourCC_type_PA16 = NDIlib_FourCC_video_type_PA16, + + // Planar 8bit 4:2:0 video format. + // The first buffer is an 8bpp luminance buffer. + // Immediately following this is a 8bpp Cr buffer. + // Immediately following this is a 8bpp Cb buffer. + NDIlib_FourCC_video_type_YV12 = NDI_LIB_FOURCC('Y', 'V', '1', '2'), + NDIlib_FourCC_type_YV12 = NDIlib_FourCC_video_type_YV12, + + // The first buffer is an 8bpp luminance buffer. + // Immediately following this is a 8bpp Cb buffer. + // Immediately following this is a 8bpp Cr buffer. + NDIlib_FourCC_video_type_I420 = NDI_LIB_FOURCC('I', '4', '2', '0'), + NDIlib_FourCC_type_I420 = NDIlib_FourCC_video_type_I420, + + // Planar 8bit 4:2:0 video format. + // The first buffer is an 8bpp luminance buffer. + // Immediately following this is in interleaved buffer of 8bpp Cb, Cr pairs + NDIlib_FourCC_video_type_NV12 = NDI_LIB_FOURCC('N', 'V', '1', '2'), + NDIlib_FourCC_type_NV12 = NDIlib_FourCC_video_type_NV12, + + // Planar 8bit, 4:4:4:4 video format. + // Color ordering in memory is blue, green, red, alpha + NDIlib_FourCC_video_type_BGRA = NDI_LIB_FOURCC('B', 'G', 'R', 'A'), + NDIlib_FourCC_type_BGRA = NDIlib_FourCC_video_type_BGRA, + + // Planar 8bit, 4:4:4 video format, packed into 32bit pixels. + // Color ordering in memory is blue, green, red, 255 + NDIlib_FourCC_video_type_BGRX = NDI_LIB_FOURCC('B', 'G', 'R', 'X'), + NDIlib_FourCC_type_BGRX = NDIlib_FourCC_video_type_BGRX, + + // Planar 8bit, 4:4:4:4 video format. + // Color ordering in memory is red, green, blue, alpha + NDIlib_FourCC_video_type_RGBA = NDI_LIB_FOURCC('R', 'G', 'B', 'A'), + NDIlib_FourCC_type_RGBA = NDIlib_FourCC_video_type_RGBA, + + // Planar 8bit, 4:4:4 video format, packed into 32bit pixels. + // Color ordering in memory is red, green, blue, 255. + NDIlib_FourCC_video_type_RGBX = NDI_LIB_FOURCC('R', 'G', 'B', 'X'), + NDIlib_FourCC_type_RGBX = NDIlib_FourCC_video_type_RGBX, + + // Make sure this is a 32-bit enumeration. + NDIlib_FourCC_video_type_max = 0x7fffffff +} NDIlib_FourCC_video_type_e; + +// Really for backwards compatibility. +PROCESSINGNDILIB_DEPRECATED +typedef NDIlib_FourCC_video_type_e NDIlib_FourCC_type_e; + +// FourCC values for audio frames. +typedef enum NDIlib_FourCC_audio_type_e { + // Planar 32-bit floating point. Be sure to specify the channel stride. + NDIlib_FourCC_audio_type_FLTP = NDI_LIB_FOURCC('F', 'L', 'T', 'p'), + NDIlib_FourCC_type_FLTP = NDIlib_FourCC_audio_type_FLTP, + + // Make sure this is a 32-bit enumeration. + NDIlib_FourCC_audio_type_max = 0x7fffffff +} NDIlib_FourCC_audio_type_e; + +typedef enum NDIlib_frame_format_type_e { + // A progressive frame. + NDIlib_frame_format_type_progressive = 1, + + // A fielded frame with the field 0 being on the even lines and field 1 being + // on the odd lines. + NDIlib_frame_format_type_interleaved = 0, + + // Individual fields. + NDIlib_frame_format_type_field_0 = 2, + NDIlib_frame_format_type_field_1 = 3, + + // Make sure this is a 32-bit enumeration. + NDIlib_frame_format_type_max = 0x7fffffff +} NDIlib_frame_format_type_e; + +// When you specify this as a timecode, the timecode will be synthesized for you. This may be used when +// sending video, audio or metadata. If you never specify a timecode at all, asking for each to be +// synthesized, then this will use the current system time as the starting timecode and then generate +// synthetic ones, keeping your streams exactly in sync as long as the frames you are sending do not deviate +// from the system time in any meaningful way. In practice this means that if you never specify timecodes +// that they will always be generated for you correctly. Timecodes coming from different senders on the same +// machine will always be in sync with each other when working in this way. If you have NTP installed on your +// local network, then streams can be synchronized between multiple machines with very high precision. +// +// If you specify a timecode at a particular frame (audio or video), then ask for all subsequent ones to be +// synthesized. The subsequent ones will be generated to continue this sequence maintaining the correct +// relationship both the between streams and samples generated, avoiding them deviating in time from the +// timecode that you specified in any meaningful way. +// +// If you specify timecodes on one stream (e.g. video) and ask for the other stream (audio) to be +// synthesized, the correct timecodes will be generated for the other stream and will be synthesize exactly +// to match (they are not quantized inter-streams) the correct sample positions. +// +// When you send metadata messages and ask for the timecode to be synthesized, then it is chosen to match the +// closest audio or video frame timecode so that it looks close to something you might want ... unless there +// is no sample that looks close in which a timecode is synthesized from the last ones known and the time +// since it was sent. +static const int64_t NDIlib_send_timecode_synthesize = INT64_MAX; + +// If the time-stamp is not available (i.e. a version of a sender before v2.5). +static const int64_t NDIlib_recv_timestamp_undefined = INT64_MAX; + +// This is a descriptor of a NDI source available on the network. +typedef struct NDIlib_source_t { + // A UTF8 string that provides a user readable name for this source. This can be used for serialization, + // etc... and comprises the machine name and the source name on that machine. In the form, + // MACHINE_NAME (NDI_SOURCE_NAME) + // If you specify this parameter either as NULL, or an EMPTY string then the specific IP address and port + // number from below is used. + const char* p_ndi_name; + + // A UTF8 string that provides the actual network address and any parameters. This is not meant to be + // application readable and might well change in the future. This can be NULL if you do not know it and + // the API internally will instantiate a finder that is used to discover it even if it is not yet + // available on the network. + union { // The current way of addressing the value. + const char* p_url_address; + + // We used to use an IP address before we used the more general URL notification this is now + // depreciated but maintained for compatibility. + PROCESSINGNDILIB_DEPRECATED const char* p_ip_address; + }; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_source_t(const char* p_ndi_name_ = NULL, const char* p_url_address_ = NULL); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_source_t; + +// This describes a video frame. +typedef struct NDIlib_video_frame_v2_t { + // The resolution of this frame. + int xres, yres; + + // What FourCC describing the type of data for this frame. + NDIlib_FourCC_video_type_e FourCC; + + // What is the frame rate of this frame. + // For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps. + int frame_rate_N, frame_rate_D; + + // What is the picture aspect ratio of this frame. + // For instance 16.0/9.0 = 1.778 is 16:9 video + // 0 means square pixels. + float picture_aspect_ratio; + + // Is this a fielded frame, or is it progressive. + NDIlib_frame_format_type_e frame_format_type; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The video data itself. + uint8_t* p_data; + + union { // If the FourCC is not a compressed type, then this will be the inter-line stride of the video data + // in bytes. If the stride is 0, then it will default to sizeof(one pixel)*xres. + int line_stride_in_bytes; + + // If the FourCC is a compressed type, then this will be the size of the p_data buffer in bytes. + int data_size_in_bytes; + }; + + // Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format. + // If you do not want any metadata then you may specify NULL here. + const char* p_metadata; // Present in >= v2.5 + + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is + // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. + int64_t timestamp; // Present in >= v2.5 + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_video_frame_v2_t( + int xres_ = 0, int yres_ = 0, + NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_video_type_UYVY, + int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, + float picture_aspect_ratio_ = 0.0f, + NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_video_frame_v2_t; + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_v2_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The audio data. + float* p_data; + + // The inter channel stride of the audio channels, in bytes. + int channel_stride_in_bytes; + + // Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format. + // If you do not want any metadata then you may specify NULL here. + const char* p_metadata; // Present in >= v2.5 + + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is + // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. + int64_t timestamp; // Present in >= v2.5 + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_v2_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL, int channel_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_v2_t; + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_v3_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // What FourCC describing the type of data for this frame. + NDIlib_FourCC_audio_type_e FourCC; + + // The audio data. + uint8_t* p_data; + + union { + // If the FourCC is not a compressed type and the audio format is planar, then this will be the + // stride in bytes for a single channel. + int channel_stride_in_bytes; + + // If the FourCC is a compressed type, then this will be the size of the p_data buffer in bytes. + int data_size_in_bytes; + }; + + // Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format. + // If you do not want any metadata then you may specify NULL here. + const char* p_metadata; + + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is + // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. + int64_t timestamp; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_v3_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + NDIlib_FourCC_audio_type_e FourCC_ = NDIlib_FourCC_audio_type_FLTP, + uint8_t* p_data_ = NULL, int channel_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_v3_t; + +// The data description for metadata. +typedef struct NDIlib_metadata_frame_t { + // The length of the string in UTF8 characters. This includes the NULL terminating character. If this is + // 0, then the length is assume to be the length of a NULL terminated string. + int length; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The metadata as a UTF8 XML string. This is a NULL terminated string. + char* p_data; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_metadata_frame_t(int length_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, char* p_data_ = NULL); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_metadata_frame_t; + +// Tally structures +typedef struct NDIlib_tally_t { + // Is this currently on program output. + bool on_program; + + // Is this currently on preview output. + bool on_preview; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_tally_t(bool on_program_ = false, bool on_preview_ = false); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_tally_t; diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.utilities.h b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.utilities.h new file mode 100644 index 0000000..842461e --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/Includes/Processing.NDI.utilities.h @@ -0,0 +1,258 @@ +#pragma once + +// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review +// the SDK documentation for the description of the full license terms, which are also provided in the file +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any +// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be +// downloaded at http://ndi.video/ +// +//*********************************************************************************************************** +// +// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files(the "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the +// following conditions : +// +// The above copyright notice and this permission notice shall be included in all copies or substantial +// portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO +// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +// THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// +//*********************************************************************************************************** + +// Because many applications like submitting 16-bit interleaved audio, these functions will convert in and +// out of that format. It is important to note that the NDI SDK does define fully audio levels, something +// that most applications that you use do not. Specifically, the floating-point range, -1.0 to +1.0, is +// defined as a professional audio reference level of +4 dBU. If we take 16-bit audio and scale it into this +// range it is almost always correct for sending and will cause no problems. For receiving however it is not +// at all uncommon that the user has audio that exceeds reference level and in this case it is likely that +// audio exceeds the reference level and so if you are not careful you will end up having audio clipping when +// you use the 16-bit range. + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_interleaved_16s_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the + // full range of 16-bit audio. If you do not understand this and want to just use numbers: + // - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level. + // - If receiving audio, specify +20 dB. This means that the full 16-bit range corresponds to + // professional level audio with 20 dB of headroom. Note that if you are writing it into a file it + // might sound soft because you have 20 dB of headroom before clipping. + int reference_level; + + // The audio data, interleaved 16-bit samples. + int16_t* p_data; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_interleaved_16s_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + int reference_level_ = 0, + int16_t* p_data_ = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_interleaved_16s_t; + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_interleaved_32s_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the + // full range of 32-bit audio. If you do not understand this and want to just use numbers: + // - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level. + // - If receiving audio, specify +20 dB. This means that the full 32-bit range corresponds to + // professional level audio with 20 dB of headroom. Note that if you are writing it into a file it + // might sound soft because you have 20 dB of headroom before clipping. + int reference_level; + + // The audio data, interleaved 32-bit samples. + int32_t* p_data; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_interleaved_32s_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + int reference_level_ = 0, + int32_t* p_data_ = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_interleaved_32s_t; + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_interleaved_32f_t { + // The sample-rate of this buffer. + int sample_rate; + + // The number of audio channels. + int no_channels; + + // The number of audio samples per channel. + int no_samples; + + // The timecode of this frame in 100-nanosecond intervals. + int64_t timecode; + + // The audio data, interleaved 32-bit floating-point samples. + float* p_data; + +#if NDILIB_CPP_DEFAULT_CONSTRUCTORS + NDIlib_audio_frame_interleaved_32f_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL + ); +#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS +} NDIlib_audio_frame_interleaved_32f_t; + +// This will add an audio frame in interleaved 16-bit. +PROCESSINGNDILIB_API +void NDIlib_util_send_send_audio_interleaved_16s( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_16s_t* p_audio_data +); + +// This will add an audio frame in interleaved 32-bit. +PROCESSINGNDILIB_API +void NDIlib_util_send_send_audio_interleaved_32s( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_32s_t* p_audio_data +); + +// This will add an audio frame in interleaved floating point. +PROCESSINGNDILIB_API +void NDIlib_util_send_send_audio_interleaved_32f( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_32f_t* p_audio_data +); + +// Convert to interleaved 16-bit. +PROCESSINGNDILIB_API +void NDIlib_util_audio_to_interleaved_16s_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_16s_t* p_dst +); + +// Convert to interleaved 16-bit. The FourCC of the source audio frame must be NDIlib_FourCC_audio_type_FLTP. +// Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_to_interleaved_16s_v3( + const NDIlib_audio_frame_v3_t* p_src, + NDIlib_audio_frame_interleaved_16s_t* p_dst +); + +// Convert from interleaved 16-bit. +PROCESSINGNDILIB_API +void NDIlib_util_audio_from_interleaved_16s_v2( + const NDIlib_audio_frame_interleaved_16s_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); + +// Convert from interleaved 16-bit. The FourCC of the destination audio frame must be +// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly. +// Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_from_interleaved_16s_v3( + const NDIlib_audio_frame_interleaved_16s_t* p_src, + NDIlib_audio_frame_v3_t* p_dst +); + +// Convert to interleaved 32-bit. +PROCESSINGNDILIB_API +void NDIlib_util_audio_to_interleaved_32s_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_32s_t* p_dst +); + +// Convert to interleaved 32-bit. The FourCC of the source audio frame must be NDIlib_FourCC_audio_type_FLTP. +// Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_to_interleaved_32s_v3( + const NDIlib_audio_frame_v3_t* p_src, + NDIlib_audio_frame_interleaved_32s_t* p_dst +); + +// Convert from interleaved 32-bit. +PROCESSINGNDILIB_API +void NDIlib_util_audio_from_interleaved_32s_v2( + const NDIlib_audio_frame_interleaved_32s_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); + +// Convert from interleaved 32-bit. The FourCC of the destination audio frame must be +// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly. +// Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_from_interleaved_32s_v3( + const NDIlib_audio_frame_interleaved_32s_t* p_src, + NDIlib_audio_frame_v3_t* p_dst +); + +// Convert to interleaved floating point. +PROCESSINGNDILIB_API +void NDIlib_util_audio_to_interleaved_32f_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_32f_t* p_dst +); + +// Convert to interleaved floating point. The FourCC of the source audio frame must be +// NDIlib_FourCC_audio_type_FLTP. Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_to_interleaved_32f_v3( + const NDIlib_audio_frame_v3_t* p_src, + NDIlib_audio_frame_interleaved_32f_t* p_dst +); + +// Convert from interleaved floating point. +PROCESSINGNDILIB_API +void NDIlib_util_audio_from_interleaved_32f_v2( + const NDIlib_audio_frame_interleaved_32f_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); + +// Convert from interleaved floating point. The FourCC of the destination audio frame must be +// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly. +// Returns true if the conversion was successful. +PROCESSINGNDILIB_API +bool NDIlib_util_audio_from_interleaved_32f_v3( + const NDIlib_audio_frame_interleaved_32f_t* p_src, + NDIlib_audio_frame_v3_t* p_dst +); + +// This is a helper function that you may use to convert from 10-bit packed UYVY into 16-bit semi-planar. The +// FourCC on the source is ignored in this function since we do not define a V210 format in NDI. You must +// make sure that there is memory and a stride allocated in p_dst. +PROCESSINGNDILIB_API +void NDIlib_util_V210_to_P216(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); + +// This converts from 16-bit semi-planar to 10-bit. You must make sure that there is memory and a stride +// allocated in p_dst. +PROCESSINGNDILIB_API +void NDIlib_util_P216_to_V210(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); diff --git a/Plugins/NDIIO/Source/ThirdParty/NDI/NDI.Build.cs b/Plugins/NDIIO/Source/ThirdParty/NDI/NDI.Build.cs new file mode 100644 index 0000000..2cabb11 --- /dev/null +++ b/Plugins/NDIIO/Source/ThirdParty/NDI/NDI.Build.cs @@ -0,0 +1,45 @@ +/* + Copyright (C) 2024 Vizrt NDI AB. All rights reserved. + + This file and its use within a Product is bound by the terms of NDI SDK license that was provided + as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation. +*/ + +using System; +using System.IO; + +using UnrealBuildTool; + +public class NDI : ModuleRules +{ + public NDI(ReadOnlyTargetRules Target) : base(Target) + { + Type = ModuleType.External; + + if (Target.Platform == UnrealTargetPlatform.Win64) + { + PublicIncludePaths.Add(Path.Combine(ModuleDirectory, "Includes")); + + PublicAdditionalLibraries.Add(Path.Combine(ModuleDirectory, "Libraries/Win64", "Processing.NDI.Lib.x64.lib")); + + // Delay-load the DLL from the runtime directory (determined at runtime) + PublicDelayLoadDLLs.Add("Processing.NDI.Lib.x64.dll"); + + // Ensure that we define our c++ define + PublicDefinitions.Add("NDI_SDK_ENABLED"); + } + else if ((Target.Platform == UnrealTargetPlatform.Linux) + || ((Target.Version.MajorVersion == 4) && (Target.Platform.ToString() == "LinuxAArch64")) + || ((Target.Version.MajorVersion == 5) && (Target.Platform.ToString() == "LinuxArm64")) + ) + { + PublicIncludePaths.Add(Path.Combine(ModuleDirectory, "Includes")); + + PublicAdditionalLibraries.Add(Path.Combine(ModuleDirectory, "Libraries/Linux", "libndi.so")); + RuntimeDependencies.Add(Path.Combine("$(BinaryOutputDir)", "libndi.so.6"), Path.Combine(ModuleDirectory, "Libraries/Linux", "libndi.so.6")); + + // Ensure that we define our c++ define + PublicDefinitions.Add("NDI_SDK_ENABLED"); + } + } +} diff --git a/README.md b/README.md index 329b904..b28d2e1 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,3 @@ -# VPDemo +# VPDevtemplate -adfgadfc \ No newline at end of file +This is the repo for the development of the Virtual Production project \ No newline at end of file diff --git a/VPTemplate.uproject b/VPTemplate.uproject new file mode 100644 index 0000000..b04ac51 --- /dev/null +++ b/VPTemplate.uproject @@ -0,0 +1,239 @@ +{ + "FileVersion": 3, + "EngineAssociation": "5.5", + "Category": "", + "Description": "", + "Plugins": [ + { + "Name": "OpenXR", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64", + "Linux", + "Android" + ] + }, + { + "Name": "VirtualScouting", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "PythonScriptPlugin", + "Enabled": true + }, + { + "Name": "HDRIBackdrop", + "Enabled": true + }, + { + "Name": "SunPosition", + "Enabled": true + }, + { + "Name": "SequencerScripting", + "Enabled": true + }, + { + "Name": "Takes", + "Enabled": true + }, + { + "Name": "VirtualProductionUtilities", + "Enabled": true + }, + { + "Name": "VirtualCamera", + "Enabled": true + }, + { + "Name": "LiveLink", + "Enabled": true + }, + { + "Name": "Composure", + "Enabled": true + }, + { + "Name": "MediaFrameworkUtilities", + "Enabled": true + }, + { + "Name": "MediaIOFramework", + "Enabled": true + }, + { + "Name": "RemoteControl", + "Enabled": true + }, + { + "Name": "nDisplay", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64", + "Linux" + ] + }, + { + "Name": "AjaMedia", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ], + "MarketplaceURL": "com.epicgames.launcher://ue/marketplace/content/91088fb17fb04b6b9eda41aaf8fda21e" + }, + { + "Name": "BlackmagicMedia", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64", + "Linux" + ], + "MarketplaceURL": "com.epicgames.launcher://ue/marketplace/content/433d28bdfb764ec5b2e60c4f45d5cb5c" + }, + { + "Name": "AppleProResMedia", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "HAPMedia", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "AppleARKit", + "Enabled": true, + "SupportedTargetPlatforms": [ + "IOS", + "Win64", + "Mac", + "Linux", + "Android" + ] + }, + { + "Name": "AppleARKitFaceSupport", + "Enabled": true, + "SupportedTargetPlatforms": [ + "IOS", + "Win64", + "Mac", + "Linux", + "Android" + ] + }, + { + "Name": "PixelStreaming", + "Enabled": true + }, + { + "Name": "VariantManager", + "Enabled": true + }, + { + "Name": "DMXProtocol", + "Enabled": true + }, + { + "Name": "MovieRenderPipeline", + "Enabled": true + }, + { + "Name": "Switchboard", + "Enabled": true + }, + { + "Name": "LiveLinkHub", + "Enabled": true + }, + { + "Name": "ICVFX", + "Enabled": true + }, + { + "Name": "LedWallCalibration", + "Enabled": true + }, + { + "Name": "LiveLinkFreeD", + "Enabled": true + }, + { + "Name": "LiveLinkLens", + "Enabled": true + }, + { + "Name": "LiveLinkMasterLockit", + "Enabled": true + }, + { + "Name": "StageMonitoring", + "Enabled": true + }, + { + "Name": "TextureShare", + "Enabled": true + }, + { + "Name": "TimedDataMonitor", + "Enabled": true + }, + { + "Name": "OpenXRViveTracker", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64", + "Linux", + "Android" + ] + }, + { + "Name": "LiveLinkXR", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "LiveLinkOverNDisplay", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64", + "Linux" + ] + }, + { + "Name": "AudioCaptureTimecodeProvider", + "Enabled": true + }, + { + "Name": "Postshot", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "NDIMedia", + "Enabled": true, + "SupportedTargetPlatforms": [ + "Win64" + ] + }, + { + "Name": "NDIIOPlugin", + "Enabled": true + }, + { + "Name": "DMXPixelMapping", + "Enabled": true + } + ] +} \ No newline at end of file