no plugin ndi commit

This commit is contained in:
2025-09-04 16:44:45 +03:00
parent 12c22a75d4
commit f43f4f2b04
106 changed files with 0 additions and 14745 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -1,58 +0,0 @@
{
"FileVersion": 3,
"Version": 11,
"VersionName": "3.8",
"FriendlyName": "NDI IO Plugin",
"Description": "Exposes Core NDI Support for integration into Unreal Engine Applications",
"Category": "Virtual Production",
"CreatedBy": "Vizrt NDI AB",
"CreatedByURL": "https://www.ndi.video",
"DocsURL": "",
"MarketplaceURL": "",
"SupportURL": "",
"EngineVersion": "5.5.0",
"CanContainContent": true,
"Installed": true,
"Modules": [
{
"Name": "NDIIO",
"Type": "Runtime",
"LoadingPhase": "Default",
"PlatformAllowList": [
"Win64",
"Linux",
"LinuxArm64"
]
},
{
"Name": "NDIIOEditor",
"Type": "Editor",
"LoadingPhase": "PostEngineInit",
"PlatformAllowList": [
"Win64",
"Linux",
"LinuxArm64"
]
},
{
"Name": "NDIIOShaders",
"Type": "Runtime",
"LoadingPhase": "PostConfigInit",
"PlatformAllowList": [
"Win64",
"Linux",
"LinuxArm64"
]
}
],
"Plugins": [
{
"Name": "MediaIOFramework",
"Enabled": true
},
{
"Name": "MediaFrameworkUtilities",
"Enabled": true
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -1,237 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include "/Engine/Public/Platform.ush"
#include "/Engine/Generated/GeneratedUniformBuffers.ush"
#include "/Engine/Private/GammaCorrectionCommon.ush"
// Matches FNDIIOShaderPS::EColorCorrection enum
#define COLOR_CORRECTION_None 0
#define COLOR_CORRECTION_sRGBToLinear 1
#define COLOR_CORRECTION_LinearTosRGB 2
void NDIIOMainVS(
in float4 InPosition : ATTRIBUTE0,
in float2 InUV : ATTRIBUTE1,
out float4 OutPosition : SV_POSITION,
out float2 OutUV : TEXCOORD0)
{
OutPosition = InPosition;
OutUV = InUV;
}
// Shader from 8 bits RGBA to 8 bits UYVY
void NDIIOBGRAtoUYVYPS(
float4 InPosition : SV_POSITION,
float2 InUV : TEXCOORD0,
out float4 OutColor : SV_Target0)
{
float3x3 RGBToYCbCrMat =
{
0.18300, 0.61398, 0.06201,
-0.10101, -0.33899, 0.43900,
0.43902, -0.39900, -0.04001
};
float3 RGBToYCbCrVec = { 0.06302, 0.50198, 0.50203 };
float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale;
float2 UVdelta = NDIIOShaderUB.UVScale * float2(2.0f/NDIIOShaderUB.OutputWidth, 1.0f/NDIIOShaderUB.OutputHeight);
float2 UV0 = UV + float2(-1.0f/4.0f, 0.0f) * UVdelta;
float2 UV1 = UV + float2( 1.0f/4.0f, 0.0f) * UVdelta;
float3 YUV0 = RGBToYCbCrVec;
float3 YUV1 = RGBToYCbCrVec;
if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1)))
{
float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0);
float3 RGB0 = (NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_LinearTosRGB) ? LinearToSrgb(RGBA0.xyz) : RGBA0.xyz;
YUV0 = mul(RGBToYCbCrMat, RGB0) + RGBToYCbCrVec;
}
if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1)))
{
float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1);
float3 RGB1 = (NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_LinearTosRGB) ? LinearToSrgb(RGBA1.xyz) : RGBA1.xyz;
YUV1 = mul(RGBToYCbCrMat, RGB1) + RGBToYCbCrVec;
}
OutColor.xz = (YUV0.zy + YUV1.zy) / 2.f;
OutColor.y = YUV0.x;
OutColor.w = YUV1.x;
}
// Shader from 8 bits RGBA to 8 bits Alpha suitable for UYVA; even-numbered lines
void NDIIOBGRAtoAlphaEvenPS(
float4 InPosition : SV_POSITION,
float2 InUV : TEXCOORD0,
out float4 OutColor : SV_Target0)
{
float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale;
float2 UVdelta = NDIIOShaderUB.UVScale * float2(4.0f/NDIIOShaderUB.OutputWidth, 2.0f/NDIIOShaderUB.OutputHeight);
float2 UV0 = UV + float2(-3.0f/8.0f, -1.0f/4.0f) * UVdelta;
float2 UV1 = UV + float2(-1.0f/8.0f, -1.0f/4.0f) * UVdelta;
float2 UV2 = UV + float2( 1.0f/8.0f, -1.0f/4.0f) * UVdelta;
float2 UV3 = UV + float2( 3.0f/8.0f, -1.0f/4.0f) * UVdelta;
float A0 = 0.0f;
float A1 = 0.0f;
float A2 = 0.0f;
float A3 = 0.0f;
if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1)))
{
float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0);
A0 = RGBA0.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1)))
{
float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1);
A1 = RGBA1.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV2 >= float2(0,0)) && all(UV2 < float2(1,1)))
{
float4 RGBA2 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV2);
A2 = RGBA2.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV3 >= float2(0,0)) && all(UV3 < float2(1,1)))
{
float4 RGBA3 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV3);
A3 = RGBA3.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
OutColor.xyzw = float4(A2, A1, A0, A3);
}
// Shader from 8 bits RGBA to 8 bits Alpha suitable for UYVA; odd-numbered lines
void NDIIOBGRAtoAlphaOddPS(
float4 InPosition : SV_POSITION,
float2 InUV : TEXCOORD0,
out float4 OutColor : SV_Target0)
{
float2 UV = NDIIOShaderUB.UVOffset + InUV * NDIIOShaderUB.UVScale;
float2 UVdelta = NDIIOShaderUB.UVScale * float2(4.0f/NDIIOShaderUB.OutputWidth, 2.0f/NDIIOShaderUB.OutputHeight);
float2 UV0 = UV + float2(-3.0f/8.0f, 1.0f/4.0f) * UVdelta;
float2 UV1 = UV + float2(-1.0f/8.0f, 1.0f/4.0f) * UVdelta;
float2 UV2 = UV + float2( 1.0f/8.0f, 1.0f/4.0f) * UVdelta;
float2 UV3 = UV + float2( 3.0f/8.0f, 1.0f/4.0f) * UVdelta;
float A0 = 0.0f;
float A1 = 0.0f;
float A2 = 0.0f;
float A3 = 0.0f;
if(all(UV0 >= float2(0,0)) && all(UV0 < float2(1,1)))
{
float4 RGBA0 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV0);
A0 = RGBA0.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV1 >= float2(0,0)) && all(UV1 < float2(1,1)))
{
float4 RGBA1 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV1);
A1 = RGBA1.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV2 >= float2(0,0)) && all(UV2 < float2(1,1)))
{
float4 RGBA2 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV2);
A2 = RGBA2.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
if(all(UV3 >= float2(0,0)) && all(UV3 < float2(1,1)))
{
float4 RGBA3 = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerT, UV3);
A3 = RGBA3.w * NDIIOShaderUB.AlphaScale + NDIIOShaderUB.AlphaOffset;
}
OutColor.xyzw = float4(A2, A1, A0, A3);
}
// Shader from 8 bits UYVY to 8 bits RGBA (alpha set to 1)
void NDIIOUYVYtoBGRAPS(
float4 InPosition : SV_POSITION,
float2 InUV : TEXCOORD0,
out float4 OutColor : SV_Target0)
{
float3x3 YCbCrToRGBMat =
{
1.16414, -0.0011, 1.7923,
1.16390, -0.2131, -0.5342,
1.16660, 2.1131, -0.0001
};
float3 YCbCrToRGBVec = { -0.9726, 0.3018, -1.1342 };
if(all(InUV >= float2(0,0)) && all(InUV < float2(1,1)) && all(InUV >= float2(0,0)) && all(InUV < float2(1,1)))
{
float4 UYVY = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerP, InUV);
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = floor(PosX) % 2.0f;
YUVA.x = (1 - FracX) * UYVY.y + FracX * UYVY.w;
YUVA.yz = UYVY.zx;
YUVA.w = 1;
OutColor.xyz = mul(YCbCrToRGBMat, YUVA.xyz) + YCbCrToRGBVec;
if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear)
OutColor.xyz = sRGBToLinear(OutColor.xyz);
OutColor.w = YUVA.w;
}
else
{
OutColor.xyz = YCbCrToRGBVec;
if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear)
OutColor.xyz = sRGBToLinear(OutColor.xyz);
OutColor.w = 1;
}
}
// Shader from 8 bits UYVA to 8 bits RGBA
void NDIIOUYVAtoBGRAPS(
float4 InPosition : SV_POSITION,
float2 InUV : TEXCOORD0,
out float4 OutColor : SV_Target0)
{
float3x3 YCbCrToRGBMat =
{
1.16414, -0.0011, 1.7923,
1.16390, -0.2131, -0.5342,
1.16660, 2.1131, -0.0001
};
float3 YCbCrToRGBVec = { -0.9726, 0.3018, -1.1342 };
if(all(InUV >= float2(0,0)) && all(InUV < float2(1,1)) && all(InUV >= float2(0,0)) && all(InUV < float2(1,1)))
{
float4 UYVY = NDIIOShaderUB.InputTarget.Sample(NDIIOShaderUB.SamplerP, InUV);
float Alpha = NDIIOShaderUB.InputAlphaTarget.Sample(NDIIOShaderUB.SamplerP, InUV).w;
float PosX = 2.0f * InUV.x * NDIIOShaderUB.InputWidth;
float4 YUVA;
float FracX = floor(PosX) % 2.0f;
YUVA.x = (1 - FracX) * UYVY.y + FracX * UYVY.w;
YUVA.yz = UYVY.zx;
YUVA.w = Alpha;
OutColor.xyz = mul(YCbCrToRGBMat, YUVA.xyz) + YCbCrToRGBVec;
if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear)
OutColor.xyz = sRGBToLinear(OutColor.xyz);
OutColor.w = YUVA.w;
}
else
{
OutColor.xyz = YCbCrToRGBVec;
if(NDIIOShaderUB.ColorCorrection == COLOR_CORRECTION_sRGBToLinear)
OutColor.xyz = sRGBToLinear(OutColor.xyz);
OutColor.w = 1;
}
}

View File

@@ -1,81 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Actors/NDIBroadcastActor.h>
ANDIBroadcastActor::ANDIBroadcastActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
this->ViewportCaptureComponent = ObjectInitializer.CreateDefaultSubobject<UNDIViewportCaptureComponent>(this, TEXT("ViewportCaptureComponent"));
this->ViewportCaptureComponent->AttachToComponent(this->RootComponent, FAttachmentTransformRules::KeepRelativeTransform);
this->PTZController = ObjectInitializer.CreateDefaultSubobject<UPTZController>(this, TEXT("PTZController"));
}
void ANDIBroadcastActor::BeginPlay()
{
Super::BeginPlay();
// validate the viewport capture component
if (IsValid(this->ViewportCaptureComponent))
{
// Initialize the Capture Component with the media source
ViewportCaptureComponent->Initialize(this->NDIMediaSource);
}
if (IsValid(this->PTZController))
{
// Initialize the PTZ Controller with the media source
PTZController->Initialize(this->NDIMediaSource);
}
if (IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->Initialize(this->SubmixCapture);
}
}
FPTZState ANDIBroadcastActor::GetPTZStateFromUE() const
{
FPTZState PTZState;
PTZState.CameraTransform = GetActorTransform();
FTransform Transform = FTransform::Identity;
if (IsValid(this->ViewportCaptureComponent))
Transform = this->ViewportCaptureComponent->GetRelativeTransform();
FQuat Rotation = Transform.GetRotation();
FVector Euler = Rotation.Euler();
PTZState.Pan = FMath::DegreesToRadians(Euler[2]);
PTZState.Tilt = FMath::DegreesToRadians(Euler[1]);
if (IsValid(this->ViewportCaptureComponent))
{
PTZState.FieldOfView = this->ViewportCaptureComponent->FOVAngle;
PTZState.FocusDistance = 1.f - 1.f / (this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance / 100.f + 1.f);
PTZState.bAutoFocus = (this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance == true) ? false : true;
}
return PTZState;
}
void ANDIBroadcastActor::SetPTZStateToUE(const FPTZState& PTZState)
{
SetActorTransform(PTZState.CameraTransform);
FVector Euler(0, FMath::RadiansToDegrees(PTZState.Tilt), FMath::RadiansToDegrees(PTZState.Pan));
FQuat NewRotation = FQuat::MakeFromEuler(Euler);
if (IsValid(this->ViewportCaptureComponent))
{
this->ViewportCaptureComponent->SetRelativeLocationAndRotation(this->ViewportCaptureComponent->GetRelativeLocation(), NewRotation);
this->ViewportCaptureComponent->FOVAngle = PTZState.FieldOfView;
this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = (1.f / FMath::Max(1 - PTZState.FocusDistance, 0.01f) - 1.f) * 100.f;
this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = FMath::Max(this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance, SMALL_NUMBER);
this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = (PTZState.bAutoFocus == true) ? false : true;
}
}

View File

@@ -1,369 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Actors/NDIReceiveActor.h>
#include <UObject/Package.h>
#include <AudioDevice.h>
#include <ActiveSound.h>
#include <Async/Async.h>
#include <Engine/StaticMesh.h>
#include <Kismet/GameplayStatics.h>
#include <Materials/MaterialInstanceDynamic.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <UObject/ConstructorHelpers.h>
ANDIReceiveActor::ANDIReceiveActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
// Get the Engine's 'Plane' static mesh
static ConstructorHelpers::FObjectFinder<UStaticMesh> MeshObject(
TEXT("StaticMesh'/Engine/BasicShapes/Plane.Plane'"));
static ConstructorHelpers::FObjectFinder<UMaterialInterface> MaterialObject(
TEXT("Material'/NDIIOPlugin/Materials/NDI_Unlit_SourceMaterial.NDI_Unlit_SourceMaterial'"));
// Ensure that the object is valid
if (MeshObject.Object)
{
// Create the static mesh component visual
this->VideoMeshComponent =
ObjectInitializer.CreateDefaultSubobject<UStaticMeshComponent>(this, TEXT("VideoMeshComponent"), true);
// setup the attachment and modify the position, rotation, and mesh properties
this->VideoMeshComponent->SetupAttachment(RootComponent);
this->VideoMeshComponent->SetStaticMesh(MeshObject.Object);
this->VideoMeshComponent->SetRelativeRotation(FQuat::MakeFromEuler(FVector(90.0f, 0.0f, 90.0f)));
this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f));
this->VideoMeshComponent->SetCollisionResponseToAllChannels(ECR_Ignore);
this->VideoMeshComponent->SetCollisionEnabled(ECollisionEnabled::NoCollision);
this->VideoMeshComponent->SetCollisionObjectType(ECC_WorldDynamic);
// This is object is mainly used for simple tests and things that don't require
// additional material shading support, store the an unlit source material to display
this->VideoMaterial = MaterialObject.Object;
// If the material is valid
if (this->VideoMaterial)
{
// Set the Mesh Material to the Video Material
this->VideoMeshComponent->SetMaterial(0, this->VideoMaterial);
}
}
this->AudioComponent = ObjectInitializer.CreateDefaultSubobject<UAudioComponent>(this, TEXT("AudioComponent"), true);
this->AudioComponent->SetupAttachment(RootComponent);
this->AudioComponent->SetRelativeLocationAndRotation(FVector::ZeroVector, FRotator::ZeroRotator);
this->AudioComponent->SetRelativeScale3D(FVector::OneVector);
this->bAllowTickBeforeBeginPlay = false;
this->PrimaryActorTick.bCanEverTick = true;
}
void ANDIReceiveActor::BeginPlay()
{
// call the base implementation for 'BeginPlay'
Super::BeginPlay();
// We need to validate that we have media source, so we can set the texture in the material instance
if (IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone);
// Validate the Video Material Instance so we can set the texture used in the NDI Media source
if (IsValid(this->VideoMaterial))
{
// create and set the instance material from the MaterialObject
VideoMaterialInstance =
this->VideoMeshComponent->CreateAndSetMaterialInstanceDynamicFromMaterial(0, this->VideoMaterial);
// Ensure we have a valid material instance
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the color and/or alpha channels
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f);
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f);
this->NDIMediaSource->UpdateMaterialTexture(VideoMaterialInstance, "Video Texture");
}
}
// Define the basic parameters for constructing temporary audio wave object
FString AudioSource = FString::Printf(TEXT("AudioSource_%s"), *GetFName().ToString().Right(1));
FName AudioWaveName = FName(*AudioSource);
EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative;
// Construct a temporary audio sound wave to be played by this component
this->AudioSoundWave = NewObject<UNDIMediaSoundWave>(GetTransientPackage(), UNDIMediaSoundWave::StaticClass(),
AudioWaveName, Flags);
// Ensure the validity of the temporary sound wave object
if (IsValid(this->AudioSoundWave))
{
// Set the number of channels
bStoppedForChannelsMode = false;
ApplyChannelsMode();
// Set the sound of the Audio Component and Ensure playback
this->AudioComponent->SetSound(this->AudioSoundWave);
// Ensure we register the audio wave object with the media.
this->NDIMediaSource->RegisterAudioWave(AudioSoundWave);
}
if (this->NDIMediaSource->GetCurrentConnectionInformation().IsValid())
{
if (IsValid(AudioComponent))
{
// we should play the audio, if we want audio playback
if (bEnableAudioPlayback)
{
this->AudioComponent->Play(0.0f);
}
// otherwise just stop
else
{
this->AudioComponent->Stop();
this->bStoppedForChannelsMode = false;
}
}
}
// Add a lambda to the OnReceiverConnected Event
else
this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddWeakLambda(this, [&](UNDIMediaReceiver*) {
// Ensure that the audio component is valid
if (IsValid(AudioComponent))
{
// we should play the audio, if we want audio playback
if (bEnableAudioPlayback)
{
this->AudioComponent->Play(0.0f);
}
// otherwise just stop
else
{
this->AudioComponent->Stop();
this->bStoppedForChannelsMode = false;
}
}
});
}
}
void ANDIReceiveActor::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
Super::EndPlay(EndPlayReason);
this->bStoppedForChannelsMode = false;
// Ensure we have a valid material instance
if (EndPlayReason == EEndPlayReason::EndPlayInEditor && IsValid(VideoMaterialInstance))
{
// Ensure that the video texture is disabled
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", 0.0f);
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", 0.0f);
}
}
void ANDIReceiveActor::Tick(float DeltaTime)
{
Super::Tick(DeltaTime);
ApplyChannelsMode();
}
void ANDIReceiveActor::ApplyChannelsMode()
{
if (IsValid(this->AudioComponent) && IsValid(this->NDIMediaSource) && IsValid(this->AudioSoundWave))
{
int32 NewNumChannels = 0;
switch(this->AudioPlaybackChannels)
{
case ENDIAudioChannels::Mono:
NewNumChannels = 1;
break;
case ENDIAudioChannels::Stereo:
NewNumChannels = 2;
break;
case ENDIAudioChannels::Source:
NewNumChannels = this->NDIMediaSource->GetAudioChannels();
break;
}
if ((NewNumChannels != 0) && (NewNumChannels != this->AudioSoundWave->NumChannels))
{
bool isPlaying = this->AudioComponent->IsPlaying();
if (isPlaying)
{
this->AudioComponent->Stop();
bStoppedForChannelsMode = true;
}
else
{
this->AudioSoundWave->NumChannels = NewNumChannels;
if (bEnableAudioPlayback && bStoppedForChannelsMode)
{
this->AudioComponent->Play(0.0f);
}
bStoppedForChannelsMode = false;
}
}
}
}
/**
Attempts to set the desired frame size in cm, represented in the virtual scene
*/
void ANDIReceiveActor::SetFrameSize(FVector2D InFrameSize)
{
// clamp the values to the lowest we'll allow
const float frame_height = FMath::Max(InFrameSize.Y, 0.00001f);
const float frame_width = FMath::Max(InFrameSize.X, 0.00001f);
FrameWidth = frame_width;
FrameHeight = frame_height;
// validate the static mesh component
if (IsValid(this->VideoMeshComponent))
{
// change the scale of the video
this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f));
}
}
void ANDIReceiveActor::SetFrameHeight(const float& InFrameHeight)
{
// Clamp the Frame Height to a minimal value
FrameHeight = FMath::Max(InFrameHeight, 0.00001f);
// Call the function to set the frame size with the newly clamped value
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
void ANDIReceiveActor::SetFrameWidth(const float& InFrameWidth)
{
// Clamp the Frame Width to a minimal value
FrameWidth = FMath::Max(InFrameWidth, 0.00001f);
// Call the function to set the frame size with the newly clamped value
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
void ANDIReceiveActor::UpdateAudioPlayback(const bool& Enabled)
{
// Ensure validity and we are currently playing
if (IsValid(this->AudioComponent))
{
// Stop playback when possible
if (Enabled)
{
// Start the playback
this->AudioComponent->Play(0.0f);
}
// otherwise just stop playback (even if it's not playing)
else
this->AudioComponent->Stop();
}
}
void ANDIReceiveActor::UpdateAudioPlaybackChannels(const ENDIAudioChannels& Channels)
{}
void ANDIReceiveActor::EnableColor(const bool& Enabled)
{
bEnableColor = Enabled;
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the color channels
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f);
}
}
void ANDIReceiveActor::EnableAlpha(const bool& Enabled)
{
bEnableAlpha = Enabled;
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the alpha channel
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f);
}
}
/**
Returns the current frame size of the 'VideoMeshComponent' for this object
*/
const FVector2D ANDIReceiveActor::GetFrameSize() const
{
return FVector2D(FrameWidth, FrameHeight);
}
#if WITH_EDITORONLY_DATA
void ANDIReceiveActor::PreEditChange(FProperty* InProperty)
{
// call the base class 'PreEditChange'
Super::PreEditChange(InProperty);
}
void ANDIReceiveActor::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
// get the name of the property which changed
FName PropertyName =
(PropertyChangedEvent.Property != nullptr) ? PropertyChangedEvent.Property->GetFName() : NAME_None;
// compare against the 'FrameHeight' property
if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameHeight))
{
// resize the frame
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
// compare against the 'FrameWidth' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameWidth))
{
// resize the frame
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
// compare against the 'bEnableAudioPlayback' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAudioPlayback))
{
// start or stop the audio playback depending on state
UpdateAudioPlayback(bEnableAudioPlayback);
}
// compare against the 'AudioPlaybackChannels' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, AudioPlaybackChannels))
{
// start or stop the audio playback depending on state
UpdateAudioPlaybackChannels(AudioPlaybackChannels);
}
// compare against the 'bEnableColor' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableColor))
{
// enable or disable color channels (if it exists)
EnableColor(bEnableColor);
}
// compare against the 'bEnableAlpha' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAlpha))
{
// enable or disable alpha channel (if it exists)
EnableAlpha(bEnableAlpha);
}
// call the base class 'PostEditChangeProperty'
Super::PostEditChangeProperty(PropertyChangedEvent);
}
#endif

View File

@@ -1,101 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Assets/NDITimecodeProvider.h>
UNDITimecodeProvider::UNDITimecodeProvider(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{}
bool UNDITimecodeProvider::FetchTimecode(FQualifiedFrameTime& OutFrameTime)
{
FScopeLock Lock(&this->StateSyncContext);
if (!IsValid(this->NDIMediaSource) ||
(GetSynchronizationState() != ETimecodeProviderSynchronizationState::Synchronized))
{
return false;
}
OutFrameTime = this->MostRecentFrameTime;
return true;
}
ETimecodeProviderSynchronizationState UNDITimecodeProvider::GetSynchronizationState() const
{
FScopeLock Lock(&this->StateSyncContext);
if (!IsValid(this->NDIMediaSource))
return ETimecodeProviderSynchronizationState::Closed;
return this->State;
}
bool UNDITimecodeProvider::Initialize(UEngine* InEngine)
{
this->State = ETimecodeProviderSynchronizationState::Closed;
if (!IsValid(this->NDIMediaSource))
{
this->State = ETimecodeProviderSynchronizationState::Error;
return false;
}
this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone);
this->VideoCaptureEventHandle = this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* Receiver, const NDIlib_video_frame_v2_t& VideoFrame)
{
const FFrameRate Rate = Receiver->GetCurrentFrameRate();
const FTimecode Timecode = Receiver->GetCurrentTimecode();
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Synchronized;
this->MostRecentFrameTime = FQualifiedFrameTime(Timecode, Rate);
});
this->ConnectedEventHandle = this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver)
{
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Synchronizing;
});
this->DisconnectedEventHandle = this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver)
{
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Closed;
});
return true;
}
void UNDITimecodeProvider::Shutdown(UEngine* InEngine)
{
ReleaseResources();
}
void UNDITimecodeProvider::BeginDestroy()
{
ReleaseResources();
Super::BeginDestroy();
}
void UNDITimecodeProvider::ReleaseResources()
{
if(IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.Remove(this->VideoCaptureEventHandle);
this->NDIMediaSource->OnNDIReceiverConnectedEvent.Remove(this->ConnectedEventHandle);
this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.Remove(this->DisconnectedEventHandle);
}
this->VideoCaptureEventHandle.Reset();
this->ConnectedEventHandle.Reset();
this->DisconnectedEventHandle.Reset();
this->State = ETimecodeProviderSynchronizationState::Closed;
}

View File

@@ -1,153 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIBroadcastComponent.h>
UNDIBroadcastComponent::UNDIBroadcastComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{}
/**
Initialize this component with the media source required for sending NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool UNDIBroadcastComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Attempts to start broadcasting audio, video, and metadata via the 'NDIMediaSource' associated with this object
@param ErrorMessage The error message received when the media source is unable to start broadcasting
@result Indicates whether this object successfully started broadcasting
*/
bool UNDIBroadcastComponent::StartBroadcasting(FString& ErrorMessage)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->Initialize(nullptr);
// the underlying functionality is always return 'true'
return true;
}
// We have no media source to broadcast
ErrorMessage = TEXT("No Media Source present to broadcast");
// looks like we don't have a media source to broadcast
return false;
}
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
void UNDIBroadcastComponent::ChangeSourceName(const FString& InSourceName)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeSourceName(InSourceName);
}
}
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
void UNDIBroadcastComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration);
}
}
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
void UNDIBroadcastComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeVideoTexture(BroadcastTexture);
}
}
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
void UNDIBroadcastComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram)
{
// Initialize the properties
IsOnPreview = false;
IsOnProgram = false;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0);
}
}
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
void UNDIBroadcastComponent::GetNumberOfConnections(int32& Result)
{
// Initialize the property
Result = 0;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetNumberOfConnections(Result);
}
}
/**
Attempts to immediately stop sending frames over NDI to any connected receivers
*/
void UNDIBroadcastComponent::StopBroadcasting()
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->Shutdown();
}
}

View File

@@ -1,104 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIFinderComponent.h>
#include <Services/NDIFinderService.h>
UNDIFinderComponent::UNDIFinderComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {}
void UNDIFinderComponent::BeginPlay()
{
Super::BeginPlay();
// Provide some sense of thread-safety
FScopeLock Lock(&CollectionSyncContext);
// Update the NetworkSourceCollection with some sources which that the service has already found
FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection);
// Ensure that we are subscribed to the collection changed notification so we can handle it locally
FNDIFinderService::EventOnNDISourceCollectionChanged.AddUObject(
this, &UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent);
}
void UNDIFinderComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
Super::EndPlay(EndPlayReason);
// Provide some sense of thread-safety
FScopeLock Lock(&CollectionSyncContext);
// Empty the source collection
this->NetworkSourceCollection.Empty(0);
// Ensure that we are no longer subscribed to collection change notifications
FNDIFinderService::EventOnNDISourceCollectionChanged.RemoveAll(this);
}
/**
An Event handler for when the NDI Finder Service notifies listeners that changes have been
detected in the network source collection
*/
void UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent()
{
// Since we don't poll the NDIFinderService for network sources, we subscribe to the change notification.
// Now we need to update the Network Source Collection, but we need to do it in a thread-safe way.
FScopeLock Lock(&CollectionSyncContext);
// Check to determine if something actually changed within the collection. We don't want to trigger
// notifications unnecessarily.
if (FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection))
{
// Trigger the blueprint handling of the situation.
this->OnNetworkSourcesChangedEvent();
// If any listeners have subscribed broadcast any collection changes
if (this->OnNetworkSourcesChanged.IsBound())
this->OnNetworkSourcesChanged.Broadcast(this);
}
}
/**
Attempts to find a network source by the supplied name.
@param ConnectionInformation An existing source information structure which contains the source name
@param InSourceName A string value representing the name of the source to find
@result A value indicating whether a source with the supplied name was found
*/
const bool UNDIFinderComponent::FindNetworkSourceByName(FNDIConnectionInformation& ConnectionInformation,
FString InSourceName)
{
// Lock the Collection so that we are working with a solid collection of items
FScopeLock Lock(&CollectionSyncContext);
// Ensure we Reset the SourceInformation
ConnectionInformation.Reset();
for (const auto& connectionInfo : NetworkSourceCollection)
{
if (InSourceName.Equals(connectionInfo.SourceName, ESearchCase::IgnoreCase))
{
ConnectionInformation = connectionInfo;
return true;
}
}
return false;
}
/**
Returns the current collection of sources found on the network
*/
const TArray<FNDIConnectionInformation> UNDIFinderComponent::GetNetworkSources()
{
// Lock the current source collection
FScopeLock Lock(&CollectionSyncContext);
// return the source collection
return this->NetworkSourceCollection;
}

View File

@@ -1,471 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIPTZControllerComponent.h>
#include <GameFramework/Actor.h>
#include <Structures/NDIXml.h>
/**
Parsers for PTZ metadata
*/
class NDIXmlElementParser_ntk_ptz_pan_tilt_speed : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_pan_tilt_speed(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
PanSpeed = 0.0;
TiltSpeed = 0.0;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("pan_speed"), AttributeName) == 0)
{
PanSpeed = FCString::Atod(AttributeValue);
}
else if(FCString::Strcmp(TEXT("tilt_speed"), AttributeName) == 0)
{
TiltSpeed = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZPanTiltSpeed(PanSpeed, TiltSpeed);
return true;
}
protected:
UPTZController* PTZController;
double PanSpeed { 0.0 };
double TiltSpeed { 0.0 };
};
class NDIXmlElementParser_ntk_ptz_zoom_speed : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_zoom_speed(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
ZoomSpeed = 0.0;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("zoom_speed"), AttributeName) == 0)
{
ZoomSpeed = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZZoomSpeed(ZoomSpeed);
return true;
}
protected:
UPTZController* PTZController;
double ZoomSpeed { 0.0 };
};
class NDIXmlElementParser_ntk_ptz_focus : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_focus(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
AutoMode = true;
Distance = 0.5;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("mode"), AttributeName) == 0)
{
if(FCString::Strcmp(TEXT("manual"), AttributeValue) == 0)
AutoMode = false;
}
else if(FCString::Strcmp(TEXT("distance"), AttributeName) == 0)
{
Distance = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZFocus(AutoMode, Distance);
return true;
}
protected:
UPTZController* PTZController;
bool AutoMode { true };
double Distance { 0.5 };
};
class NDIXmlElementParser_ntk_ptz_store_preset : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_store_preset(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
StoreIndex = -1;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("index"), AttributeName) == 0)
{
StoreIndex = FCString::Atoi(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(StoreIndex >= 0)
{
PTZController->StorePTZState(StoreIndex);
}
return true;
}
protected:
UPTZController* PTZController;
int StoreIndex { -1 };
};
class NDIXmlElementParser_ntk_ptz_recall_preset : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_recall_preset(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
RecallIndex = -1;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("index"), AttributeName) == 0)
{
RecallIndex = FCString::Atoi(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(RecallIndex >= 0)
{
PTZController->RecallPTZState(RecallIndex);
}
return true;
}
protected:
UPTZController* PTZController;
int RecallIndex { -1 };
};
/**
PTZ controller component
*/
UPTZController::UPTZController()
{
this->bWantsInitializeComponent = true;
this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false;
this->PrimaryComponentTick.bCanEverTick = true;
this->PrimaryComponentTick.bHighPriority = true;
this->PrimaryComponentTick.bRunOnAnyThread = false;
this->PrimaryComponentTick.bStartWithTickEnabled = true;
this->PrimaryComponentTick.bTickEvenWhenPaused = true;
this->NDIMetadataParser = MakeShareable(new NDIXmlParser());
this->NDIMetadataParser->AddElementParser("ntk_ptz_pan_tilt_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_pan_tilt_speed(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_zoom_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_zoom_speed(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_focus", MakeShareable(new NDIXmlElementParser_ntk_ptz_focus(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_store_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_store_preset(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_recall_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_recall_preset(this)));
}
UPTZController::~UPTZController()
{}
void UPTZController::InitializeComponent()
{
Super::InitializeComponent();
if (IsValid(NDIMediaSource))
{
// Ensure the PTZ controller is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender);
}
}
bool UPTZController::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// Ensure the PTZ controller is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
void UPTZController::SetPTZPanTiltSpeed(float PanSpeed, float TiltSpeed)
{
PTZPanSpeed = PanSpeed;
PTZTiltSpeed = TiltSpeed;
OnPTZPanTiltSpeed.Broadcast(PanSpeed, TiltSpeed);
}
void UPTZController::SetPTZZoomSpeed(float ZoomSpeed)
{
PTZZoomSpeed = ZoomSpeed;
OnPTZZoomSpeed.Broadcast(ZoomSpeed);
}
void UPTZController::SetPTZFocus(bool AutoMode, float Distance)
{
FPTZState PTZState = GetPTZStateFromUE();
PTZState.FocusDistance = Distance;
PTZState.bAutoFocus = AutoMode;
SetPTZStateToUE(PTZState);
OnPTZFocus.Broadcast(AutoMode, Distance);
}
void UPTZController::StorePTZState(int Index)
{
if((Index >= 0) && (Index < 256))
{
FPTZState PTZState = GetPTZStateFromUE();
if(Index >= PTZStoredStates.Num())
PTZStoredStates.SetNum(Index+1);
PTZStoredStates[Index] = PTZState;
OnPTZStore.Broadcast(Index);
}
}
void UPTZController::RecallPTZState(int Index)
{
if((Index >= 0) && (Index < PTZStoredStates.Num()))
{
if(PTZRecallEasing > 0)
{
PTZStateInterp.PTZTargetState = PTZStoredStates[Index];
PTZStateInterp.EasingDuration = PTZRecallEasing;
PTZStateInterp.EasingRemaining = PTZStateInterp.EasingDuration;
}
else
{
SetPTZStateToUE(PTZStoredStates[Index]);
}
}
OnPTZRecall.Broadcast(Index);
}
FPTZState UPTZController::GetPTZStateFromUE() const
{
AActor* OwnerActor = GetOwner();
IPTZControllableInterface* ControllableObject = Cast<IPTZControllableInterface>(OwnerActor);
if (ControllableObject != nullptr)
{
return ControllableObject->GetPTZStateFromUE();
}
else
{
FPTZState PTZState;
FTransform Transform = OwnerActor->GetActorTransform();
FVector Euler = Transform.GetRotation().Euler();
PTZState.Pan = FMath::DegreesToRadians(Euler[2]);
PTZState.Tilt = FMath::DegreesToRadians(Euler[1]);
Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], 0.f, 0.f)));
PTZState.CameraTransform = Transform;
return PTZState;
}
}
void UPTZController::SetPTZStateToUE(const FPTZState& PTZState)
{
if (EnablePTZ == true)
{
AActor* OwnerActor = GetOwner();
IPTZControllableInterface* ControllableObject = Cast<IPTZControllableInterface>(OwnerActor);
if (ControllableObject != nullptr)
{
ControllableObject->SetPTZStateToUE(PTZState);
}
else
{
FTransform Transform = PTZState.CameraTransform;
FVector Euler = Transform.GetRotation().Euler();
float Pitch = FMath::RadiansToDegrees(PTZState.Tilt);
float Yaw = FMath::RadiansToDegrees(PTZState.Pan);
Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], Pitch, Yaw)));
OwnerActor->SetActorTransform(Transform);
}
}
}
void UPTZController::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
bool bUpdatePTZ = false;
if(PTZStateInterp.EasingRemaining > 0)
bUpdatePTZ = true;
if((PTZPanSpeed != 0) || (PTZTiltSpeed != 0) || (PTZZoomSpeed != 0))
bUpdatePTZ = true;
if(bUpdatePTZ)
{
FPTZState PTZState = GetPTZStateFromUE();
if(PTZStateInterp.EasingRemaining > 0)
{
float EasingDelta = FMath::Min(PTZStateInterp.EasingRemaining, DeltaTime);
/** Interpolate from 0 to 1 using polynomial:
I(F) = a*F^3 + b*F^2 + c*F + d
with constraints:
Start and end points: I(0) = 0, I(1) = 1
Smooth stop at end: I'(1) = 0 (velocity)
I''(1) = 0 (acceleration)
Solve to get:
a = 1, b = -3, c = 3, d = 0
I(F) = F^3 - 3*F^2 + 3*F
*/
float EasingFrac = (PTZStateInterp.EasingRemaining > 0) ? (EasingDelta / PTZStateInterp.EasingRemaining) : 1;
float EasingInterp = EasingFrac*EasingFrac*EasingFrac - 3*EasingFrac*EasingFrac + 3*EasingFrac;
PTZState.Pan = PTZState.Pan * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Pan * EasingInterp;
PTZState.Tilt = PTZState.Tilt * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Tilt * EasingInterp;
PTZState.FieldOfView = PTZState.FieldOfView * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FieldOfView * EasingInterp;
PTZState.FocusDistance = PTZState.FocusDistance * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FocusDistance * EasingInterp;
PTZState.CameraTransform.BlendWith(PTZStateInterp.PTZTargetState.CameraTransform, EasingInterp);
PTZStateInterp.EasingRemaining -= EasingDelta;
}
PTZState.FieldOfView -= FMath::RadiansToDegrees(PTZZoomSpeed) * DeltaTime;
if(PTZWithFoVLimit)
{
PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, PTZFoVMinLimit, PTZFoVMaxLimit);
}
PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, 5.f, 170.f);
float MovementScale = PTZState.FieldOfView / 90.f;
PTZState.Pan += PTZPanSpeed * DeltaTime * MovementScale * (bPTZPanInvert ? -1 : 1);
PTZState.Pan = FMath::Fmod(PTZState.Pan, 2*PI);
if(PTZWithPanLimit)
{
PTZState.Pan = FMath::Clamp(PTZState.Pan, FMath::DegreesToRadians(PTZPanMinLimit), FMath::DegreesToRadians(PTZPanMaxLimit));
}
PTZState.Tilt += PTZTiltSpeed * DeltaTime * MovementScale * (bPTZTiltInvert ? -1 : 1);
PTZState.Tilt = FMath::Fmod(PTZState.Tilt, 2*PI);
if(PTZWithTiltLimit)
{
PTZState.Tilt = FMath::Clamp(PTZState.Tilt, FMath::DegreesToRadians(PTZTiltMinLimit), FMath::DegreesToRadians(PTZTiltMaxLimit));
}
SetPTZStateToUE(PTZState);
}
}
void UPTZController::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data)
{
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(),
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
}

View File

@@ -1,126 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIReceiverComponent.h>
UNDIReceiverComponent::UNDIReceiverComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {}
/**
Initialize this component with the media source required for receiving NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool UNDIReceiverComponent::Initialize(UNDIMediaReceiver* InMediaSource)
{
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
this->NDIMediaSource = InMediaSource;
}
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Begin receiving NDI audio, video, and metadata frames
*/
bool UNDIReceiverComponent::StartReceiver(const FNDIConnectionInformation& InConnectionInformation)
{
if (IsValid(this->NDIMediaSource))
{
// Call to the Media Source's function to initialize (hopefully with valid connection information)
if (NDIMediaSource->Initialize(InConnectionInformation, UNDIMediaReceiver::EUsage::Standalone))
{
// FNDIConnectionService::RegisterReceiver(this->NDIMediaSource);
return true;
}
}
return false;
}
/**
Attempt to change the connection for which to get audio, video, and metadata frame from
*/
void UNDIReceiverComponent::ChangeConnection(const FNDIConnectionInformation& InConnectionInformation)
{
// Ensure a valid source to change the connection on
if (IsValid(this->NDIMediaSource))
{
// Call the underlying function
NDIMediaSource->ChangeConnection(InConnectionInformation);
}
}
/**
This will add a metadata frame and return immediately, having scheduled the frame asynchronously
*/
void UNDIReceiverComponent::SendMetadataFrame(const FString& metadata)
{
// Ensure a valid source to send metadata from
if (IsValid(this->NDIMediaSource))
{
// Call the underlying function
NDIMediaSource->SendMetadataFrame(metadata);
}
}
/**
This will setup the up-stream tally notifications. If no streams are connected, it will automatically send
the tally state upon connection
*/
void UNDIReceiverComponent::SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram)
{
if (IsValid(this->NDIMediaSource))
{
NDIMediaSource->SendTallyInformation(IsOnPreview, IsOnProgram);
}
}
/**
Attempts to stop receiving audio, video, and metadata frame from the connected source
*/
void UNDIReceiverComponent::ShutdownReceiver()
{
if (IsValid(this->NDIMediaSource))
{
NDIMediaSource->Shutdown();
}
}
/**
Returns the current framerate of the connected source
*/
FFrameRate UNDIReceiverComponent::GetCurrentFrameRate() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentFrameRate() : FFrameRate(60, 1);
}
/**
Returns the current timecode of the connected source
*/
FTimecode UNDIReceiverComponent::GetCurrentTimecode() const
{
return IsValid(NDIMediaSource)
? NDIMediaSource->GetCurrentTimecode()
: FTimecode::FromTimespan(FTimespan::FromMilliseconds(0.0), FFrameRate(60, 1), false, true);
}
/**
Returns the current connection information of the connected source
*/
FNDIConnectionInformation UNDIReceiverComponent::GetCurrentConnectionInformation() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentConnectionInformation() : FNDIConnectionInformation();
}
/**
Returns the current performance data of the receiver while connected to the source
*/
FNDIReceiverPerformanceData UNDIReceiverComponent::GetPerformanceData() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetPerformanceData() : FNDIReceiverPerformanceData();
}

View File

@@ -1,340 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDITriCasterExtComponent.h>
#include <Structures/NDIXml.h>
#include <Misc/EngineVersionComparison.h>
#include <EngineUtils.h>
/**
Parsers for TriCasterExt metadata
*/
class NDIXmlElementParser_tricaster_ext : public NDIXmlElementParser
{
public:
NDIXmlElementParser_tricaster_ext(UTriCasterExtComponent* TriCasterExtComponentIn)
: TriCasterExtComponent(TriCasterExtComponentIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
TCData.Value = FString();
TCData.KeyValues.Empty();
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("name"), AttributeName) == 0)
{}
else if(FCString::Strcmp(TEXT("value"), AttributeName) == 0)
{
TCData.Value = FString(AttributeValue);
}
else
{
TCData.KeyValues.Add(FName(AttributeName), FString(AttributeValue));
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(TCData.Value == "ndiio")
{
FString* ActorNamePtr = TCData.KeyValues.Find("actor");
FString* PropertyNamePtr = TCData.KeyValues.Find("property");
FString* PropertyValueStrPtr = TCData.KeyValues.Find("propertyvalue");
FString* ComponentNamePtr = TCData.KeyValues.Find("component");
FString* EasingDurationPtr = TCData.KeyValues.Find("easing");
if((ActorNamePtr != nullptr) && (PropertyNamePtr != nullptr) && (PropertyValueStrPtr != nullptr))
{
FString PropertyBaseName, PropertyElementName;
if(!PropertyNamePtr->Split(TEXT(":"), &PropertyBaseName, &PropertyElementName))
PropertyBaseName = *PropertyNamePtr;
FTimespan EasingDuration = 0;
if(EasingDurationPtr != nullptr)
{
double Seconds = FCString::Atod(**EasingDurationPtr);
EasingDuration = FTimespan::FromSeconds(Seconds);
}
for(TActorIterator<AActor> ActorItr(TriCasterExtComponent->GetWorld()); ActorItr; ++ActorItr)
{
AActor* Actor = *ActorItr;
if(Actor->GetName() == *ActorNamePtr)
{
UObject* FoundObject = nullptr;
FProperty* FoundProperty = nullptr;
if(ComponentNamePtr != nullptr)
{
TInlineComponentArray<UActorComponent*> PrimComponents;
Actor->GetComponents(PrimComponents, true);
for(auto& CompIt : PrimComponents)
{
if(CompIt->GetName() == *ComponentNamePtr)
{
FProperty* Property = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName);
if(Property)
{
FoundObject = CompIt;
FoundProperty = Property;
break;
}
}
}
}
else
{
FProperty* ActorProperty = Actor->GetClass()->FindPropertyByName(*PropertyBaseName);
if(ActorProperty)
{
FoundObject = Actor;
FoundProperty = ActorProperty;
}
else
{
TInlineComponentArray<UActorComponent*> PrimComponents;
Actor->GetComponents(PrimComponents, true);
for(auto& CompIt : PrimComponents)
{
FProperty* CompProperty = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName);
if(CompProperty)
{
FoundObject = CompIt;
FoundProperty = CompProperty;
break;
}
}
}
}
if(FoundObject && FoundProperty)
{
TriCasterExtComponent->TriCasterExt(Actor, FoundObject, FoundProperty, PropertyElementName, *PropertyValueStrPtr, EasingDuration);
break;
}
}
}
}
}
TriCasterExtComponent->TriCasterExtCustom(TCData);
return true;
}
protected:
UTriCasterExtComponent* TriCasterExtComponent;
FTriCasterExt TCData;
};
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="Intensity" propertyvalue="1.234" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" component="LightComponent0" property="Intensity" propertyvalue="1.234" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation" propertyvalue="(X=1,Y=2,Z=3)" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation" propertyvalue="(X=1)" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation:Y" propertyvalue="2" easing="5.3"/>
UTriCasterExtComponent::UTriCasterExtComponent()
{
this->bWantsInitializeComponent = true;
this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false;
this->PrimaryComponentTick.bCanEverTick = true;
this->PrimaryComponentTick.bHighPriority = true;
this->PrimaryComponentTick.bRunOnAnyThread = false;
this->PrimaryComponentTick.bStartWithTickEnabled = true;
this->PrimaryComponentTick.bTickEvenWhenPaused = true;
this->NDIMetadataParser = MakeShareable(new NDIXmlParser());
NDIMetadataParser->AddElementParser("tricaster_ext", MakeShareable(new NDIXmlElementParser_tricaster_ext(this)));
}
UTriCasterExtComponent::~UTriCasterExtComponent()
{}
void UTriCasterExtComponent::InitializeComponent()
{
Super::InitializeComponent();
if (IsValid(NDIMediaSource))
{
// Ensure the TriCasterExt component is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender);
}
}
bool UTriCasterExtComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// Ensure the TriCasterExt component is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
void UTriCasterExtComponent::TriCasterExt(AActor* Actor, UObject* Object, FProperty* Property, FString PropertyElementName, FString PropertyValueStr, FTimespan EasingDuration)
{
if(Actor && Object && Property)
{
FTriCasterExtInterp Interp;
Interp.Actor = Actor;
Interp.Object = Object;
Interp.Property = Property;
Interp.PropertyElementName = PropertyElementName;
Interp.PropertyValueStr = PropertyValueStr;
Interp.EasingDuration = EasingDuration.GetTotalSeconds();
Interp.EasingRemaining = Interp.EasingDuration;
TriCasterExtInterp.Add(Interp);
}
OnTriCasterExt.Broadcast(Actor, Object, PropertyElementName, PropertyValueStr, EasingDuration);
}
void UTriCasterExtComponent::TriCasterExtCustom(const FTriCasterExt& TCData)
{
OnTriCasterExtCustom.Broadcast(TCData);
}
void UTriCasterExtComponent::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
for(int32 i = 0; i < TriCasterExtInterp.Num(); ++i)
{
FTriCasterExtInterp& Interp = TriCasterExtInterp[i];
float EasingDelta = FMath::Min(Interp.EasingRemaining, DeltaTime);
void* Data = Interp.Property->ContainerPtrToValuePtr<void>(Interp.Object);
if(Data)
{
bool Done = false;
#if WITH_EDITOR
Interp.Object->PreEditChange(Interp.Property);
Interp.Actor->PreEditChange(Interp.Property);
#endif
if(FNumericProperty* NumericProperty = CastField<FNumericProperty>(Interp.Property))
{
double PropertyValue = NumericProperty->GetFloatingPointPropertyValue(Data);
double TargetValue = FCString::Atod(*Interp.PropertyValueStr);
double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1;
double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac;
double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp;
NumericProperty->SetFloatingPointPropertyValue(Data, NewValue);
Done = true;
}
else if(FStructProperty* StructProperty = CastField<FStructProperty>(Interp.Property))
{
FProperty* FieldProperty = FindFProperty<FProperty>(StructProperty->Struct, *(Interp.PropertyElementName));
if(FNumericProperty* StructNumericProperty = CastField<FNumericProperty>(FieldProperty))
{
void* FieldData = FieldProperty->ContainerPtrToValuePtr<void>(Data);
double PropertyValue = StructNumericProperty->GetFloatingPointPropertyValue(FieldData);
double TargetValue = FCString::Atod(*Interp.PropertyValueStr);
double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1;
double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac;
double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp;
StructNumericProperty->SetFloatingPointPropertyValue(FieldData, NewValue);
Done = true;
}
}
if(!Done)
{
FString ImportText;
if(!Interp.PropertyElementName.IsEmpty())
ImportText = "(" + Interp.PropertyElementName + "=" + Interp.PropertyValueStr + ")";
else
ImportText = Interp.PropertyValueStr;
Interp.Property->ImportText_Direct(*ImportText, Data, Interp.Object, 0);
}
UActorComponent* ActorComponent = Cast<UActorComponent>(Interp.Object);
if(ActorComponent)
{
if((Interp.Property->GetFName() == TEXT("RelativeLocation")) ||
(Interp.Property->GetFName() == TEXT("RelativeRotation")) ||
(Interp.Property->GetFName() == TEXT("RelativeScale3D")))
{
ActorComponent->UpdateComponentToWorld();
}
}
#if (ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION < 3) // Before 5.3
if(Interp.Property->HasAnyPropertyFlags(CPF_Interp))
Interp.Object->PostInterpChange(Interp.Property);
#endif
#if WITH_EDITOR
TArray<const UObject*> ModifiedObjects;
ModifiedObjects.Add(Interp.Actor);
FPropertyChangedEvent PropertyChangedEvent(Interp.Property, EPropertyChangeType::ValueSet, MakeArrayView(ModifiedObjects));
FEditPropertyChain PropertyChain;
PropertyChain.AddHead(Interp.Property);
FPropertyChangedChainEvent PropertyChangedChainEvent(PropertyChain, PropertyChangedEvent);
Interp.Object->PostEditChangeChainProperty(PropertyChangedChainEvent);
Interp.Actor->PostEditChangeChainProperty(PropertyChangedChainEvent);
#endif
}
Interp.EasingRemaining -= EasingDelta;
if(Interp.EasingRemaining == 0)
TriCasterExtInterp.RemoveAtSwap(i);
}
}
void UTriCasterExtComponent::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data)
{
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(),
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
}

View File

@@ -1,264 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIViewportCaptureComponent.h>
#include <Rendering/RenderingCommon.h>
#include <SceneView.h>
#include <SceneViewExtension.h>
#include <CanvasTypes.h>
#include <EngineModule.h>
#include <LegacyScreenPercentageDriver.h>
#include <RenderResource.h>
#include <UnrealClient.h>
#include <Engine/Engine.h>
#include <EngineUtils.h>
#include <Misc/CoreDelegates.h>
#include <Engine/TextureRenderTarget2D.h>
#include <UObject/Package.h>
UNDIViewportCaptureComponent::UNDIViewportCaptureComponent(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{
this->bWantsInitializeComponent = true;
this->CaptureSource = ESceneCaptureSource::SCS_FinalToneCurveHDR;
this->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = true;
this->PostProcessSettings.DepthOfFieldFocalDistance = 10000.f;
}
UNDIViewportCaptureComponent::~UNDIViewportCaptureComponent()
{}
void UNDIViewportCaptureComponent::InitializeComponent()
{
Super::InitializeComponent();
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// define default capture values
const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize;
const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate;
// change the capture sizes as necessary
ChangeCaptureSettings(capture_size, capture_rate);
// ensure we are subscribed to the broadcast configuration changed event
this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this);
this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic(
this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged);
}
}
void UNDIViewportCaptureComponent::UninitializeComponent()
{
if (IsValid(NDIMediaSource))
{
if (IsValid(TextureTarget))
{
NDIMediaSource->ChangeVideoTexture(nullptr);
}
}
Super::UninitializeComponent();
}
bool UNDIViewportCaptureComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// define default capture values
const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize;
const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate;
// change the capture sizes as necessary
ChangeCaptureSettings(capture_size, capture_rate);
// ensure we are subscribed to the broadcast configuration changed event
this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this);
this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic(
this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
void UNDIViewportCaptureComponent::ChangeSourceName(const FString& InSourceName)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeSourceName(InSourceName);
}
}
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
void UNDIViewportCaptureComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration);
}
}
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
void UNDIViewportCaptureComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture)
{
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
this->TextureTarget = BroadcastTexture;
}
/**
Change the capture settings of the viewport capture
@param InCaptureSize The Capture size of the frame to capture of the viewport
@param InCaptureRate A framerate at which to capture frames of the viewport
*/
void UNDIViewportCaptureComponent::ChangeCaptureSettings(FIntPoint InCaptureSize, FFrameRate InCaptureRate)
{
// clamp our viewport capture size
int32 capture_width = FMath::Max(InCaptureSize.X, 64);
int32 capture_height = FMath::Max(InCaptureSize.Y, 64);
// set the capture size
this->CaptureSize = FIntPoint(capture_width, capture_height);
// set the capture rate
this->CaptureRate = InCaptureRate;
// clamp the maximum capture rate to something reasonable
float capture_rate_max = 1 / 1000.0f;
float capture_rate = CaptureRate.Denominator / (float)CaptureRate.Numerator;
// set the primary tick interval to the sensible capture rate
this->PrimaryComponentTick.TickInterval = capture_rate >= capture_rate_max ? capture_rate : -1.0f;
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
if (!IsValid(this->TextureTarget))
{
this->TextureTarget = NewObject<UTextureRenderTarget2D>(
GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), NAME_None, RF_Transient | RF_MarkAsNative);
this->TextureTarget->UpdateResource();
}
this->TextureTarget->ResizeTarget(this->CaptureSize.X, this->CaptureSize.Y);
}
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
void UNDIViewportCaptureComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram)
{
// Initialize the properties
IsOnPreview = false;
IsOnProgram = false;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0);
}
}
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
void UNDIViewportCaptureComponent::GetNumberOfConnections(int32& Result)
{
// Initialize the property
Result = 0;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetNumberOfConnections(Result);
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene, ISceneRenderBuilder& SceneRenderBuilder)
#else
void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene)
#endif
{
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
if (TextureTarget == nullptr)
return;
if (IsValid(NDIMediaSource))
{
NDIMediaSource->ChangeVideoTexture(TextureTarget);
// Some capture sources treat alpha as opacity, some sources use transparency.
// Alpha in NDI is opacity. Reverse the alpha mapping to always get opacity.
bool flip_alpha = (CaptureSource == SCS_SceneColorHDR) || (CaptureSource == SCS_SceneColorHDRNoAlpha) ||
(CaptureSource == SCS_SceneDepth) || (CaptureSource == SCS_Normal) ||
(CaptureSource == SCS_BaseColor);
if (flip_alpha == false)
NDIMediaSource->ChangeAlphaRemap(AlphaMin, AlphaMax);
else
NDIMediaSource->ChangeAlphaRemap(AlphaMax, AlphaMin);
// Do the actual capturing
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
Super::UpdateSceneCaptureContents(Scene, SceneRenderBuilder);
#else
Super::UpdateSceneCaptureContents(Scene);
#endif
}
}
void UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged(UNDIMediaSender* Sender)
{
// If we are not overriding the broadcast settings and the sender is valid
if (!bOverrideBroadcastSettings && IsValid(Sender))
{
// change the capture sizes as necessary
ChangeCaptureSettings(Sender->GetFrameSize(), Sender->GetFrameRate());
}
}

View File

@@ -1,305 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <NDIIOPluginModule.h>
#include <Modules/ModuleManager.h>
#include <IMediaModule.h>
#include <NDIIOPluginAPI.h>
#include "Player/NDIMediaPlayer.h"
#include <Misc/Paths.h>
#include <GenericPlatform/GenericPlatformMisc.h>
#include <Services/NDIConnectionService.h>
#include <Services/NDIFinderService.h>
#include <Misc/MessageDialog.h>
#include <Misc/EngineVersionComparison.h>
// Meaning the plugin is being compiled with the editor
#if WITH_EDITOR
#include "ThumbnailRendering/ThumbnailManager.h"
#include "ThumbnailRendering/TextureThumbnailRenderer.h"
#include <ISettingsModule.h>
#include <Editor.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#endif
#define LOCTEXT_NAMESPACE "FNDIIOPluginModule"
void FNDIIOPluginModule::StartupModule()
{
// Doubly Ensure that this handle is nullptr
NDI_LIB_HANDLE = nullptr;
if (LoadModuleDependencies())
{
#if UE_EDITOR
if (ISettingsModule* SettingsModule = FModuleManager::GetModulePtr<ISettingsModule>("Settings"))
{
SettingsModule->RegisterSettings(
"Project", "Plugins", "NDI", LOCTEXT("NDISettingsName", "Vizrt NDI"),
LOCTEXT("NDISettingsDescription", "Vizrt NDI(R) Engine Intergration Settings"),
GetMutableDefault<UNDIIOPluginSettings>());
}
// Ensure that the thumbnail for the 'NDI Media Texture2D' is being updated, as the texture is being used.
UThumbnailManager::Get().RegisterCustomRenderer(UNDIMediaTexture2D::StaticClass(),
UTextureThumbnailRenderer::StaticClass());
#endif
// Construct our Services
this->NDIFinderService = MakeShareable(new FNDIFinderService());
this->NDIConnectionService = MakeShareable(new FNDIConnectionService());
// Start the service
if (NDIFinderService.IsValid())
NDIFinderService->Start();
// Start the service
if (NDIConnectionService.IsValid())
NDIConnectionService->Start();
}
else
{
#if PLATFORM_WINDOWS
// Write an error message to the log.
UE_LOG(LogWindows, Error,
TEXT("Unable to load \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory."));
#if UE_EDITOR
const FText& WarningMessage =
LOCTEXT("NDIRuntimeMissing",
"Cannot find \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory. "
"Continued usage of the plugin can cause instability within the editor.\r\n\r\n"
"Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' "
"for additional information related to installation instructions for this plugin.\r\n\r\n");
// Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found
if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok)
{
FString URLResult = FString("");
FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult);
}
#endif
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
// Write an error message to the log.
UE_LOG(LogLinux, Error,
TEXT("Unable to load \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime."));
#if UE_EDITOR
const FText& WarningMessage =
LOCTEXT("NDIRuntimeMissing",
"Cannot find \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime. "
"Continued usage of the plugin can cause instability within the editor.\r\n\r\n"
"Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' "
"for additional information related to installation instructions for this plugin.\r\n\r\n");
// Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found
if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok)
{
FString URLResult = FString("");
FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult);
}
#endif
#endif
}
// supported platforms
SupportedPlatforms.Add(TEXT("Windows"));
SupportedPlatforms.Add(TEXT("Linux"));
SupportedPlatforms.Add(TEXT("LinuxAArch64"));
// supported schemes
SupportedUriSchemes.Add(TEXT("ndiio"));
// register player factory
auto MediaModule = FModuleManager::LoadModulePtr<IMediaModule>("Media");
if (MediaModule != nullptr)
{
MediaModule->RegisterPlayerFactory(*this);
}
FApp::SetUnfocusedVolumeMultiplier(1.f);
}
void FNDIIOPluginModule::ShutdownModule()
{
// unregister player factory
auto MediaModule = FModuleManager::GetModulePtr<IMediaModule>("Media");
if (MediaModule != nullptr)
{
MediaModule->UnregisterPlayerFactory(*this);
}
if (NDIFinderService.IsValid())
NDIFinderService->Shutdown();
ShutdownModuleDependencies();
}
bool FNDIIOPluginModule::BeginBroadcastingActiveViewport()
{
// Ensure we have a valid service
if (NDIConnectionService.IsValid())
{
// perform the requested functionality
return NDIConnectionService->BeginBroadcastingActiveViewport();
}
return false;
}
void FNDIIOPluginModule::StopBroadcastingActiveViewport()
{
// Ensure we have a valid service
if (NDIConnectionService.IsValid())
{
// perform the requested functionality
NDIConnectionService->StopBroadcastingActiveViewport();
}
}
//~ IMediaPlayerFactory interface
bool FNDIIOPluginModule::CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray<FText>* /*OutWarnings*/, TArray<FText>* OutErrors) const
{
FString Scheme;
FString Location;
// check scheme
if (!Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive))
{
if (OutErrors != nullptr)
{
OutErrors->Add(LOCTEXT("NoSchemeFound", "No URI scheme found"));
}
return false;
}
if (!SupportedUriSchemes.Contains(Scheme))
{
if (OutErrors != nullptr)
{
OutErrors->Add(FText::Format(LOCTEXT("SchemeNotSupported", "The URI scheme '{0}' is not supported"), FText::FromString(Scheme)));
}
return false;
}
return true;
}
TSharedPtr<IMediaPlayer, ESPMode::ThreadSafe> FNDIIOPluginModule::CreatePlayer(IMediaEventSink& EventSink)
{
return MakeShared<FNDIMediaPlayer, ESPMode::ThreadSafe>(EventSink);
}
FText FNDIIOPluginModule::GetDisplayName() const
{
return LOCTEXT("MediaPlayerDisplayName", "NDI Interface");
}
FName FNDIIOPluginModule::GetPlayerName() const
{
static FName PlayerName(TEXT("NDIMedia"));
return PlayerName;
}
FGuid FNDIIOPluginModule::GetPlayerPluginGUID() const
{
static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f);
return PlayerPluginGUID;
}
const TArray<FString>& FNDIIOPluginModule::GetSupportedPlatforms() const
{
return SupportedPlatforms;
}
bool FNDIIOPluginModule::SupportsFeature(EMediaFeature Feature) const
{
return Feature == EMediaFeature::AudioSamples ||
Feature == EMediaFeature::MetadataTracks ||
Feature == EMediaFeature::VideoSamples;
}
bool FNDIIOPluginModule::LoadModuleDependencies()
{
#if PLATFORM_WINDOWS
// Get the Binaries File Location
const FString env_variable = TEXT(NDILIB_REDIST_FOLDER);
const FString binaries_path = FPlatformMisc::GetEnvironmentVariable(*env_variable) + "/Processing.NDI.Lib.x64.dll";
// We can't validate if it's valid, but we can determine if it's explicitly not.
if (binaries_path.Len() > 0)
{
// Load the DLL
this->NDI_LIB_HANDLE = FPlatformProcess::GetDllHandle(*binaries_path);
// Not required, but "correct" (see the SDK documentation)
if (this->NDI_LIB_HANDLE != nullptr && !NDIlib_initialize())
{
// We were unable to initialize the library, so lets free the handle
FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE);
this->NDI_LIB_HANDLE = nullptr;
}
}
// Did we successfully load the NDI library?
return this->NDI_LIB_HANDLE != nullptr;
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
return true;
#endif
}
void FNDIIOPluginModule::ShutdownModuleDependencies()
{
#if PLATFORM_WINDOWS
if (this->NDI_LIB_HANDLE != nullptr)
{
NDIlib_destroy();
FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE);
this->NDI_LIB_HANDLE = nullptr;
}
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
#endif
}
#undef LOCTEXT_NAMESPACE
IMPLEMENT_MODULE(FNDIIOPluginModule, NDIIO);

View File

@@ -1,201 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Libraries/NDIIOLibrary.h>
#include <Services/NDIFinderService.h>
#include <NDIIOPluginModule.h>
#include <FastXml.h>
const TArray<FNDIConnectionInformation> UNDIIOLibrary::K2_GetNDISourceCollection()
{
// Return the FinderServices current network source collection
return FNDIFinderService::GetNetworkSourceCollection();
}
const bool UNDIIOLibrary::K2_FindNetworkSourceByName(UObject* WorldContextObject,
FNDIConnectionInformation& ConnectionInformation,
FString InSourceName)
{
// Ensure that the passed in information is empty
ConnectionInformation.Reset();
// Get the current network source collection from the finder service.
const TArray<FNDIConnectionInformation> NetworkSourceCollection = FNDIFinderService::GetNetworkSourceCollection();
// Get the current number of network source items in the collection
int32 final_count = NetworkSourceCollection.Num();
// Ensure we have a wide range of items to search through.
int32 last_index = final_count;
// Since the Source collection returned is already sorted alphabetically do a binary search to speed things up.
// We are only going to do comparisons that are necessary using O(log(n)) time complexity
for (int32 current_index = 0; current_index < last_index; /* current_index changed in loop */)
{
// Ensure that the index is valid (this will protect against negative values)
if (NetworkSourceCollection.IsValidIndex(current_index))
{
// Get the source reference from the collection
FNDIConnectionInformation source_info = NetworkSourceCollection[current_index];
// do a comparison against the requested SourceName
if (int32 comparitor_value = InSourceName.Compare(source_info.SourceName, ESearchCase::IgnoreCase))
{
// Our search says that our source name is greater than the info we checked
if (comparitor_value <= 0)
{
// set the last index to the current index
last_index = current_index;
// get halfway between the last index and the 0th index
current_index = last_index / 2;
}
// Our search says that our source name is less than the info we checked
else if (comparitor_value > 0)
{
// move up half the number of items within the collection
current_index = (last_index + current_index + 1) / 2;
}
}
// We found a comparable source.
else
{
// Set the source information structure
ConnectionInformation = source_info;
// return success
return true;
}
}
// Something weird happened (maybe the first check was larger than the search term); just return a fail
else
return false;
}
return false;
}
bool UNDIIOLibrary::K2_BeginBroadcastingActiveViewport(UObject* WorldContextObject)
{
// Get the plugin module for the owner of this object
if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr<FNDIIOPluginModule>("NDIIO"))
{
// Call the underlying functionality
return PluginModule->BeginBroadcastingActiveViewport();
}
return false;
}
void UNDIIOLibrary::K2_StopBroadcastingActiveViewport(UObject* WorldContextObject)
{
// Get the plugin module for the owner of this object
if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr<FNDIIOPluginModule>("NDIIO"))
{
// Call the underlying functionality
PluginModule->StopBroadcastingActiveViewport();
}
}
UNDIMediaReceiver* UNDIIOLibrary::K2_GetNDIMediaReceiver(UNDIMediaReceiver* Receiver)
{
return Receiver;
}
UNDIMediaSender* UNDIIOLibrary::K2_GetNDIMediaSender(UNDIMediaSender* Sender)
{
return Sender;
}
const TArray<FNDIMetaDataElement> UNDIIOLibrary::K2_ParseNDIMetaData(FString Data)
{
class Parser : public IFastXmlCallback
{
public:
Parser(TArray<FNDIMetaDataElement>& ElementsIn)
: Elements(ElementsIn)
{}
virtual ~Parser()
{}
virtual bool ProcessXmlDeclaration(const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
return true;
}
virtual bool ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
if(CurrentElementStack.Num() > 0)
return false;
FNDIMetaDataElement NewElement;
NewElement.ElementName = ElementName;
NewElement.Data = ElementData;
if(CurrentElementStack.Num() == 0)
{
Elements.Push(NewElement);
CurrentElementStack.Push(&Elements.Last());
}
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
check(CurrentElementStack.Num() > 0);
FNDIMetaDataElement* CurrentElement = CurrentElementStack.Last();
CurrentElement->Attributes.Add(AttributeName, AttributeValue);
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
check(CurrentElementStack.Num() > 0);
CurrentElementStack.Pop();
return true;
}
virtual bool ProcessComment(const TCHAR* Comment) override
{
return true;
}
private:
TArray<FNDIMetaDataElement>& Elements;
TArray<FNDIMetaDataElement*> CurrentElementStack;
};
TArray<FNDIMetaDataElement> Elements;
Parser Parser(Elements);
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(&Parser,
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
return Elements;
}

View File

@@ -1,77 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaSoundWave.h>
#include <Objects/Media/NDIMediaReceiver.h>
UNDIMediaSoundWave::UNDIMediaSoundWave(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
// Set the Default Values for this object
this->bLooping = false;
this->NumChannels = 1;
this->SampleRate = 48000;
this->Duration = INDEFINITELY_LOOPING_DURATION;
}
/**
Set the Media Source of this object, so that when this object is called to 'GeneratePCMData' by the engine
we can request the media source to provide the pcm data from the current connected source
*/
void UNDIMediaSoundWave::SetConnectionSource(UNDIMediaReceiver* InMediaSource)
{
// Ensure there is no thread contention for generating pcm data from the connection source
FScopeLock Lock(&SyncContext);
// Do we have a media source object to work with
if (this->MediaSource != nullptr)
{
// Are we already registered with the incoming media source object
if (this->MediaSource != InMediaSource)
{
// It doesn't look like we are registered with the incoming, make sure
// to unregistered with the previous source
this->MediaSource->UnregisterAudioWave(this);
}
}
// Ensure we have a reference to the media source object
this->MediaSource = InMediaSource;
}
/**
Called by the engine to generate pcm data to be 'heard' by audio listener objects
*/
int32 UNDIMediaSoundWave::OnGeneratePCMAudio(TArray<uint8>& OutAudio, int32 NumSamples)
{
// Ensure there is no thread contention for generating pcm data from the connection source
FScopeLock Lock(&SyncContext);
// set the default value, in case we have no connection source
int32 samples_generated = 0;
OutAudio.Reset();
OutAudio.AddZeroed(NumSamples * sizeof(int16));
// check the connection source and continue
if (this->MediaSource != nullptr)
{
samples_generated = MediaSource->GeneratePCMData(this, OutAudio.GetData(), NumSamples);
}
// return to the engine the number of samples actually generated
return samples_generated;
}
bool UNDIMediaSoundWave::IsReadyForFinishDestroy()
{
// Ensure that there is no thread contention for generating data
FScopeLock Lock(&SyncContext);
return USoundWaveProcedural::IsReadyForFinishDestroy();
}

View File

@@ -1,153 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaTexture2D.h>
#include <Objects/Media/NDIMediaTextureResource.h>
#include <Misc/EngineVersionComparison.h>
UNDIMediaTexture2D::UNDIMediaTexture2D(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
this->SetMyResource(nullptr);
}
void UNDIMediaTexture2D::UpdateTextureReference(FRHICommandList& RHICmdList, FTextureRHIRef Reference)
{
if (GetMyResource() != nullptr)
{
if (Reference.IsValid() && GetMyResource()->TextureRHI != Reference)
{
GetMyResource()->TextureRHI = (FTextureRHIRef&)Reference;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
}
else if (!Reference.IsValid())
{
if (FNDIMediaTextureResource* TextureResource = static_cast<FNDIMediaTextureResource*>(this->GetMyResource()))
{
ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference)
([this](FRHICommandListImmediate& RHICmdList) {
static int32 DefaultWidth = 1280;
static int32 DefaultHeight = 720;
// Set the default video texture to reference nothing
TRefCountPtr<FRHITexture> RenderableTexture;
const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DUpdateTextureReference"))
.SetExtent(DefaultWidth, DefaultHeight)
.SetFormat(EPixelFormat::PF_B8G8R8A8)
.SetNumMips(1)
.SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable)
.SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f)));
RenderableTexture = RHICreateTexture(CreateDesc);
GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
});
// Make sure _RenderThread is executed before continuing
FlushRenderingCommands();
}
}
}
}
FTextureResource* UNDIMediaTexture2D::CreateResource()
{
if (this->GetMyResource() != nullptr)
{
delete this->GetMyResource();
this->SetMyResource(nullptr);
}
if (FNDIMediaTextureResource* TextureResource = new FNDIMediaTextureResource(this))
{
this->SetMyResource(TextureResource);
ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference)
([this](FRHICommandListImmediate& RHICmdList) {
static int32 DefaultWidth = 1280;
static int32 DefaultHeight = 720;
// Set the default video texture to reference nothing
TRefCountPtr<FRHITexture> RenderableTexture;
const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DCreateResourceTexture"))
.SetExtent(DefaultWidth, DefaultHeight)
.SetFormat(EPixelFormat::PF_B8G8R8A8)
.SetNumMips(1)
.SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable)
.SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f)));
RenderableTexture = RHICreateTexture(CreateDesc);
GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
});
}
return this->GetMyResource();
}
void UNDIMediaTexture2D::GetResourceSizeEx(FResourceSizeEx& CumulativeResourceSize)
{
Super::GetResourceSizeEx(CumulativeResourceSize);
if (FNDIMediaTextureResource* CurrentResource = static_cast<FNDIMediaTextureResource*>(this->GetMyResource()))
{
CumulativeResourceSize.AddUnknownMemoryBytes(CurrentResource->GetResourceSize());
}
}
float UNDIMediaTexture2D::GetSurfaceHeight() const
{
return GetMyResource() != nullptr ? GetMyResource()->GetSizeY() : 0.0f;
}
float UNDIMediaTexture2D::GetSurfaceWidth() const
{
return GetMyResource() != nullptr ? GetMyResource()->GetSizeX() : 0.0f;
}
float UNDIMediaTexture2D::GetSurfaceDepth() const
{
return 0.0f;
}
uint32 UNDIMediaTexture2D::GetSurfaceArraySize() const
{
return 0;
}
EMaterialValueType UNDIMediaTexture2D::GetMaterialType() const
{
return MCT_Texture2D;
}
ETextureClass UNDIMediaTexture2D::GetTextureClass() const
{
return ETextureClass::Other2DNoSource;
}
void UNDIMediaTexture2D::SetMyResource(FTextureResource* ResourceIn)
{
SetResource(ResourceIn);
}
FTextureResource* UNDIMediaTexture2D::GetMyResource()
{
return GetResource();
}
const FTextureResource* UNDIMediaTexture2D::GetMyResource() const
{
return GetResource();
}

View File

@@ -1,73 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaTextureResource.h>
#include <RHI.h>
#include <DeviceProfiles/DeviceProfile.h>
#include <DeviceProfiles/DeviceProfileManager.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <RenderUtils.h>
/**
Constructs a new instance of this object specifying a media texture owner
@param Owner The media object used as the owner for this object
*/
FNDIMediaTextureResource::FNDIMediaTextureResource(UNDIMediaTexture2D* Owner)
{
this->MediaTexture = Owner;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
void FNDIMediaTextureResource::InitRHI(FRHICommandListBase& RHICmdList)
#else
void FNDIMediaTextureResource::InitDynamicRHI()
#endif
{
if (this->MediaTexture != nullptr)
{
FSamplerStateInitializerRHI SamplerStateInitializer(
(ESamplerFilter)UDeviceProfileManager::Get().GetActiveProfile()->GetTextureLODSettings()->GetSamplerFilter(
MediaTexture),
AM_Border, AM_Border, AM_Wrap);
SamplerStateRHI = RHICreateSamplerState(SamplerStateInitializer);
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
void FNDIMediaTextureResource::ReleaseRHI()
#else
void FNDIMediaTextureResource::ReleaseDynamicRHI()
#endif
{
// Release the TextureRHI bound by this object
this->TextureRHI.SafeRelease();
// Ensure that we have a owning media texture
if (this->MediaTexture != nullptr)
{
// Remove the texture reference associated with the owner texture object
RHIUpdateTextureReference(MediaTexture->TextureReference.TextureReferenceRHI, nullptr);
}
}
SIZE_T FNDIMediaTextureResource::GetResourceSize()
{
return CalcTextureSize(GetSizeX(), GetSizeY(), EPixelFormat::PF_A8R8G8B8, 1);
}
uint32 FNDIMediaTextureResource::GetSizeX() const
{
return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().X : 0;
}
uint32 FNDIMediaTextureResource::GetSizeY() const
{
return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().Y : 0;
}

View File

@@ -1,492 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include "NDIMediaPlayer.h"
#include <MediaIOCoreSamples.h>
#include <MediaIOCoreTextureSampleBase.h>
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
#include <MediaIOCoreTextureSampleConverter.h>
#endif
#include <MediaIOCoreAudioSampleBase.h>
#include <IMediaEventSink.h>
#include <IMediaTextureSampleConverter.h>
#include <Misc/EngineVersionComparison.h>
#define LOCTEXT_NAMESPACE "FNDIMediaPlayer"
// An NDI-derived media texture sample, representing a frame of video
class NDIMediaTextureSample : public FMediaIOCoreTextureSampleBase, public IMediaTextureSampleConverter
{
using Super = FMediaIOCoreTextureSampleBase;
public:
NDIMediaTextureSample() = default;
virtual ~NDIMediaTextureSample() = default;
bool Initialize(const NDIlib_video_frame_v2_t& InVideoFrame, FTimespan InTime, UNDIMediaReceiver* InReceiver)
{
FreeSample();
VideoFrame = InVideoFrame;
Receiver = InReceiver;
if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVY)
SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres);
else if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVA)
SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres +
InVideoFrame.xres*InVideoFrame.yres);
else
return false;
VideoFrame.p_data = Buffer.GetData();
SetProperties(InVideoFrame.line_stride_in_bytes, InVideoFrame.xres, InVideoFrame.yres, EMediaTextureSampleFormat::CharUYVY,
InTime, FFrameRate(InVideoFrame.frame_rate_N, InVideoFrame.frame_rate_D), FTimecode(),
true);
return true;
}
virtual const FMatrix& GetYUVToRGBMatrix() const override
{
return MediaShaders::YuvToRgbRec709Scaled;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
virtual void CopyConfiguration(const TSharedPtr<FMediaIOCoreTextureSampleBase>& SourceSample) override
{
Super::CopyConfiguration(SourceSample);
if (SourceSample.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SourceSample);
VideoFrame = NDISamplePtr->VideoFrame;
Receiver = NDISamplePtr->Receiver;
}
}
#endif
virtual uint32 GetConverterInfoFlags() const override
{
return ConverterInfoFlags_WillCreateOutputTexture;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (!Receiver)
return false;
FTextureRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame));
InDstTexture = DstTexture;
return true;
}
#else
virtual bool Convert(FTexture2DRHIRef & InDstTexture, const FConversionHints & Hints) override
{
if (!Receiver)
return false;
FTexture2DRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame));
InDstTexture = DstTexture;
return true;
}
#endif
private:
NDIlib_video_frame_v2_t VideoFrame;
UNDIMediaReceiver* Receiver { nullptr };
//FMediaTimeStamp Time;
//std::vector<uint8_t> Data;
};
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
class NDIMediaTextureSampleConverter : public FMediaIOCoreTextureSampleConverter
{
using Super = FMediaIOCoreTextureSampleConverter;
public:
NDIMediaTextureSampleConverter() = default;
virtual ~NDIMediaTextureSampleConverter() = default;
virtual void Setup(const TSharedPtr<FMediaIOCoreTextureSampleBase>& InSample) override
{
FMediaIOCoreTextureSampleConverter::Setup(InSample);
JITRProxySample = InSample;
}
virtual uint32 GetConverterInfoFlags() const override
{
return ConverterInfoFlags_WillCreateOutputTexture;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (FMediaIOCoreTextureSampleConverter::Convert(RHICmdList, InDstTexture, Hints))
{
TSharedPtr<FMediaIOCoreTextureSampleBase> SamplePtr = JITRProxySample.Pin();
if (SamplePtr.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SamplePtr);
return NDISamplePtr->Convert(RHICmdList, InDstTexture, Hints);
}
}
return false;
}
#else
virtual bool Convert(FTexture2DRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (FMediaIOCoreTextureSampleConverter::Convert(InDstTexture, Hints))
{
TSharedPtr<FMediaIOCoreTextureSampleBase> SamplePtr = JITRProxySample.Pin();
if (SamplePtr.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SamplePtr);
return NDISamplePtr->Convert(InDstTexture, Hints);
}
}
return false;
}
#endif
private:
TWeakPtr<FMediaIOCoreTextureSampleBase> JITRProxySample;
};
#endif
class NDIMediaTextureSamplePool : public TMediaObjectPool<NDIMediaTextureSample>
{};
// An NDI-derived media audio sample, representing a frame of audio
class NDIMediaAudioSample : public FMediaIOCoreAudioSampleBase
{
using Super = FMediaIOCoreAudioSampleBase;
public:
};
class NDIMediaAudioSamplePool : public TMediaObjectPool<NDIMediaAudioSample>
{};
FNDIMediaPlayer::FNDIMediaPlayer(IMediaEventSink& InEventSink)
: Super(InEventSink)
, NDIPlayerState(EMediaState::Closed)
, EventSink(InEventSink)
, TextureSamplePool(new NDIMediaTextureSamplePool)
, AudioSamplePool(new NDIMediaAudioSamplePool)
{}
FNDIMediaPlayer::~FNDIMediaPlayer()
{
Close();
delete TextureSamplePool;
delete AudioSamplePool;
}
FGuid FNDIMediaPlayer::GetPlayerPluginGUID() const
{
static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f);
return PlayerPluginGUID;
}
bool FNDIMediaPlayer::Open(const FString& Url, const IMediaOptions* Options)
{
if (!Super::Open(Url, Options))
{
return false;
}
MaxNumVideoFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxVideoFrameBuffer, (int64)8);
MaxNumAudioFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAudioFrameBuffer, (int64)8);
MaxNumMetadataFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAncillaryFrameBuffer, (int64)8);
// Setup our different supported channels based on source settings
SetupSampleChannels();
// If the player is opened with an NDIMediaReceiver, use that. Otherwise create an internal one.
bool bIsNDIMediaReceiver = Options->HasMediaOption(NDIMediaOption::IsNDIMediaReceiver);
if (bIsNDIMediaReceiver)
{
Receiver = static_cast<UNDIMediaReceiver*>(const_cast<IMediaOptions*>(Options));
bInternalReceiver = false;
}
else
{
Receiver = NewObject<UNDIMediaReceiver>();
bInternalReceiver = true;
}
// Hook into the video and audio captures
Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle = Receiver->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame)
{
this->DisplayFrame(video_frame);
});
Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle);
AudioCaptureEventHandle = Receiver->OnNDIReceiverAudioCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_audio_frame_v2_t& audio_frame)
{
this->PlayAudio(audio_frame);
});
// Control the player's state based on the receiver connecting and disconnecting
Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle);
ConnectedEventHandle = Receiver->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver)
{
this->NDIPlayerState = EMediaState::Playing;
});
Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle);
DisconnectedEventHandle = Receiver->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver)
{
this->NDIPlayerState = EMediaState::Closed;
});
// Get ready to connect
CurrentState = EMediaState::Preparing;
NDIPlayerState = EMediaState::Preparing;
EventSink.ReceiveMediaEvent(EMediaEvent::MediaConnecting);
// Start up the receiver under the player's control.
// Use the provided URL as the source if given, otherwise use the connection info set for the receiver
FString Scheme;
FString Location;
if (Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive))
{
FNDIConnectionInformation ConnectionInformation = Receiver->ConnectionSetting;
ConnectionInformation.SourceName = Location;
Receiver->Initialize(ConnectionInformation, UNDIMediaReceiver::EUsage::Controlled);
}
else
{
Receiver->Initialize(UNDIMediaReceiver::EUsage::Controlled);
}
return true;
}
void FNDIMediaPlayer::Close()
{
NDIPlayerState = EMediaState::Closed;
if (Receiver != nullptr)
{
// Disconnect from receiver events
Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle.Reset();
Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle);
AudioCaptureEventHandle.Reset();
Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle);
ConnectedEventHandle.Reset();
Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle);
DisconnectedEventHandle.Reset();
// Shut down the receiver
Receiver->Shutdown();
// If the player created the receiver, destroy the receiver
if (bInternalReceiver)
Receiver->ConditionalBeginDestroy();
Receiver = nullptr;
bInternalReceiver = false;
}
TextureSamplePool->Reset();
AudioSamplePool->Reset();
Super::Close();
}
void FNDIMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode)
{
// Update player state
EMediaState NewState = NDIPlayerState;
if (NewState != CurrentState)
{
CurrentState = NewState;
if (CurrentState == EMediaState::Playing)
{
EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged);
EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpened);
EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackResumed);
}
else if (NewState == EMediaState::Error)
{
EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpenFailed);
Close();
}
}
if (CurrentState != EMediaState::Playing)
{
return;
}
TickTimeManagement();
}
void FNDIMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan Timecode)
{
Super::TickFetch(DeltaTime, Timecode);
if ((CurrentState == EMediaState::Preparing) || (CurrentState == EMediaState::Playing))
{
if (Receiver != nullptr)
{
// Ask receiver to capture a new frame of video and audio.
// Will call DisplayFrame() and PlayAudio() through capture event.
Receiver->CaptureConnectedAudio();
Receiver->CaptureConnectedVideo();
}
}
if (CurrentState == EMediaState::Playing)
{
ProcessFrame();
VerifyFrameDropCount();
}
}
void FNDIMediaPlayer::ProcessFrame()
{
if (CurrentState == EMediaState::Playing)
{
// No need to lock here. That info is only used for debug information.
//AudioTrackFormat.NumChannels = 0;//NDIThreadAudioChannels;
//AudioTrackFormat.SampleRate = 0;//NDIThreadAudioSampleRate;
}
}
void FNDIMediaPlayer::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame)
{
auto TextureSample = TextureSamplePool->AcquireShared();
if (TextureSample->Initialize(video_frame, FTimespan::FromSeconds(GetPlatformSeconds()), Receiver))
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
AddVideoSample(TextureSample);
#else
Samples->AddVideo(TextureSample);
#endif
}
}
void FNDIMediaPlayer::PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame)
{
auto AudioSample = AudioSamplePool->AcquireShared();
// UE wants 32bit signed interleaved audio data, so need to convert the NDI audio.
// Fortunately the NDI library has a utility function to do that.
// Get a buffer to convert to
const int32 available_samples = audio_frame.no_samples * audio_frame.no_channels;
void* SampleBuffer = AudioSample->RequestBuffer(available_samples);
if (SampleBuffer != nullptr)
{
// Format to convert to
NDIlib_audio_frame_interleaved_32s_t audio_frame_32s(
audio_frame.sample_rate,
audio_frame.no_channels,
audio_frame.no_samples,
audio_frame.timecode,
20,
static_cast<int32_t*>(SampleBuffer));
// Convert received NDI audio
NDIlib_util_audio_to_interleaved_32s_v2(&audio_frame, &audio_frame_32s);
// Supply converted audio data
if (AudioSample->SetProperties(available_samples
, audio_frame_32s.no_channels
, audio_frame_32s.sample_rate
, FTimespan::FromSeconds(GetPlatformSeconds())
, TOptional<FTimecode>()))
{
Samples->AddAudio(AudioSample);
}
}
}
void FNDIMediaPlayer::VerifyFrameDropCount()
{
}
bool FNDIMediaPlayer::IsHardwareReady() const
{
return NDIPlayerState == EMediaState::Playing ? true : false;
}
void FNDIMediaPlayer::SetupSampleChannels()
{
FMediaIOSamplingSettings VideoSettings = BaseSettings;
VideoSettings.BufferSize = MaxNumVideoFrameBuffer;
Samples->InitializeVideoBuffer(VideoSettings);
FMediaIOSamplingSettings AudioSettings = BaseSettings;
AudioSettings.BufferSize = MaxNumAudioFrameBuffer;
Samples->InitializeAudioBuffer(AudioSettings);
FMediaIOSamplingSettings MetadataSettings = BaseSettings;
MetadataSettings.BufferSize = MaxNumMetadataFrameBuffer;
Samples->InitializeMetadataBuffer(MetadataSettings);
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
TSharedPtr<FMediaIOCoreTextureSampleBase> FNDIMediaPlayer::AcquireTextureSample_AnyThread() const
{
return TextureSamplePool->AcquireShared();
}
TSharedPtr<FMediaIOCoreTextureSampleConverter> FNDIMediaPlayer::CreateTextureSampleConverter() const
{
return MakeShared<NDIMediaTextureSampleConverter>();
}
#endif
//~ ITimedDataInput interface
#if WITH_EDITOR
const FSlateBrush* FNDIMediaPlayer::GetDisplayIcon() const
{
return nullptr;
}
#endif
#undef LOCTEXT_NAMESPACE

View File

@@ -1,75 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <Objects/Media/NDIMediaReceiver.h>
#include <MediaIOCorePlayerBase.h>
class FNDIMediaPlayer : public FMediaIOCorePlayerBase
{
using Super = FMediaIOCorePlayerBase;
public:
FNDIMediaPlayer(IMediaEventSink& InEventSink);
virtual ~FNDIMediaPlayer();
//~ IMediaPlayer interface
virtual FGuid GetPlayerPluginGUID() const override;
virtual bool Open(const FString& Url, const IMediaOptions* Options) override;
virtual void Close() override;
virtual void TickInput(FTimespan DeltaTime, FTimespan Timecode) override;
virtual void TickFetch(FTimespan DeltaTime, FTimespan Timecode) override;
protected:
virtual bool IsHardwareReady() const override;
virtual void SetupSampleChannels() override;
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
virtual TSharedPtr<FMediaIOCoreTextureSampleBase> AcquireTextureSample_AnyThread() const override;
virtual TSharedPtr<FMediaIOCoreTextureSampleConverter> CreateTextureSampleConverter() const override;
#endif
void DisplayFrame(const NDIlib_video_frame_v2_t& video_frame);
void PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame);
void ProcessFrame();
void VerifyFrameDropCount();
public:
//~ ITimedDataInput interface
#if WITH_EDITOR
virtual const FSlateBrush* GetDisplayIcon() const override;
#endif
private:
/** Max sample count our different buffer can hold. Taken from MediaSource */
int32 MaxNumAudioFrameBuffer = 0;
int32 MaxNumMetadataFrameBuffer = 0;
int32 MaxNumVideoFrameBuffer = 0;
/** Current state of the media player. */
EMediaState NDIPlayerState = EMediaState::Closed;
/** The media event handler. */
IMediaEventSink& EventSink;
UNDIMediaReceiver* Receiver = nullptr;
bool bInternalReceiver = true;
FDelegateHandle VideoCaptureEventHandle;
FDelegateHandle AudioCaptureEventHandle;
FDelegateHandle ConnectedEventHandle;
FDelegateHandle DisconnectedEventHandle;
class NDIMediaTextureSamplePool* TextureSamplePool;
class NDIMediaAudioSamplePool* AudioSamplePool;
};

View File

@@ -1,404 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Services/NDIConnectionService.h>
#include <UObject/UObjectGlobals.h>
#include <UObject/Package.h>
#include <Misc/CoreDelegates.h>
#include <NDIIOPluginSettings.h>
#include <Objects/Media/NDIMediaSender.h>
#include <Framework/Application/SlateApplication.h>
#include <Misc/EngineVersionComparison.h>
#include <Engine/Engine.h>
#include <TextureResource.h>
#if WITH_EDITOR
#include <Editor.h>
#endif
/** Define Global Accessors */
FNDIConnectionServiceSendVideoEvent FNDIConnectionService::EventOnSendVideoFrame;
TMap<USoundSubmix*, FNDIConnectionServiceSendAudioEvent> FNDIConnectionService::SubmixSendAudioFrameEvents;
FCriticalSection FNDIConnectionService::AudioSyncContext;
FCriticalSection FNDIConnectionService::RenderSyncContext;
/** ************************ **/
/**
Constructs a new instance of this object
*/
FNDIConnectionService::FNDIConnectionService() {}
// Begin the service
bool FNDIConnectionService::Start()
{
if (!bIsInitialized)
{
bIsInitialized = true;
// Define some basic properties
FNDIBroadcastConfiguration Configuration;
FString BroadcastName = TEXT("Unreal Engine");
EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative;
bool bBeginBroadcastOnPlay = false;
// Load the plugin settings for broadcasting the active viewport
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
// Define the configuration properties
Configuration.FrameRate = CoreSettings->BroadcastRate;
Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840),
FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840));
// Set the broadcast name
BroadcastName = CoreSettings->ApplicationStreamName;
bBeginBroadcastOnPlay = CoreSettings->bBeginBroadcastOnPlay;
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
}
/** Construct the Active Viewport video texture */
this->VideoTexture = NewObject<UTextureRenderTarget2D>(
GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), TEXT("NDIViewportVideoTexture"), Flags);
/** Construct the active viewport sender */
this->ActiveViewportSender = NewObject<UNDIMediaSender>(GetTransientPackage(), UNDIMediaSender::StaticClass(),
TEXT("NDIViewportSender"), Flags);
VideoTexture->UpdateResource();
// Update the active viewport sender, with the properties defined in the settings configuration
this->ActiveViewportSender->ChangeSourceName(BroadcastName);
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration);
// Hook into the core for the end of frame handlers
FCoreDelegates::OnEndFrameRT.AddRaw(this, &FNDIConnectionService::OnEndRenderFrame);
if (!GIsEditor)
{
FCoreDelegates::OnPostEngineInit.AddRaw(this, &FNDIConnectionService::OnPostEngineInit);
FCoreDelegates::OnEnginePreExit.AddRaw(this, &FNDIConnectionService::OnEnginePreExit);
if (bBeginBroadcastOnPlay)
BeginBroadcastingActiveViewport();
}
#if WITH_EDITOR
else
{
FEditorDelegates::PostPIEStarted.AddLambda([this](const bool Success) {
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
if (CoreSettings->bBeginBroadcastOnPlay == true)
BeginBroadcastingActiveViewport();
else
BeginAudioCapture();
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
}
bIsInPIEMode = true;
});
FEditorDelegates::PrePIEEnded.AddLambda([this](const bool Success) { StopBroadcastingActiveViewport(); });
}
#endif
}
return true;
}
// Stop the service
void FNDIConnectionService::Shutdown()
{
// Wait for the sync context locks
FScopeLock AudioLock(&AudioSyncContext);
FScopeLock RenderLock(&RenderSyncContext);
// reset the initialization properties
bIsInitialized = false;
StopAudioCapture();
// unbind our handlers for our frame events
FCoreDelegates::OnEndFrame.RemoveAll(this);
FCoreDelegates::OnEndFrameRT.RemoveAll(this);
// Cleanup the broadcasting of the active viewport
StopBroadcastingActiveViewport();
}
// Handler for when the render thread frame has ended
void FNDIConnectionService::OnEndRenderFrame()
{
FScopeLock Lock(&RenderSyncContext);
if (bIsInitialized)
{
int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks();
if (FNDIConnectionService::EventOnSendVideoFrame.IsBound())
{
FNDIConnectionService::EventOnSendVideoFrame.Broadcast(ticks);
}
}
}
void FNDIConnectionService::BeginAudioCapture()
{
if (bIsInitialized)
{
if (!bIsAudioInitialized)
{
if (GEngine)
{
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (AudioDevice.IsValid())
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
for (auto& SendAudioEvent : SubmixSendAudioFrameEvents)
{
if (SendAudioEvent.Key == nullptr)
AudioDevice->RegisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject());
else
AudioDevice->RegisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key);
}
#else
AudioDevice->RegisterSubmixBufferListener(this);
#endif
bIsAudioInitialized = true;
}
}
}
}
}
void FNDIConnectionService::StopAudioCapture()
{
if (bIsAudioInitialized)
{
if (GEngine)
{
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (AudioDevice)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
for (auto& SendAudioEvent : SubmixSendAudioFrameEvents)
{
if (SendAudioEvent.Key == nullptr)
AudioDevice->UnregisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject());
else
AudioDevice->UnregisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key);
}
#else
AudioDevice->UnregisterSubmixBufferListener(this);
#endif
}
}
bIsAudioInitialized = false;
}
}
void FNDIConnectionService::OnPostEngineInit()
{
BeginAudioCapture();
}
void FNDIConnectionService::OnEnginePreExit()
{
StopAudioCapture();
}
bool FNDIConnectionService::BeginBroadcastingActiveViewport()
{
if (!bIsBroadcastingActiveViewport && IsValid(ActiveViewportSender))
{
// Load the plugin settings for broadcasting the active viewport
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
// Define some basic properties
FNDIBroadcastConfiguration Configuration;
FString BroadcastName = TEXT("Unreal Engine");
// Define the configuration properties
Configuration.FrameRate = CoreSettings->BroadcastRate;
Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840),
FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840));
// Set the broadcast name
BroadcastName = CoreSettings->ApplicationStreamName;
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
// Update the active viewport sender, with the properties defined in the settings configuration
this->ActiveViewportSender->ChangeSourceName(BroadcastName);
this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration);
}
// we don't want to perform the linear conversion for the active viewport,
// since it's already had the conversion completed by the engine before passing to the sender
ActiveViewportSender->PerformLinearTosRGBConversion(false);
// Do not enable PTZ capabilities for active viewport sender
ActiveViewportSender->EnablePTZ(false);
// Initialize the sender, this will automatically start rendering output via NDI
ActiveViewportSender->Initialize(nullptr);
// We've initialized the active viewport
bIsBroadcastingActiveViewport = true;
// However we need to update the 'Video Texture' to the active viewport back buffer...
FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().AddRaw(
this, &FNDIConnectionService::OnActiveViewportBackbufferPreResize);
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().AddRaw(
this, &FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent);
BeginAudioCapture();
}
// always return true
return true;
}
// Handler for when the active viewport back buffer has been resized
void FNDIConnectionService::OnActiveViewportBackbufferPreResize(void* Backbuffer)
{
check(IsInGameThread());
// Ensure we have a valid video texture
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
FRenderCommandFence Fence;
TextureResource->TextureRHI.SafeRelease();
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
ENQUEUE_RENDER_COMMAND(FlushRHIThreadToUpdateTextureRenderTargetReference)(
[this](FRHICommandListImmediate& RHICmdList)
{
RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, nullptr);
RHICmdList.ImmediateFlush(EImmediateFlushType::FlushRHIThread);
});
// Wait for render thread to finish, so that renderthread texture references are updated
Fence.BeginFence();
Fence.Wait();
}
}
// Handler for when the back buffer is read to present to the end user
void FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent(SWindow& Window,
const FTextureRHIRef& Backbuffer)
{
if (Window.GetType() == EWindowType::GameWindow || (Window.IsRegularWindow() && IsRunningInPIE()))
{
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
// Lets improve the performance a bit
if (TextureResource->TextureRHI != Backbuffer)
{
TextureResource->TextureRHI = (FTextureRHIRef&)Backbuffer;
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, Backbuffer);
}
}
}
}
void FNDIConnectionService::StopBroadcastingActiveViewport()
{
// Wait for the sync context locks
FScopeLock RenderLock(&RenderSyncContext);
// reset the initialization properties
bIsInPIEMode = false;
StopAudioCapture();
// Ensure that if the active viewport sender is active, that we shut it down
if (IsValid(this->ActiveViewportSender))
{
FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().RemoveAll(this);
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().RemoveAll(this);
// shutdown the active viewport sender (just in case it was activated)
this->ActiveViewportSender->Shutdown();
// reset the broadcasting flag, so that we can restart the broadcast later
this->bIsBroadcastingActiveViewport = false;
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
TextureResource->TextureRHI.SafeRelease();
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
}
}
}
FTextureResource* FNDIConnectionService::GetVideoTextureResource() const
{
if(IsValid(this->VideoTexture))
return this->VideoTexture->GetResource();
return nullptr;
}
void FNDIConnectionService::OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock)
{
if (NumSamples > 0)
{
FScopeLock Lock(&AudioSyncContext);
if (bIsAudioInitialized)
{
int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks();
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (&AudioDevice->GetMainSubmixObject() == OwningSubmix)
OwningSubmix = nullptr;
#else
OwningSubmix = nullptr;
#endif
FNDIConnectionServiceSendAudioEvent* SendAudioEvent = SubmixSendAudioFrameEvents.Find(OwningSubmix);
if (SendAudioEvent)
{
if (SendAudioEvent->IsBound())
{
SendAudioEvent->Broadcast(ticks, AudioData, NumSamples, NumChannels, SampleRate, AudioClock);
}
}
}
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
const FString& FNDIConnectionService::GetListenerName() const
{
static const FString ListenerName(TEXT("NDIConnectionServiceListener"));
return ListenerName;
}
#endif

View File

@@ -1,232 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Services/NDIFinderService.h>
#include <Async/Async.h>
#include <Misc/EngineVersionComparison.h>
#include <NDIIOPluginAPI.h>
/** Define Global Accessors */
static NDIlib_find_instance_t NDI_FIND_INSTANCE = nullptr;
static FCriticalSection NDI_FIND_SYNC_CONTEXT;
FNDIFinderService::FNDISourceCollectionChangedEvent FNDIFinderService::EventOnNDISourceCollectionChanged;
TArray<FNDIConnectionInformation> FNDIFinderService::NetworkSourceCollection = TArray<FNDIConnectionInformation>();
/** ************************ **/
FNDIFinderService::FNDIFinderService()
{
if (NDI_FIND_INSTANCE == nullptr)
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
NDI_FIND_INSTANCE = NDIlib_find_create_v2(nullptr);
}
}
// Begin the service
bool FNDIFinderService::Start()
{
if (!bIsThreadRunning && p_RunnableThread == nullptr)
{
if (NDI_FIND_INSTANCE != nullptr)
{
this->bIsThreadRunning = true;
p_RunnableThread = FRunnableThread::Create(this, TEXT("FNDIFinderService_Tick"), 0, TPri_BelowNormal);
return bIsThreadRunning = p_RunnableThread != nullptr;
}
}
return false;
}
/** FRunnable Interface implementation for 'Init' */
bool FNDIFinderService::Init()
{
return NDI_FIND_INSTANCE != nullptr;
}
/** FRunnable Interface implementation for 'Stop' */
uint32 FNDIFinderService::Run()
{
static const uint32 find_wait_time = 500;
if (NDI_FIND_INSTANCE == nullptr)
return 0;
// Only update when we are suppose to run
while (bIsThreadRunning)
{
// Wait up to 'find_wait_time' (in milliseconds) to determine whether new sources have been added
if (!NDIlib_find_wait_for_sources(NDI_FIND_INSTANCE, find_wait_time))
{
// alright the source collection has stopped updating, did we change the network source collection?
if (UpdateNetworkSourceCollection())
{
// Broadcast the even on the game thread for thread safety purposes
AsyncTask(ENamedThreads::GameThread, []() {
if (FNDIFinderService::EventOnNDISourceCollectionChanged.IsBound())
FNDIFinderService::EventOnNDISourceCollectionChanged.Broadcast();
});
}
}
}
// return success
return 1;
}
/** FRunnable Interface implementation for 'Run' */
void FNDIFinderService::Shutdown()
{
if (p_RunnableThread != nullptr)
{
this->bIsThreadRunning = false;
p_RunnableThread->WaitForCompletion();
p_RunnableThread = nullptr;
}
// Ensure we unload the finder instance
if (NDI_FIND_INSTANCE != nullptr)
NDIlib_find_destroy(NDI_FIND_INSTANCE);
}
// Stop the service
void FNDIFinderService::Stop()
{
Shutdown();
}
bool FNDIFinderService::UpdateNetworkSourceCollection()
{
uint32 no_sources = 0;
bool bHasCollectionChanged = false;
if (NDI_FIND_INSTANCE != nullptr)
{
const NDIlib_source_t* p_sources = NDIlib_find_get_current_sources(NDI_FIND_INSTANCE, &no_sources);
// Change Scope
{
FScopeLock lock(&NDI_FIND_SYNC_CONTEXT);
bHasCollectionChanged = FNDIFinderService::NetworkSourceCollection.Num() != no_sources;
if (no_sources > 0 && p_sources != nullptr)
{
uint32 CurrentSourceCount = NetworkSourceCollection.Num();
for (uint32 iter = 0; iter < no_sources; iter++)
{
if (iter >= CurrentSourceCount)
{
NetworkSourceCollection.Add(FNDIConnectionInformation());
}
const NDIlib_source_t* SourceInformation = &p_sources[iter];
FNDIConnectionInformation* CollectionSource = &NetworkSourceCollection[iter];
bHasCollectionChanged |= SourceInformation->p_url_address != CollectionSource->Url;
CollectionSource->Url = SourceInformation->p_url_address;
CollectionSource->SourceName = SourceInformation->p_ndi_name;
FString SourceName = SourceInformation->p_ndi_name;
SourceName.Split(TEXT(" "), &CollectionSource->MachineName, &CollectionSource->StreamName);
// Now that the MachineName and StreamName have been split, cleanup the stream name
CollectionSource->StreamName.RemoveFromStart("(");
CollectionSource->StreamName.RemoveFromEnd(")");
}
if (CurrentSourceCount > no_sources)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes);
#else
NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true);
#endif
bHasCollectionChanged = true;
}
}
else if (NetworkSourceCollection.Num() > 0)
{
NetworkSourceCollection.Empty();
bHasCollectionChanged = true;
}
bHasCollectionChanged |= NetworkSourceCollection.Num() != no_sources;
}
}
return bHasCollectionChanged;
}
/** Call to update an existing collection of network sources to match the current collection */
bool FNDIFinderService::UpdateSourceCollection(TArray<FNDIConnectionInformation>& InSourceCollection)
{
bool bHasCollectionChanged = false;
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
const uint32& no_sources = NetworkSourceCollection.Num();
bHasCollectionChanged = InSourceCollection.Num() != no_sources;
if (no_sources > 0)
{
uint32 CurrentSourceCount = InSourceCollection.Num();
for (uint32 iter = 0; iter < no_sources; iter++)
{
if (iter >= CurrentSourceCount)
{
InSourceCollection.Add(FNDIConnectionInformation());
CurrentSourceCount = InSourceCollection.Num();
}
FNDIConnectionInformation* CollectionSource = &InSourceCollection[iter];
const FNDIConnectionInformation* SourceInformation = &NetworkSourceCollection[iter];
bHasCollectionChanged |= SourceInformation->Url != CollectionSource->Url;
CollectionSource->Url = SourceInformation->Url;
CollectionSource->SourceName = SourceInformation->SourceName;
CollectionSource->MachineName = SourceInformation->MachineName;
CollectionSource->StreamName = SourceInformation->StreamName;
}
if (CurrentSourceCount > no_sources)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes);
#else
InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true);
#endif
bHasCollectionChanged = true;
}
}
else if (InSourceCollection.Num() > 0)
{
InSourceCollection.Empty();
bHasCollectionChanged = true;
}
}
return bHasCollectionChanged;
}
/** Get the available sources on the network */
const TArray<FNDIConnectionInformation> FNDIFinderService::GetNetworkSourceCollection()
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
return FNDIFinderService::NetworkSourceCollection;
}

View File

@@ -1,50 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIBroadcastConfiguration.h>
/** Copies an existing instance to this object */
FNDIBroadcastConfiguration::FNDIBroadcastConfiguration(const FNDIBroadcastConfiguration& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->FrameRate = other.FrameRate;
this->FrameSize = other.FrameSize;
}
/** Copies existing instance properties to this object */
FNDIBroadcastConfiguration& FNDIBroadcastConfiguration::operator=(const FNDIBroadcastConfiguration& other)
{
// perform a deep copy of the 'other' structure
this->FrameRate = other.FrameRate;
this->FrameSize = other.FrameSize;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIBroadcastConfiguration::operator==(const FNDIBroadcastConfiguration& other) const
{
// return the value of a deep compare against the 'other' structure
return this->FrameRate == other.FrameRate && this->FrameSize == other.FrameSize;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIBroadcastConfiguration::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->FrameRate.Numerator << this->FrameRate.Denominator << this->FrameSize;
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIBroadcastConfiguration::operator!=(const FNDIBroadcastConfiguration& other) const
{
return !(*this == other);
}

View File

@@ -1,111 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIConnectionInformation.h>
#include <string>
/** Copies an existing instance to this object */
FNDIConnectionInformation::FNDIConnectionInformation(const FNDIConnectionInformation& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->Bandwidth = other.Bandwidth;
this->MachineName = other.MachineName;
this->SourceName = other.SourceName;
this->StreamName = other.StreamName;
this->Url = other.Url;
this->bMuteAudio = other.bMuteAudio;
this->bMuteVideo = other.bMuteVideo;
}
/** Copies existing instance properties to this object */
FNDIConnectionInformation& FNDIConnectionInformation::operator=(const FNDIConnectionInformation& other)
{
// perform a deep copy of the 'other' structure
this->Bandwidth = other.Bandwidth;
this->MachineName = other.MachineName;
this->SourceName = other.SourceName;
this->StreamName = other.StreamName;
this->Url = other.Url;
this->bMuteAudio = other.bMuteAudio;
this->bMuteVideo = other.bMuteVideo;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIConnectionInformation::operator==(const FNDIConnectionInformation& other) const
{
// return the value of a deep compare against the 'other' structure
return this->Bandwidth == other.Bandwidth &&
this->MachineName == other.MachineName && this->SourceName == other.SourceName &&
this->StreamName == other.StreamName && this->Url == other.Url &&
this->bMuteAudio == other.bMuteAudio && this->bMuteVideo == other.bMuteVideo;
}
FNDIConnectionInformation::operator NDIlib_recv_bandwidth_e() const
{
return this->Bandwidth == ENDISourceBandwidth::MetadataOnly ? NDIlib_recv_bandwidth_metadata_only
: this->Bandwidth == ENDISourceBandwidth::AudioOnly ? NDIlib_recv_bandwidth_audio_only
: this->Bandwidth == ENDISourceBandwidth::Lowest ? NDIlib_recv_bandwidth_lowest
: NDIlib_recv_bandwidth_highest;
}
/** Resets the current parameters to the default property values */
void FNDIConnectionInformation::Reset()
{
// Ensure we reset all the properties of this object to nominal default properties
this->Bandwidth = ENDISourceBandwidth::Highest;
this->MachineName = FString("");
this->SourceName = FString("");
this->StreamName = FString("");
this->Url = FString("");
this->bMuteAudio = false;
this->bMuteVideo = false;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIConnectionInformation::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->Bandwidth
<< this->MachineName << this->SourceName << this->StreamName << this->Url
<< this->bMuteAudio << this->bMuteVideo;
}
/** Determines whether this object is valid connection information */
bool FNDIConnectionInformation::IsValid() const
{
// Need at least a source name and/or machine+stream name and/or a URL
return (!this->SourceName.IsEmpty()) ||
((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty())) ||
(!this->Url.IsEmpty());
}
FString FNDIConnectionInformation::GetNDIName() const
{
std::string source_name;
if(!this->SourceName.IsEmpty())
return this->SourceName;
if ((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty()))
return this->MachineName + " (" + this->StreamName + ")";
return FString();
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIConnectionInformation::operator!=(const FNDIConnectionInformation& other) const
{
return !(*this == other);
}

View File

@@ -1,74 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIReceiverPerformanceData.h>
/** Copies an existing instance to this object */
FNDIReceiverPerformanceData::FNDIReceiverPerformanceData(const FNDIReceiverPerformanceData& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->AudioFrames = other.AudioFrames;
this->DroppedAudioFrames = other.DroppedAudioFrames;
this->DroppedMetadataFrames = other.DroppedMetadataFrames;
this->DroppedVideoFrames = other.DroppedVideoFrames;
this->MetadataFrames = other.MetadataFrames;
this->VideoFrames = other.VideoFrames;
}
/** Copies existing instance properties to this object */
FNDIReceiverPerformanceData& FNDIReceiverPerformanceData::operator=(const FNDIReceiverPerformanceData& other)
{
// perform a deep copy of the 'other' structure
this->AudioFrames = other.AudioFrames;
this->DroppedAudioFrames = other.DroppedAudioFrames;
this->DroppedMetadataFrames = other.DroppedMetadataFrames;
this->DroppedVideoFrames = other.DroppedVideoFrames;
this->MetadataFrames = other.MetadataFrames;
this->VideoFrames = other.VideoFrames;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIReceiverPerformanceData::operator==(const FNDIReceiverPerformanceData& other) const
{
// return the value of a deep compare against the 'other' structure
return this->AudioFrames == other.AudioFrames && this->DroppedAudioFrames == other.DroppedAudioFrames &&
this->DroppedMetadataFrames == other.DroppedMetadataFrames &&
this->DroppedVideoFrames == other.DroppedVideoFrames && this->MetadataFrames == other.MetadataFrames &&
this->VideoFrames == other.VideoFrames;
}
/** Resets the current parameters to the default property values */
void FNDIReceiverPerformanceData::Reset()
{
// Ensure we reset all the properties of this object to nominal default properties
this->AudioFrames = 0;
this->DroppedAudioFrames = 0;
this->DroppedMetadataFrames = 0;
this->DroppedVideoFrames = 0;
this->MetadataFrames = 0;
this->VideoFrames = 0;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIReceiverPerformanceData::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->AudioFrames << this->DroppedAudioFrames << this->DroppedMetadataFrames
<< this->DroppedVideoFrames << this->MetadataFrames << this->VideoFrames;
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIReceiverPerformanceData::operator!=(const FNDIReceiverPerformanceData& other) const
{
return !(*this == other);
}

View File

@@ -1,106 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
using System;
using System.IO;
using UnrealBuildTool;
public class NDIIO : ModuleRules
{
public NDIIO(ReadOnlyTargetRules Target) : base(Target)
{
#if UE_5_2_OR_LATER
IWYUSupport = IWYUSupport.Full;
#else
bEnforceIWYU = true;
#endif
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
#region Public Includes
// Include the Public include paths
if (Directory.Exists(Path.Combine(ModuleDirectory, "Public")))
{
PublicIncludePaths.AddRange(new string[] {
// ... add public include paths required here ...
Path.Combine(ModuleDirectory, "Public" ),
});
}
// Define the public dependencies
PublicDependencyModuleNames.AddRange(new string[] {
"Engine",
"Core",
"CoreUObject",
"Projects",
"NDIIOShaders"
});
#endregion
#region Private Includes
if (Directory.Exists(Path.Combine(ModuleDirectory, "Private")))
{
PrivateIncludePaths.AddRange(new string[] {
// ... add other private include paths required here ...
Path.Combine(ModuleDirectory, "Private" )
});
}
PrivateDependencyModuleNames.AddRange(new string[] {
"Renderer",
"RenderCore",
"RHI",
"Slate",
"SlateCore",
"UMG",
"ImageWrapper",
"AudioMixer",
"AudioExtensions",
"InputCore",
"Media",
"MediaAssets",
"MediaIOCore",
"MediaUtils",
"TimeManagement",
"CinematicCamera",
"XmlParser"
});
#endregion
#region Editor Includes
if (Target.bBuildEditor == true)
{
PrivateIncludePathModuleNames.AddRange(new string[] {
"AssetTools",
"TargetPlatform",
});
PrivateDependencyModuleNames.AddRange(new string[] {
"UnrealEd",
"AssetTools",
"MaterialUtilities"
});
}
#endregion
#region ThirdParty Includes
PublicDependencyModuleNames.Add("NDI");
#endregion
}
}

View File

@@ -1,59 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <GameFramework/Actor.h>
#include <Components/NDIViewportCaptureComponent.h>
#include <Components/NDIPTZControllerComponent.h>
#include <Sound/SoundSubmix.h>
#include "NDIBroadcastActor.generated.h"
/**
A quick and easy way to capture the from the perspective of a camera that starts broadcasting the viewport
immediate upon 'BeginPlay'
*/
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Broadcast Actor"))
class NDIIO_API ANDIBroadcastActor : public AActor, public IPTZControllableInterface
{
GENERATED_UCLASS_BODY()
private:
/**
The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata
*/
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaSender* NDIMediaSource = nullptr;
/**
A component used to capture an additional viewport for broadcasting over NDI
*/
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "NDI IO",
META = (DisplayName = "Viewport Capture Component", AllowPrivateAccess = true))
UNDIViewportCaptureComponent* ViewportCaptureComponent = nullptr;
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO",
META = (DisplayName = "Audio Submix Capture", AllowPrivateAccess = true))
USoundSubmix* SubmixCapture = nullptr;
/**
Component used for PTZ control
*/
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "NDI IO",
META = (DisplayName = "PTZ Controller", AllowPrivateAccess = true))
UPTZController* PTZController = nullptr;
public:
virtual void BeginPlay() override;
// IPTZControllableInterface
virtual FPTZState GetPTZStateFromUE() const override;
virtual void SetPTZStateToUE(const FPTZState& PTZState) override;
};

View File

@@ -1,129 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <GameFramework/Actor.h>
#include <Components/AudioComponent.h>
#include <Components/StaticMeshComponent.h>
#include <Components/NDIReceiverComponent.h>
#include <Enumerations/NDIAudioChannels.h>
#include "NDIReceiveActor.generated.h"
UCLASS(HideCategories = (Activation, Rendering, AssetUserData, Material, Attachment, Actor, Input, Cooking, LOD,
StaticMesh, Materials),
Category = "NDI IO", META = (DisplayName = "NDI Receive Actor"))
class NDIIO_API ANDIReceiveActor : public AActor
{
GENERATED_UCLASS_BODY()
private:
/** The desired width of the frame in cm, represented in the virtual scene */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Interp, BlueprintSetter = "SetFrameWidth", Category = "NDI IO",
META = (DisplayName = "Frame Width", AllowPrivateAccess = true))
float FrameWidth = 177.778;
/** The desired height of the frame in cm, represented in the virtual scene */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Interp, BlueprintSetter = "SetFrameHeight", Category = "NDI IO",
META = (DisplayName = "Frame Height", AllowPrivateAccess = true))
float FrameHeight = 100.0f;
/**
Indicates that this object should play the audio.
*Note Audio played by this object will be played as a UI sound, and won't normalize the audio
if the same 'MediaSource' object is being used as the audio source on multiple receivers.
*/
UPROPERTY(EditInstanceOnly, BlueprintSetter = "UpdateAudioPlayback", Category = "NDI IO",
META = (DisplayName = "Enable Audio Playback?", AllowPrivateAccess = true))
bool bEnableAudioPlayback = false;
UPROPERTY(EditInstanceOnly, BlueprintSetter = "UpdateAudioPlaybackChannels", Category = "NDI IO",
META = (DisplayName = "Audio Playback Channels", AllowPrivateAccess = true))
ENDIAudioChannels AudioPlaybackChannels = ENDIAudioChannels::Mono;
/** Enable/disable the use of the color channels (if there are any) */
UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "EnableColor", Category = "NDI IO",
META = (DisplayName = "Enable Color?", AllowPrivateAccess = true))
bool bEnableColor = true;
/** Enable/disable the use of the alpha channel (if there is one) */
UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "EnableAlpha", Category = "NDI IO",
META = (DisplayName = "Enable Alpha?", AllowPrivateAccess = true))
bool bEnableAlpha = true;
/** The Receiver object used to get Audio, Video, and Metadata from on the network */
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaReceiver* NDIMediaSource = nullptr;
/** The component used to display the video received from the Media Sender object */
UPROPERTY(Transient, META = (DisplayName = "Video Mesh Component"))
UStaticMeshComponent* VideoMeshComponent = nullptr;
/** The component used to play the audio from the NDI Media source */
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category= "NDI IO", META = (DisplayName = "Audio Component", AllowPrivateAccess = true))
UAudioComponent* AudioComponent = nullptr;
/** The audio sound wave which receives the audio from the NDI Media source */
UPROPERTY(Transient, META = (DisplayName = "Soundwave"))
UNDIMediaSoundWave* AudioSoundWave = nullptr;
private:
/** The material we are trying to apply to the video mesh */
class UMaterialInterface* VideoMaterial = nullptr;
/** The dynamic material to apply to the plane object of this actor */
UPROPERTY()
class UMaterialInstanceDynamic* VideoMaterialInstance = nullptr;
public:
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
virtual void Tick(float DeltaTime) override;
/**
Attempts to set the desired frame size in cm, represented in the virtual scene
*/
void SetFrameSize(FVector2D InFrameSize);
/**
Returns the current frame size of the 'VideoMeshComponent' for this object
*/
const FVector2D GetFrameSize() const;
private:
UFUNCTION(BlueprintSetter)
void SetFrameHeight(const float& InFrameHeight);
UFUNCTION(BlueprintSetter)
void SetFrameWidth(const float& InFrameWidth);
UFUNCTION(BlueprintSetter)
void UpdateAudioPlayback(const bool& Enabled);
UFUNCTION(BlueprintSetter)
void UpdateAudioPlaybackChannels(const ENDIAudioChannels& Channels);
UFUNCTION(BlueprintSetter)
void EnableColor(const bool& Enabled);
UFUNCTION(BlueprintSetter)
void EnableAlpha(const bool& Enabled);
#if WITH_EDITORONLY_DATA
virtual void PreEditChange(FProperty* InProperty) override;
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
#endif
void ApplyChannelsMode();
bool bStoppedForChannelsMode = false;
};

View File

@@ -1,52 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <GenlockedTimecodeProvider.h>
#include <Objects/Media/NDIMediaReceiver.h>
#include "NDITimecodeProvider.generated.h"
/**
Timecode provider from an NDI source
*/
UCLASS(Blueprintable, editinlinenew, meta=(DisplayName="NDI Timecode Provider"))
class NDIIO_API UNDITimecodeProvider : public UGenlockedTimecodeProvider
{
GENERATED_UCLASS_BODY()
private:
/** The Receiver object used to get timecodes from */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "NDI IO",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaReceiver* NDIMediaSource = nullptr;
public:
//~ UTimecodeProvider interface
virtual bool FetchTimecode(FQualifiedFrameTime& OutFrameTime) override;
virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override;
virtual bool Initialize(class UEngine* InEngine) override;
virtual void Shutdown(class UEngine* InEngine) override;
//~ UObject interface
virtual void BeginDestroy() override;
private:
void ReleaseResources();
private:
FDelegateHandle VideoCaptureEventHandle;
FDelegateHandle ConnectedEventHandle;
FDelegateHandle DisconnectedEventHandle;
mutable FCriticalSection StateSyncContext;
ETimecodeProviderSynchronizationState State = ETimecodeProviderSynchronizationState::Closed;
FQualifiedFrameTime MostRecentFrameTime;
};

View File

@@ -1,99 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Components/ActorComponent.h>
#include <Objects/Media/NDIMediaSender.h>
#include <Structures/NDIBroadcastConfiguration.h>
#include "NDIBroadcastComponent.generated.h"
/**
Provides a wrapper to allow you to modify an NDI Media Sender object from blueprints and perform broadcasting
functionality
*/
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI Broadcast Component", BlueprintSpawnableComponent))
class NDIIO_API UNDIBroadcastComponent : public UActorComponent
{
GENERATED_UCLASS_BODY()
private:
/** The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata */
UPROPERTY(EditDefaultsOnly, Category = "Properties",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaSender* NDIMediaSource = nullptr;
public:
/**
Initialize this component with the media source required for sending NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool Initialize(UNDIMediaSender* InMediaSource = nullptr);
/**
Attempts to start broadcasting audio, video, and metadata via the 'NDIMediaSource' associated with this object
@param ErrorMessage The error message received when the media source is unable to start broadcasting
@result Indicates whether this object successfully started broadcasting
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Broadcasting"))
bool StartBroadcasting(FString& ErrorMessage);
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name"))
void ChangeSourceName(const FString& InSourceName);
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration"))
void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration);
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Texture"))
void ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture = nullptr);
/**
Determines the current tally information.
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Tally Information"))
void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram);
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Number of Connections"))
void GetNumberOfConnections(int32& Result);
/**
Attempts to immediately stop sending frames over NDI to any connected receivers
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Stop Broadcasting"))
void StopBroadcasting();
};

View File

@@ -1,79 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Components/ActorComponent.h>
#include <Structures/NDIConnectionInformation.h>
#include "NDIFinderComponent.generated.h"
/** Delegates **/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIFinderServiceCollectionChangedDelegate, UNDIFinderComponent*,
InComponent);
/** ******************* **/
/**
A component used for essential functionality when dealing with the finder service. Allowing you to
get a collection of sources found on the network.
*/
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI Finder Component", BlueprintSpawnableComponent))
class NDIIO_API UNDIFinderComponent : public UActorComponent
{
GENERATED_UCLASS_BODY()
public:
/** A collection of the current sources and their information, found on the network */
UPROPERTY()
TArray<FNDIConnectionInformation> NetworkSourceCollection;
/** A delegate which is broadcast when any change to the network source collection has been detected */
UPROPERTY(BlueprintAssignable, META = (DisplayName = "On Network Sources Changed", AllowPrivateAccess = true))
FNDIFinderServiceCollectionChangedDelegate OnNetworkSourcesChanged;
public:
/**
Attempts to find a network source by the supplied name.
@param ConnectionInformation An existing source information structure which contains the source name
@param InSourceName A string value representing the name of the source to find
@result A value indicating whether a source with the supplied name was found
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Find Network Source by Name"))
const bool FindNetworkSourceByName(FNDIConnectionInformation& ConnectionInformation,
FString InSourceName = FString(""));
/**
Returns the current collection of sources found on the network
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO", META = (DisplayName = "Get Network Sources"))
const TArray<FNDIConnectionInformation> GetNetworkSources();
protected:
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
/** An override function for when the network source collection has been changed */
UFUNCTION(BlueprintImplementableEvent, META = (DisplayName = "On Network Sources Changed Event"))
void OnNetworkSourcesChangedEvent();
private:
/**
An Event handler for when the NDI Finder Service notifies listeners that changes have been
detected in the network source collection
*/
UFUNCTION()
virtual void OnNetworkSourceCollectionChangedEvent() final;
private:
FCriticalSection CollectionSyncContext;
};

View File

@@ -1,174 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <UObject/Interface.h>
#include <Components/ActorComponent.h>
#include <Objects/Media/NDIMediaSender.h>
#include "NDIPTZControllerComponent.generated.h"
USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI PTZ State"))
struct NDIIO_API FPTZState
{
GENERATED_USTRUCT_BODY()
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
float Pan;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
float Tilt;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
float FieldOfView;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
float FocusDistance;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
bool bAutoFocus;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PTZ")
FTransform CameraTransform;
FPTZState()
: Pan(0.f)
, Tilt(0.f)
, FieldOfView(90.f)
, FocusDistance(0.5f)
, bAutoFocus(false)
{}
};
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIEventDelegate_OnPTZPanTiltSpeed, float, PanSpeed, float, TiltSpeed);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZZoomSpeed, float, ZoomSpeed);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIEventDelegate_OnPTZFocus, bool, AutoMode, float, Distance);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZStore, int, Index);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnPTZRecall, int, Index);
UINTERFACE(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI PTZ Controllable", BlueprintSpawnableComponent))
class NDIIO_API UPTZControllableInterface : public UInterface
{
GENERATED_BODY()
};
class IPTZControllableInterface
{
GENERATED_BODY()
public:
virtual FPTZState GetPTZStateFromUE() const = 0;
virtual void SetPTZStateToUE(const FPTZState& PTZState) = 0;
};
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI PTZ Controller", BlueprintSpawnableComponent))
class UPTZController : public UActorComponent
{
GENERATED_BODY()
protected:
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Enable PTZ", AllowPrivateAccess = true), Category="PTZ")
bool EnablePTZ = true;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Limit", AllowPrivateAccess = true), Category="PTZ")
bool PTZWithPanLimit = false;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Min Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ")
float PTZPanMinLimit = -180.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Pan Max Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ")
float PTZPanMaxLimit = 180.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Invert Pan", AllowPrivateAccess = true), Category="PTZ")
bool bPTZPanInvert = true;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Limit", AllowPrivateAccess = true), Category="PTZ")
bool PTZWithTiltLimit = true;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Min Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ")
float PTZTiltMinLimit = -90.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Tilt Max Limit", UIMin="-180", UIMax="180", AllowPrivateAccess = true), Category="PTZ")
float PTZTiltMaxLimit = 90.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Invert Tilt", AllowPrivateAccess = true), Category="PTZ")
bool bPTZTiltInvert = false;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Limit", AllowPrivateAccess = true), Category="PTZ")
bool PTZWithFoVLimit = false;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Min Limit", UIMin="5", UIMax="170", AllowPrivateAccess = true), Category="PTZ")
float PTZFoVMinLimit = 5.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Field of View Max Limit", UIMin="5", UIMax="170", AllowPrivateAccess = true), Category="PTZ")
float PTZFoVMaxLimit = 170.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Preset Recall Easing", UIMin="0", UIMax="60", AllowPrivateAccess = true), Category="PTZ")
float PTZRecallEasing = 2.f;
UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ")
float PTZPanSpeed = 0.f;
UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ")
float PTZTiltSpeed = 0.f;
UPROPERTY(BlueprintReadWrite, meta=(AllowPrivateAccess = true), Category="PTZ")
float PTZZoomSpeed = 0.f;
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="PTZ Presets", AllowPrivateAccess = true), Category="PTZ")
TArray<FPTZState> PTZStoredStates;
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaSender* NDIMediaSource = nullptr;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Pan Tilt Speed", AllowPrivateAccess = true))
FNDIEventDelegate_OnPTZPanTiltSpeed OnPTZPanTiltSpeed;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Zoom Speed", AllowPrivateAccess = true))
FNDIEventDelegate_OnPTZZoomSpeed OnPTZZoomSpeed;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Focus", AllowPrivateAccess = true))
FNDIEventDelegate_OnPTZFocus OnPTZFocus;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Store", AllowPrivateAccess = true))
FNDIEventDelegate_OnPTZStore OnPTZStore;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On PTZ Recall", AllowPrivateAccess = true))
FNDIEventDelegate_OnPTZRecall OnPTZRecall;
public:
/** Call with the PTZ metadata received from an NDI media sender */
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Receive Metadata From Sender"))
void ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data);
public:
UPTZController();
virtual ~UPTZController();
/**
Initialize this component with the required media source to receive metadata from.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool Initialize(UNDIMediaSender* InMediaSource = nullptr);
void SetPTZPanTiltSpeed(float PanSpeed, float TiltSpeed);
void SetPTZZoomSpeed(float ZoomSpeed);
void SetPTZFocus(bool AutoMode, float Distance);
void StorePTZState(int Index);
void RecallPTZState(int Index);
FPTZState GetPTZStateFromUE() const;
void SetPTZStateToUE(const FPTZState& PTZState);
protected:
virtual void InitializeComponent() override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) override;
protected:
TSharedPtr<class NDIXmlParser> NDIMetadataParser;
struct FPTZStateInterp
{
FPTZState PTZTargetState;
float EasingDuration { 0 };
float EasingRemaining { 0 };
};
FPTZStateInterp PTZStateInterp;
};

View File

@@ -1,97 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Components/ActorComponent.h>
#include <Structures/NDIConnectionInformation.h>
#include <Objects/Media/NDIMediaReceiver.h>
#include "NDIReceiverComponent.generated.h"
/**
A component used to receive audio, video, and metadata over NDI
*/
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI Receiver Component", BlueprintSpawnableComponent))
class NDIIO_API UNDIReceiverComponent : public UActorComponent
{
GENERATED_UCLASS_BODY()
private:
/** The NDI Media Receiver representing the configuration of the network source to receive audio, video, and
* metadata from */
UPROPERTY(EditDefaultsOnly, Category = "Properties",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaReceiver* NDIMediaSource = nullptr;
public:
/**
Initialize this component with the media source required for receiving NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool Initialize(UNDIMediaReceiver* InMediaSource = nullptr);
/**
Begin receiving NDI audio, video, and metadata frames
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Receiver"))
bool StartReceiver(const FNDIConnectionInformation& InConnectionInformation);
/**
Attempt to change the connection for which to get audio, video, and metadata frame from
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Connection"))
void ChangeConnection(const FNDIConnectionInformation& InConnectionInformation);
/**
This will add a metadata frame and return immediately, having scheduled the frame asynchronously
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata Frame"))
void SendMetadataFrame(const FString& metadata);
/**
This will setup the up-stream tally notifications. If no streams are connected, it will automatically send
the tally state upon connection
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Tally Information"))
void SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram);
/**
Attempts to stop receiving audio, video, and metadata frame from the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Shutdown Receiver"))
void ShutdownReceiver();
public:
/**
Returns the current framerate of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Frame Rate"))
FFrameRate GetCurrentFrameRate() const;
/**
Returns the current timecode of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Timecode"))
FTimecode GetCurrentTimecode() const;
/**
Returns the current connection information of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Connection Information"))
FNDIConnectionInformation GetCurrentConnectionInformation() const;
/**
Returns the current performance data of the receiver while connected to the source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Performance Data"))
FNDIReceiverPerformanceData GetPerformanceData() const;
};

View File

@@ -1,91 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <UObject/UnrealType.h>
#include <Objects/Media/NDIMediaSender.h>
#include <Components/ActorComponent.h>
#include "NDITriCasterExtComponent.generated.h"
USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI TricasterExt"))
struct NDIIO_API FTriCasterExt
{
GENERATED_USTRUCT_BODY()
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category="TricasterExt")
FString Value;
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category="TricasterExt")
TMap<FName,FString> KeyValues;
};
DECLARE_DYNAMIC_MULTICAST_DELEGATE_FiveParams(FNDIEventDelegate_OnTriCasterExt, AActor*, Actor, UObject*, Object, FString, PropertyElementName, FString, PropertyValueStr, FTimespan, EasingDuration);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIEventDelegate_OnTriCasterExtCustom, const FTriCasterExt&, TCData);
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI TricasterExt Component", BlueprintSpawnableComponent))
class UTriCasterExtComponent : public UActorComponent
{
GENERATED_BODY()
protected:
UPROPERTY(EditAnywhere, BlueprintReadWrite, meta=(DisplayName="Enable TricasterExt", AllowPrivateAccess = true), Category="TricasterExt")
bool EnableTriCasterExt = true;
UPROPERTY(BlueprintReadWrite, EditInstanceOnly, Category = "NDI IO", META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaSender* NDIMediaSource = nullptr;
UPROPERTY(BlueprintAssignable, BlueprintCallable, Category="NDI Events", META = (DisplayName = "On TricasterExt", AllowPrivateAccess = true))
FNDIEventDelegate_OnTriCasterExt OnTriCasterExt;
UPROPERTY(BlueprintAssignable, BlueprintCallable, Category="NDI Events", META = (DisplayName = "On TricasterExt Custom", AllowPrivateAccess = true))
FNDIEventDelegate_OnTriCasterExtCustom OnTriCasterExtCustom;
public:
/** Call with the TriCasterExt metadata received from an NDI media sender */
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Receive Metadata From Sender"))
void ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data);
public:
UTriCasterExtComponent();
virtual ~UTriCasterExtComponent();
/**
Initialize this component with the required media source to receive metadata from.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool Initialize(UNDIMediaSender* InMediaSource = nullptr);
void TriCasterExt(AActor* Actor, UObject* Object, FProperty* Property, FString PropertyElementName, FString PropertyValueStr, FTimespan EasingDuration);
void TriCasterExtCustom(const FTriCasterExt& TCData);
protected:
virtual void InitializeComponent() override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) override;
protected:
TSharedPtr<class NDIXmlParser> NDIMetadataParser;
struct FTriCasterExtInterp
{
AActor* Actor;
UObject* Object;
FProperty* Property;
FString PropertyElementName;
FString PropertyValueStr;
float EasingDuration;
float EasingRemaining;
};
TArray<FTriCasterExtInterp> TriCasterExtInterp;
};

View File

@@ -1,154 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <CineCameraComponent.h>
#include <Engine/TextureRenderTarget2D.h>
#include <Components/SceneCaptureComponent2D.h>
#include <Objects/Media/NDIMediaSender.h>
#include <Misc/FrameRate.h>
#include <Framework/Application/SlateApplication.h>
#include <SceneManagement.h>
#include <Slate/SceneViewport.h>
#include <Widgets/SViewport.h>
#include <Widgets/SWindow.h>
#include "NDIViewportCaptureComponent.generated.h"
/**
A component used to capture an additional viewport for broadcasting over NDI
*/
UCLASS(BlueprintType, Blueprintable, Category = "NDI IO",
META = (DisplayName = "NDI Viewport Capture Component", BlueprintSpawnableComponent))
class NDIIO_API UNDIViewportCaptureComponent : public USceneCaptureComponent2D
{
GENERATED_UCLASS_BODY()
private:
/**
If true, will allow you to override the capture settings by ignoring the default Broadcast Settings
in the NDI Media Sender, Potentially Requiring a texture rescale of the capture frame when broadcasting
over NDI.
*/
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings", META = (AllowPrivateAccess = true))
bool bOverrideBroadcastSettings = false;
/**
Describes the Height and Width of the viewport frame to capture.
*/
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings",
META = (DisplayName = "Capture Size", AllowPrivateAccess = true,
EditCondition = "bOverrideBroadcastSettings"))
FIntPoint CaptureSize = FIntPoint(1280, 720);
/**
Represents the desired number of frames (per second) to capture the viewport
*/
UPROPERTY(BlueprintReadwrite, EditAnywhere, Category = "Capture Settings",
META = (DisplayName = "Capture Rate", AllowPrivateAccess = true,
EditCondition = "bOverrideBroadcastSettings"))
FFrameRate CaptureRate = FFrameRate(60, 1);
/**
The NDI Media Sender representing the configuration of the network source to send audio, video, and metadata
*/
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Properties",
META = (DisplayName = "NDI Media Source", AllowPrivateAccess = true))
UNDIMediaSender* NDIMediaSource = nullptr;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings",
META = (DisplayName = "Alpha Remap Min", AllowPrivateAccess = true))
float AlphaMin = 0.f;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Capture Settings",
META = (DisplayName = "Alpha Remap Max", AllowPrivateAccess = true))
float AlphaMax = 1.f;
public:
/**
Initialize this component with the media source required for sending NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool Initialize(UNDIMediaSender* InMediaSource = nullptr);
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name"))
void ChangeSourceName(const FString& InSourceName);
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration"))
void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration);
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Texture"))
void ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture = nullptr);
/**
Change the capture settings of the viewport capture and overrides the NDI Media Sender settings
@param InCaptureSize The Capture size of the frame to capture of the viewport
@param InCaptureRate A framerate at which to capture frames of the viewport
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Capture Settings"))
void ChangeCaptureSettings(FIntPoint InCaptureSize, FFrameRate InCaptureRate);
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Tally Information"))
void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram);
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Number of Connections"))
void GetNumberOfConnections(int32& Result);
protected:
virtual ~UNDIViewportCaptureComponent();
virtual void InitializeComponent() override;
virtual void UninitializeComponent() override;
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
virtual void UpdateSceneCaptureContents(FSceneInterface* Scene, ISceneRenderBuilder& SceneRenderBuilder) override;
#else
virtual void UpdateSceneCaptureContents(FSceneInterface* Scene) override;
#endif
private:
UFUNCTION()
void OnBroadcastConfigurationChanged(UNDIMediaSender* Sender);
private:
FCriticalSection UpdateRenderContext;
};

View File

@@ -1,28 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include "NDIAudioChannels.generated.h"
/**
Receiver Bandwidth modes
*/
UENUM(BlueprintType, META = (DisplayName = "NDI Audio Channels"))
enum class ENDIAudioChannels : uint8
{
/** Mono. */
Mono = 0x00 UMETA(DisplayName = "Mono"),
/** Stereo. */
Stereo = 0x01 UMETA(DisplayName = "Stereo"),
/** Whatever the number of channels in the source is. */
Source = 0x02 UMETA(DisplayName = "Source"),
};

View File

@@ -1,31 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include "NDISourceBandwidth.generated.h"
/**
Receiver Bandwidth modes
*/
UENUM(BlueprintType, META = (DisplayName = "NDI Source Bandwidth"))
enum class ENDISourceBandwidth : uint8
{
/** Receive metadata. */
MetadataOnly = 0x00 UMETA(DisplayName = "Metadata Only"),
/** Receive metadata, audio */
AudioOnly = 0x01 UMETA(DisplayName = "Audio Only"),
/** Receive metadata, audio, video at a lower bandwidth and resolution. */
Lowest = 0x02 UMETA(DisplayName = "Lowest"),
// Receive metadata, audio, video at full resolution.
Highest = 0x03 UMETA(DisplayName = "Highest")
};

View File

@@ -1,34 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <vector>
#include <algorithm>
#include <functional>
#include <chrono>
#if PLATFORM_WINDOWS
#include <Windows/AllowWindowsPlatformTypes.h>
#endif
#ifndef NDI_SDK_ENABLED
#error NDI(R) 6.x Runtime must be installed for the NDI(R) IO plugin to run properly.
#endif
#ifdef NDI_SDK_ENABLED
#include <Processing.NDI.Lib.h>
#include <Processing.NDI.Lib.cplusplus.h>
#endif
#if PLATFORM_WINDOWS
#include <Windows/HideWindowsPlatformTypes.h>
#endif
#define NDIIO_MODULE_NAME FName(TEXT("NDIIO"))

View File

@@ -1,61 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <vector>
#include <algorithm>
#include <functional>
#include <chrono>
#include <Engine/World.h>
#include <Interfaces/IPluginManager.h>
#include <Modules/ModuleManager.h>
#include <IMediaPlayerFactory.h>
#include <NDIIOPluginSettings.h>
class NDIIO_API FNDIIOPluginModule
: public IModuleInterface
, public IMediaPlayerFactory
{
public:
/** IModuleInterface implementation */
virtual void StartupModule() override;
virtual void ShutdownModule() override;
/** IMediaPlayerFactory implementation */
virtual bool CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray<FText>* /*OutWarnings*/, TArray<FText>* OutErrors) const override;
virtual TSharedPtr<IMediaPlayer, ESPMode::ThreadSafe> CreatePlayer(IMediaEventSink& EventSink) override;
virtual FText GetDisplayName() const override;
virtual FName GetPlayerName() const override;
virtual FGuid GetPlayerPluginGUID() const override;
virtual const TArray<FString>& GetSupportedPlatforms() const override;
virtual bool SupportsFeature(EMediaFeature Feature) const override;
bool BeginBroadcastingActiveViewport();
void StopBroadcastingActiveViewport();
private:
bool LoadModuleDependencies();
void ShutdownModuleDependencies();
private:
TSharedPtr<class FNDIFinderService> NDIFinderService = nullptr;
TSharedPtr<class FNDIConnectionService> NDIConnectionService = nullptr;
void* NDI_LIB_HANDLE = nullptr;
/** List of platforms that the media player support. */
TArray<FString> SupportedPlatforms;
/** List of supported URI schemes. */
TArray<FString> SupportedUriSchemes;
};

View File

@@ -1,52 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Misc/FrameRate.h>
#include <UObject/Object.h>
#include "NDIIOPluginSettings.generated.h"
/**
Settings for the Broadcasting of the Active Viewport configurable in the running editor of the application
*/
UCLASS(Config = Engine, DefaultConfig)
class NDIIO_API UNDIIOPluginSettings : public UObject
{
GENERATED_BODY()
public:
UPROPERTY(VisibleAnywhere, Category = "NDI IO", META = (DisplayName = "Description", MultiLine = true))
FString Decription = TEXT(
"These values define the 'Active Viewport' broadcast settings and does not define default values for outputs."
"\r\n"
"\r\nApplication Stream Name - The default name to use when broadcasting the Currently Active Viewport over "
"NDI."
"\r\nBroadcast Rate - Indicates the preferred frame rate to broadcast the Currently Active Viewport over NDI."
"\r\nPreferred FrameSize - Indicates the preferred frame size to broadcast the Currently Active Viewport over "
"NDI."
"\r\nBegin Broadcast On Play - Starts the broadcast of the Currently Active Viewport immediately on Play."
);
/** The default name to use when broadcasting the Currently Active Viewport over NDI. */
UPROPERTY(Config, EditAnywhere, Category = "NDI IO")
FString ApplicationStreamName = FString("Unreal Engine");
/** Indicates the preferred frame rate to broadcast the Currently Active Viewport over NDI. */
UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Broadcast Rate"))
FFrameRate BroadcastRate = FFrameRate(60, 1);
/** Indicates the preferred frame size to broadcast the Currently Active Viewport over NDI. */
UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Preferred Broadcast Framesize"))
FIntPoint PreferredFrameSize = FIntPoint(1920, 1080);
UPROPERTY(Config, EditAnywhere, Category = "NDI IO", META = (DisplayName = "Begin Broadcast On Play"))
bool bBeginBroadcastOnPlay = false;
};

View File

@@ -1,52 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Kismet/BlueprintFunctionLibrary.h>
#include <Structures/NDIBroadcastConfiguration.h>
#include "NDIBroadcastConfigurationLibrary.generated.h"
UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO",
META = (DisplayName = "NDI Broadcast Configuration Library"))
class NDIIO_API UNDIBroadcastConfigurationLibrary : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
private:
/**
Returns a value indicating whether the two structures are comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Equals (NDI Broadcast Configuration)",
CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true))
static bool K2_Compare_NDIBroadcastConfiguration(FNDIBroadcastConfiguration A, FNDIBroadcastConfiguration B)
{
return A == B;
}
/**
Returns a value indicating whether the two structures are NOT comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Not Equals (NDI Broadcast Configuration)",
CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true))
static bool K2_Compare_Not_NDIBroadcastConfiguration(FNDIBroadcastConfiguration A, FNDIBroadcastConfiguration B)
{
return A != B;
}
};

View File

@@ -1,82 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Kismet/BlueprintFunctionLibrary.h>
#include <Structures/NDIConnectionInformation.h>
#include "NDIConnectionInformationLibrary.generated.h"
UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO",
META = (DisplayName = "NDI Connection Information Library"))
class NDIIO_API UNDIConnectionInformationLibrary : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
private:
/**
Returns a value indicating whether the two structures are comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Equals (NDI Connection Information)",
CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true))
static bool K2_Compare_NDIConnectionInformation(FNDIConnectionInformation A, FNDIConnectionInformation B)
{
return A == B;
}
/**
Returns a value indicating whether the two structures are NOT comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Not Equals (NDI Connection Information)",
CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true))
static bool K2_Compare_Not_NDIConnectionInformation(FNDIConnectionInformation A, FNDIConnectionInformation B)
{
return A != B;
}
/**
Returns a value indicating whether the property values of the supplied structure is valid
@param ConnectionInformation The structure to validate
@return An indication of the supplied structures validity
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Is Valid?", AllowPrivateAccess = true))
static bool K2_NDIConnectionInformation_IsValid(FNDIConnectionInformation& ConnectionInformation)
{
return ConnectionInformation.IsValid();
}
/**
Resets the structure's properties to their default values
@param ConnectionInformation The structure to reset to the default value
@return The reference to the passed in structure after the 'reset' has been completed
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Reset Connection Information", AllowPrivateAccess = true))
static UPARAM(ref) FNDIConnectionInformation& K2_NDIConnectionInformation_Reset(
UPARAM(ref) FNDIConnectionInformation& ConnectionInformation)
{
// call the underlying function to reset the properties of the object
ConnectionInformation.Reset();
// return the ConnectionInformation object reference
return ConnectionInformation;
}
};

View File

@@ -1,121 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Kismet/BlueprintFunctionLibrary.h>
#include <Structures/NDIConnectionInformation.h>
#include <Objects/Media/NDIMediaReceiver.h>
#include <Objects/Media/NDIMediaSender.h>
#include "NDIIOLibrary.generated.h"
/**
An metadata element as returned by K2_ParseNDIMetaData()
Blueprints do not support recursive datastructures, so parsing metadata
with this will result in only the top-level elements being returned.
*/
USTRUCT(BlueprintType)
struct FNDIMetaDataElement
{
GENERATED_USTRUCT_BODY()
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata")
FString ElementName;
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata")
TMap<FString,FString> Attributes;
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Metadata")
FString Data;
};
UCLASS(META = (DisplayName = "NDI IO Library"))
class NDIIO_API UNDIIOLibrary : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
private:
/**
Retrieves a collection of NDI sources appearing on the network
@return A collection of NDI Sources appearing on the network
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Get NDI Source Collection", AllowPrivateAccess = true))
static const TArray<FNDIConnectionInformation> K2_GetNDISourceCollection();
/**
Attempts to search the NDI Source Collection for the source name, returning a result indicating
success with the ConnectionInformation parameter filled with the found connection
@param ConnectionInformation The connection information for a successful find with the supplied InSourceName
@param InSourceName The name of the source to find within the collection of NDI sources
@return The result of whether the search was successful
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Find Network Source by Name", DefaultToSelf = "WorldContextObject",
HidePin = "WorldContextObject", AllowPrivateAccess = true))
static const bool K2_FindNetworkSourceByName(UObject* WorldContextObject,
FNDIConnectionInformation& ConnectionInformation,
FString InSourceName = FString(""));
private:
/**
Attempts to start broadcasting the active viewport. The output of the active viewport is the current camera
that is actively being viewed (through), and does not have to be an NDI Broadcast Viewport Component.
@return The result of whether broadcasting the active viewport was started
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Begin Broadcasting Active Viewport", DefaultToSelf = "WorldContextObject",
HidePin = "WorldContextObject", AllowPrivateAccess = true))
static bool K2_BeginBroadcastingActiveViewport(UObject* WorldContextObject);
/**
Will stop broadcasting the active viewport, which was started by a previous call to 'Begin Broadcasting Active
Viewport'
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Stop Broadcasting Active Viewport", DefaultToSelf = "WorldContextObject",
HidePin = "WorldContextObject", AllowPrivateAccess = true))
static void K2_StopBroadcastingActiveViewport(UObject* WorldContextObject);
private:
/**
Returns an NDI Media Receiver object
@param Receiver The Receiver object to return
@return The selected Receiver object
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Get NDI Media Receiver", AllowPrivateAccess = true))
static UPARAM(ref) UNDIMediaReceiver* K2_GetNDIMediaReceiver(UNDIMediaReceiver* Receiver = nullptr);
/**
Returns an NDI Media Sender object
@param Sender The Sender object to return
@return The selected Sender object
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Get NDI Media Sender", AllowPrivateAccess = true))
static UPARAM(ref) UNDIMediaSender* K2_GetNDIMediaSender(UNDIMediaSender* Sender = nullptr);
private:
/**
Parses a string as metadata
Blueprints do not support recursive datastructures, so parsing metadata
with this will result in only the top-level elements being returned.
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Parse NDI MetaData", AllowPrivateAccess = true))
static const TArray<FNDIMetaDataElement> K2_ParseNDIMetaData(FString Data);
};

View File

@@ -1,70 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Kismet/BlueprintFunctionLibrary.h>
#include <Structures/NDIReceiverPerformanceData.h>
#include "NDIReceiverPerformanceDataLibrary.generated.h"
UCLASS(NotBlueprintable, BlueprintType, Category = "NDI IO",
META = (DisplayName = "NDI Receiver Performance Data Library"))
class NDIIO_API UNDIReceiverPerformanceDataLibrary : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
private:
/**
Returns a value indicating whether the two structures are comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Equals (NDI Receiver Performance Data)",
CompactNodeTitle = "==", Keywords = "= == Equals", AllowPrivateAccess = true))
static bool K2_Compare_NDIReceiverPerformanceData(FNDIReceiverPerformanceData A, FNDIReceiverPerformanceData B)
{
return A == B;
}
/**
Returns a value indicating whether the two structures are NOT comparably equal
@param A The structure used as the source comparator
@param B The structure used as the target comparator
@return The resulting value of the comparator operator
*/
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "NDI IO",
META = (DisplayName = "Not Equals (NDI Receiver Performance Data)",
CompactNodeTitle = "!=", Keywords = "! != Not Equals", AllowPrivateAccess = true))
static bool K2_Compare_Not_NDIReceiverPerformanceData(FNDIReceiverPerformanceData A, FNDIReceiverPerformanceData B)
{
return A != B;
}
/**
Resets the structure's properties to their default values
@param PerformanceData The structure to reset to the default value
@return The reference to the passed in structure after the 'reset' has been completed
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO",
META = (DisplayName = "Reset Receiver Performance Data", AllowPrivateAccess = true))
static UPARAM(ref) FNDIReceiverPerformanceData& K2_NDIReceiverPerformanceData_Reset(
UPARAM(ref) FNDIReceiverPerformanceData& PerformanceData)
{
// call the underlying function to reset the properties of the object
PerformanceData.Reset();
// return the Performance Data object reference
return PerformanceData;
}
};

View File

@@ -1,361 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <UObject/Object.h>
#include <Misc/Timecode.h>
#include <Misc/FrameRate.h>
#include <TimeSynchronizableMediaSource.h>
#include <RendererInterface.h>
#include <Objects/Media/NDIMediaSoundWave.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <Structures/NDIConnectionInformation.h>
#include <Structures/NDIReceiverPerformanceData.h>
#include "NDIMediaReceiver.generated.h"
namespace NDIMediaOption
{
static const FName IsNDIMediaReceiver("IsNDIMediaReceiver");
static const FName MaxVideoFrameBuffer("MaxVideoFrameBuffer");
static const FName MaxAudioFrameBuffer("MaxAudioFrameBuffer");
static const FName MaxAncillaryFrameBuffer("MaxAncillaryFrameBuffer");
}
/**
Delegates to notify that the NDIMediaReceiver has received a video, audio, or metadata frame
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaReceiverVideoReceived, UNDIMediaReceiver*, Receiver);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaReceiverAudioReceived, UNDIMediaReceiver*, Receiver);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_ThreeParams(FNDIMediaReceiverMetaDataReceived, UNDIMediaReceiver*, Receiver, FString, Data, bool, bAttachedToVideoFrame);
/**
A Media object representing the NDI Receiver for being able to receive Audio, Video, and Metadata over NDI
*/
UCLASS(BlueprintType, Blueprintable, HideCategories = ("Platforms"), Category = "NDI IO",
HideCategories = ("Information"), AutoCollapseCategories = ("Content"),
META = (DisplayName = "NDI Media Receiver"))
class NDIIO_API UNDIMediaReceiver : public UTimeSynchronizableMediaSource
{
GENERATED_BODY()
public:
/**
Information describing detailed information about the sender this receiver is to connect to
*/
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Settings",
META = (DisplayName = "Connection", AllowPrivateAccess = true))
FNDIConnectionInformation ConnectionSetting;
private:
/**
The current frame count, seconds, minutes, and hours in time-code notation
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Timecode", AllowPrivateAccess = true))
FTimecode Timecode;
/**
The desired number of frames (per second) for video to be displayed
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Frame Rate", AllowPrivateAccess = true))
FFrameRate FrameRate;
/**
The width and height of the last received video frame
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Resolution", AllowPrivateAccess = true))
FIntPoint Resolution;
/**
Indicates whether the timecode should be synced to the Source Timecode value
*/
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Settings",
META = (DisplayName = "Sync Timecode to Source", AllowPrivateAccess = true))
bool bSyncTimecodeToSource = true;
/**
Should perform the sRGB to Linear color space conversion
*/
UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Perform sRGB to Linear?", AllowPrivateAccess = true))
bool bPerformsRGBtoLinear = true;
/**
Information describing detailed information about the sender this receiver is currently connected to
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Connection Information", AllowPrivateAccess = true))
FNDIConnectionInformation ConnectionInformation;
/**
Information describing detailed information about the receiver performance when connected to an NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Performance Data", AllowPrivateAccess = true))
FNDIReceiverPerformanceData PerformanceData;
/**
Provides an NDI Video Texture object to render videos frames from the source onto (optional)
*/
UPROPERTY(BlueprintReadWrite, EditAnywhere, BlueprintSetter = "ChangeVideoTexture", Category = "Content",
AdvancedDisplay, META = (DisplayName = "Video Texture (optional)", AllowPrivateAccess = true))
UNDIMediaTexture2D* VideoTexture = nullptr;
public:
DECLARE_EVENT_OneParam(FNDIMediaReceiverConnectionEvent, FOnReceiverConnectionEvent,
UNDIMediaReceiver*) FOnReceiverConnectionEvent OnNDIReceiverConnectedEvent;
DECLARE_EVENT_OneParam(FNDIMediaReceiverDisconnectionEvent, FOnReceiverDisconnectionEvent,
UNDIMediaReceiver*) FOnReceiverDisconnectionEvent OnNDIReceiverDisconnectedEvent;
DECLARE_EVENT_TwoParams(FNDIMediaReceiverVideoCaptureEvent, FOnReceiverVideoCaptureEvent,
UNDIMediaReceiver*, const NDIlib_video_frame_v2_t&) FOnReceiverVideoCaptureEvent OnNDIReceiverVideoCaptureEvent;
DECLARE_EVENT_TwoParams(FNDIMediaReceiverAudioCaptureEvent, FOnReceiverAudioCaptureEvent,
UNDIMediaReceiver*, const NDIlib_audio_frame_v2_t&) FOnReceiverAudioCaptureEvent OnNDIReceiverAudioCaptureEvent;
DECLARE_EVENT_TwoParams(FNDIMediaReceiverMetadataCaptureEvent, FOnReceiverMetadataCaptureEvent,
UNDIMediaReceiver*, const NDIlib_metadata_frame_t&) FOnReceiverMetadataCaptureEvent OnNDIReceiverMetadataCaptureEvent;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Video Received by Receiver", AllowPrivateAccess = true))
FNDIMediaReceiverVideoReceived OnReceiverVideoReceived;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Audio Received by Receiver", AllowPrivateAccess = true))
FNDIMediaReceiverAudioReceived OnReceiverAudioReceived;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Received by Receiver", AllowPrivateAccess = true))
FNDIMediaReceiverMetaDataReceived OnReceiverMetaDataReceived;
public:
UNDIMediaReceiver();
/**
Called before destroying the object. This is called immediately upon deciding to destroy the object,
to allow the object to begin an asynchronous cleanup process.
*/
void BeginDestroy() override;
/**
Attempts to perform initialization logic for creating a receiver through the NDI sdk api
*/
enum class EUsage
{
Standalone, // The receiver automatically captures its own video frame every engine render frame
Controlled // The user of the receiver manually triggers capturing a frame through CaptureConnectedVideo/Audio()
};
bool Initialize(const FNDIConnectionInformation& InConnectionInformation, EUsage InUsage);
bool Initialize(EUsage Inusage);
/**
Attempt to (re-)start the connection
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Start Connection"))
void StartConnection();
/**
Stop the connection
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Stop Connection"))
void StopConnection();
/**
Attempts to change the connection to another NDI sender source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Connection"))
void ChangeConnection(const FNDIConnectionInformation& InConnectionInformation);
/**
Attempts to change the Video Texture object used as the video frame capture object
*/
UFUNCTION(BlueprintSetter)
void ChangeVideoTexture(UNDIMediaTexture2D* InVideoTexture = nullptr);
/**
Attempts to generate the pcm data required by the 'AudioWave' object
*/
int32 GeneratePCMData(UNDIMediaSoundWave* AudioWave, uint8* PCMData, const int32 SamplesNeeded);
int32 GetAudioChannels();
/**
Attempts to register a sound wave object with this object
*/
void RegisterAudioWave(UNDIMediaSoundWave* InAudioWave = nullptr);
/**
This will send a metadata frame to the sender
The data is expected to be valid XML
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender"))
void SendMetadataFrame(const FString& Data);
/**
This will send a metadata frame to the sender
The data will be formatted as: <Element>ElementData</Element>
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender (Element + Data)"))
void SendMetadataFrameAttr(const FString& Element, const FString& ElementData);
/**
This will send a metadata frame to the sender
The data will be formatted as: <Element Key0="Value0" Key1="Value1" Keyn="Valuen"/>
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Sender (Element + Attributes)"))
void SendMetadataFrameAttrs(const FString& Element, const TMap<FString,FString>& Attributes);
/**
This will set the up-stream tally notifications. If no streams are connected, it will automatically
send the tally state upon connection
*/
void SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram);
/**
Attempts to immediately stop receiving frames from the connected NDI sender
*/
void Shutdown();
/**
Remove the AudioWave object from this object (if it was previously registered)
@param InAudioWave An NDIMediaSoundWave object registered with this object
*/
void UnregisterAudioWave(UNDIMediaSoundWave* InAudioWave = nullptr);
/**
Updates the DynamicMaterial with the VideoTexture of this object
*/
void UpdateMaterialTexture(class UMaterialInstanceDynamic* MaterialInstance, FString ParameterName);
/**
Attempts to capture a frame from the connected source. If a new frame is captured, broadcast it to
interested receivers through the capture event. Returns true if new data was captured.
*/
bool CaptureConnectedVideo();
bool CaptureConnectedAudio();
bool CaptureConnectedMetadata();
/**
Attempts to immediately update the 'VideoTexture' object with the captured video frame
*/
FTextureRHIRef DisplayFrame(const NDIlib_video_frame_v2_t& video_frame);
private:
void SetIsCurrentlyConnected(bool bConnected);
/**
Attempts to gather the performance metrics of the connection to the remote source
*/
void GatherPerformanceMetrics();
public:
/**
Set whether or not a RGB to Linear conversion is made
*/
void PerformsRGBToLinearConversion(bool Value);
/**
Returns the current framerate of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Frame Rate"))
const FFrameRate& GetCurrentFrameRate() const;
/**
Returns the current resolution of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Resolution"))
const FIntPoint& GetCurrentResolution() const;
/**
Returns the current timecode of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Timecode"))
const FTimecode& GetCurrentTimecode() const;
/**
Returns the current connection information of the connected source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Current Connection Information"))
const FNDIConnectionInformation& GetCurrentConnectionInformation() const;
/**
Returns the current performance data of the receiver while connected to the source
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Get Performance Data"))
const FNDIReceiverPerformanceData& GetPerformanceData() const;
/** Returns a value indicating whether this object is currently connected to the sender source */
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Is Currently Connected"))
const bool GetIsCurrentlyConnected() const;
private:
/**
Perform the color conversion (if any) and bit copy from the gpu
*/
FTextureRHIRef DrawProgressiveVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result);
FTextureRHIRef DrawProgressiveVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result);
FTextureRHIRef DrawInterlacedVideoFrame(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result);
FTextureRHIRef DrawInterlacedVideoFrameAlpha(FRHICommandListImmediate& RHICmdList, const NDIlib_video_frame_v2_t& Result);
virtual bool Validate() const override
{
return true;
}
virtual FString GetUrl() const override;
FTextureResource* GetVideoTextureResource() const;
FTextureResource* GetInternalVideoTextureResource() const;
#if WITH_EDITORONLY_DATA
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
#endif
public:
virtual bool GetMediaOption(const FName& Key, bool DefaultValue) const override;
virtual int64 GetMediaOption(const FName& Key, int64 DefaultValue) const override;
virtual FString GetMediaOption(const FName& Key, const FString& DefaultValue) const override;
virtual bool HasMediaOption(const FName& Key) const override;
private:
int64_t LastFrameTimestamp = 0;
NDIlib_frame_format_type_e LastFrameFormatType = NDIlib_frame_format_type_max;
bool bIsCurrentlyConnected = false;
NDIlib_recv_instance_t p_receive_instance = nullptr;
NDIlib_framesync_instance_t p_framesync_instance = nullptr;
FCriticalSection RenderSyncContext;
FCriticalSection AudioSyncContext;
FCriticalSection MetadataSyncContext;
FCriticalSection ConnectionSyncContext;
TArray<UNDIMediaSoundWave*> AudioSourceCollection;
UNDIMediaTexture2D* InternalVideoTexture = nullptr;
FTextureRHIRef SourceTexture;
FTextureRHIRef SourceAlphaTexture;
FPooledRenderTargetDesc RenderTargetDescriptor;
TRefCountPtr<IPooledRenderTarget> RenderTarget;
enum class EDrawMode
{
Invalid,
Progressive,
ProgressiveAlpha,
Interlaced,
InterlacedAlpha
};
EDrawMode DrawMode = EDrawMode::Invalid;
FDelegateHandle FrameEndRTHandle;
FDelegateHandle VideoCaptureEventHandle;
};

View File

@@ -1,362 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <RendererInterface.h>
#include <UObject/Object.h>
#include <Misc/FrameRate.h>
#include <Engine/TextureRenderTarget2D.h>
#include <Sound/SoundSubmix.h>
#include <Structures/NDIBroadcastConfiguration.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <BaseMediaSource.h>
#include <Misc/EngineVersionComparison.h>
#include <string>
#include "NDIMediaSender.generated.h"
/**
A delegate used for notifications on property changes on the NDIMediaSender object
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderPropertyChanged, UNDIMediaSender*, Sender);
/**
A delegate used for notifications on the NDIMediaSender object receiving metadata
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FNDIMediaSenderMetaDataReceived, UNDIMediaSender*, Sender, FString, Data);
/**
Delegates to notify just before and after the NDIMediaSender sends a video, audio, or metadata frame
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderVideoPreSend, UNDIMediaSender*, Sender);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderVideoSent, UNDIMediaSender*, Sender);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderAudioPreSend, UNDIMediaSender*, Sender);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderAudioSent, UNDIMediaSender*, Sender);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderMetaDataPreSend, UNDIMediaSender*, Sender);
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FNDIMediaSenderMetaDataSent, UNDIMediaSender*, Sender);
/**
Defines a media object representing an NDI(R) Sender object. This object is used with the
NDI Broadcast Component to send Audio / Video / Metadata to a 'receiving' NDI object.
*/
UCLASS(BlueprintType, Blueprintable, HideCategories = ("Platforms"), Category = "NDI IO",
HideCategories = ("Information"), AutoCollapseCategories = ("Content"),
META = (DisplayName = "NDI Sender Object"))
class NDIIO_API UNDIMediaSender : public UBaseMediaSource
{
GENERATED_UCLASS_BODY()
private:
/** Describes a user-friendly name of the output stream to differentiate from other output streams on the current
* machine */
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName = "Source Name", AllowPrivateAccess = true))
FString SourceName = TEXT("Unreal Engine Output");
/** Describes the output frame size while sending video frame over NDI */
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName = "Frame Size", AllowPrivateAccess = true))
FIntPoint FrameSize = FIntPoint(1920, 1080);
/** Represents the desired number of frames (per second) for video to be sent over NDI */
UPROPERTY(BlueprintReadwrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName = "Frame Rate", AllowPrivateAccess = true))
FFrameRate FrameRate = FFrameRate(60, 1);
/** Sets whether or not to output an alpha channel */
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName="Output Alpha", AllowPrivateAccess = true))
bool OutputAlpha = false;
UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Broadcast Settings",
META = (DisplayName = "Alpha Remap Min", AllowPrivateAccess = true))
float AlphaMin = 0.f;
UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Broadcast Settings",
META = (DisplayName = "Alpha Remap Max", AllowPrivateAccess = true))
float AlphaMax = 1.f;
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName="Enable Audio", AllowPrivateAccess = true))
bool bEnableAudio = true;
/** Sets whether or not to present PTZ capabilities */
UPROPERTY(BlueprintReadWrite, EditDefaultsOnly, Category = "Broadcast Settings",
META = (DisplayName="Enable PTZ", AllowPrivateAccess = true))
bool bEnablePTZ = true;
/** Indicates the texture to send over NDI (optional) */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Content",
AdvancedDisplay, META = (DisplayName = "Render Target (optional)", AllowPrivateAccess = true))
UTextureRenderTarget2D* RenderTarget = nullptr;
/**
Should perform the Linear to sRGB color space conversion
*/
UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Perform Linear to sRGB?", AllowPrivateAccess = true))
bool bPerformLinearTosRGB = true;
public:
UPROPERTY()
FNDIMediaSenderPropertyChanged OnBroadcastConfigurationChanged;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Received by Sender", AllowPrivateAccess = true))
FNDIMediaSenderMetaDataReceived OnSenderMetaDataReceived;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before Video Being Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderVideoPreSend OnSenderVideoPreSend;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Video Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderVideoSent OnSenderVideoSent;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before Audio Being Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderAudioPreSend OnSenderAudioPreSend;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Audio Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderAudioSent OnSenderAudioSent;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On Before MetaData Being Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderMetaDataPreSend OnSenderMetaDataPreSend;
UPROPERTY(BlueprintAssignable, Category="NDI Events", META = (DisplayName = "On MetaData Sent by Sender", AllowPrivateAccess = true))
FNDIMediaSenderMetaDataSent OnSenderMetaDataSent;
public:
/**
Attempts to perform initialization logic for creating a sender through the NDI(R) sdk api
*/
void Initialize(USoundSubmix* SubmixCapture);
/**
Changes the name of the sender object as seen on the network for remote connections
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Source Name"))
void ChangeSourceName(const FString& InSourceName);
/**
Attempts to change the Broadcast information associated with this media object
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Change Broadcast Configuration"))
void ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration);
/**
This will send a metadata frame to all receivers
The data is expected to be valid XML
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers"))
void SendMetadataFrame(const FString& Data, bool AttachToVideoFrame = true);
/**
This will send a metadata frame to all receivers
The data will be formatted as: <Element>ElementData</Element>
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers (Element + Data)"))
void SendMetadataFrameAttr(const FString& Element, const FString& ElementData, bool AttachToVideoFrame = true);
/**
This will send a metadata frame to all receivers
The data will be formatted as: <Element Key0="Value0" Key1="Value1" Keyn="Valuen"/>
*/
UFUNCTION(BlueprintCallable, Category = "NDI IO", META = (DisplayName = "Send Metadata To Receivers (Element + Attributes)"))
void SendMetadataFrameAttrs(const FString& Element, const TMap<FString,FString>& Attributes, bool AttachToVideoFrame = true);
/**
Attempts to change the RenderTarget used in sending video frames over NDI
*/
void ChangeVideoTexture(UTextureRenderTarget2D* VideoTexture = nullptr);
/**
Change the alpha remapping settings
*/
void ChangeAlphaRemap(float AlphaMinIn, float AlphaMaxIn);
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
@param TimeOut - Indicates the amount of time to wait (in milliseconds) until a change has occurred
*/
void GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram, uint32 Timeout = 0);
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
*/
void GetNumberOfConnections(int32& Result);
/**
Attempts to immediately stop sending frames over NDI to any connected receivers
*/
void Shutdown();
/**
Called before destroying the object. This is called immediately upon deciding to destroy the object,
to allow the object to begin an asynchronous cleanup process.
*/
virtual void BeginDestroy() override;
/**
Set whether or not a RGB to Linear conversion is made
*/
void PerformLinearTosRGBConversion(bool Value);
/**
Set whether or not to enable PTZ support
*/
void EnablePTZ(bool Value);
/**
Returns the Render Target used for sending a frame over NDI
*/
UTextureRenderTarget2D* GetRenderTarget();
const FIntPoint& GetFrameSize()
{
return this->FrameSize;
}
const FFrameRate& GetFrameRate()
{
return this->FrameRate;
}
private:
bool CreateSender();
/**
Attempts to get a metadata frame from the sender.
If there is one, the data is broadcast through OnSenderMetaDataReceived.
Returns true if metadata was received, false otherwise.
*/
bool GetMetadataFrame();
/**
This will attempt to generate an audio frame, add the frame to the stack and return immediately,
having scheduled the frame asynchronously.
*/
void TrySendAudioFrame(int64 time_code, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock);
/**
This will attempt to generate a video frame, add the frame to the stack and return immediately,
having scheduled the frame asynchronously.
*/
void TrySendVideoFrame(int64 time_code = 0);
/**
Perform the color conversion (if any) and bit copy from the gpu
*/
bool DrawRenderTarget(FRHICommandListImmediate& RHICmdList);
/**
Change the render target configuration based on the passed in parameters
@param InFrameSize The frame size to resize the render target to
@param InFrameRate The frame rate at which we should be sending frames via NDI
*/
void ChangeRenderTargetConfiguration(FIntPoint InFrameSize, FFrameRate InFrameRate);
virtual bool Validate() const override
{
return true;
}
virtual FString GetUrl() const override
{
return FString();
}
FTextureResource* GetRenderTargetResource() const;
void PrepareDefaultTexture();
private:
std::atomic<bool> bIsChangingBroadcastSize { false };
FTimecode LastRenderTime;
FTextureRHIRef DefaultVideoTextureRHI;
TArray<float> SendAudioData;
NDIlib_video_frame_v2_t NDI_video_frame;
NDIlib_send_instance_t p_send_instance = nullptr;
FCriticalSection AudioSyncContext;
FCriticalSection RenderSyncContext;
/**
A texture with CPU readback
*/
class MappedTexture
{
private:
FTextureRHIRef Texture = nullptr;
void* pData = nullptr;
std::string MetaData;
FIntPoint FrameSize;
public:
~MappedTexture();
void Create(FIntPoint FrameSize);
void Destroy();
FIntPoint GetSizeXY() const;
void Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect = FResolveRect(), const FResolveRect& DestRect = FResolveRect());
void Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride);
void* MappedData() const;
void Unmap(FRHICommandListImmediate& RHICmdList);
void AddMetaData(const FString& Data);
const std::string& GetMetaData() const;
private:
void PrepareTexture();
};
/**
Class for managing the sending of mapped texture data to an NDI video stream.
Sending is done asynchronously, so mapping and unmapping of texture data must
be managed so that CPU accessible texture content remains valid until the
sending of the frame is guaranteed to have been completed. This is achieved
by double-buffering readback textures.
*/
class MappedTextureASyncSender
{
private:
MappedTexture MappedTextures[2];
int32 CurrentIndex = 0;
public:
void Create(FIntPoint FrameSize);
void Destroy();
FIntPoint GetSizeXY() const;
void Resolve(FRHICommandListImmediate& RHICmdList, FRHITexture* SourceTextureRHI, const FResolveRect& Rect = FResolveRect(), const FResolveRect& DestRect = FResolveRect());
void Map(FRHICommandListImmediate& RHICmdList, int32& OutWidth, int32& OutHeight, int32& OutLineStride);
void Send(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance, NDIlib_video_frame_v2_t& p_video_data);
void Flush(FRHICommandListImmediate& RHICmdList, NDIlib_send_instance_t p_send_instance);
void AddMetaData(const FString& Data);
};
MappedTextureASyncSender ReadbackTextures;
bool ReadbackTexturesHaveAlpha = false;
FPooledRenderTargetDesc RenderTargetDescriptor;
};

View File

@@ -1,42 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Sound/SoundWaveProcedural.h>
#include "NDIMediaSoundWave.generated.h"
/**
Defines a SoundWave object used by an NDI Media Receiver object for capturing audio from
a network source
*/
UCLASS(NotBlueprintable, Category = "NDI IO", META = (DisplayName = "NDI Media Sound Wave"))
class NDIIO_API UNDIMediaSoundWave : public USoundWaveProcedural
{
GENERATED_UCLASS_BODY()
public:
/**
Set the Media Source of this object, so that when this object is called to 'GeneratePCMData' by the engine
we can request the media source to provide the pcm data from the current connected source
*/
void SetConnectionSource(class UNDIMediaReceiver* InMediaSource = nullptr);
protected:
/**
Called by the engine to generate pcm data to be 'heard' by audio listener objects
*/
virtual int32 OnGeneratePCMAudio(TArray<uint8>& OutAudio, int32 NumSamples) override final;
virtual bool IsReadyForFinishDestroy() override final;
private:
FCriticalSection SyncContext;
class UNDIMediaReceiver* MediaSource = nullptr;
};

View File

@@ -1,49 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Engine/Texture.h>
#include <Misc/EngineVersionComparison.h>
#include <RHI.h>
#include <RHICommandList.h>
#include "NDIMediaTexture2D.generated.h"
/**
A Texture Object used by an NDI Media Receiver object for capturing video from
a network source
*/
UCLASS(NotBlueprintType, NotBlueprintable, HideDropdown,
HideCategories = (ImportSettings, Compression, Texture, Adjustments, Compositing, LevelOfDetail, Object),
META = (DisplayName = "NDI Media Texture 2D"))
class NDIIO_API UNDIMediaTexture2D : public UTexture
{
GENERATED_UCLASS_BODY()
public:
virtual float GetSurfaceHeight() const override;
virtual float GetSurfaceWidth() const override;
virtual float GetSurfaceDepth() const;
virtual uint32 GetSurfaceArraySize() const;
virtual ETextureClass GetTextureClass() const;
virtual void GetResourceSizeEx(FResourceSizeEx& CumulativeResourceSize) override;
virtual EMaterialValueType GetMaterialType() const override;
virtual void UpdateTextureReference(FRHICommandList& RHICmdList, FTextureRHIRef Reference) final;
private:
virtual class FTextureResource* CreateResource() override;
void SetMyResource(FTextureResource* ResourceIn);
FTextureResource* GetMyResource();
const FTextureResource* GetMyResource() const;
};

View File

@@ -1,54 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <ExternalTexture.h>
#include <TextureResource.h>
#include <Misc/EngineVersionComparison.h>
/**
A Texture Resource object used by the NDIMediaTexture2D object for capturing video
from a network source
*/
class NDIIO_API FNDIMediaTextureResource : public FTextureResource
{
public:
/**
Constructs a new instance of this object specifying a media texture owner
@param Owner The media object used as the owner for this object
*/
FNDIMediaTextureResource(class UNDIMediaTexture2D* Owner = nullptr);
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
/** FTextureResource Interface Implementation for 'InitDynamicRHI' */
virtual void InitRHI(FRHICommandListBase& RHICmdList) override;
/** FTextureResource Interface Implementation for 'ReleaseDynamicRHI' */
virtual void ReleaseRHI() override;
#else
/** FTextureResource Interface Implementation for 'InitDynamicRHI' */
virtual void InitDynamicRHI() override;
/** FTextureResource Interface Implementation for 'ReleaseDynamicRHI' */
virtual void ReleaseDynamicRHI() override;
#endif
/** FTextureResource Interface Implementation for 'GetResourceSize' */
SIZE_T GetResourceSize();
/** FTextureResource Interface Implementation for 'GetSizeX' */
virtual uint32 GetSizeX() const override;
/** FTextureResource Interface Implementation for 'GetSizeY' */
virtual uint32 GetSizeY() const override;
private:
class UNDIMediaTexture2D* MediaTexture = nullptr;
};

View File

@@ -1,109 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Engine/TextureRenderTarget2D.h>
#include <AudioDevice.h>
#include <Misc/EngineVersionComparison.h>
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
#include <ISubmixBufferListener.h>
#endif
#include <Widgets/SWindow.h>
DECLARE_EVENT_OneParam(FNDICoreDelegates, FNDIConnectionServiceSendVideoEvent, int64)
DECLARE_EVENT_SixParams(FNDICoreDelegates, FNDIConnectionServiceSendAudioEvent, int64, float*, int32, int32, const int32, double)
/**
A service which runs and triggers updates for interested parties to be notified of
Audio and Video Frame events
*/
class NDIIO_API FNDIConnectionService final : public ISubmixBufferListener
{
public:
static FNDIConnectionServiceSendVideoEvent EventOnSendVideoFrame;
private:
static TMap<USoundSubmix*, FNDIConnectionServiceSendAudioEvent> SubmixSendAudioFrameEvents;
public:
/**
Constructs a new instance of this object
*/
FNDIConnectionService();
// Begin the service
bool Start();
// Stop the service
void Shutdown();
bool BeginBroadcastingActiveViewport();
void StopBroadcastingActiveViewport();
bool IsRunningInPIE() const
{
return bIsInPIEMode;
}
template <typename UserClass>
static void AddAudioSender(UserClass* InUserObject, USoundSubmix* Submix, typename TMemFunPtrType<false, UserClass, void (int64 /*time_code*/, float* /*AudioData*/, int32 /*NumSamples*/, int32 /*NumChannels*/, const int32 /*SampleRate*/, double /*AudioClock*/)>::Type InFunc)
{
FScopeLock Lock(&AudioSyncContext);
FNDIConnectionServiceSendAudioEvent& SendAudioEvent = SubmixSendAudioFrameEvents.FindOrAdd(Submix);
SendAudioEvent.AddUObject(InUserObject, InFunc);
}
template <typename UserClass>
static void RemoveAudioSender(UserClass* InUserObject)
{
FScopeLock Lock(&AudioSyncContext);
for (auto it = SubmixSendAudioFrameEvents.CreateIterator(); it; ++it)
{
it->Value.RemoveAll(InUserObject);
if (it->Value.IsBound() == false)
it.RemoveCurrent();
}
}
private:
// Handler for when the render thread frame has ended
void OnEndRenderFrame();
void BeginAudioCapture();
void StopAudioCapture();
void OnPostEngineInit();
void OnEnginePreExit();
// Handler for when the active viewport back buffer is about to be resized
void OnActiveViewportBackbufferPreResize(void* Backbuffer);
// Handler for when the back buffer is read to present to the end user
void OnActiveViewportBackbufferReadyToPresent(SWindow& Window, const FTextureRHIRef& Backbuffer);
FTextureResource* GetVideoTextureResource() const;
virtual void OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) override final;
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
virtual const FString& GetListenerName() const override final;
#endif
private:
bool bIsInitialized = false;
bool bIsAudioInitialized = false;
bool bIsBroadcastingActiveViewport = false;
bool bIsInPIEMode = false;
static FCriticalSection AudioSyncContext;
static FCriticalSection RenderSyncContext;
UTextureRenderTarget2D* VideoTexture = nullptr;
class UNDIMediaSender* ActiveViewportSender = nullptr;
};

View File

@@ -1,62 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <HAL/Runnable.h>
#include <HAL/ThreadSafeBool.h>
#include <Structures/NDIConnectionInformation.h>
/**
A Runnable object used for Finding NDI network Sources, and updating interested parties
*/
class NDIIO_API FNDIFinderService : public FRunnable
{
public:
FNDIFinderService();
// Begin the service
virtual bool Start();
// Stop the service
virtual void Shutdown();
public:
/** Get the available sources on the network */
static const TArray<FNDIConnectionInformation> GetNetworkSourceCollection();
/** Call to update an existing collection of network sources to match the current collection */
static bool UpdateSourceCollection(TArray<FNDIConnectionInformation>& InSourceCollection);
/** Event which is triggered when the collection of network sources has changed */
DECLARE_EVENT(FNDICoreDelegates, FNDISourceCollectionChangedEvent)
static FNDISourceCollectionChangedEvent EventOnNDISourceCollectionChanged;
protected:
/** FRunnable Interface implementation for 'Init' */
virtual bool Init() override;
/** FRunnable Interface implementation for 'Stop' */
virtual void Stop() override;
/** FRunnable Interface implementation for 'Run' */
virtual uint32 Run() override;
private:
bool UpdateNetworkSourceCollection();
private:
bool bShouldWaitOneFrame = true;
bool bIsNetworkSourceCollectionDirty = false;
FThreadSafeBool bIsThreadRunning;
FRunnableThread* p_RunnableThread = nullptr;
static TArray<FNDIConnectionInformation> NetworkSourceCollection;
};

View File

@@ -1,61 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Misc/FrameRate.h>
#include "NDIBroadcastConfiguration.generated.h"
/**
Describes essential properties used for modifying the broadcast configuration of an Sender object
*/
USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Broadcast Configuration"))
struct NDIIO_API FNDIBroadcastConfiguration
{
GENERATED_USTRUCT_BODY()
public:
/** Describes the output frame size while sending video frame over NDI */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Broadcast Settings", META = (DisplayName = "Frame Size"))
FIntPoint FrameSize = FIntPoint(1920, 1080);
/** Represents the desired number of frames (per second) for video to be sent over NDI */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Broadcast Settings", META = (DisplayName = "Frame Rate"))
FFrameRate FrameRate = FFrameRate(60, 1);
public:
/** Constructs a new instance of this object */
FNDIBroadcastConfiguration() = default;
/** Copies an existing instance to this object */
FNDIBroadcastConfiguration(const FNDIBroadcastConfiguration& other);
/** Copies existing instance properties to this object */
FNDIBroadcastConfiguration& operator=(const FNDIBroadcastConfiguration& other);
/** Destructs this object */
virtual ~FNDIBroadcastConfiguration() = default;
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool operator==(const FNDIBroadcastConfiguration& other) const;
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool operator!=(const FNDIBroadcastConfiguration& other) const;
protected:
/** Attempts to serialize this object using an Archive object */
virtual FArchive& Serialize(FArchive& Ar);
private:
/** Operator override for serializing this object to an Archive object */
friend class FArchive& operator<<(FArchive& Ar, FNDIBroadcastConfiguration& Input)
{
return Input.Serialize(Ar);
}
};

View File

@@ -1,92 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <Enumerations/NDISourceBandwidth.h>
#include <Serialization/Archive.h>
#include "NDIConnectionInformation.generated.h"
/**
Describes essential properties used for connection objects over NDI
*/
USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Connection Information"))
struct NDIIO_API FNDIConnectionInformation
{
GENERATED_USTRUCT_BODY()
public:
/** A user-friendly name of the source */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Source Name"))
FString SourceName = FString("");
/** The machine name of the source */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Machine Name"))
FString MachineName = FString("");
/** The stream name of the source */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Stream Name"))
FString StreamName = FString("");
/** A location on the network for which this source exists */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Url"))
FString Url = FString("");
/** Indicates the current bandwidth mode used for this connection */
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Bandwidth"))
ENDISourceBandwidth Bandwidth = ENDISourceBandwidth::Highest;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Mute Audio"))
bool bMuteAudio = false;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Properties", META = (DisplayName = "Mute Video"))
bool bMuteVideo = false;
public:
/** Constructs a new instance of this object */
FNDIConnectionInformation() = default;
/** Copies an existing instance to this object */
FNDIConnectionInformation(const FNDIConnectionInformation& other);
/** Copies existing instance properties to this object */
FNDIConnectionInformation& operator=(const FNDIConnectionInformation& other);
/** Destructs this object */
virtual ~FNDIConnectionInformation() = default;
/** Implicit conversion to a base NDI bandwidth value */
operator NDIlib_recv_bandwidth_e() const;
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool operator==(const FNDIConnectionInformation& other) const;
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool operator!=(const FNDIConnectionInformation& other) const;
public:
/** Resets the current parameters to the default property values */
void Reset();
/** Determines whether this object is valid connection information */
bool IsValid() const;
FString GetNDIName() const;
protected:
/** Attempts to serialize this object using an Archive object */
virtual FArchive& Serialize(FArchive& Ar);
private:
/** Operator override for serializing this object to an Archive object */
friend class FArchive& operator<<(FArchive& Ar, FNDIConnectionInformation& Input)
{
return Input.Serialize(Ar);
}
};

View File

@@ -1,97 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <Serialization/Archive.h>
#include "NDIReceiverPerformanceData.generated.h"
/**
A structure holding data allowing you to determine the current performance levels of the receiver with the
ability to detect whether frames has been dropped
*/
USTRUCT(BlueprintType, Blueprintable, Category = "NDI IO", META = (DisplayName = "NDI Receiver Performance Data"))
struct NDIIO_API FNDIReceiverPerformanceData
{
GENERATED_USTRUCT_BODY()
public:
/**
The number of audio frames received from the NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Audio Frames"))
int64 AudioFrames = 0;
/**
The number of video frames dropped in transit from an NDI sender
*/
UPROPERTY(BlueprintReadonly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Dropped Video Frames"))
int64 DroppedVideoFrames = 0;
/**
The number of audio frames dropped in transit from the NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Dropped Audio Frames"))
int64 DroppedAudioFrames = 0;
/**
The number of metadata frames dropped in transit from the NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information",
META = (DisplayName = "Dropped Metadata Frames"))
int64 DroppedMetadataFrames = 0;
/**
The number of metadata frames received from the NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Metadata Frames"))
int64 MetadataFrames = 0;
/**
The number of video frames received from the NDI sender
*/
UPROPERTY(BlueprintReadOnly, VisibleAnywhere, Category = "Information", META = (DisplayName = "Video Frames"))
int64 VideoFrames = 0;
public:
/** Constructs a new instance of this object */
FNDIReceiverPerformanceData() = default;
/** Copies an existing instance to this object */
FNDIReceiverPerformanceData(const FNDIReceiverPerformanceData& other);
/** Copies existing instance properties to this object */
FNDIReceiverPerformanceData& operator=(const FNDIReceiverPerformanceData& other);
/** Destructs this object */
virtual ~FNDIReceiverPerformanceData() = default;
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool operator==(const FNDIReceiverPerformanceData& other) const;
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool operator!=(const FNDIReceiverPerformanceData& other) const;
public:
/** Resets the current parameters to the default property values */
void Reset();
protected:
/** Attempts to serialize this object using an Archive object */
virtual FArchive& Serialize(FArchive& Ar);
private:
/** Operator override for serializing this object to an Archive object */
friend class FArchive& operator<<(FArchive& Ar, FNDIReceiverPerformanceData& Input)
{
return Input.Serialize(Ar);
}
};

View File

@@ -1,131 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <FastXml.h>
class NDIXmlElementParser
{
public:
virtual ~NDIXmlElementParser()
{}
// Start parsing this element
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
return true;
}
// Parse an attribute of this element
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue)
{
return true;
}
// Start parsing a sub-element
virtual TSharedRef<NDIXmlElementParser>* ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData)
{
return nullptr;
}
// Finish parsing this element
virtual bool ProcessClose(const TCHAR* ElementName)
{
return true;
}
};
class NDIXmlElementParser_null : public NDIXmlElementParser
{
public:
};
class NDIXmlParser : public IFastXmlCallback
{
public:
virtual ~NDIXmlParser()
{}
void AddElementParser(FName ElementName, TSharedRef<NDIXmlElementParser> ElementParser)
{
ElementParsers.Add(ElementName, ElementParser);
}
virtual bool ProcessXmlDeclaration(const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
return true;
}
virtual bool ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
if(ElementParserStack.Num() == 0)
{
TSharedRef<NDIXmlElementParser>* ParserPtr = ElementParsers.Find(ElementName);
if(ParserPtr == nullptr)
ParserPtr = &NullParser;
ElementParserStack.Push(*ParserPtr);
return (*ParserPtr)->ProcessOpen(ElementName, ElementData);
}
else
{
TSharedRef<NDIXmlElementParser>* ParserPtr = ElementParserStack.Last()->ProcessElement(ElementName, ElementData);
if(ParserPtr == nullptr)
ParserPtr = &NullParser;
ElementParserStack.Push(*ParserPtr);
return (*ParserPtr)->ProcessOpen(ElementName, ElementData);
}
//return false;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(ElementParserStack.Num() == 0)
{
return true;
}
else
{
return ElementParserStack.Last()->ProcessAttribute(AttributeName, AttributeValue);
}
//return false;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(ElementParserStack.Num() == 0)
{
return true;
}
else
{
auto Parser = ElementParserStack.Pop();
return Parser->ProcessClose(ElementName);
}
//return false;
}
virtual bool ProcessComment(const TCHAR* Comment) override
{
return true;
}
protected:
TMap<FName, TSharedRef<NDIXmlElementParser> > ElementParsers;
TArray<TSharedRef<NDIXmlElementParser> > ElementParserStack;
TSharedRef<NDIXmlElementParser> NullParser { MakeShareable(new NDIXmlElementParser_null()) };
};

View File

@@ -1,36 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Factories/NDIMediaReceiverFactory.h>
#include <AssetTypeCategories.h>
#include <Objects/Media/NDIMediaReceiver.h>
#define LOCTEXT_NAMESPACE "NDIIOEditorMediaReceiverFactory"
UNDIMediaReceiverFactory::UNDIMediaReceiverFactory(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer) {
this->bCreateNew = true;
this->bEditAfterNew = true;
this->SupportedClass = UNDIMediaReceiver::StaticClass();
}
FText UNDIMediaReceiverFactory::GetDisplayName() const { return LOCTEXT("NDIMediaReceiverFactoryDisplayName", "NDI Media Receiver"); }
uint32 UNDIMediaReceiverFactory::GetMenuCategories() const
{
return EAssetTypeCategories::Media;
}
UObject* UNDIMediaReceiverFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UNDIMediaReceiver>(InParent, InClass, InName, Flags | RF_Transactional);
}
#undef LOCTEXT_NAMESPACE

View File

@@ -1,33 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Factories/NDIMediaSenderFactory.h>
#include <AssetTypeCategories.h>
#include <Objects/Media/NDIMediaSender.h>
#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSenderFactory"
UNDIMediaSenderFactory::UNDIMediaSenderFactory(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer) {
bCreateNew = true;
bEditAfterNew = true;
this->SupportedClass = UNDIMediaSender::StaticClass();
}
FText UNDIMediaSenderFactory::GetDisplayName() const { return LOCTEXT("NDIMediaSenderFactoryDisplayName", "NDI Media Sender"); }
uint32 UNDIMediaSenderFactory::GetMenuCategories() const { return EAssetTypeCategories::Media; }
UObject* UNDIMediaSenderFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UNDIMediaSender>(InParent, InClass, InName, Flags | RF_Transactional);
}
#undef LOCTEXT_NAMESPACE

View File

@@ -1,33 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Factories/NDIMediaSoundWaveFactory.h>
#include <AssetTypeCategories.h>
#include <Objects/Media/NDIMediaSoundWave.h>
#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSoundWaveFactory"
UNDIMediaSoundWaveFactory::UNDIMediaSoundWaveFactory(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer) {
this->bCreateNew = true;
this->bEditAfterNew = true;
this->SupportedClass = UNDIMediaSoundWave::StaticClass();
}
FText UNDIMediaSoundWaveFactory::GetDisplayName() const { return LOCTEXT("NDIMediaSoundWaveFactoryDisplayName", "NDI Media Sound Wave"); }
uint32 UNDIMediaSoundWaveFactory::GetMenuCategories() const { return EAssetTypeCategories::Sounds; }
UObject* UNDIMediaSoundWaveFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UNDIMediaSoundWave>(InParent, InName, Flags | RF_Transactional);
}
#undef LOCTEXT_NAMESPACE

View File

@@ -1,40 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Factories/NDIMediaTexture2DFactory.h>
#include <AssetTypeCategories.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <Misc/EngineVersionComparison.h>
#define LOCTEXT_NAMESPACE "NDIIOEditorMediaSoundWaveFactory"
UNDIMediaTexture2DFactory::UNDIMediaTexture2DFactory(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer) {
this->bCreateNew = true;
this->bEditAfterNew = true;
this->SupportedClass = UNDIMediaTexture2D::StaticClass();
}
FText UNDIMediaTexture2DFactory::GetDisplayName() const { return LOCTEXT("NDIMediaTexture2DFactoryDisplayName", "NDI Media Texture2D"); }
uint32 UNDIMediaTexture2DFactory::GetMenuCategories() const { return EAssetTypeCategories::Textures; }
UObject* UNDIMediaTexture2DFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
if (UNDIMediaTexture2D* Resource = NewObject<UNDIMediaTexture2D>(InParent, InName, Flags | RF_Transactional))
{
Resource->UpdateResource();
return Resource;
}
return nullptr;
}
#undef LOCTEXT_NAMESPACE

View File

@@ -1,136 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <NDIIOEditorModule.h>
#include <Editor.h>
#include <PropertyEditorModule.h>
#include <IPlacementModeModule.h>
#include <Interfaces/IPluginManager.h>
#include <Styling/SlateStyleRegistry.h>
#include <Actors/NDIReceiveActor.h>
#include <Actors/NDIBroadcastActor.h>
#include <Framework/Application/SlateApplication.h>
#include <Misc/EngineVersionComparison.h>
#include "Widgets/NDIWidgets.h"
#define LOCTEXT_NAMESPACE "FNDIEditorModule"
#define IMAGE_BRUSH(RelativePath, ...) FSlateImageBrush(StyleInstance->RootToContentDir(RelativePath, TEXT(".png")), __VA_ARGS__)
#define PLACEMENT_CATEGORY TEXT("NDI(R)")
#define PLACEMENT_LOCTEXT NSLOCTEXT("Vizrt", "NDI", "NDI(R)")
#define PLACEMENT_TEXT TEXT("PMNDI")
void FNDIIOEditorModule::StartupModule()
{
const FName& CategoryName = PLACEMENT_CATEGORY;
IPlacementModeModule& PlacementModeModule = IPlacementModeModule::Get();
const FVector2D Icon20x20(20.0f, 20.0f);
const FVector2D Icon64x64(64.0f, 64.0f);
this->StyleInstance = MakeUnique<FSlateStyleSet>("NDIEditorStyle");
if (IPlugin* NDIIOPlugin = IPluginManager::Get().FindPlugin("NDIIOPlugin").Get())
{
StyleInstance->SetContentRoot(FPaths::Combine(NDIIOPlugin->GetContentDir(), TEXT("Editor/Icons")));
StyleInstance->Set("ClassThumbnail.NDIBroadcastActor", new IMAGE_BRUSH("NDIBroadcastActorIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIBroadcastActor", new IMAGE_BRUSH("NDIBroadcastActorIcon_x20", Icon20x20));
StyleInstance->Set("ClassThumbnail.NDIReceiveActor", new IMAGE_BRUSH("NDIReceiveActorIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIReceiveActor", new IMAGE_BRUSH("NDIReceiveActorIcon_x20", Icon20x20));
StyleInstance->Set("ClassThumbnail.NDIMediaReceiver", new IMAGE_BRUSH("NDIReceiverIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIMediaReceiver", new IMAGE_BRUSH("NDIReceiverIcon_x20", Icon20x20));
StyleInstance->Set("ClassThumbnail.NDIMediaSender", new IMAGE_BRUSH("NDISenderIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIMediaSender", new IMAGE_BRUSH("NDISenderIcon_x20", Icon20x20));
StyleInstance->Set("ClassThumbnail.NDIMediaSoundWave", new IMAGE_BRUSH("NDISoundWaveIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIMediaSoundWave", new IMAGE_BRUSH("NDISoundWaveIcon_x20", Icon20x20));
StyleInstance->Set("ClassThumbnail.NDIMediaTexture2D", new IMAGE_BRUSH("NDIVideoTextureIcon_x64", Icon64x64));
StyleInstance->Set("ClassIcon.NDIMediaTexture2D", new IMAGE_BRUSH("NDIVideoTextureIcon_x20", Icon20x20));
FSlateStyleRegistry::RegisterSlateStyle(*StyleInstance.Get());
PlacementModeModule.RegisterPlacementCategory(
FPlacementCategoryInfo(
PLACEMENT_LOCTEXT,
CategoryName,
PLACEMENT_TEXT,
41, // FBuiltInPlacementCategories::Volumes() == 40
true
)
);
}
// Get the Registered Placement Category
if (const FPlacementCategoryInfo* PlacementCategoryInformation = PlacementModeModule.GetRegisteredPlacementCategory(CategoryName))
{
// Register the NDI Broadcast Actor a placeable item within the editor
PlacementModeModule.RegisterPlaceableItem(PlacementCategoryInformation->UniqueHandle, MakeShareable(
new FPlaceableItem(
*UActorFactory::StaticClass(),
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
FAssetData(GetDefault<ANDIBroadcastActor>()),
#else
FAssetData(ANDIBroadcastActor::StaticClass()->ClassDefaultObject),
#endif
FName("ClassThumbnail.NDIBroadcastActor"),
NAME_None,
TOptional<FLinearColor>(),
10,
NSLOCTEXT("Vizrt", "NDIBroadcastActor", "NDI Broadcast Actor")
))
);
// Register the NDI Receive Actor a placeable item within the editor
PlacementModeModule.RegisterPlaceableItem(PlacementCategoryInformation->UniqueHandle, MakeShareable(
new FPlaceableItem(
*UActorFactory::StaticClass(),
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
FAssetData(GetDefault<ANDIReceiveActor>()),
#else
FAssetData(ANDIReceiveActor::StaticClass()->ClassDefaultObject),
#endif
FName("ClassThumbnail.NDIReceiveActor"),
NAME_None,
TOptional<FLinearColor>(),
20,
NSLOCTEXT("Vizrt", "NDIReceiveActor", "NDI Receive Actor")
))
);
}
FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>("PropertyEditor");
PropertyModule.RegisterCustomPropertyTypeLayout(FNDIConnectionInformation::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FNDIConnectionInformationCustomization::MakeInstance));
}
void FNDIIOEditorModule::ShutdownModule()
{
FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>("PropertyEditor");
PropertyModule.UnregisterCustomPropertyTypeLayout(FNDIConnectionInformation::StaticStruct()->GetFName());
FSlateStyleRegistry::UnRegisterSlateStyle(*StyleInstance.Get());
StyleInstance.Reset();
IPlacementModeModule& PlacementModeModule = IPlacementModeModule::Get();
PlacementModeModule.UnregisterPlacementCategory(PLACEMENT_CATEGORY);
}
#undef PLACEMENT_CATEGORY
#undef PLACEMENT_LOCTEXT
#undef PLACEMENT_TEXT
#undef IMAGE_BRUSH
#undef LOCTEXT_NAMESPACE

View File

@@ -1,380 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Widgets/NDIWidgets.h>
#include <Services/NDIFinderService.h>
#include <DetailLayoutBuilder.h>
#include <DetailWidgetRow.h>
#include <Editor.h>
#include <IDetailChildrenBuilder.h>
#include <IPropertyUtilities.h>
#include <PropertyHandle.h>
#include <Framework/MultiBox/MultiBoxBuilder.h>
#include <Widgets/Input/SComboButton.h>
#include <atomic>
#define LOCTEXT_NAMESPACE "UNDIWidgets"
/**
Organizes NDI sources into a tree
*/
struct FNDISourceTreeItem
{
TArray<TSharedRef<FNDISourceTreeItem> > Children;
FNDIConnectionInformation NDISource;
FText DisplayText;
bool IsExpanded { false };
bool IsSelected { false };
FNDISourceTreeItem()
{}
FNDISourceTreeItem(const FText& DisplayTextIn)
: DisplayText(DisplayTextIn)
{}
FNDISourceTreeItem(const FNDIConnectionInformation& Source)
: NDISource(Source)
{}
FNDISourceTreeItem(TSharedRef<FNDISourceTreeItem>&& Child)
{
Children.Add(Child);
}
static const TSharedRef<FNDISourceTreeItem>* FindMachineNode(const FNDISourceTreeItem& RootNode, const FNDIConnectionInformation& SourceItem)
{
const TSharedRef<FNDISourceTreeItem>* MachineNode = nullptr;
if(!SourceItem.MachineName.IsEmpty())
{
const FString& SearchName = SourceItem.MachineName;
MachineNode = RootNode.Children.FindByPredicate([&SearchName](const TSharedRef<FNDISourceTreeItem>& Child)
{
if(Child->Children.Num() > 0)
return Child->Children[0]->NDISource.MachineName == SearchName;
else
return false;
});
}
else if(!SourceItem.Url.IsEmpty())
{
const FString& SearchName = SourceItem.Url;
MachineNode = RootNode.Children.FindByPredicate([&SearchName](const TSharedRef<FNDISourceTreeItem>& Child)
{
if(Child->Children.Num() > 0)
return Child->Children[0]->NDISource.Url == SearchName;
else
return false;
});
}
return MachineNode;
}
static const TSharedRef<FNDISourceTreeItem>* FindStreamNodeInMachineNode(const TSharedRef<FNDISourceTreeItem>& MachineNode, const FNDIConnectionInformation& SourceItem)
{
const TSharedRef<FNDISourceTreeItem>* StreamNode = nullptr;
if(!SourceItem.StreamName.IsEmpty())
{
const FString& SearchName = SourceItem.StreamName;
StreamNode = MachineNode->Children.FindByPredicate([&SearchName](const TSharedRef<FNDISourceTreeItem>& Child)
{
return Child->NDISource.StreamName == SearchName;
});
}
else if(!SourceItem.Url.IsEmpty())
{
const FString& SearchName = SourceItem.Url;
StreamNode = MachineNode->Children.FindByPredicate([&SearchName](const TSharedRef<FNDISourceTreeItem>& Child)
{
return Child->NDISource.Url == SearchName;
});
}
return StreamNode;
}
void SetFromSources(const TArray<FNDIConnectionInformation>& SourceItems, const FText& SearchingTxt, bool StartExpanded)
{
FNDISourceTreeItem RootNode;
//
// Build new tree
//
for(int32 i = 0; i < SourceItems.Num(); ++i)
{
const TSharedRef<FNDISourceTreeItem>* MachineNode = FindMachineNode(RootNode, SourceItems[i]);
if(MachineNode != nullptr)
{
FNDISourceTreeItem* NewNode = new FNDISourceTreeItem(SourceItems[i]);
(*MachineNode)->Children.Add(MakeShareable(NewNode));
}
else
{
FNDISourceTreeItem* NewNode = new FNDISourceTreeItem(SourceItems[i]);
FNDISourceTreeItem* NewMachineNode = new FNDISourceTreeItem(MakeShareable(NewNode));
RootNode.Children.Add(MakeShareable(NewMachineNode));
}
}
//
// Preserve expansion and selection state by matching with old tree
//
for(int32 i = 0; i < RootNode.Children.Num(); ++i)
{
const TSharedRef<FNDISourceTreeItem>* OldMachineNode = FindMachineNode(*this, RootNode.Children[i]->Children[0]->NDISource);
if(OldMachineNode != nullptr)
{
RootNode.Children[i]->IsExpanded = (*OldMachineNode)->IsExpanded;
for(int32 j = 0; j < RootNode.Children[i]->Children.Num(); ++j)
{
const TSharedRef<FNDISourceTreeItem>* OldStreamNode = FindStreamNodeInMachineNode(*OldMachineNode, RootNode.Children[i]->Children[j]->NDISource);
if(OldStreamNode != nullptr)
{
RootNode.Children[i]->Children[j]->IsSelected = (*OldStreamNode)->IsSelected;
}
}
}
else
{
RootNode.Children[i]->IsExpanded = StartExpanded;
}
}
if(RootNode.Children.Num() == 0)
{
RootNode.Children.Add(MakeShareable(new FNDISourceTreeItem(SearchingTxt)));
}
//
// Set to new tree
//
*this = RootNode;
}
};
/**
A menu widget containing NDI sources
*/
DECLARE_DELEGATE_OneParam(FOnSourceClicked, FNDIConnectionInformation);
class SNDISourcesMenu : public SCompoundWidget
{
public:
SLATE_BEGIN_ARGS(SNDISourcesMenu)
: _OnSourceClicked()
{}
SLATE_EVENT(FOnSourceClicked, OnSourceClicked)
SLATE_END_ARGS()
SNDISourcesMenu()
{}
virtual ~SNDISourcesMenu()
{
FNDIFinderService::EventOnNDISourceCollectionChanged.Remove(SourceCollectionChangedEventHandle);
SourceCollectionChangedEventHandle.Reset();
}
void Construct(const FArguments& InArgs)
{
OnSourceClicked = InArgs._OnSourceClicked;
ChildSlot
[
SNew(SComboButton)
.ButtonContent()
[
SNew(STextBlock)
.Font(IDetailLayoutBuilder::GetDetailFont())
.ToolTipText(LOCTEXT("NDI Sources Tip", "Currently Available NDI Sources"))
.Text(LOCTEXT("NDI Sources", "NDI Sources"))
]
.OnGetMenuContent_Lambda([this]() -> TSharedRef<SWidget>
{
FMenuBuilder MenuBuilder(true, nullptr);
for (const auto& Sources : SourceTreeItems.Children)
ConstructSourceMenu(MenuBuilder, Sources.Get());
return MenuBuilder.MakeWidget();
})
];
UpdateSources = true;
FNDIFinderService::EventOnNDISourceCollectionChanged.Remove(SourceCollectionChangedEventHandle);
SourceCollectionChangedEventHandle.Reset();
SourceCollectionChangedEventHandle = FNDIFinderService::EventOnNDISourceCollectionChanged.AddLambda([this]()
{
UpdateSources = true;
});
}
virtual void Tick(const FGeometry& AllottedGeometry, const double CurrentTime, const float DeltaTime) override
{
bool IsDifferent = false;
if (UpdateSources.exchange(false))
{
IsDifferent = FNDIFinderService::UpdateSourceCollection(SourceItems);
}
if (SourceItems.Num() == 0)
{
FText NewSearchingTxt;
double WholeTime = 0.0;
double FracTime = FMath::Modf(CurrentTime, &WholeTime);
if(FracTime < 1/4.0)
NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching0", "Searching"));
else if(FracTime < 2/4.0)
NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching1", "Searching."));
else if(FracTime < 3/4.0)
NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching2", "Searching.."));
else
NewSearchingTxt = FText(LOCTEXT("NDI Sources Searching3", "Searching..."));
if(!NewSearchingTxt.EqualTo(SearchingTxt))
{
SearchingTxt = NewSearchingTxt;
IsDifferent = true;
}
}
if (IsDifferent)
{
SourceTreeItems.SetFromSources(SourceItems, SearchingTxt, false);
Invalidate(EInvalidateWidgetReason::PaintAndVolatility | EInvalidateWidgetReason::ChildOrder);
}
SCompoundWidget::Tick(AllottedGeometry, CurrentTime, DeltaTime);
}
protected:
void ConstructSourceMenu(FMenuBuilder& MenuBuilder, const FNDISourceTreeItem& SourceTreeItem)
{
if (SourceTreeItem.NDISource.IsValid())
{
MenuBuilder.AddMenuEntry(
FText::FromString(SourceTreeItem.NDISource.StreamName),
FText::GetEmpty(),
FSlateIcon(),
FUIAction(FExecuteAction::CreateLambda([this,&SourceTreeItem]()
{
this->OnSourceClicked.ExecuteIfBound(SourceTreeItem.NDISource);
})),
NAME_None,
EUserInterfaceActionType::Button
);
}
else if (SourceTreeItem.Children.Num() > 0)
{
MenuBuilder.AddSubMenu(
FText::FromString(SourceTreeItem.Children[0]->NDISource.MachineName),
FText::GetEmpty(),
FNewMenuDelegate::CreateLambda([this,&SourceTreeItem](FMenuBuilder& MenuBuilder)
{
for(const auto& ChildSource : SourceTreeItem.Children)
ConstructSourceMenu(MenuBuilder, ChildSource.Get());
})
);
}
else if (!SourceTreeItem.DisplayText.IsEmpty())
{
MenuBuilder.AddMenuEntry(
SourceTreeItem.DisplayText,
FText::GetEmpty(),
FSlateIcon(),
FUIAction(FExecuteAction::CreateLambda([this]
{
})),
NAME_None,
EUserInterfaceActionType::Button
);
}
}
private:
TArray<FNDIConnectionInformation> SourceItems;
FText SearchingTxt;
FNDISourceTreeItem SourceTreeItems;
FDelegateHandle SourceCollectionChangedEventHandle;
std::atomic_bool UpdateSources { false };
FOnSourceClicked OnSourceClicked;
};
/**
Customization of NDIConnectionInformation property
by including a menu to select from currently available NDI sources
*/
TSharedRef<IPropertyTypeCustomization> FNDIConnectionInformationCustomization::MakeInstance()
{
return MakeShareable(new FNDIConnectionInformationCustomization);
}
void FNDIConnectionInformationCustomization::CustomizeHeader(TSharedRef<IPropertyHandle> PropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils)
{
HeaderRow.NameContent()
[
PropertyHandle->CreatePropertyNameWidget()
]
.ValueContent()
[
SNew(SNDISourcesMenu)
.OnSourceClicked_Lambda([this,PropertyHandle](FNDIConnectionInformation Source)
{
TArray<void*> RawData;
PropertyHandle->AccessRawData(RawData);
FNDIConnectionInformation* ConnectionInformation = reinterpret_cast<FNDIConnectionInformation*>(RawData[0]);
if (ConnectionInformation != nullptr)
{
ConnectionInformation->Url = "";
PropertyHandle->GetChildHandle("SourceName")->SetValue(Source.SourceName);
}
})
].IsEnabled(true);
}
void FNDIConnectionInformationCustomization::CustomizeChildren(TSharedRef<IPropertyHandle> PropertyHandle, IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils)
{
TSharedPtr<IPropertyUtilities> PropertyUtils = CustomizationUtils.GetPropertyUtilities();
uint32 NumberOfChild;
if (PropertyHandle->GetNumChildren(NumberOfChild) == FPropertyAccess::Success)
{
for (uint32 Index = 0; Index < NumberOfChild; ++Index)
{
TSharedRef<IPropertyHandle> ChildPropertyHandle = PropertyHandle->GetChildHandle(Index).ToSharedRef();
ChildBuilder.AddProperty(ChildPropertyHandle)
.ShowPropertyButtons(true)
.IsEnabled(MakeAttributeLambda([=] { return !PropertyHandle->IsEditConst() && PropertyUtils->IsPropertyEditingEnabled(); }));
}
}
}
#undef LOCTEXT_NAMESPACE

View File

@@ -1,92 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
using System;
using System.IO;
using UnrealBuildTool;
public class NDIIOEditor : ModuleRules
{
public NDIIOEditor(ReadOnlyTargetRules Target) : base(Target)
{
#if UE_5_2_OR_LATER
IWYUSupport = IWYUSupport.Full;
#else
bEnforceIWYU = true;
#endif
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
#region Public Includes
if (Directory.Exists(Path.Combine(ModuleDirectory, "Public")))
{
PublicIncludePaths.AddRange(new string[] {
// ... add public include paths required here ...
Path.Combine(ModuleDirectory, "Public" ),
});
}
PublicDependencyModuleNames.AddRange(new string[] {
"Engine",
"Core",
"CoreUObject"
});
#endregion
if (Target.bBuildEditor == true)
{
#region Private Includes
if (Directory.Exists(Path.Combine(ModuleDirectory, "Private")))
{
PrivateIncludePaths.AddRange(new string[] {
// ... add other private include paths required here ...
Path.Combine(ModuleDirectory, "Private" ),
Path.Combine(ModuleDirectory, "../Core/Private"),
});
}
#endregion
PrivateIncludePathModuleNames.AddRange(new string[] {
"AssetTools",
"TargetPlatform",
});
PrivateDependencyModuleNames.AddRange(new string[] {
"Projects",
"UnrealEd",
"AssetTools",
"MaterialUtilities",
"Renderer",
"RenderCore",
"PlacementMode",
"CinematicCamera",
"RHI",
"Slate",
"SlateCore",
"UMG",
"ImageWrapper",
"Media",
"MediaAssets",
"MediaUtils",
"AssetTools",
"TargetPlatform",
"PropertyEditor",
"DetailCustomizations",
"EditorStyle",
"NDIIO"
});
}
}
}

View File

@@ -1,30 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Factories/Factory.h>
#include <UObject/Object.h>
#include "NDIMediaReceiverFactory.generated.h"
/**
Factory Class used to create assets via content browser for NDI Receiver objects
*/
UCLASS()
class NDIIOEDITOR_API UNDIMediaReceiverFactory : public UFactory
{
GENERATED_UCLASS_BODY()
public:
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
virtual bool ShouldShowInNewMenu() const override { return true; }
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
};

View File

@@ -1,29 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Factories/Factory.h>
#include <UObject/Object.h>
#include "NDIMediaSenderFactory.generated.h"
/**
Factory Class used to create assets via content browser for NDI Sender objects
*/
UCLASS()
class NDIIOEDITOR_API UNDIMediaSenderFactory : public UFactory
{
GENERATED_UCLASS_BODY()
public:
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
};

View File

@@ -1,29 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Factories/Factory.h>
#include <UObject/Object.h>
#include "NDIMediaSoundWaveFactory.generated.h"
/**
Factory Class used to create assets via content browser for NDI Sound Wave objects
*/
UCLASS()
class NDIIOEDITOR_API UNDIMediaSoundWaveFactory : public UFactory
{
GENERATED_UCLASS_BODY()
public:
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
};

View File

@@ -1,29 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Factories/Factory.h>
#include <UObject/Object.h>
#include "NDIMediaTexture2DFactory.generated.h"
/**
Factory Class used to create assets via content browser for NDI Texture2D objects
*/
UCLASS()
class NDIIOEDITOR_API UNDIMediaTexture2DFactory : public UFactory
{
GENERATED_UCLASS_BODY()
public:
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
};

View File

@@ -1,12 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#define NDIIO_EDITOR_MODULE_NAME FName(TEXT("NDIIOEditor"))

View File

@@ -1,25 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <Modules/ModuleManager.h>
#include <Styling/SlateStyle.h>
class NDIIOEDITOR_API FNDIIOEditorModule : public IModuleInterface
{
public:
virtual void StartupModule() override;
virtual void ShutdownModule() override;
private:
TUniquePtr<FSlateStyleSet> StyleInstance;
};
IMPLEMENT_MODULE(FNDIIOEditorModule, NDIIOEditor)

View File

@@ -1,29 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <CoreMinimal.h>
#include <IPropertyTypeCustomization.h>
/**
Customization of NDIConnectionInformation property
by including a menu to select from currently available NDI sources
*/
class FNDIConnectionInformationCustomization : public IPropertyTypeCustomization
{
public:
static TSharedRef<IPropertyTypeCustomization> MakeInstance();
// IDetailCustomization interface
virtual void CustomizeHeader(TSharedRef<IPropertyHandle> PropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) override;
virtual void CustomizeChildren(TSharedRef<IPropertyHandle> PropertyHandle, IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils) override;
private:
};

View File

@@ -1,38 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
using System;
using System.IO;
using UnrealBuildTool;
public class NDIIOShaders : ModuleRules
{
public NDIIOShaders(ReadOnlyTargetRules Target) : base(Target)
{
#if UE_5_2_OR_LATER
IWYUSupport = IWYUSupport.Full;
#else
bEnforceIWYU = true;
#endif
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
PublicDependencyModuleNames.AddRange(new string[] {
"Engine",
"Core",
"CoreUObject",
"Projects",
"InputCore"
});
PrivateDependencyModuleNames.AddRange(new string[] {
"Renderer",
"RenderCore",
"RHI"
});
}
}

View File

@@ -1,114 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include "NDIShaders.h"
#include "Modules/ModuleManager.h"
#include "Interfaces/IPluginManager.h"
#include "Misc/Paths.h"
#include "Misc/EngineVersionComparison.h"
#include "Engine/TextureRenderTarget2D.h"
#include "Engine/World.h"
#include "PipelineStateCache.h"
#include "SceneUtils.h"
#include "SceneInterface.h"
BEGIN_GLOBAL_SHADER_PARAMETER_STRUCT(FNDIIOShaderUB, )
SHADER_PARAMETER(uint32, InputWidth)
SHADER_PARAMETER(uint32, InputHeight)
SHADER_PARAMETER(uint32, OutputWidth)
SHADER_PARAMETER(uint32, OutputHeight)
SHADER_PARAMETER(FVector2f, UVOffset)
SHADER_PARAMETER(FVector2f, UVScale)
SHADER_PARAMETER(uint32, ColorCorrection)
SHADER_PARAMETER(float, AlphaScale)
SHADER_PARAMETER(float, AlphaOffset)
SHADER_PARAMETER_TEXTURE(Texture2D, InputTarget)
SHADER_PARAMETER_TEXTURE(Texture2D, InputAlphaTarget)
SHADER_PARAMETER_SAMPLER(SamplerState, SamplerP)
SHADER_PARAMETER_SAMPLER(SamplerState, SamplerB)
SHADER_PARAMETER_SAMPLER(SamplerState, SamplerT)
END_GLOBAL_SHADER_PARAMETER_STRUCT()
IMPLEMENT_GLOBAL_SHADER_PARAMETER_STRUCT(FNDIIOShaderUB, "NDIIOShaderUB");
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderVS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOMainVS", SF_Vertex);
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoUYVYPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoUYVYPS", SF_Pixel);
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoAlphaEvenPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoAlphaEvenPS", SF_Pixel);
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderBGRAtoAlphaOddPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOBGRAtoAlphaOddPS", SF_Pixel);
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderUYVYtoBGRAPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOUYVYtoBGRAPS", SF_Pixel);
IMPLEMENT_GLOBAL_SHADER(FNDIIOShaderUYVAtoBGRAPS, "/Plugin/NDIIOPlugin/Private/NDIIOShaders.usf", "NDIIOUYVAtoBGRAPS", SF_Pixel);
void FNDIIOShaderPS::SetParameters(FRHICommandList& CommandList, const Params& params)
{
FNDIIOShaderUB UB;
{
UB.InputWidth = params.InputTarget->GetSizeX();
UB.InputHeight = params.InputTarget->GetSizeY();
UB.OutputWidth = params.OutputSize.X;
UB.OutputHeight = params.OutputSize.Y;
UB.UVOffset = static_cast<FVector2f>(params.UVOffset);
UB.UVScale = static_cast<FVector2f>(params.UVScale);
UB.ColorCorrection = static_cast<uint32>(params.ColorCorrection);
/*
* Alpha' = Alpha * AlphaScale + AlphaOffset
* = (Alpha - AlphaMin) / (AlphaMax - AlphaMin)
* = Alpha / (AlphaMax - AlphaMin) - AlphaMin / (AlphaMax - AlphaMin)
* AlphaScale = 1 / (AlphaMax - AlphaMin)
* AlphaOffset = - AlphaMin / (AlphaMax - AlphaMin)
*/
float AlphaRange = params.AlphaMinMax[1] - params.AlphaMinMax[0];
if (AlphaRange != 0.f)
{
UB.AlphaScale = 1.f / AlphaRange;
UB.AlphaOffset = - params.AlphaMinMax[0] / AlphaRange;
}
else
{
UB.AlphaScale = 0.f;
UB.AlphaOffset = -params.AlphaMinMax[0];
}
UB.InputTarget = params.InputTarget;
UB.InputAlphaTarget = params.InputAlphaTarget;
UB.SamplerP = TStaticSamplerState<SF_Point>::GetRHI();
UB.SamplerB = TStaticSamplerState<SF_Bilinear>::GetRHI();
UB.SamplerT = TStaticSamplerState<SF_Trilinear>::GetRHI();
}
TUniformBufferRef<FNDIIOShaderUB> Data = TUniformBufferRef<FNDIIOShaderUB>::CreateUniformBufferImmediate(UB, UniformBuffer_SingleFrame);
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
FRHIBatchedShaderParameters& BatchedParameters = CommandList.GetScratchShaderParameters();
SetUniformBufferParameter(BatchedParameters, GetUniformBufferParameter<FNDIIOShaderUB>(), Data);
CommandList.SetBatchedShaderParameters(CommandList.GetBoundPixelShader(), BatchedParameters);
#else
SetUniformBufferParameter(CommandList, CommandList.GetBoundPixelShader(), GetUniformBufferParameter<FNDIIOShaderUB>(), Data);
#endif
}
class FNDIIOShaders : public INDIIOShaders
{
/** IModuleInterface implementation */
virtual void StartupModule() override
{
FString PluginShaderDir = FPaths::Combine(IPluginManager::Get().FindPlugin(TEXT("NDIIOPlugin"))->GetBaseDir(), TEXT("Shaders"));
AddShaderSourceDirectoryMapping(TEXT("/Plugin/NDIIOPlugin"), PluginShaderDir);
}
virtual void ShutdownModule() override
{
}
};
IMPLEMENT_MODULE( FNDIIOShaders, NDIIOShaders )

View File

@@ -1,138 +0,0 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include "CoreMinimal.h"
#include "RHI.h"
#include "RenderResource.h"
#include "Shader.h"
#include "GlobalShader.h"
#include "ShaderParameterUtils.h"
#include "RHIStaticStates.h"
#include "Misc/EngineVersionComparison.h"
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 2)) // 5.2 or later
#include "DataDrivenShaderPlatformInfo.h"
#endif
#include "Logging/LogMacros.h"
DECLARE_LOG_CATEGORY_EXTERN(LogNDIIOShaders, Log, All);
class FNDIIOShaderVS : public FGlobalShader
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderVS, Global, NDIIOSHADERS_API);
public:
static bool ShouldCompilePermutation(const FGlobalShaderPermutationParameters& Parameters)
{
return IsFeatureLevelSupported(Parameters.Platform, ERHIFeatureLevel::ES3_1);
}
FNDIIOShaderVS()
{}
FNDIIOShaderVS(const ShaderMetaType::CompiledShaderInitializerType& Initializer)
: FGlobalShader(Initializer)
{}
};
class FNDIIOShaderPS : public FGlobalShader
{
public:
static bool ShouldCompilePermutation(const FGlobalShaderPermutationParameters& Parameters)
{
return IsFeatureLevelSupported(Parameters.Platform, ERHIFeatureLevel::ES3_1);
}
FNDIIOShaderPS()
{}
FNDIIOShaderPS(const ShaderMetaType::CompiledShaderInitializerType& Initializer)
: FGlobalShader(Initializer)
{}
enum class EColorCorrection : uint32
{
None = 0,
sRGBToLinear,
LinearTosRGB
};
struct Params
{
Params(const TRefCountPtr<FRHITexture>& InputTargetIn, const TRefCountPtr<FRHITexture>& InputAlphaTargetIn, FIntPoint OutputSizeIn, FVector2D UVOffsetIn, FVector2D UVScaleIn, EColorCorrection ColorCorrectionIn, FVector2D AlphaMinMaxIn)
: InputTarget(InputTargetIn)
, InputAlphaTarget(InputAlphaTargetIn)
, OutputSize(OutputSizeIn)
, UVOffset(UVOffsetIn)
, UVScale(UVScaleIn)
, ColorCorrection(ColorCorrectionIn)
, AlphaMinMax(AlphaMinMaxIn)
{}
TRefCountPtr<FRHITexture> InputTarget;
TRefCountPtr<FRHITexture> InputAlphaTarget;
FIntPoint OutputSize;
FVector2D UVOffset;
FVector2D UVScale;
EColorCorrection ColorCorrection;
FVector2D AlphaMinMax;
};
NDIIOSHADERS_API void SetParameters(FRHICommandList& CommandList, const Params& params);
protected:
};
class FNDIIOShaderBGRAtoUYVYPS : public FNDIIOShaderPS
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoUYVYPS, Global, NDIIOSHADERS_API);
public:
using FNDIIOShaderPS::FNDIIOShaderPS;
};
class FNDIIOShaderBGRAtoAlphaEvenPS : public FNDIIOShaderPS
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoAlphaEvenPS, Global, NDIIOSHADERS_API);
public:
using FNDIIOShaderPS::FNDIIOShaderPS;
};
class FNDIIOShaderBGRAtoAlphaOddPS : public FNDIIOShaderPS
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderBGRAtoAlphaOddPS, Global, NDIIOSHADERS_API);
public:
using FNDIIOShaderPS::FNDIIOShaderPS;
};
class FNDIIOShaderUYVYtoBGRAPS : public FNDIIOShaderPS
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderUYVYtoBGRAPS, Global, NDIIOSHADERS_API);
public:
using FNDIIOShaderPS::FNDIIOShaderPS;
};
class FNDIIOShaderUYVAtoBGRAPS : public FNDIIOShaderPS
{
DECLARE_EXPORTED_SHADER_TYPE(FNDIIOShaderUYVAtoBGRAPS, Global, NDIIOSHADERS_API);
public:
using FNDIIOShaderPS::FNDIIOShaderPS;
};
class INDIIOShaders : public IModuleInterface
{
public:
};

View File

@@ -1,635 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
typedef struct NDIlib_v6 {
// v1.5
union {
bool (*initialize)(void);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_initialize)(void);
};
union {
void (*destroy)(void);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_destroy)(void);
};
union {
const char* (*version)(void);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_version)(void);
};
union {
bool (*is_supported_CPU)(void);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_is_supported_CPU)(void);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*find_create)(const NDIlib_find_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create)(const NDIlib_find_create_t* p_create_settings);
};
union {
NDIlib_find_instance_t (*find_create_v2)(const NDIlib_find_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create_v2)(const NDIlib_find_create_t* p_create_settings);
};
union {
void (*find_destroy)(NDIlib_find_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_find_destroy)(NDIlib_find_instance_t p_instance);
};
union {
const NDIlib_source_t* (*find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms);
};
union {
NDIlib_send_instance_t (*send_create)(const NDIlib_send_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_send_instance_t (*NDIlib_send_create)(const NDIlib_send_create_t* p_create_settings);
};
union {
void (*send_destroy)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_destroy)(NDIlib_send_instance_t p_instance);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
};
union {
void (*send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
NDIlib_frame_type_e (*send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
};
union {
void (*send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms);
};
union {
int (*send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
void (*send_clear_connection_metadata)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_clear_connection_metadata)(NDIlib_send_instance_t p_instance);
};
union {
void (*send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
void (*send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create_v2)(const NDIlib_recv_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v2)(const NDIlib_recv_create_t* p_create_settings);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create)(const NDIlib_recv_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create)(const NDIlib_recv_create_t* p_create_settings);
};
union {
void (*recv_destroy)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_destroy)(NDIlib_recv_instance_t p_instance);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
};
union {
void (*recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
};
union {
void (*recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped);
};
union {
void (*recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
};
union {
void (*recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance);
};
union {
void (*recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
int (*recv_get_no_connections)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_recv_get_no_connections)(NDIlib_recv_instance_t p_instance);
};
union {
NDIlib_routing_instance_t (*routing_create)(const NDIlib_routing_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_routing_instance_t (*NDIlib_routing_create)(const NDIlib_routing_create_t* p_create_settings);
};
union {
void (*routing_destroy)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_routing_destroy)(NDIlib_routing_instance_t p_instance);
};
union {
bool (*routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
};
union {
bool (*routing_clear)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_clear)(NDIlib_routing_instance_t p_instance);
};
union {
void (*util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst);
};
// v2
union {
bool (*find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
const NDIlib_source_t* (*find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst);
};
union {
void (*util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data);
};
// v3
union {
void (*recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
NDIlib_frame_type_e (*recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
};
union {
void (*send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
void (*util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
};
union {
void (*util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
union {
void (*util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
};
union {
void (*util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
// V3.01
union {
void (*recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string);
};
union {
bool (*recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_supported)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_supported)(NDIlib_recv_instance_t p_instance);
};
union {
const char* (*recv_get_web_control)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_get_web_control)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value);
};
union {
bool (*recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed);
};
union {
bool (*recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
};
union {
bool (*recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
};
union {
bool (*recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no);
};
union {
bool (*recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
};
union {
bool (*recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value);
};
union {
bool (*recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed);
};
union {
bool (*recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue);
};
union {
bool (*recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_stop)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_stop)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB);
};
union { // This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_recording)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_recording)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_filename)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_filename)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_error)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_error)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);
};
// v3.1
union {
NDIlib_recv_instance_t (*recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings);
};
// v3.5
union {
void (*recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src);
};
// v3.6
union {
NDIlib_framesync_instance_t (*framesync_create)(NDIlib_recv_instance_t p_receiver);
PROCESSINGNDILIB_DEPRECATED NDIlib_framesync_instance_t (*NDIlib_framesync_create)(NDIlib_recv_instance_t p_receiver);
};
union {
void (*framesync_destroy)(NDIlib_framesync_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_destroy)(NDIlib_framesync_instance_t p_instance);
};
union {
void (*framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
};
union {
void (*framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
void (*framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type);
};
union {
void (*framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data);
};
union {
void (*util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
};
union {
void (*util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
// v3.8
union {
const NDIlib_source_t* (*send_get_source_name)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_send_get_source_name)(NDIlib_send_instance_t p_instance);
};
// v4.0
union {
void (*send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
void (*util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216);
};
union {
void (*util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);
};
// v4.1
union {
int (*routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
const NDIlib_source_t* (*routing_get_source_name)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_routing_get_source_name)(NDIlib_routing_instance_t p_instance);
};
union {
NDIlib_frame_type_e (*recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
};
union {
void (*recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
void (*framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
};
union {
void (*framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
int (*framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance);
};
// v5
union {
bool (*recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed);
};
// v6.1
bool (*util_audio_to_interleaved_16s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
bool (*util_audio_from_interleaved_16s_v3)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
bool (*util_audio_to_interleaved_32s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
bool (*util_audio_from_interleaved_32s_v3)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
bool (*util_audio_to_interleaved_32f_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
bool (*util_audio_from_interleaved_32f_v3)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
// v6.2
bool (*recv_get_source_name)(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms);
NDIlib_recv_advertiser_instance_t (*recv_advertiser_create)(const NDIlib_recv_advertiser_create_t* p_create_settings);
void (*recv_advertiser_destroy)(NDIlib_recv_advertiser_instance_t p_instance);
bool (*recv_advertiser_add_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver, bool allow_controlling, bool allow_monitoring, const char* p_input_group_name);
bool (*recv_advertiser_del_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver);
NDIlib_recv_listener_instance_t (*recv_listener_create)(const NDIlib_recv_listener_create_t* p_create_settings);
void (*recv_listener_destroy)(NDIlib_recv_listener_instance_t p_instance);
bool (*recv_listener_is_connected)(NDIlib_recv_listener_instance_t p_instance);
const char* (*recv_listener_get_server_url)(NDIlib_recv_listener_instance_t p_instance);
const NDIlib_receiver_t* (*recv_listener_get_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers);
bool (*recv_listener_wait_for_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms);
} NDIlib_v6;
typedef struct NDIlib_v6 NDIlib_v5;
typedef struct NDIlib_v6 NDIlib_v4_5;
typedef struct NDIlib_v6 NDIlib_v4;
typedef struct NDIlib_v6 NDIlib_v3;
typedef struct NDIlib_v6 NDIlib_v2;
// Load the library.
PROCESSINGNDILIB_API
const NDIlib_v6* NDIlib_v6_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v5* NDIlib_v5_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v4_5* NDIlib_v4_5_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v4* NDIlib_v4_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v3* NDIlib_v3_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v2* NDIlib_v2_load(void);

View File

@@ -1,79 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI finding.
// The reference to an instance of the finder.
struct NDIlib_find_instance_type;
typedef struct NDIlib_find_instance_type* NDIlib_find_instance_t;
// The creation structure that is used when you are creating a finder.
typedef struct NDIlib_find_create_t {
// Do we want to include the list of NDI sources that are running on the local machine? If TRUE then
// local sources will be visible, if FALSE then they will not.
bool show_local_sources;
// Which groups do you want to search in for sources.
const char* p_groups;
// The list of additional IP addresses that exist that we should query for sources on. For instance, if
// you want to find the sources on a remote machine that is not on your local sub-net then you can put a
// comma separated list of those IP addresses here and those sources will be available locally even
// though they are not mDNS discoverable. An example might be "12.0.0.8,13.0.12.8". When none is
// specified the registry is used.
// Default = NULL;
const char* p_extra_ips;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_find_create_t(
bool show_local_sources_ = true,
const char* p_groups_ = NULL,
const char* p_extra_ips_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_find_create_t;
//***********************************************************************************************************
// Create a new finder instance. This will return NULL if it fails.
PROCESSINGNDILIB_API
NDIlib_find_instance_t NDIlib_find_create_v2(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will destroy an existing finder instance.
PROCESSINGNDILIB_API
void NDIlib_find_destroy(NDIlib_find_instance_t p_instance);
// This function will recover the current set of sources (i.e. the ones that exist right this second). The
// char* memory buffers returned in NDIlib_source_t are valid until the next call to
// NDIlib_find_get_current_sources or a call to NDIlib_find_destroy. For a given NDIlib_find_instance_t, do
// not call NDIlib_find_get_current_sources asynchronously.
PROCESSINGNDILIB_API
const NDIlib_source_t* NDIlib_find_get_current_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
// This will allow you to wait until the number of online sources have changed.
PROCESSINGNDILIB_API
bool NDIlib_find_wait_for_sources(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);

View File

@@ -1,172 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// It is important when using video to realize that often you are using difference clocks for different parts
// of the signal chain. Within NDI, the sender can send at the clock rate that it wants and the receiver will
// receive it at that rate. The receiver however is very unlikely to share the exact same clock rate in many
// cases. For instance, bear in mind that computer clocks rely on crystals which while all rated for the same
// frequency are still not exact. If you sending computer has an audio clock that it "thinks" is 48000Hz, to
// the receiver computer that has a different audio clock this might be 48001Hz or 47998Hz. While these
// differences might appear small they accumulate over time and can cause audio to either slightly drift out
// of sync (it is receiving more audio sample than it needs to play back) or might cause audio glitches
// because it is not receiving enough audio samples. While we have described the situation for audio, the
// same exact problem occurs for video sources; it is commonly thought that this can be solved by simply
// having a "frame buffer" and that displaying the "most recently received video frame" will solve these
// timing discrepancies. Unfortunately this is not the case and when it is done because of the variance in
// clock timings, it is very common the video will appear the "jitter" when the sending and receiving clocks
// are almost in alignment. The solution to these problems is to implement a "time base corrector" for the
// video clock which is a device that uses hysteresis to know when the best time is to either drop or insert
// a video frame such that the video is most likely to play back smoothly, and audio should be dynamically
// audio sampled (with a high order resampling filter) to adaptively track any clocking differences.
// Implementing these components is very difficult to get entirely correct under all scenarios and this
// implementation is provided to facilitate this and help people who are building real time video
// applications to receive audio and video without needing to undertake the full complexity of implementing
// such clock devices.
//
// Another way to look at what this class does is that it transforms "push" sources (i.e. NDI sources in
// which the data is pushed from the sender to the receiver) into "pull" sources in which a host application
// is pulling the data down-stream. The frame-sync automatically tracks all clocks to achieve the best video
// performance doing this operation.
//
// In addition to time-base correction operations, these implementations also will automatically detect and
// correct timing jitter that might occur. This will internally correct for timing anomalies that might be
// caused by network, sender or receiver side timing errors caused by CPU limitations, network bandwidth
// fluctuations, etc...
//
// A very common use of a frame-synchronizer might be if you are displaying video on screen timed to the GPU
// v-sync, you should use such a device to convert from the incoming time-base into the time-base of the GPU.
//
// The following are common times that you want to use a frame-synchronizer
// Video playback on screen : Yes, you want the clock to be synced with vertical refresh.
// Audio playback through sound card : Yes you want the clock to be synced with your sound card clock.
// Video mixing : Yes you want the input video clocks to all be synced to your output video clock.
// Audio mixing : Yes, you want all input audio clocks to be brought into sync with your output
// audio clock.
// Recording a single channel : No, you want to record the signal in it's raw form without
// any re-clocking.
// Recording multiple channels : Maybe. If you want to sync some input channels to match a master clock
// so that they can be ISO edited, then you might want a frame-sync.
// The type instance for a frame-synchronizer.
struct NDIlib_framesync_instance_type;
typedef struct NDIlib_framesync_instance_type* NDIlib_framesync_instance_t;
// Create a frame synchronizer instance that can be used to get frames from a receiver. Once this receiver
// has been bound to a frame-sync then you should use it in order to receive video frames. You can continue
// to use the underlying receiver for other operations (tally, PTZ, etc...). Note that it remains your
// responsibility to destroy the receiver even when a frame-sync is using it. You should always destroy the
// receiver after the frame-sync has been destroyed.
//
PROCESSINGNDILIB_API
NDIlib_framesync_instance_t NDIlib_framesync_create(NDIlib_recv_instance_t p_receiver);
// Destroy a frame-sync implementation.
PROCESSINGNDILIB_API
void NDIlib_framesync_destroy(NDIlib_framesync_instance_t p_instance);
// This function will pull audio samples from the frame-sync queue. This function will always return data
// immediately, inserting silence if no current audio data is present. You should call this at the rate that
// you want audio and it will automatically adapt the incoming audio signal to match the rate at which you
// are calling by using dynamic audio sampling. Note that you have no obligation that your requested sample
// rate, no channels and no samples match the incoming signal and all combinations of conversions
// are supported.
//
// If you wish to know what the current incoming audio format is, then you can make a call with the
// parameters set to zero and it will then return the associated settings. For instance a call as follows:
//
// NDIlib_framesync_capture_audio(p_instance, p_audio_data, 0, 0, 0);
//
// will return in p_audio_data the current received audio format if there is one or sample_rate and
// no_channels equal to zero if there is not one. At any time you can specify sample_rate and no_channels as
// zero and it will return the current received audio format.
//
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_audio(
NDIlib_framesync_instance_t p_instance,
NDIlib_audio_frame_v2_t* p_audio_data,
int sample_rate, int no_channels, int no_samples
);
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_audio_v2(
NDIlib_framesync_instance_t p_instance,
NDIlib_audio_frame_v3_t* p_audio_data,
int sample_rate, int no_channels, int no_samples
);
// Free audio returned by NDIlib_framesync_capture_audio.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_audio(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
// Free audio returned by NDIlib_framesync_capture_audio_v2.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_audio_v2(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
// This function will tell you the approximate current depth of the audio queue to give you an indication
// of the number of audio samples you can request. Note that if you should treat the results of this function
// with some care because in reality the frame-sync API is meant to dynamically resample audio to match the
// rate that you are calling it. If you have an inaccurate clock then this function can be useful.
// for instance :
//
// while(true)
// { int no_samples = NDIlib_framesync_audio_queue_depth(p_instance);
// NDIlib_framesync_capture_audio( ... );
// play_audio( ... )
// NDIlib_framesync_free_audio( ... )
// inaccurate_sleep( 33ms );
// }
//
// Obviously because audio is being received in real-time there is no guarantee after the call that the
// number is correct since new samples might have been captured in that time. On synchronous use of this
// function however this will be the minimum number of samples in the queue at any later time until
// NDIlib_framesync_capture_audio is called.
//
PROCESSINGNDILIB_API
int NDIlib_framesync_audio_queue_depth(NDIlib_framesync_instance_t p_instance);
// This function will pull video samples from the frame-sync queue. This function will always immediately
// return a video sample by using time-base correction. You can specify the desired field type which is then
// used to return the best possible frame. Note that field based frame-synchronization means that the
// frame-synchronizer attempts to match the fielded input phase with the frame requests so that you have the
// most correct possible field ordering on output. Note that the same frame can be returned multiple times.
//
// If no video frame has ever been received, this will return NDIlib_video_frame_v2_t as an empty (all zero)
// structure. The reason for this is that it allows you to determine that there has not yet been any video
// and act accordingly. For instance you might want to display a constant frame output at a particular video
// format, or black.
//
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_video(
NDIlib_framesync_instance_t p_instance,
NDIlib_video_frame_v2_t* p_video_data,
NDIlib_frame_format_type_e field_type NDILIB_CPP_DEFAULT_VALUE(NDIlib_frame_format_type_progressive)
);
// Free audio returned by NDIlib_framesync_capture_video.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_video(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);

View File

@@ -1,129 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// C++ implementations of default constructors are here to avoid them needing to be inline with all of the
// rest of the code.
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
// All the structs used and reasonable defaults are here
inline NDIlib_source_t::NDIlib_source_t(const char* p_ndi_name_, const char* p_url_address_)
: p_ndi_name(p_ndi_name_), p_url_address(p_url_address_) {}
inline NDIlib_video_frame_v2_t::NDIlib_video_frame_v2_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_,
float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_,
int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_)
: xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_),
picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_),
timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_audio_frame_v2_t::NDIlib_audio_frame_v2_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_,
int channel_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_audio_frame_v3_t::NDIlib_audio_frame_v3_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_,
NDIlib_FourCC_audio_type_e FourCC_, uint8_t* p_data_, int channel_stride_in_bytes_,
const char* p_metadata_, int64_t timestamp_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
FourCC(FourCC_), p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_),
p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_video_frame_t::NDIlib_video_frame_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_,
float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_,
int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_)
: xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_),
picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_),
timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_) {}
inline NDIlib_audio_frame_t::NDIlib_audio_frame_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_,
int channel_stride_in_bytes_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_) {}
inline NDIlib_metadata_frame_t::NDIlib_metadata_frame_t(int length_, int64_t timecode_, char* p_data_)
: length(length_), timecode(timecode_), p_data(p_data_) {}
inline NDIlib_tally_t::NDIlib_tally_t(bool on_program_, bool on_preview_)
: on_program(on_program_), on_preview(on_preview_) {}
inline NDIlib_routing_create_t::NDIlib_routing_create_t(const char* p_ndi_name_, const char* p_groups_)
: p_ndi_name(p_ndi_name_), p_groups(p_groups_) {}
inline NDIlib_recv_create_v3_t::NDIlib_recv_create_v3_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_,
NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_, const char* p_ndi_name_)
: source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_), p_ndi_recv_name(p_ndi_name_) {}
inline NDIlib_recv_create_t::NDIlib_recv_create_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_,
NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_)
: source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_) {}
inline NDIlib_recv_performance_t::NDIlib_recv_performance_t(void)
: video_frames(0), audio_frames(0), metadata_frames(0) {}
inline NDIlib_recv_queue_t::NDIlib_recv_queue_t(void)
: video_frames(0), audio_frames(0), metadata_frames(0) {}
inline NDIlib_recv_recording_time_t::NDIlib_recv_recording_time_t(void)
: no_frames(0), start_time(0), last_time(0) {}
inline NDIlib_send_create_t::NDIlib_send_create_t(const char* p_ndi_name_, const char* p_groups_, bool clock_video_, bool clock_audio_)
: p_ndi_name(p_ndi_name_), p_groups(p_groups_), clock_video(clock_video_), clock_audio(clock_audio_) {}
inline NDIlib_find_create_t::NDIlib_find_create_t(bool show_local_sources_, const char* p_groups_, const char* p_extra_ips_)
: show_local_sources(show_local_sources_), p_groups(p_groups_), p_extra_ips(p_extra_ips_) {}
inline NDIlib_audio_frame_interleaved_16s_t::NDIlib_audio_frame_interleaved_16s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int16_t* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
reference_level(reference_level_), p_data(p_data_) {}
inline NDIlib_audio_frame_interleaved_32s_t::NDIlib_audio_frame_interleaved_32s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int32_t* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
reference_level(reference_level_), p_data(p_data_) {}
inline NDIlib_audio_frame_interleaved_32f_t::NDIlib_audio_frame_interleaved_32f_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), p_data(p_data_) {}
inline NDIlib_recv_advertiser_create_t::NDIlib_recv_advertiser_create_t(const char* p_url_address)
: p_url_address(p_url_address) {}
inline NDIlib_recv_listener_create_t::NDIlib_recv_listener_create_t(const char* p_url_address)
: p_url_address(p_url_address) {}
inline NDIlib_receiver_t::NDIlib_receiver_t(void)
: p_uuid(NULL), p_name(NULL), p_input_uuid(NULL), p_input_name(NULL), p_address(NULL),
p_streams(NULL), num_streams(0), p_commands(NULL), num_commands(0), events_subscribed(false) {}
#ifdef __clang__
#pragma clang diagnostic pop
#endif

View File

@@ -1,165 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
#ifdef PROCESSINGNDILIB_STATIC
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C"
# else // __cplusplus
# define PROCESSINGNDILIB_API
# endif // __cplusplus
#else // PROCESSINGNDILIB_STATIC
# ifdef _WIN32
# ifdef PROCESSINGNDILIB_EXPORTS
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __declspec(dllexport)
# else // __cplusplus
# define PROCESSINGNDILIB_API __declspec(dllexport)
# endif // __cplusplus
# else // PROCESSINGNDILIB_EXPORTS
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __declspec(dllimport)
# else // __cplusplus
# define PROCESSINGNDILIB_API __declspec(dllimport)
# endif // __cplusplus
# ifdef _WIN64
# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x64.dll"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6"
# else // _WIN64
# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x86.dll"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6"
# endif // _WIN64
# endif // PROCESSINGNDILIB_EXPORTS
# else // _WIN32
# ifdef __APPLE__
# define NDILIB_LIBRARY_NAME "libndi.dylib"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6Apple"
# else // __APPLE__
# define NDILIB_LIBRARY_NAME "libndi.so.6"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL ""
# endif // __APPLE__
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __attribute((visibility("default")))
# else // __cplusplus
# define PROCESSINGNDILIB_API __attribute((visibility("default")))
# endif // __cplusplus
# endif // _WIN32
#endif // PROCESSINGNDILIB_STATIC
#ifndef PROCESSINGNDILIB_DEPRECATED
# ifdef _WIN32
# ifdef _MSC_VER
# define PROCESSINGNDILIB_DEPRECATED __declspec(deprecated)
# else // _MSC_VER
# define PROCESSINGNDILIB_DEPRECATED __attribute((deprecated))
# endif // _MSC_VER
# else // _WIN32
# define PROCESSINGNDILIB_DEPRECATED
# endif // _WIN32
#endif // PROCESSINGNDILIB_DEPRECATED
#ifndef NDILIB_CPP_DEFAULT_CONSTRUCTORS
# ifdef __cplusplus
# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 1
# else // __cplusplus
# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 0
# endif // __cplusplus
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
#ifndef NDILIB_CPP_DEFAULT_VALUE
# ifdef __cplusplus
# define NDILIB_CPP_DEFAULT_VALUE(a) =(a)
# else // __cplusplus
# define NDILIB_CPP_DEFAULT_VALUE(a)
# endif // __cplusplus
#endif // NDILIB_CPP_DEFAULT_VALUE
// Data structures shared by multiple SDKs.
#include "Processing.NDI.compat.h"
#include "Processing.NDI.structs.h"
// This is not actually required, but will start and end the libraries which might get you slightly better
// performance in some cases. In general it is more "correct" to call these although it is not required.
// There is no way to call these that would have an adverse impact on anything (even calling destroy before
// you've deleted all your objects). This will return false if the CPU is not sufficiently capable to run
// NDILib currently NDILib requires SSE4.2 instructions (see documentation). You can verify a specific CPU
// against the library with a call to NDIlib_is_supported_CPU().
PROCESSINGNDILIB_API
bool NDIlib_initialize(void);
PROCESSINGNDILIB_API
void NDIlib_destroy(void);
PROCESSINGNDILIB_API
const char* NDIlib_version(void);
// Recover whether the current CPU in the system is capable of running NDILib.
PROCESSINGNDILIB_API
bool NDIlib_is_supported_CPU(void);
// The finding (discovery API).
#include "Processing.NDI.Find.h"
// The receiving video and audio API.
#include "Processing.NDI.Recv.h"
// Extensions to support PTZ control, etc...
#include "Processing.NDI.Recv.ex.h"
// The receiver advertiser API.
#include "Processing.NDI.RecvAdvertiser.h"
// The receiver listener API.
#include "Processing.NDI.RecvListener.h"
// The sending video API.
#include "Processing.NDI.Send.h"
// The routing of inputs API.
#include "Processing.NDI.Routing.h"
// Utility functions.
#include "Processing.NDI.utilities.h"
// Deprecated structures and functions.
#include "Processing.NDI.deprecated.h"
// The frame synchronizer.
#include "Processing.NDI.FrameSync.h"
// Dynamic loading used for OSS libraries.
#include "Processing.NDI.DynamicLoad.h"
// The C++ implementations.
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
#include "Processing.NDI.Lib.cplusplus.h"
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS

View File

@@ -1,211 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Has this receiver got PTZ control. Note that it might take a second or two after the connection for this
// value to be set. To avoid the need to poll this function, you can know when the value of this function
// might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_is_supported(NDIlib_recv_instance_t p_instance);
// Has this receiver got recording control. Note that it might take a second or two after the connection for
// this value to be set. To avoid the need to poll this function, you can know when the value of this
// function might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
//
// Note on deprecation of this function:
// NDI version 4 includes the native ability to record all NDI streams using an external application that
// is provided with the SDK. This is better in many ways than the internal recording support which only
// ever supported remotely recording systems and NDI|HX. This functionality will be supported in the SDK
// for some time although we are recommending that you use the newer support which is more feature rich and
// supports the recording of all stream types, does not take CPU time to record NDI sources (it does not
// require any type of re-compression since it can just store the data in the file), it will synchronize
// all recorders on a system (and cross systems if NTP clock locking is used).
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_is_supported(NDIlib_recv_instance_t p_instance);
// PTZ Controls.
// Zoom to an absolute value.
// zoom_value = 0.0 (zoomed in) ... 1.0 (zoomed out)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_zoom(NDIlib_recv_instance_t p_instance, const float zoom_value);
// Zoom at a particular speed.
// zoom_speed = -1.0 (zoom outwards) ... +1.0 (zoom inwards)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_zoom_speed(NDIlib_recv_instance_t p_instance, const float zoom_speed);
// Set the pan and tilt to an absolute value.
// pan_value = -1.0 (left) ... 0.0 (centered) ... +1.0 (right)
// tilt_value = -1.0 (bottom) ... 0.0 (centered) ... +1.0 (top)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_pan_tilt(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
// Set the pan and tilt direction and speed.
// pan_speed = -1.0 (moving right) ... 0.0 (stopped) ... +1.0 (moving left)
// tilt_speed = -1.0 (down) ... 0.0 (stopped) ... +1.0 (moving up)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_pan_tilt_speed(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
// Store the current position, focus, etc... as a preset.
// preset_no = 0 ... 99
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_store_preset(NDIlib_recv_instance_t p_instance, const int preset_no);
// Recall a preset, including position, focus, etc...
// preset_no = 0 ... 99
// speed = 0.0(as slow as possible) ... 1.0(as fast as possible) The speed at which to move to the new preset.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_recall_preset(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
// Put the camera in auto-focus.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_auto_focus(NDIlib_recv_instance_t p_instance);
// Focus to an absolute value.
// focus_value = 0.0 (focused to infinity) ... 1.0 (focused as close as possible)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_focus(NDIlib_recv_instance_t p_instance, const float focus_value);
// Focus at a particular speed.
// focus_speed = -1.0 (focus outwards) ... +1.0 (focus inwards)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_focus_speed(NDIlib_recv_instance_t p_instance, const float focus_speed);
// Put the camera in auto white balance mode.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_auto(NDIlib_recv_instance_t p_instance);
// Put the camera in indoor white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_indoor(NDIlib_recv_instance_t p_instance);
// Put the camera in indoor white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_outdoor(NDIlib_recv_instance_t p_instance);
// Use the current brightness to automatically set the current white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_oneshot(NDIlib_recv_instance_t p_instance);
// Set the manual camera white balance using the R, B values.
// red = 0.0(not red) ... 1.0(very red)
// blue = 0.0(not blue) ... 1.0(very blue)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_manual(NDIlib_recv_instance_t p_instance, const float red, const float blue);
// Put the camera in auto-exposure mode.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_auto(NDIlib_recv_instance_t p_instance);
// Manually set the camera exposure iris.
// exposure_level = 0.0(dark) ... 1.0(light)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_manual(NDIlib_recv_instance_t p_instance, const float exposure_level);
// Manually set the camera exposure parameters.
// iris = 0.0(dark) ... 1.0(light)
// gain = 0.0(dark) ... 1.0(light)
// shutter_speed = 0.0(slow) ... 1.0(fast)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_manual_v2(
NDIlib_recv_instance_t p_instance,
const float iris, const float gain, const float shutter_speed
);
// Recording control.
// This will start recording.If the recorder was already recording then the message is ignored.A filename is
// passed in as a "hint".Since the recorder might already be recording(or might not allow complete
// flexibility over its filename), the filename might or might not be used.If the filename is empty, or not
// present, a name will be chosen automatically. If you do not with to provide a filename hint you can simply
// pass NULL.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_start(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
// Stop recording.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_stop(NDIlib_recv_instance_t p_instance);
// This will control the audio level for the recording. dB is specified in decibels relative to the reference
// level of the source. Not all recording sources support controlling audio levels.For instance, a digital
// audio device would not be able to avoid clipping on sources already at the wrong level, thus might not
// support this message.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_set_audio_level(NDIlib_recv_instance_t p_instance, const float level_dB);
// This will determine if the source is currently recording. It will return true while recording is in
// progress and false when it is not. Because there is one recorded and multiple people might be connected to
// it, there is a chance that it is recording which was initiated by someone else.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_is_recording(NDIlib_recv_instance_t p_instance);
// Get the current filename for recording. When this is set it will return a non-NULL value which is owned by
// you and freed using NDIlib_recv_free_string. If a file was already being recorded by another client, the
// massage will contain the name of that file. The filename contains a UNC path (when one is available) to
// the recorded file, and can be used to access the file on your local machine for playback. If a UNC path
// is not available, then this will represent the local filename. This will remain valid even after the file
// has stopped being recorded until the next file is started.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const char* NDIlib_recv_recording_get_filename(NDIlib_recv_instance_t p_instance);
// This will tell you whether there was a recording error and what that string is. When this is set it will
// return a non-NULL value which is owned by you and freed using NDIlib_recv_free_string. When there is no
// error it will return NULL.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const char* NDIlib_recv_recording_get_error(NDIlib_recv_instance_t p_instance);
// In order to get the duration.
typedef struct NDIlib_recv_recording_time_t
{
// The number of actual video frames recorded.
int64_t no_frames;
// The starting time and current largest time of the record, in UTC time, at 100-nanosecond unit
// intervals. This allows you to know the record time irrespective of frame rate. For instance,
// last_time - start_time would give you the recording length in 100-nanosecond intervals.
int64_t start_time, last_time;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_recording_time_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_recording_time_t;
// Get the current recording times.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_get_times(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);

View File

@@ -1,297 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI finding.
// The reference to an instance of the receiver.
struct NDIlib_recv_instance_type;
typedef struct NDIlib_recv_instance_type* NDIlib_recv_instance_t;
typedef enum NDIlib_recv_bandwidth_e {
NDIlib_recv_bandwidth_metadata_only = -10, // Receive metadata.
NDIlib_recv_bandwidth_audio_only = 10, // Receive metadata, audio.
NDIlib_recv_bandwidth_lowest = 0, // Receive metadata, audio, video at a lower bandwidth and resolution.
NDIlib_recv_bandwidth_highest = 100, // Receive metadata, audio, video at full resolution.
// Make sure this is a 32-bit enumeration.
NDIlib_recv_bandwidth_max = 0x7fffffff
} NDIlib_recv_bandwidth_e;
typedef enum NDIlib_recv_color_format_e {
// When there is no alpha channel, this mode delivers BGRX.
// When there is an alpha channel, this mode delivers BGRA.
NDIlib_recv_color_format_BGRX_BGRA = 0,
// When there is no alpha channel, this mode delivers UYVY.
// When there is an alpha channel, this mode delivers BGRA.
NDIlib_recv_color_format_UYVY_BGRA = 1,
// When there is no alpha channel, this mode delivers BGRX.
// When there is an alpha channel, this mode delivers RGBA.
NDIlib_recv_color_format_RGBX_RGBA = 2,
// When there is no alpha channel, this mode delivers UYVY.
// When there is an alpha channel, this mode delivers RGBA.
NDIlib_recv_color_format_UYVY_RGBA = 3,
// This format will try to decode the video using the fastest available color format for the incoming
// video signal. This format follows the following guidelines, although different platforms might
// vary slightly based on their capabilities and specific performance profiles. In general if you want
// the best performance this mode should be used.
//
// When using this format, you should consider than allow_video_fields is true, and individual fields
// will always be delivered.
//
// For most video sources on most platforms, this will follow the following conventions.
// No alpha channel : UYVY
// Alpha channel : UYVA
NDIlib_recv_color_format_fastest = 100,
// This format will try to provide the video in the format that is the closest to native for the incoming
// codec yielding the highest quality. Specifically, this allows for receiving on 16bpp color from many
// sources.
//
// When using this format, you should consider than allow_video_fields is true, and individual fields
// will always be delivered.
//
// For most video sources on most platforms, this will follow the following conventions
// No alpha channel : P216, or UYVY
// Alpha channel : PA16 or UYVA
NDIlib_recv_color_format_best = 101,
// Legacy definitions for backwards compatibility.
NDIlib_recv_color_format_e_BGRX_BGRA = NDIlib_recv_color_format_BGRX_BGRA,
NDIlib_recv_color_format_e_UYVY_BGRA = NDIlib_recv_color_format_UYVY_BGRA,
NDIlib_recv_color_format_e_RGBX_RGBA = NDIlib_recv_color_format_RGBX_RGBA,
NDIlib_recv_color_format_e_UYVY_RGBA = NDIlib_recv_color_format_UYVY_RGBA,
#ifdef _WIN32
// For Windows we can support flipped images which is unfortunately something that Microsoft decided to
// do back in the old days.
NDIlib_recv_color_format_BGRX_BGRA_flipped = 1000 + NDIlib_recv_color_format_BGRX_BGRA,
#endif
// Make sure this is a 32-bit enumeration.
NDIlib_recv_color_format_max = 0x7fffffff
} NDIlib_recv_color_format_e;
// The creation structure that is used when you are creating a receiver.
typedef struct NDIlib_recv_create_v3_t {
// The source that you wish to connect to.
NDIlib_source_t source_to_connect_to;
// Your preference of color space. See above.
NDIlib_recv_color_format_e color_format;
// The bandwidth setting that you wish to use for this video source. Bandwidth controlled by changing
// both the compression level and the resolution of the source. A good use for low bandwidth is working
// on WIFI connections.
NDIlib_recv_bandwidth_e bandwidth;
// When this flag is FALSE, all video that you receive will be progressive. For sources that provide
// fields, this is de-interlaced on the receiving side (because we cannot change what the up-stream
// source was actually rendering. This is provided as a convenience to down-stream sources that do not
// wish to understand fielded video. There is almost no performance impact of using this function.
bool allow_video_fields;
// The name of the NDI receiver to create. This is a NULL terminated UTF8 string and should be the name
// of receive channel that you have. This is in many ways symmetric with the name of senders, so this
// might be "Channel 1" on your system. If this is NULL then it will use the filename of your application
// indexed with the number of the instance number of this receiver.
const char* p_ndi_recv_name;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_create_v3_t(
const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(),
NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA,
NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest,
bool allow_video_fields_ = true,
const char* p_ndi_name_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_create_v3_t;
// This allows you determine the current performance levels of the receiving to be able to detect whether
// frames have been dropped.
typedef struct NDIlib_recv_performance_t {
// The number of video frames.
int64_t video_frames;
// The number of audio frames.
int64_t audio_frames;
// The number of metadata frames.
int64_t metadata_frames;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_performance_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_performance_t;
// Get the current queue depths.
typedef struct NDIlib_recv_queue_t {
// The number of video frames.
int video_frames;
// The number of audio frames.
int audio_frames;
// The number of metadata frames.
int metadata_frames;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_queue_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_queue_t;
//**************************************************************************************************************************
// Create a new receiver instance. This will return NULL if it fails. If you create this with the default
// settings (NULL) then it will automatically determine a receiver name.
PROCESSINGNDILIB_API
NDIlib_recv_instance_t NDIlib_recv_create_v3(const NDIlib_recv_create_v3_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will destroy an existing receiver instance.
PROCESSINGNDILIB_API
void NDIlib_recv_destroy(NDIlib_recv_instance_t p_instance);
// This function allows you to change the connection to another video source, you can also disconnect it by
// specifying a NULL here. This allows you to preserve a receiver without needing to.
PROCESSINGNDILIB_API
void NDIlib_recv_connect(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which
// case data of that type will not be captured in this call. This call can be called simultaneously on
// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads.
// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and
// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the
// appropriate free function below.
PROCESSINGNDILIB_API
NDIlib_frame_type_e NDIlib_recv_capture_v2(
NDIlib_recv_instance_t p_instance, // The library instance.
NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL).
NDIlib_audio_frame_v2_t* p_audio_data, // The audio data received (can be NULL).
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which
// case data of that type will not be captured in this call. This call can be called simultaneously on
// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads.
// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and
// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the
// appropriate free function below.
PROCESSINGNDILIB_API
NDIlib_frame_type_e NDIlib_recv_capture_v3(
NDIlib_recv_instance_t p_instance, // The library instance.
NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL).
NDIlib_audio_frame_v3_t* p_audio_data, // The audio data received (can be NULL).
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// Free the buffers returned by capture for video.
PROCESSINGNDILIB_API
void NDIlib_recv_free_video_v2(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
// Free the buffers returned by capture for audio.
PROCESSINGNDILIB_API
void NDIlib_recv_free_audio_v2(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
// Free the buffers returned by capture for audio.
PROCESSINGNDILIB_API
void NDIlib_recv_free_audio_v3(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
// Free the buffers returned by capture for metadata.
PROCESSINGNDILIB_API
void NDIlib_recv_free_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// This will free a string that was allocated and returned by NDIlib_recv (for instance the
// NDIlib_recv_get_web_control) function.
PROCESSINGNDILIB_API
void NDIlib_recv_free_string(NDIlib_recv_instance_t p_instance, const char* p_string);
// This function will send a meta message to the source that we are connected too. This returns FALSE if we
// are not currently connected to anything.
PROCESSINGNDILIB_API
bool NDIlib_recv_send_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// Set the up-stream tally notifications. This returns FALSE if we are not currently connected to anything.
// That said, the moment that we do connect to something it will automatically be sent the tally state.
PROCESSINGNDILIB_API
bool NDIlib_recv_set_tally(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
// Get the current performance structures. This can be used to determine if you have been calling
// NDIlib_recv_capture fast enough, or if your processing of data is not keeping up with real-time. The total
// structure will give you the total frame counts received, the dropped structure will tell you how many
// frames have been dropped. Either of these could be NULL.
PROCESSINGNDILIB_API
void NDIlib_recv_get_performance(
NDIlib_recv_instance_t p_instance,
NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped
);
// This will allow you to determine the current queue depth for all of the frame sources at any time.
PROCESSINGNDILIB_API
void NDIlib_recv_get_queue(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
// Connection based metadata is data that is sent automatically each time a new connection is received. You
// queue all of these up and they are sent on each connection. To reset them you need to clear them all and
// set them up again.
PROCESSINGNDILIB_API
void NDIlib_recv_clear_connection_metadata(NDIlib_recv_instance_t p_instance);
// Add a connection metadata string to the list of what is sent on each new connection. If someone is already
// connected then this string will be sent to them immediately.
PROCESSINGNDILIB_API
void NDIlib_recv_add_connection_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// Is this receiver currently connected to a source on the other end, or has the source not yet been found or
// is no longer online. This will normally return 0 or 1.
PROCESSINGNDILIB_API
int NDIlib_recv_get_no_connections(NDIlib_recv_instance_t p_instance);
// Get the URL that might be used for configuration of this input. Note that it might take a second or two
// after the connection for this value to be set. This function will return NULL if there is no web control
// user interface. You should call NDIlib_recv_free_string to free the string that is returned by this
// function. The returned value will be a fully formed URL, for instance "http://10.28.1.192/configuration/".
// To avoid the need to poll this function, you can know when the value of this function might have changed
// when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
PROCESSINGNDILIB_API
const char* NDIlib_recv_get_web_control(NDIlib_recv_instance_t p_instance);
// Retrieve the name of the current NDI source that the NDI receiver is connected to. This will return false
// if there has been no change in the source information since the last call. If p_source_name is NULL, then
// the name of the current NDI source will not be returned. If p_source_name is not NULL, then the name of
// the current source will be returned, however, the returned value can be NULL if the NDI receiver is
// currently not connected to a source. If the returned pointer is not NULL, then you should call
// NDIlib_recv_free_string to free the string that is returned by this function. A timeout value can be given
// to wait until a change occurs. If waiting is not desired, then use a timeout of 0.
PROCESSINGNDILIB_API
bool NDIlib_recv_get_source_name(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms NDILIB_CPP_DEFAULT_VALUE(0));

View File

@@ -1,79 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// The type instance for a receiver advertiser.
struct NDIlib_recv_advertiser_instance_type;
typedef struct NDIlib_recv_advertiser_instance_type* NDIlib_recv_advertiser_instance_t;
typedef struct NDIlib_recv_advertiser_create_t {
// The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery
// server will be used. If there is no discovery server available, then the receiver advertiser will not
// be able to be instantiated and the create function will return NULL. The format of this field is
// expected to be the hostname or IP address, optionally followed by a colon and a port number. If the
// port number is not specified, then port 5959 will be used. For example,
// 127.0.0.1:5959
// or
// 127.0.0.1
// or
// hostname:5959
// This field can also specify multiple addresses separated by commas for redundancy support.
const char* p_url_address;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_advertiser_create_t(
const char* p_url_address = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_advertiser_create_t;
// Create an instance of the receiver advertiser. This will return NULL if it fails to create the advertiser.
PROCESSINGNDILIB_API
NDIlib_recv_advertiser_instance_t NDIlib_recv_advertiser_create(const NDIlib_recv_advertiser_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// Destroy an instance of the receiver advertiser.
PROCESSINGNDILIB_API
void NDIlib_recv_advertiser_destroy(NDIlib_recv_advertiser_instance_t p_instance);
// Add the receiver to the list of receivers that are being advertised. Returns false if the receiver has
// been previously registered.
PROCESSINGNDILIB_API
bool NDIlib_recv_advertiser_add_receiver(
NDIlib_recv_advertiser_instance_t p_instance,
NDIlib_recv_instance_t p_receiver,
bool allow_controlling, bool allow_monitoring,
const char* p_input_group_name NDILIB_CPP_DEFAULT_VALUE(NULL)
);
// Remove the receiver from the list of receivers that are being advertised. Returns false if the receiver
// was not previously registered.
PROCESSINGNDILIB_API
bool NDIlib_recv_advertiser_del_receiver(
NDIlib_recv_advertiser_instance_t p_instance,
NDIlib_recv_instance_t p_receiver
);

View File

@@ -1,141 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// The type instance for a receiver listener.
struct NDIlib_recv_listener_instance_type;
typedef struct NDIlib_recv_listener_instance_type* NDIlib_recv_listener_instance_t;
typedef struct NDIlib_recv_listener_create_t {
// The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery
// server will be used. If there is no discovery server available, then the receiver listener will not
// be able to be instantiated and the create function will return NULL. The format of this field is
// expected to be the hostname or IP address, optionally followed by a colon and a port number. If the
// port number is not specified, then port 5959 will be used. For example,
// 127.0.0.1:5959
// or
// 127.0.0.1
// or
// hostname:5959
// If this field is a comma-separated list, then only the first address will be used.
const char* p_url_address;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_listener_create_t(
const char* p_url_address = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_listener_create_t;
// Create an instance of the receiver listener. This will return NULL if it fails to create the listener.
PROCESSINGNDILIB_API
NDIlib_recv_listener_instance_t NDIlib_recv_listener_create(const NDIlib_recv_listener_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// Destroy an instance of the receiver listener.
PROCESSINGNDILIB_API
void NDIlib_recv_listener_destroy(NDIlib_recv_listener_instance_t p_instance);
// Returns true if the receiver listener is actively connected to the configured NDI Discovery Server.
PROCESSINGNDILIB_API
bool NDIlib_recv_listener_is_connected(NDIlib_recv_listener_instance_t p_instance);
// Retrieve the URL address of the NDI Discovery Server that the receiver listener is connected to. This can
// return NULL if the instance pointer is invalid.
PROCESSINGNDILIB_API
const char* NDIlib_recv_listener_get_server_url(NDIlib_recv_listener_instance_t p_instance);
// The types of streams that a receiver can receive from the source it's connected to.
typedef enum NDIlib_receiver_type_e {
NDIlib_receiver_type_none = 0,
NDIlib_receiver_type_metadata = 1,
NDIlib_receiver_type_video = 2,
NDIlib_receiver_type_audio = 3,
// Make sure this is a 32-bit enumeration.
NDIlib_receiver_type_max = 0x7fffffff
} NDIlib_receiver_type_e;
// The types of commands that a receiver can process.
typedef enum NDIlib_receiver_command_e {
NDIlib_receiver_command_none = 0,
// A receiver can be told to connect to a specific source.
NDIlib_receiver_command_connect = 1,
// Make sure this is a 32-bit enumeration.
NDIlib_receiver_command_max = 0x7fffffff
} NDIlib_receiver_command_e;
// Describes a receiver that has been discovered.
typedef struct NDIlib_receiver_t {
// The unique identifier for the receiver on the network.
const char* p_uuid;
// The human-readable name of the receiver.
const char* p_name;
// The unique identifier for the input group that the receiver belongs to.
const char* p_input_uuid;
// The human-readable name of the input group that the receiver belongs to.
const char* p_input_name;
// The known IP address of the receiver.
const char* p_address;
// An array of streams that the receiver is set to receive. The last entry in this list will be
// NDIlib_receiver_type_none.
NDIlib_receiver_type_e* p_streams;
// How many elements are in the p_streams array, excluding the NDIlib_receiver_type_none entry.
uint32_t num_streams;
// An array of commands that the receiver can process. The last entry in this list will be
// NDIlib_receiver_command_none.
NDIlib_receiver_command_e* p_commands;
// How many elements are in the p_commands array, excluding the NDIlib_receiver_command_none entry.
uint32_t num_commands;
// Are we currently subscribed for receive events?
bool events_subscribed;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_receiver_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_receiver_t;
// Retrieves the current list of advertised receivers. The memory for the returned structure is only valid
// until the next call or when destroy is called. For a given NDIlib_recv_listener_instance_t, do not call
// NDIlib_recv_listener_get_receivers asynchronously.
PROCESSINGNDILIB_API
const NDIlib_receiver_t* NDIlib_recv_listener_get_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers);
// This will allow you to wait until the number of online receivers has changed.
PROCESSINGNDILIB_API
bool NDIlib_recv_listener_wait_for_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms);

View File

@@ -1,75 +0,0 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI routing.
// The reference to an instance of the router.
struct NDIlib_routing_instance_type;
typedef struct NDIlib_routing_instance_type* NDIlib_routing_instance_t;
// The creation structure that is used when you are creating a sender.
typedef struct NDIlib_routing_create_t
{
// The name of the NDI source to create. This is a NULL terminated UTF8 string.
const char* p_ndi_name;
// What groups should this source be part of.
const char* p_groups;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_routing_create_t(const char* p_ndi_name_ = NULL, const char* p_groups_ = NULL);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_routing_create_t;
// Create an NDI routing source.
PROCESSINGNDILIB_API
NDIlib_routing_instance_t NDIlib_routing_create(const NDIlib_routing_create_t* p_create_settings);
// Destroy and NDI routing source.
PROCESSINGNDILIB_API
void NDIlib_routing_destroy(NDIlib_routing_instance_t p_instance);
// Change the routing of this source to another destination.
PROCESSINGNDILIB_API
bool NDIlib_routing_change(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
// Change the routing of this source to another destination.
PROCESSINGNDILIB_API
bool NDIlib_routing_clear(NDIlib_routing_instance_t p_instance);
// Get the current number of receivers connected to this source. This can be used to avoid even rendering
// when nothing is connected to the video source. which can significantly improve the efficiency if you want
// to make a lot of sources available on the network. If you specify a timeout that is not 0 then it will
// wait until there are connections for this amount of time.
PROCESSINGNDILIB_API
int NDIlib_routing_get_no_connections(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
// Retrieve the source information for the given router instance. This pointer is valid until
// NDIlib_routing_destroy is called.
PROCESSINGNDILIB_API
const NDIlib_source_t* NDIlib_routing_get_source_name(NDIlib_routing_instance_t p_instance);

Some files were not shown because too many files have changed in this diff Show More