First commit

This commit is contained in:
2025-09-04 16:16:17 +03:00
parent 25a1a8d36a
commit ae934d9718
506 changed files with 15576 additions and 2 deletions

View File

@@ -0,0 +1,81 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Actors/NDIBroadcastActor.h>
ANDIBroadcastActor::ANDIBroadcastActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
this->ViewportCaptureComponent = ObjectInitializer.CreateDefaultSubobject<UNDIViewportCaptureComponent>(this, TEXT("ViewportCaptureComponent"));
this->ViewportCaptureComponent->AttachToComponent(this->RootComponent, FAttachmentTransformRules::KeepRelativeTransform);
this->PTZController = ObjectInitializer.CreateDefaultSubobject<UPTZController>(this, TEXT("PTZController"));
}
void ANDIBroadcastActor::BeginPlay()
{
Super::BeginPlay();
// validate the viewport capture component
if (IsValid(this->ViewportCaptureComponent))
{
// Initialize the Capture Component with the media source
ViewportCaptureComponent->Initialize(this->NDIMediaSource);
}
if (IsValid(this->PTZController))
{
// Initialize the PTZ Controller with the media source
PTZController->Initialize(this->NDIMediaSource);
}
if (IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->Initialize(this->SubmixCapture);
}
}
FPTZState ANDIBroadcastActor::GetPTZStateFromUE() const
{
FPTZState PTZState;
PTZState.CameraTransform = GetActorTransform();
FTransform Transform = FTransform::Identity;
if (IsValid(this->ViewportCaptureComponent))
Transform = this->ViewportCaptureComponent->GetRelativeTransform();
FQuat Rotation = Transform.GetRotation();
FVector Euler = Rotation.Euler();
PTZState.Pan = FMath::DegreesToRadians(Euler[2]);
PTZState.Tilt = FMath::DegreesToRadians(Euler[1]);
if (IsValid(this->ViewportCaptureComponent))
{
PTZState.FieldOfView = this->ViewportCaptureComponent->FOVAngle;
PTZState.FocusDistance = 1.f - 1.f / (this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance / 100.f + 1.f);
PTZState.bAutoFocus = (this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance == true) ? false : true;
}
return PTZState;
}
void ANDIBroadcastActor::SetPTZStateToUE(const FPTZState& PTZState)
{
SetActorTransform(PTZState.CameraTransform);
FVector Euler(0, FMath::RadiansToDegrees(PTZState.Tilt), FMath::RadiansToDegrees(PTZState.Pan));
FQuat NewRotation = FQuat::MakeFromEuler(Euler);
if (IsValid(this->ViewportCaptureComponent))
{
this->ViewportCaptureComponent->SetRelativeLocationAndRotation(this->ViewportCaptureComponent->GetRelativeLocation(), NewRotation);
this->ViewportCaptureComponent->FOVAngle = PTZState.FieldOfView;
this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = (1.f / FMath::Max(1 - PTZState.FocusDistance, 0.01f) - 1.f) * 100.f;
this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance = FMath::Max(this->ViewportCaptureComponent->PostProcessSettings.DepthOfFieldFocalDistance, SMALL_NUMBER);
this->ViewportCaptureComponent->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = (PTZState.bAutoFocus == true) ? false : true;
}
}

View File

@@ -0,0 +1,369 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Actors/NDIReceiveActor.h>
#include <UObject/Package.h>
#include <AudioDevice.h>
#include <ActiveSound.h>
#include <Async/Async.h>
#include <Engine/StaticMesh.h>
#include <Kismet/GameplayStatics.h>
#include <Materials/MaterialInstanceDynamic.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <UObject/ConstructorHelpers.h>
ANDIReceiveActor::ANDIReceiveActor(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
// Get the Engine's 'Plane' static mesh
static ConstructorHelpers::FObjectFinder<UStaticMesh> MeshObject(
TEXT("StaticMesh'/Engine/BasicShapes/Plane.Plane'"));
static ConstructorHelpers::FObjectFinder<UMaterialInterface> MaterialObject(
TEXT("Material'/NDIIOPlugin/Materials/NDI_Unlit_SourceMaterial.NDI_Unlit_SourceMaterial'"));
// Ensure that the object is valid
if (MeshObject.Object)
{
// Create the static mesh component visual
this->VideoMeshComponent =
ObjectInitializer.CreateDefaultSubobject<UStaticMeshComponent>(this, TEXT("VideoMeshComponent"), true);
// setup the attachment and modify the position, rotation, and mesh properties
this->VideoMeshComponent->SetupAttachment(RootComponent);
this->VideoMeshComponent->SetStaticMesh(MeshObject.Object);
this->VideoMeshComponent->SetRelativeRotation(FQuat::MakeFromEuler(FVector(90.0f, 0.0f, 90.0f)));
this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f));
this->VideoMeshComponent->SetCollisionResponseToAllChannels(ECR_Ignore);
this->VideoMeshComponent->SetCollisionEnabled(ECollisionEnabled::NoCollision);
this->VideoMeshComponent->SetCollisionObjectType(ECC_WorldDynamic);
// This is object is mainly used for simple tests and things that don't require
// additional material shading support, store the an unlit source material to display
this->VideoMaterial = MaterialObject.Object;
// If the material is valid
if (this->VideoMaterial)
{
// Set the Mesh Material to the Video Material
this->VideoMeshComponent->SetMaterial(0, this->VideoMaterial);
}
}
this->AudioComponent = ObjectInitializer.CreateDefaultSubobject<UAudioComponent>(this, TEXT("AudioComponent"), true);
this->AudioComponent->SetupAttachment(RootComponent);
this->AudioComponent->SetRelativeLocationAndRotation(FVector::ZeroVector, FRotator::ZeroRotator);
this->AudioComponent->SetRelativeScale3D(FVector::OneVector);
this->bAllowTickBeforeBeginPlay = false;
this->PrimaryActorTick.bCanEverTick = true;
}
void ANDIReceiveActor::BeginPlay()
{
// call the base implementation for 'BeginPlay'
Super::BeginPlay();
// We need to validate that we have media source, so we can set the texture in the material instance
if (IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone);
// Validate the Video Material Instance so we can set the texture used in the NDI Media source
if (IsValid(this->VideoMaterial))
{
// create and set the instance material from the MaterialObject
VideoMaterialInstance =
this->VideoMeshComponent->CreateAndSetMaterialInstanceDynamicFromMaterial(0, this->VideoMaterial);
// Ensure we have a valid material instance
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the color and/or alpha channels
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f);
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f);
this->NDIMediaSource->UpdateMaterialTexture(VideoMaterialInstance, "Video Texture");
}
}
// Define the basic parameters for constructing temporary audio wave object
FString AudioSource = FString::Printf(TEXT("AudioSource_%s"), *GetFName().ToString().Right(1));
FName AudioWaveName = FName(*AudioSource);
EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative;
// Construct a temporary audio sound wave to be played by this component
this->AudioSoundWave = NewObject<UNDIMediaSoundWave>(GetTransientPackage(), UNDIMediaSoundWave::StaticClass(),
AudioWaveName, Flags);
// Ensure the validity of the temporary sound wave object
if (IsValid(this->AudioSoundWave))
{
// Set the number of channels
bStoppedForChannelsMode = false;
ApplyChannelsMode();
// Set the sound of the Audio Component and Ensure playback
this->AudioComponent->SetSound(this->AudioSoundWave);
// Ensure we register the audio wave object with the media.
this->NDIMediaSource->RegisterAudioWave(AudioSoundWave);
}
if (this->NDIMediaSource->GetCurrentConnectionInformation().IsValid())
{
if (IsValid(AudioComponent))
{
// we should play the audio, if we want audio playback
if (bEnableAudioPlayback)
{
this->AudioComponent->Play(0.0f);
}
// otherwise just stop
else
{
this->AudioComponent->Stop();
this->bStoppedForChannelsMode = false;
}
}
}
// Add a lambda to the OnReceiverConnected Event
else
this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddWeakLambda(this, [&](UNDIMediaReceiver*) {
// Ensure that the audio component is valid
if (IsValid(AudioComponent))
{
// we should play the audio, if we want audio playback
if (bEnableAudioPlayback)
{
this->AudioComponent->Play(0.0f);
}
// otherwise just stop
else
{
this->AudioComponent->Stop();
this->bStoppedForChannelsMode = false;
}
}
});
}
}
void ANDIReceiveActor::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
Super::EndPlay(EndPlayReason);
this->bStoppedForChannelsMode = false;
// Ensure we have a valid material instance
if (EndPlayReason == EEndPlayReason::EndPlayInEditor && IsValid(VideoMaterialInstance))
{
// Ensure that the video texture is disabled
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", 0.0f);
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", 0.0f);
}
}
void ANDIReceiveActor::Tick(float DeltaTime)
{
Super::Tick(DeltaTime);
ApplyChannelsMode();
}
void ANDIReceiveActor::ApplyChannelsMode()
{
if (IsValid(this->AudioComponent) && IsValid(this->NDIMediaSource) && IsValid(this->AudioSoundWave))
{
int32 NewNumChannels = 0;
switch(this->AudioPlaybackChannels)
{
case ENDIAudioChannels::Mono:
NewNumChannels = 1;
break;
case ENDIAudioChannels::Stereo:
NewNumChannels = 2;
break;
case ENDIAudioChannels::Source:
NewNumChannels = this->NDIMediaSource->GetAudioChannels();
break;
}
if ((NewNumChannels != 0) && (NewNumChannels != this->AudioSoundWave->NumChannels))
{
bool isPlaying = this->AudioComponent->IsPlaying();
if (isPlaying)
{
this->AudioComponent->Stop();
bStoppedForChannelsMode = true;
}
else
{
this->AudioSoundWave->NumChannels = NewNumChannels;
if (bEnableAudioPlayback && bStoppedForChannelsMode)
{
this->AudioComponent->Play(0.0f);
}
bStoppedForChannelsMode = false;
}
}
}
}
/**
Attempts to set the desired frame size in cm, represented in the virtual scene
*/
void ANDIReceiveActor::SetFrameSize(FVector2D InFrameSize)
{
// clamp the values to the lowest we'll allow
const float frame_height = FMath::Max(InFrameSize.Y, 0.00001f);
const float frame_width = FMath::Max(InFrameSize.X, 0.00001f);
FrameWidth = frame_width;
FrameHeight = frame_height;
// validate the static mesh component
if (IsValid(this->VideoMeshComponent))
{
// change the scale of the video
this->VideoMeshComponent->SetRelativeScale3D(FVector(FrameWidth / 100.0f, FrameHeight / 100.0f, 1.0f));
}
}
void ANDIReceiveActor::SetFrameHeight(const float& InFrameHeight)
{
// Clamp the Frame Height to a minimal value
FrameHeight = FMath::Max(InFrameHeight, 0.00001f);
// Call the function to set the frame size with the newly clamped value
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
void ANDIReceiveActor::SetFrameWidth(const float& InFrameWidth)
{
// Clamp the Frame Width to a minimal value
FrameWidth = FMath::Max(InFrameWidth, 0.00001f);
// Call the function to set the frame size with the newly clamped value
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
void ANDIReceiveActor::UpdateAudioPlayback(const bool& Enabled)
{
// Ensure validity and we are currently playing
if (IsValid(this->AudioComponent))
{
// Stop playback when possible
if (Enabled)
{
// Start the playback
this->AudioComponent->Play(0.0f);
}
// otherwise just stop playback (even if it's not playing)
else
this->AudioComponent->Stop();
}
}
void ANDIReceiveActor::UpdateAudioPlaybackChannels(const ENDIAudioChannels& Channels)
{}
void ANDIReceiveActor::EnableColor(const bool& Enabled)
{
bEnableColor = Enabled;
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the color channels
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Texture", bEnableColor ? 1.0f : 0.0f);
}
}
void ANDIReceiveActor::EnableAlpha(const bool& Enabled)
{
bEnableAlpha = Enabled;
if (IsValid(VideoMaterialInstance))
{
// Enable/disable the alpha channel
this->VideoMaterialInstance->SetScalarParameterValue("Enable Video Alpha", bEnableAlpha ? 1.0f : 0.0f);
}
}
/**
Returns the current frame size of the 'VideoMeshComponent' for this object
*/
const FVector2D ANDIReceiveActor::GetFrameSize() const
{
return FVector2D(FrameWidth, FrameHeight);
}
#if WITH_EDITORONLY_DATA
void ANDIReceiveActor::PreEditChange(FProperty* InProperty)
{
// call the base class 'PreEditChange'
Super::PreEditChange(InProperty);
}
void ANDIReceiveActor::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
// get the name of the property which changed
FName PropertyName =
(PropertyChangedEvent.Property != nullptr) ? PropertyChangedEvent.Property->GetFName() : NAME_None;
// compare against the 'FrameHeight' property
if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameHeight))
{
// resize the frame
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
// compare against the 'FrameWidth' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, FrameWidth))
{
// resize the frame
SetFrameSize(FVector2D(FrameWidth, FrameHeight));
}
// compare against the 'bEnableAudioPlayback' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAudioPlayback))
{
// start or stop the audio playback depending on state
UpdateAudioPlayback(bEnableAudioPlayback);
}
// compare against the 'AudioPlaybackChannels' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, AudioPlaybackChannels))
{
// start or stop the audio playback depending on state
UpdateAudioPlaybackChannels(AudioPlaybackChannels);
}
// compare against the 'bEnableColor' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableColor))
{
// enable or disable color channels (if it exists)
EnableColor(bEnableColor);
}
// compare against the 'bEnableAlpha' property
else if (PropertyName == GET_MEMBER_NAME_CHECKED(ANDIReceiveActor, bEnableAlpha))
{
// enable or disable alpha channel (if it exists)
EnableAlpha(bEnableAlpha);
}
// call the base class 'PostEditChangeProperty'
Super::PostEditChangeProperty(PropertyChangedEvent);
}
#endif

View File

@@ -0,0 +1,101 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Assets/NDITimecodeProvider.h>
UNDITimecodeProvider::UNDITimecodeProvider(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{}
bool UNDITimecodeProvider::FetchTimecode(FQualifiedFrameTime& OutFrameTime)
{
FScopeLock Lock(&this->StateSyncContext);
if (!IsValid(this->NDIMediaSource) ||
(GetSynchronizationState() != ETimecodeProviderSynchronizationState::Synchronized))
{
return false;
}
OutFrameTime = this->MostRecentFrameTime;
return true;
}
ETimecodeProviderSynchronizationState UNDITimecodeProvider::GetSynchronizationState() const
{
FScopeLock Lock(&this->StateSyncContext);
if (!IsValid(this->NDIMediaSource))
return ETimecodeProviderSynchronizationState::Closed;
return this->State;
}
bool UNDITimecodeProvider::Initialize(UEngine* InEngine)
{
this->State = ETimecodeProviderSynchronizationState::Closed;
if (!IsValid(this->NDIMediaSource))
{
this->State = ETimecodeProviderSynchronizationState::Error;
return false;
}
this->NDIMediaSource->Initialize(UNDIMediaReceiver::EUsage::Standalone);
this->VideoCaptureEventHandle = this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* Receiver, const NDIlib_video_frame_v2_t& VideoFrame)
{
const FFrameRate Rate = Receiver->GetCurrentFrameRate();
const FTimecode Timecode = Receiver->GetCurrentTimecode();
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Synchronized;
this->MostRecentFrameTime = FQualifiedFrameTime(Timecode, Rate);
});
this->ConnectedEventHandle = this->NDIMediaSource->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver)
{
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Synchronizing;
});
this->DisconnectedEventHandle = this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* Receiver)
{
FScopeLock Lock(&this->StateSyncContext);
this->State = ETimecodeProviderSynchronizationState::Closed;
});
return true;
}
void UNDITimecodeProvider::Shutdown(UEngine* InEngine)
{
ReleaseResources();
}
void UNDITimecodeProvider::BeginDestroy()
{
ReleaseResources();
Super::BeginDestroy();
}
void UNDITimecodeProvider::ReleaseResources()
{
if(IsValid(this->NDIMediaSource))
{
this->NDIMediaSource->OnNDIReceiverVideoCaptureEvent.Remove(this->VideoCaptureEventHandle);
this->NDIMediaSource->OnNDIReceiverConnectedEvent.Remove(this->ConnectedEventHandle);
this->NDIMediaSource->OnNDIReceiverDisconnectedEvent.Remove(this->DisconnectedEventHandle);
}
this->VideoCaptureEventHandle.Reset();
this->ConnectedEventHandle.Reset();
this->DisconnectedEventHandle.Reset();
this->State = ETimecodeProviderSynchronizationState::Closed;
}

View File

@@ -0,0 +1,153 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIBroadcastComponent.h>
UNDIBroadcastComponent::UNDIBroadcastComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{}
/**
Initialize this component with the media source required for sending NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool UNDIBroadcastComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Attempts to start broadcasting audio, video, and metadata via the 'NDIMediaSource' associated with this object
@param ErrorMessage The error message received when the media source is unable to start broadcasting
@result Indicates whether this object successfully started broadcasting
*/
bool UNDIBroadcastComponent::StartBroadcasting(FString& ErrorMessage)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->Initialize(nullptr);
// the underlying functionality is always return 'true'
return true;
}
// We have no media source to broadcast
ErrorMessage = TEXT("No Media Source present to broadcast");
// looks like we don't have a media source to broadcast
return false;
}
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
void UNDIBroadcastComponent::ChangeSourceName(const FString& InSourceName)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeSourceName(InSourceName);
}
}
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
void UNDIBroadcastComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration);
}
}
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
void UNDIBroadcastComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeVideoTexture(BroadcastTexture);
}
}
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
void UNDIBroadcastComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram)
{
// Initialize the properties
IsOnPreview = false;
IsOnProgram = false;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0);
}
}
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
void UNDIBroadcastComponent::GetNumberOfConnections(int32& Result)
{
// Initialize the property
Result = 0;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetNumberOfConnections(Result);
}
}
/**
Attempts to immediately stop sending frames over NDI to any connected receivers
*/
void UNDIBroadcastComponent::StopBroadcasting()
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->Shutdown();
}
}

View File

@@ -0,0 +1,104 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIFinderComponent.h>
#include <Services/NDIFinderService.h>
UNDIFinderComponent::UNDIFinderComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {}
void UNDIFinderComponent::BeginPlay()
{
Super::BeginPlay();
// Provide some sense of thread-safety
FScopeLock Lock(&CollectionSyncContext);
// Update the NetworkSourceCollection with some sources which that the service has already found
FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection);
// Ensure that we are subscribed to the collection changed notification so we can handle it locally
FNDIFinderService::EventOnNDISourceCollectionChanged.AddUObject(
this, &UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent);
}
void UNDIFinderComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
Super::EndPlay(EndPlayReason);
// Provide some sense of thread-safety
FScopeLock Lock(&CollectionSyncContext);
// Empty the source collection
this->NetworkSourceCollection.Empty(0);
// Ensure that we are no longer subscribed to collection change notifications
FNDIFinderService::EventOnNDISourceCollectionChanged.RemoveAll(this);
}
/**
An Event handler for when the NDI Finder Service notifies listeners that changes have been
detected in the network source collection
*/
void UNDIFinderComponent::OnNetworkSourceCollectionChangedEvent()
{
// Since we don't poll the NDIFinderService for network sources, we subscribe to the change notification.
// Now we need to update the Network Source Collection, but we need to do it in a thread-safe way.
FScopeLock Lock(&CollectionSyncContext);
// Check to determine if something actually changed within the collection. We don't want to trigger
// notifications unnecessarily.
if (FNDIFinderService::UpdateSourceCollection(NetworkSourceCollection))
{
// Trigger the blueprint handling of the situation.
this->OnNetworkSourcesChangedEvent();
// If any listeners have subscribed broadcast any collection changes
if (this->OnNetworkSourcesChanged.IsBound())
this->OnNetworkSourcesChanged.Broadcast(this);
}
}
/**
Attempts to find a network source by the supplied name.
@param ConnectionInformation An existing source information structure which contains the source name
@param InSourceName A string value representing the name of the source to find
@result A value indicating whether a source with the supplied name was found
*/
const bool UNDIFinderComponent::FindNetworkSourceByName(FNDIConnectionInformation& ConnectionInformation,
FString InSourceName)
{
// Lock the Collection so that we are working with a solid collection of items
FScopeLock Lock(&CollectionSyncContext);
// Ensure we Reset the SourceInformation
ConnectionInformation.Reset();
for (const auto& connectionInfo : NetworkSourceCollection)
{
if (InSourceName.Equals(connectionInfo.SourceName, ESearchCase::IgnoreCase))
{
ConnectionInformation = connectionInfo;
return true;
}
}
return false;
}
/**
Returns the current collection of sources found on the network
*/
const TArray<FNDIConnectionInformation> UNDIFinderComponent::GetNetworkSources()
{
// Lock the current source collection
FScopeLock Lock(&CollectionSyncContext);
// return the source collection
return this->NetworkSourceCollection;
}

View File

@@ -0,0 +1,471 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIPTZControllerComponent.h>
#include <GameFramework/Actor.h>
#include <Structures/NDIXml.h>
/**
Parsers for PTZ metadata
*/
class NDIXmlElementParser_ntk_ptz_pan_tilt_speed : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_pan_tilt_speed(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
PanSpeed = 0.0;
TiltSpeed = 0.0;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("pan_speed"), AttributeName) == 0)
{
PanSpeed = FCString::Atod(AttributeValue);
}
else if(FCString::Strcmp(TEXT("tilt_speed"), AttributeName) == 0)
{
TiltSpeed = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZPanTiltSpeed(PanSpeed, TiltSpeed);
return true;
}
protected:
UPTZController* PTZController;
double PanSpeed { 0.0 };
double TiltSpeed { 0.0 };
};
class NDIXmlElementParser_ntk_ptz_zoom_speed : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_zoom_speed(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
ZoomSpeed = 0.0;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("zoom_speed"), AttributeName) == 0)
{
ZoomSpeed = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZZoomSpeed(ZoomSpeed);
return true;
}
protected:
UPTZController* PTZController;
double ZoomSpeed { 0.0 };
};
class NDIXmlElementParser_ntk_ptz_focus : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_focus(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
AutoMode = true;
Distance = 0.5;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("mode"), AttributeName) == 0)
{
if(FCString::Strcmp(TEXT("manual"), AttributeValue) == 0)
AutoMode = false;
}
else if(FCString::Strcmp(TEXT("distance"), AttributeName) == 0)
{
Distance = FCString::Atod(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
PTZController->SetPTZFocus(AutoMode, Distance);
return true;
}
protected:
UPTZController* PTZController;
bool AutoMode { true };
double Distance { 0.5 };
};
class NDIXmlElementParser_ntk_ptz_store_preset : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_store_preset(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
StoreIndex = -1;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("index"), AttributeName) == 0)
{
StoreIndex = FCString::Atoi(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(StoreIndex >= 0)
{
PTZController->StorePTZState(StoreIndex);
}
return true;
}
protected:
UPTZController* PTZController;
int StoreIndex { -1 };
};
class NDIXmlElementParser_ntk_ptz_recall_preset : public NDIXmlElementParser
{
public:
NDIXmlElementParser_ntk_ptz_recall_preset(UPTZController* PTZControllerIn)
: PTZController(PTZControllerIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
RecallIndex = -1;
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("index"), AttributeName) == 0)
{
RecallIndex = FCString::Atoi(AttributeValue);
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(RecallIndex >= 0)
{
PTZController->RecallPTZState(RecallIndex);
}
return true;
}
protected:
UPTZController* PTZController;
int RecallIndex { -1 };
};
/**
PTZ controller component
*/
UPTZController::UPTZController()
{
this->bWantsInitializeComponent = true;
this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false;
this->PrimaryComponentTick.bCanEverTick = true;
this->PrimaryComponentTick.bHighPriority = true;
this->PrimaryComponentTick.bRunOnAnyThread = false;
this->PrimaryComponentTick.bStartWithTickEnabled = true;
this->PrimaryComponentTick.bTickEvenWhenPaused = true;
this->NDIMetadataParser = MakeShareable(new NDIXmlParser());
this->NDIMetadataParser->AddElementParser("ntk_ptz_pan_tilt_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_pan_tilt_speed(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_zoom_speed", MakeShareable(new NDIXmlElementParser_ntk_ptz_zoom_speed(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_focus", MakeShareable(new NDIXmlElementParser_ntk_ptz_focus(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_store_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_store_preset(this)));
this->NDIMetadataParser->AddElementParser("ntk_ptz_recall_preset", MakeShareable(new NDIXmlElementParser_ntk_ptz_recall_preset(this)));
}
UPTZController::~UPTZController()
{}
void UPTZController::InitializeComponent()
{
Super::InitializeComponent();
if (IsValid(NDIMediaSource))
{
// Ensure the PTZ controller is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender);
}
}
bool UPTZController::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// Ensure the PTZ controller is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UPTZController::ReceiveMetaDataFromSender);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
void UPTZController::SetPTZPanTiltSpeed(float PanSpeed, float TiltSpeed)
{
PTZPanSpeed = PanSpeed;
PTZTiltSpeed = TiltSpeed;
OnPTZPanTiltSpeed.Broadcast(PanSpeed, TiltSpeed);
}
void UPTZController::SetPTZZoomSpeed(float ZoomSpeed)
{
PTZZoomSpeed = ZoomSpeed;
OnPTZZoomSpeed.Broadcast(ZoomSpeed);
}
void UPTZController::SetPTZFocus(bool AutoMode, float Distance)
{
FPTZState PTZState = GetPTZStateFromUE();
PTZState.FocusDistance = Distance;
PTZState.bAutoFocus = AutoMode;
SetPTZStateToUE(PTZState);
OnPTZFocus.Broadcast(AutoMode, Distance);
}
void UPTZController::StorePTZState(int Index)
{
if((Index >= 0) && (Index < 256))
{
FPTZState PTZState = GetPTZStateFromUE();
if(Index >= PTZStoredStates.Num())
PTZStoredStates.SetNum(Index+1);
PTZStoredStates[Index] = PTZState;
OnPTZStore.Broadcast(Index);
}
}
void UPTZController::RecallPTZState(int Index)
{
if((Index >= 0) && (Index < PTZStoredStates.Num()))
{
if(PTZRecallEasing > 0)
{
PTZStateInterp.PTZTargetState = PTZStoredStates[Index];
PTZStateInterp.EasingDuration = PTZRecallEasing;
PTZStateInterp.EasingRemaining = PTZStateInterp.EasingDuration;
}
else
{
SetPTZStateToUE(PTZStoredStates[Index]);
}
}
OnPTZRecall.Broadcast(Index);
}
FPTZState UPTZController::GetPTZStateFromUE() const
{
AActor* OwnerActor = GetOwner();
IPTZControllableInterface* ControllableObject = Cast<IPTZControllableInterface>(OwnerActor);
if (ControllableObject != nullptr)
{
return ControllableObject->GetPTZStateFromUE();
}
else
{
FPTZState PTZState;
FTransform Transform = OwnerActor->GetActorTransform();
FVector Euler = Transform.GetRotation().Euler();
PTZState.Pan = FMath::DegreesToRadians(Euler[2]);
PTZState.Tilt = FMath::DegreesToRadians(Euler[1]);
Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], 0.f, 0.f)));
PTZState.CameraTransform = Transform;
return PTZState;
}
}
void UPTZController::SetPTZStateToUE(const FPTZState& PTZState)
{
if (EnablePTZ == true)
{
AActor* OwnerActor = GetOwner();
IPTZControllableInterface* ControllableObject = Cast<IPTZControllableInterface>(OwnerActor);
if (ControllableObject != nullptr)
{
ControllableObject->SetPTZStateToUE(PTZState);
}
else
{
FTransform Transform = PTZState.CameraTransform;
FVector Euler = Transform.GetRotation().Euler();
float Pitch = FMath::RadiansToDegrees(PTZState.Tilt);
float Yaw = FMath::RadiansToDegrees(PTZState.Pan);
Transform.SetRotation(FQuat::MakeFromEuler(FVector(Euler[0], Pitch, Yaw)));
OwnerActor->SetActorTransform(Transform);
}
}
}
void UPTZController::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
bool bUpdatePTZ = false;
if(PTZStateInterp.EasingRemaining > 0)
bUpdatePTZ = true;
if((PTZPanSpeed != 0) || (PTZTiltSpeed != 0) || (PTZZoomSpeed != 0))
bUpdatePTZ = true;
if(bUpdatePTZ)
{
FPTZState PTZState = GetPTZStateFromUE();
if(PTZStateInterp.EasingRemaining > 0)
{
float EasingDelta = FMath::Min(PTZStateInterp.EasingRemaining, DeltaTime);
/** Interpolate from 0 to 1 using polynomial:
I(F) = a*F^3 + b*F^2 + c*F + d
with constraints:
Start and end points: I(0) = 0, I(1) = 1
Smooth stop at end: I'(1) = 0 (velocity)
I''(1) = 0 (acceleration)
Solve to get:
a = 1, b = -3, c = 3, d = 0
I(F) = F^3 - 3*F^2 + 3*F
*/
float EasingFrac = (PTZStateInterp.EasingRemaining > 0) ? (EasingDelta / PTZStateInterp.EasingRemaining) : 1;
float EasingInterp = EasingFrac*EasingFrac*EasingFrac - 3*EasingFrac*EasingFrac + 3*EasingFrac;
PTZState.Pan = PTZState.Pan * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Pan * EasingInterp;
PTZState.Tilt = PTZState.Tilt * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.Tilt * EasingInterp;
PTZState.FieldOfView = PTZState.FieldOfView * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FieldOfView * EasingInterp;
PTZState.FocusDistance = PTZState.FocusDistance * (1 - EasingInterp) + PTZStateInterp.PTZTargetState.FocusDistance * EasingInterp;
PTZState.CameraTransform.BlendWith(PTZStateInterp.PTZTargetState.CameraTransform, EasingInterp);
PTZStateInterp.EasingRemaining -= EasingDelta;
}
PTZState.FieldOfView -= FMath::RadiansToDegrees(PTZZoomSpeed) * DeltaTime;
if(PTZWithFoVLimit)
{
PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, PTZFoVMinLimit, PTZFoVMaxLimit);
}
PTZState.FieldOfView = FMath::Clamp(PTZState.FieldOfView, 5.f, 170.f);
float MovementScale = PTZState.FieldOfView / 90.f;
PTZState.Pan += PTZPanSpeed * DeltaTime * MovementScale * (bPTZPanInvert ? -1 : 1);
PTZState.Pan = FMath::Fmod(PTZState.Pan, 2*PI);
if(PTZWithPanLimit)
{
PTZState.Pan = FMath::Clamp(PTZState.Pan, FMath::DegreesToRadians(PTZPanMinLimit), FMath::DegreesToRadians(PTZPanMaxLimit));
}
PTZState.Tilt += PTZTiltSpeed * DeltaTime * MovementScale * (bPTZTiltInvert ? -1 : 1);
PTZState.Tilt = FMath::Fmod(PTZState.Tilt, 2*PI);
if(PTZWithTiltLimit)
{
PTZState.Tilt = FMath::Clamp(PTZState.Tilt, FMath::DegreesToRadians(PTZTiltMinLimit), FMath::DegreesToRadians(PTZTiltMaxLimit));
}
SetPTZStateToUE(PTZState);
}
}
void UPTZController::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data)
{
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(),
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
}

View File

@@ -0,0 +1,126 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIReceiverComponent.h>
UNDIReceiverComponent::UNDIReceiverComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) {}
/**
Initialize this component with the media source required for receiving NDI audio, video, and metadata.
Returns false, if the MediaSource is already been set. This is usually the case when this component is
initialized in Blueprints.
*/
bool UNDIReceiverComponent::Initialize(UNDIMediaReceiver* InMediaSource)
{
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
this->NDIMediaSource = InMediaSource;
}
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Begin receiving NDI audio, video, and metadata frames
*/
bool UNDIReceiverComponent::StartReceiver(const FNDIConnectionInformation& InConnectionInformation)
{
if (IsValid(this->NDIMediaSource))
{
// Call to the Media Source's function to initialize (hopefully with valid connection information)
if (NDIMediaSource->Initialize(InConnectionInformation, UNDIMediaReceiver::EUsage::Standalone))
{
// FNDIConnectionService::RegisterReceiver(this->NDIMediaSource);
return true;
}
}
return false;
}
/**
Attempt to change the connection for which to get audio, video, and metadata frame from
*/
void UNDIReceiverComponent::ChangeConnection(const FNDIConnectionInformation& InConnectionInformation)
{
// Ensure a valid source to change the connection on
if (IsValid(this->NDIMediaSource))
{
// Call the underlying function
NDIMediaSource->ChangeConnection(InConnectionInformation);
}
}
/**
This will add a metadata frame and return immediately, having scheduled the frame asynchronously
*/
void UNDIReceiverComponent::SendMetadataFrame(const FString& metadata)
{
// Ensure a valid source to send metadata from
if (IsValid(this->NDIMediaSource))
{
// Call the underlying function
NDIMediaSource->SendMetadataFrame(metadata);
}
}
/**
This will setup the up-stream tally notifications. If no streams are connected, it will automatically send
the tally state upon connection
*/
void UNDIReceiverComponent::SendTallyInformation(const bool& IsOnPreview, const bool& IsOnProgram)
{
if (IsValid(this->NDIMediaSource))
{
NDIMediaSource->SendTallyInformation(IsOnPreview, IsOnProgram);
}
}
/**
Attempts to stop receiving audio, video, and metadata frame from the connected source
*/
void UNDIReceiverComponent::ShutdownReceiver()
{
if (IsValid(this->NDIMediaSource))
{
NDIMediaSource->Shutdown();
}
}
/**
Returns the current framerate of the connected source
*/
FFrameRate UNDIReceiverComponent::GetCurrentFrameRate() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentFrameRate() : FFrameRate(60, 1);
}
/**
Returns the current timecode of the connected source
*/
FTimecode UNDIReceiverComponent::GetCurrentTimecode() const
{
return IsValid(NDIMediaSource)
? NDIMediaSource->GetCurrentTimecode()
: FTimecode::FromTimespan(FTimespan::FromMilliseconds(0.0), FFrameRate(60, 1), false, true);
}
/**
Returns the current connection information of the connected source
*/
FNDIConnectionInformation UNDIReceiverComponent::GetCurrentConnectionInformation() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetCurrentConnectionInformation() : FNDIConnectionInformation();
}
/**
Returns the current performance data of the receiver while connected to the source
*/
FNDIReceiverPerformanceData UNDIReceiverComponent::GetPerformanceData() const
{
return IsValid(NDIMediaSource) ? NDIMediaSource->GetPerformanceData() : FNDIReceiverPerformanceData();
}

View File

@@ -0,0 +1,340 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDITriCasterExtComponent.h>
#include <Structures/NDIXml.h>
#include <Misc/EngineVersionComparison.h>
#include <EngineUtils.h>
/**
Parsers for TriCasterExt metadata
*/
class NDIXmlElementParser_tricaster_ext : public NDIXmlElementParser
{
public:
NDIXmlElementParser_tricaster_ext(UTriCasterExtComponent* TriCasterExtComponentIn)
: TriCasterExtComponent(TriCasterExtComponentIn)
{}
virtual bool ProcessOpen(const TCHAR* ElementName, const TCHAR* ElementData)
{
TCData.Value = FString();
TCData.KeyValues.Empty();
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
if(FCString::Strcmp(TEXT("name"), AttributeName) == 0)
{}
else if(FCString::Strcmp(TEXT("value"), AttributeName) == 0)
{
TCData.Value = FString(AttributeValue);
}
else
{
TCData.KeyValues.Add(FName(AttributeName), FString(AttributeValue));
}
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
if(TCData.Value == "ndiio")
{
FString* ActorNamePtr = TCData.KeyValues.Find("actor");
FString* PropertyNamePtr = TCData.KeyValues.Find("property");
FString* PropertyValueStrPtr = TCData.KeyValues.Find("propertyvalue");
FString* ComponentNamePtr = TCData.KeyValues.Find("component");
FString* EasingDurationPtr = TCData.KeyValues.Find("easing");
if((ActorNamePtr != nullptr) && (PropertyNamePtr != nullptr) && (PropertyValueStrPtr != nullptr))
{
FString PropertyBaseName, PropertyElementName;
if(!PropertyNamePtr->Split(TEXT(":"), &PropertyBaseName, &PropertyElementName))
PropertyBaseName = *PropertyNamePtr;
FTimespan EasingDuration = 0;
if(EasingDurationPtr != nullptr)
{
double Seconds = FCString::Atod(**EasingDurationPtr);
EasingDuration = FTimespan::FromSeconds(Seconds);
}
for(TActorIterator<AActor> ActorItr(TriCasterExtComponent->GetWorld()); ActorItr; ++ActorItr)
{
AActor* Actor = *ActorItr;
if(Actor->GetName() == *ActorNamePtr)
{
UObject* FoundObject = nullptr;
FProperty* FoundProperty = nullptr;
if(ComponentNamePtr != nullptr)
{
TInlineComponentArray<UActorComponent*> PrimComponents;
Actor->GetComponents(PrimComponents, true);
for(auto& CompIt : PrimComponents)
{
if(CompIt->GetName() == *ComponentNamePtr)
{
FProperty* Property = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName);
if(Property)
{
FoundObject = CompIt;
FoundProperty = Property;
break;
}
}
}
}
else
{
FProperty* ActorProperty = Actor->GetClass()->FindPropertyByName(*PropertyBaseName);
if(ActorProperty)
{
FoundObject = Actor;
FoundProperty = ActorProperty;
}
else
{
TInlineComponentArray<UActorComponent*> PrimComponents;
Actor->GetComponents(PrimComponents, true);
for(auto& CompIt : PrimComponents)
{
FProperty* CompProperty = CompIt->GetClass()->FindPropertyByName(*PropertyBaseName);
if(CompProperty)
{
FoundObject = CompIt;
FoundProperty = CompProperty;
break;
}
}
}
}
if(FoundObject && FoundProperty)
{
TriCasterExtComponent->TriCasterExt(Actor, FoundObject, FoundProperty, PropertyElementName, *PropertyValueStrPtr, EasingDuration);
break;
}
}
}
}
}
TriCasterExtComponent->TriCasterExtCustom(TCData);
return true;
}
protected:
UTriCasterExtComponent* TriCasterExtComponent;
FTriCasterExt TCData;
};
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="Intensity" propertyvalue="1.234" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" component="LightComponent0" property="Intensity" propertyvalue="1.234" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation" propertyvalue="(X=1,Y=2,Z=3)" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation" propertyvalue="(X=1)" />
// <tricaster_ext name="net1" value="ndiio" actor="LightSource" property="RelativeLocation:Y" propertyvalue="2" easing="5.3"/>
UTriCasterExtComponent::UTriCasterExtComponent()
{
this->bWantsInitializeComponent = true;
this->PrimaryComponentTick.bAllowTickOnDedicatedServer = false;
this->PrimaryComponentTick.bCanEverTick = true;
this->PrimaryComponentTick.bHighPriority = true;
this->PrimaryComponentTick.bRunOnAnyThread = false;
this->PrimaryComponentTick.bStartWithTickEnabled = true;
this->PrimaryComponentTick.bTickEvenWhenPaused = true;
this->NDIMetadataParser = MakeShareable(new NDIXmlParser());
NDIMetadataParser->AddElementParser("tricaster_ext", MakeShareable(new NDIXmlElementParser_tricaster_ext(this)));
}
UTriCasterExtComponent::~UTriCasterExtComponent()
{}
void UTriCasterExtComponent::InitializeComponent()
{
Super::InitializeComponent();
if (IsValid(NDIMediaSource))
{
// Ensure the TriCasterExt component is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender);
}
}
bool UTriCasterExtComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// Ensure the TriCasterExt component is subscribed to the sender receiving metadata
this->NDIMediaSource->OnSenderMetaDataReceived.RemoveAll(this);
this->NDIMediaSource->OnSenderMetaDataReceived.AddDynamic(this, &UTriCasterExtComponent::ReceiveMetaDataFromSender);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
void UTriCasterExtComponent::TriCasterExt(AActor* Actor, UObject* Object, FProperty* Property, FString PropertyElementName, FString PropertyValueStr, FTimespan EasingDuration)
{
if(Actor && Object && Property)
{
FTriCasterExtInterp Interp;
Interp.Actor = Actor;
Interp.Object = Object;
Interp.Property = Property;
Interp.PropertyElementName = PropertyElementName;
Interp.PropertyValueStr = PropertyValueStr;
Interp.EasingDuration = EasingDuration.GetTotalSeconds();
Interp.EasingRemaining = Interp.EasingDuration;
TriCasterExtInterp.Add(Interp);
}
OnTriCasterExt.Broadcast(Actor, Object, PropertyElementName, PropertyValueStr, EasingDuration);
}
void UTriCasterExtComponent::TriCasterExtCustom(const FTriCasterExt& TCData)
{
OnTriCasterExtCustom.Broadcast(TCData);
}
void UTriCasterExtComponent::TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
for(int32 i = 0; i < TriCasterExtInterp.Num(); ++i)
{
FTriCasterExtInterp& Interp = TriCasterExtInterp[i];
float EasingDelta = FMath::Min(Interp.EasingRemaining, DeltaTime);
void* Data = Interp.Property->ContainerPtrToValuePtr<void>(Interp.Object);
if(Data)
{
bool Done = false;
#if WITH_EDITOR
Interp.Object->PreEditChange(Interp.Property);
Interp.Actor->PreEditChange(Interp.Property);
#endif
if(FNumericProperty* NumericProperty = CastField<FNumericProperty>(Interp.Property))
{
double PropertyValue = NumericProperty->GetFloatingPointPropertyValue(Data);
double TargetValue = FCString::Atod(*Interp.PropertyValueStr);
double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1;
double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac;
double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp;
NumericProperty->SetFloatingPointPropertyValue(Data, NewValue);
Done = true;
}
else if(FStructProperty* StructProperty = CastField<FStructProperty>(Interp.Property))
{
FProperty* FieldProperty = FindFProperty<FProperty>(StructProperty->Struct, *(Interp.PropertyElementName));
if(FNumericProperty* StructNumericProperty = CastField<FNumericProperty>(FieldProperty))
{
void* FieldData = FieldProperty->ContainerPtrToValuePtr<void>(Data);
double PropertyValue = StructNumericProperty->GetFloatingPointPropertyValue(FieldData);
double TargetValue = FCString::Atod(*Interp.PropertyValueStr);
double EasingFrac = (Interp.EasingRemaining > 0) ? (EasingDelta / Interp.EasingRemaining) : 1;
double EasingInterp = 3*EasingFrac - 3*EasingFrac*EasingFrac + EasingFrac*EasingFrac*EasingFrac;
double NewValue = PropertyValue * (1 - EasingInterp) + TargetValue * EasingInterp;
StructNumericProperty->SetFloatingPointPropertyValue(FieldData, NewValue);
Done = true;
}
}
if(!Done)
{
FString ImportText;
if(!Interp.PropertyElementName.IsEmpty())
ImportText = "(" + Interp.PropertyElementName + "=" + Interp.PropertyValueStr + ")";
else
ImportText = Interp.PropertyValueStr;
Interp.Property->ImportText_Direct(*ImportText, Data, Interp.Object, 0);
}
UActorComponent* ActorComponent = Cast<UActorComponent>(Interp.Object);
if(ActorComponent)
{
if((Interp.Property->GetFName() == TEXT("RelativeLocation")) ||
(Interp.Property->GetFName() == TEXT("RelativeRotation")) ||
(Interp.Property->GetFName() == TEXT("RelativeScale3D")))
{
ActorComponent->UpdateComponentToWorld();
}
}
#if (ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION < 3) // Before 5.3
if(Interp.Property->HasAnyPropertyFlags(CPF_Interp))
Interp.Object->PostInterpChange(Interp.Property);
#endif
#if WITH_EDITOR
TArray<const UObject*> ModifiedObjects;
ModifiedObjects.Add(Interp.Actor);
FPropertyChangedEvent PropertyChangedEvent(Interp.Property, EPropertyChangeType::ValueSet, MakeArrayView(ModifiedObjects));
FEditPropertyChain PropertyChain;
PropertyChain.AddHead(Interp.Property);
FPropertyChangedChainEvent PropertyChangedChainEvent(PropertyChain, PropertyChangedEvent);
Interp.Object->PostEditChangeChainProperty(PropertyChangedChainEvent);
Interp.Actor->PostEditChangeChainProperty(PropertyChangedChainEvent);
#endif
}
Interp.EasingRemaining -= EasingDelta;
if(Interp.EasingRemaining == 0)
TriCasterExtInterp.RemoveAtSwap(i);
}
}
void UTriCasterExtComponent::ReceiveMetaDataFromSender(UNDIMediaSender* Sender, FString Data)
{
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(this->NDIMetadataParser.Get(),
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
}

View File

@@ -0,0 +1,264 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Components/NDIViewportCaptureComponent.h>
#include <Rendering/RenderingCommon.h>
#include <SceneView.h>
#include <SceneViewExtension.h>
#include <CanvasTypes.h>
#include <EngineModule.h>
#include <LegacyScreenPercentageDriver.h>
#include <RenderResource.h>
#include <UnrealClient.h>
#include <Engine/Engine.h>
#include <EngineUtils.h>
#include <Misc/CoreDelegates.h>
#include <Engine/TextureRenderTarget2D.h>
#include <UObject/Package.h>
UNDIViewportCaptureComponent::UNDIViewportCaptureComponent(const FObjectInitializer& ObjectInitializer)
: Super(ObjectInitializer)
{
this->bWantsInitializeComponent = true;
this->CaptureSource = ESceneCaptureSource::SCS_FinalToneCurveHDR;
this->PostProcessSettings.bOverride_DepthOfFieldFocalDistance = true;
this->PostProcessSettings.DepthOfFieldFocalDistance = 10000.f;
}
UNDIViewportCaptureComponent::~UNDIViewportCaptureComponent()
{}
void UNDIViewportCaptureComponent::InitializeComponent()
{
Super::InitializeComponent();
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// define default capture values
const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize;
const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate;
// change the capture sizes as necessary
ChangeCaptureSettings(capture_size, capture_rate);
// ensure we are subscribed to the broadcast configuration changed event
this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this);
this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic(
this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged);
}
}
void UNDIViewportCaptureComponent::UninitializeComponent()
{
if (IsValid(NDIMediaSource))
{
if (IsValid(TextureTarget))
{
NDIMediaSource->ChangeVideoTexture(nullptr);
}
}
Super::UninitializeComponent();
}
bool UNDIViewportCaptureComponent::Initialize(UNDIMediaSender* InMediaSource)
{
// is the media source already set?
if (this->NDIMediaSource == nullptr && InMediaSource != nullptr)
{
// we passed validation, so set the media source
this->NDIMediaSource = InMediaSource;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// define default capture values
const auto& capture_size = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameSize() : CaptureSize;
const auto& capture_rate = !bOverrideBroadcastSettings ? NDIMediaSource->GetFrameRate() : CaptureRate;
// change the capture sizes as necessary
ChangeCaptureSettings(capture_size, capture_rate);
// ensure we are subscribed to the broadcast configuration changed event
this->NDIMediaSource->OnBroadcastConfigurationChanged.RemoveAll(this);
this->NDIMediaSource->OnBroadcastConfigurationChanged.AddDynamic(
this, &UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged);
}
}
// did we pass validation
return InMediaSource != nullptr && InMediaSource == NDIMediaSource;
}
/**
Changes the name of the sender object as seen on the network for remote connections
@param InSourceName The new name of the source to be identified as on the network
*/
void UNDIViewportCaptureComponent::ChangeSourceName(const FString& InSourceName)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeSourceName(InSourceName);
}
}
/**
Attempts to change the Broadcast information associated with this media object
@param InConfiguration The new configuration to broadcast
*/
void UNDIViewportCaptureComponent::ChangeBroadcastConfiguration(const FNDIBroadcastConfiguration& InConfiguration)
{
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->ChangeBroadcastConfiguration(InConfiguration);
}
}
/**
Attempts to change the RenderTarget used in sending video frames over NDI
@param BroadcastTexture The texture to use as video, while broadcasting over NDI
*/
void UNDIViewportCaptureComponent::ChangeBroadcastTexture(UTextureRenderTarget2D* BroadcastTexture)
{
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
this->TextureTarget = BroadcastTexture;
}
/**
Change the capture settings of the viewport capture
@param InCaptureSize The Capture size of the frame to capture of the viewport
@param InCaptureRate A framerate at which to capture frames of the viewport
*/
void UNDIViewportCaptureComponent::ChangeCaptureSettings(FIntPoint InCaptureSize, FFrameRate InCaptureRate)
{
// clamp our viewport capture size
int32 capture_width = FMath::Max(InCaptureSize.X, 64);
int32 capture_height = FMath::Max(InCaptureSize.Y, 64);
// set the capture size
this->CaptureSize = FIntPoint(capture_width, capture_height);
// set the capture rate
this->CaptureRate = InCaptureRate;
// clamp the maximum capture rate to something reasonable
float capture_rate_max = 1 / 1000.0f;
float capture_rate = CaptureRate.Denominator / (float)CaptureRate.Numerator;
// set the primary tick interval to the sensible capture rate
this->PrimaryComponentTick.TickInterval = capture_rate >= capture_rate_max ? capture_rate : -1.0f;
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
if (!IsValid(this->TextureTarget))
{
this->TextureTarget = NewObject<UTextureRenderTarget2D>(
GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), NAME_None, RF_Transient | RF_MarkAsNative);
this->TextureTarget->UpdateResource();
}
this->TextureTarget->ResizeTarget(this->CaptureSize.X, this->CaptureSize.Y);
}
/**
Determines the current tally information. If you specify a timeout then it will wait until it has
changed, otherwise it will simply poll it and return the current tally immediately
@param IsOnPreview - A state indicating whether this source in on preview of a receiver
@param IsOnProgram - A state indicating whether this source is on program of a receiver
*/
void UNDIViewportCaptureComponent::GetTallyInformation(bool& IsOnPreview, bool& IsOnProgram)
{
// Initialize the properties
IsOnPreview = false;
IsOnProgram = false;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetTallyInformation(IsOnPreview, IsOnProgram, 0);
}
}
/**
Gets the current number of receivers connected to this source. This can be used to avoid rendering
when nothing is connected to the video source. which can significantly improve the efficiency if
you want to make a lot of sources available on the network
@param Result The total number of connected receivers attached to the broadcast of this object
*/
void UNDIViewportCaptureComponent::GetNumberOfConnections(int32& Result)
{
// Initialize the property
Result = 0;
// validate the Media Source object
if (IsValid(NDIMediaSource))
{
// call the media source implementation of the function
NDIMediaSource->GetNumberOfConnections(Result);
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene, ISceneRenderBuilder& SceneRenderBuilder)
#else
void UNDIViewportCaptureComponent::UpdateSceneCaptureContents(FSceneInterface* Scene)
#endif
{
// ensure we have some thread-safety
FScopeLock Lock(&UpdateRenderContext);
if (TextureTarget == nullptr)
return;
if (IsValid(NDIMediaSource))
{
NDIMediaSource->ChangeVideoTexture(TextureTarget);
// Some capture sources treat alpha as opacity, some sources use transparency.
// Alpha in NDI is opacity. Reverse the alpha mapping to always get opacity.
bool flip_alpha = (CaptureSource == SCS_SceneColorHDR) || (CaptureSource == SCS_SceneColorHDRNoAlpha) ||
(CaptureSource == SCS_SceneDepth) || (CaptureSource == SCS_Normal) ||
(CaptureSource == SCS_BaseColor);
if (flip_alpha == false)
NDIMediaSource->ChangeAlphaRemap(AlphaMin, AlphaMax);
else
NDIMediaSource->ChangeAlphaRemap(AlphaMax, AlphaMin);
// Do the actual capturing
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 6)) // 5.6 or later
Super::UpdateSceneCaptureContents(Scene, SceneRenderBuilder);
#else
Super::UpdateSceneCaptureContents(Scene);
#endif
}
}
void UNDIViewportCaptureComponent::OnBroadcastConfigurationChanged(UNDIMediaSender* Sender)
{
// If we are not overriding the broadcast settings and the sender is valid
if (!bOverrideBroadcastSettings && IsValid(Sender))
{
// change the capture sizes as necessary
ChangeCaptureSettings(Sender->GetFrameSize(), Sender->GetFrameRate());
}
}

View File

@@ -0,0 +1,305 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <NDIIOPluginModule.h>
#include <Modules/ModuleManager.h>
#include <IMediaModule.h>
#include <NDIIOPluginAPI.h>
#include "Player/NDIMediaPlayer.h"
#include <Misc/Paths.h>
#include <GenericPlatform/GenericPlatformMisc.h>
#include <Services/NDIConnectionService.h>
#include <Services/NDIFinderService.h>
#include <Misc/MessageDialog.h>
#include <Misc/EngineVersionComparison.h>
// Meaning the plugin is being compiled with the editor
#if WITH_EDITOR
#include "ThumbnailRendering/ThumbnailManager.h"
#include "ThumbnailRendering/TextureThumbnailRenderer.h"
#include <ISettingsModule.h>
#include <Editor.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#endif
#define LOCTEXT_NAMESPACE "FNDIIOPluginModule"
void FNDIIOPluginModule::StartupModule()
{
// Doubly Ensure that this handle is nullptr
NDI_LIB_HANDLE = nullptr;
if (LoadModuleDependencies())
{
#if UE_EDITOR
if (ISettingsModule* SettingsModule = FModuleManager::GetModulePtr<ISettingsModule>("Settings"))
{
SettingsModule->RegisterSettings(
"Project", "Plugins", "NDI", LOCTEXT("NDISettingsName", "Vizrt NDI"),
LOCTEXT("NDISettingsDescription", "Vizrt NDI(R) Engine Intergration Settings"),
GetMutableDefault<UNDIIOPluginSettings>());
}
// Ensure that the thumbnail for the 'NDI Media Texture2D' is being updated, as the texture is being used.
UThumbnailManager::Get().RegisterCustomRenderer(UNDIMediaTexture2D::StaticClass(),
UTextureThumbnailRenderer::StaticClass());
#endif
// Construct our Services
this->NDIFinderService = MakeShareable(new FNDIFinderService());
this->NDIConnectionService = MakeShareable(new FNDIConnectionService());
// Start the service
if (NDIFinderService.IsValid())
NDIFinderService->Start();
// Start the service
if (NDIConnectionService.IsValid())
NDIConnectionService->Start();
}
else
{
#if PLATFORM_WINDOWS
// Write an error message to the log.
UE_LOG(LogWindows, Error,
TEXT("Unable to load \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory."));
#if UE_EDITOR
const FText& WarningMessage =
LOCTEXT("NDIRuntimeMissing",
"Cannot find \"Processing.NDI.Lib.x64.dll\" from the NDI 6 Runtime Directory. "
"Continued usage of the plugin can cause instability within the editor.\r\n\r\n"
"Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' "
"for additional information related to installation instructions for this plugin.\r\n\r\n");
// Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found
if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok)
{
FString URLResult = FString("");
FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult);
}
#endif
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
// Write an error message to the log.
UE_LOG(LogLinux, Error,
TEXT("Unable to load \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime."));
#if UE_EDITOR
const FText& WarningMessage =
LOCTEXT("NDIRuntimeMissing",
"Cannot find \"" NDILIB_LIBRARY_NAME "\" from the NDI 6 Runtime. "
"Continued usage of the plugin can cause instability within the editor.\r\n\r\n"
"Please refer to the 'NDI IO Plugin for Unreal Engine Quickstart Guide' "
"for additional information related to installation instructions for this plugin.\r\n\r\n");
// Open a message box, showing that things will not work since the NDI Runtime Directory cannot be found
if (FMessageDialog::Open(EAppMsgType::OkCancel, EAppReturnType::Ok, WarningMessage) == EAppReturnType::Ok)
{
FString URLResult = FString("");
FPlatformProcess::LaunchURL(*FString("https://ndi.video/sdk/"), nullptr, &URLResult);
}
#endif
#endif
}
// supported platforms
SupportedPlatforms.Add(TEXT("Windows"));
SupportedPlatforms.Add(TEXT("Linux"));
SupportedPlatforms.Add(TEXT("LinuxAArch64"));
// supported schemes
SupportedUriSchemes.Add(TEXT("ndiio"));
// register player factory
auto MediaModule = FModuleManager::LoadModulePtr<IMediaModule>("Media");
if (MediaModule != nullptr)
{
MediaModule->RegisterPlayerFactory(*this);
}
FApp::SetUnfocusedVolumeMultiplier(1.f);
}
void FNDIIOPluginModule::ShutdownModule()
{
// unregister player factory
auto MediaModule = FModuleManager::GetModulePtr<IMediaModule>("Media");
if (MediaModule != nullptr)
{
MediaModule->UnregisterPlayerFactory(*this);
}
if (NDIFinderService.IsValid())
NDIFinderService->Shutdown();
ShutdownModuleDependencies();
}
bool FNDIIOPluginModule::BeginBroadcastingActiveViewport()
{
// Ensure we have a valid service
if (NDIConnectionService.IsValid())
{
// perform the requested functionality
return NDIConnectionService->BeginBroadcastingActiveViewport();
}
return false;
}
void FNDIIOPluginModule::StopBroadcastingActiveViewport()
{
// Ensure we have a valid service
if (NDIConnectionService.IsValid())
{
// perform the requested functionality
NDIConnectionService->StopBroadcastingActiveViewport();
}
}
//~ IMediaPlayerFactory interface
bool FNDIIOPluginModule::CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray<FText>* /*OutWarnings*/, TArray<FText>* OutErrors) const
{
FString Scheme;
FString Location;
// check scheme
if (!Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive))
{
if (OutErrors != nullptr)
{
OutErrors->Add(LOCTEXT("NoSchemeFound", "No URI scheme found"));
}
return false;
}
if (!SupportedUriSchemes.Contains(Scheme))
{
if (OutErrors != nullptr)
{
OutErrors->Add(FText::Format(LOCTEXT("SchemeNotSupported", "The URI scheme '{0}' is not supported"), FText::FromString(Scheme)));
}
return false;
}
return true;
}
TSharedPtr<IMediaPlayer, ESPMode::ThreadSafe> FNDIIOPluginModule::CreatePlayer(IMediaEventSink& EventSink)
{
return MakeShared<FNDIMediaPlayer, ESPMode::ThreadSafe>(EventSink);
}
FText FNDIIOPluginModule::GetDisplayName() const
{
return LOCTEXT("MediaPlayerDisplayName", "NDI Interface");
}
FName FNDIIOPluginModule::GetPlayerName() const
{
static FName PlayerName(TEXT("NDIMedia"));
return PlayerName;
}
FGuid FNDIIOPluginModule::GetPlayerPluginGUID() const
{
static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f);
return PlayerPluginGUID;
}
const TArray<FString>& FNDIIOPluginModule::GetSupportedPlatforms() const
{
return SupportedPlatforms;
}
bool FNDIIOPluginModule::SupportsFeature(EMediaFeature Feature) const
{
return Feature == EMediaFeature::AudioSamples ||
Feature == EMediaFeature::MetadataTracks ||
Feature == EMediaFeature::VideoSamples;
}
bool FNDIIOPluginModule::LoadModuleDependencies()
{
#if PLATFORM_WINDOWS
// Get the Binaries File Location
const FString env_variable = TEXT(NDILIB_REDIST_FOLDER);
const FString binaries_path = FPlatformMisc::GetEnvironmentVariable(*env_variable) + "/Processing.NDI.Lib.x64.dll";
// We can't validate if it's valid, but we can determine if it's explicitly not.
if (binaries_path.Len() > 0)
{
// Load the DLL
this->NDI_LIB_HANDLE = FPlatformProcess::GetDllHandle(*binaries_path);
// Not required, but "correct" (see the SDK documentation)
if (this->NDI_LIB_HANDLE != nullptr && !NDIlib_initialize())
{
// We were unable to initialize the library, so lets free the handle
FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE);
this->NDI_LIB_HANDLE = nullptr;
}
}
// Did we successfully load the NDI library?
return this->NDI_LIB_HANDLE != nullptr;
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
return true;
#endif
}
void FNDIIOPluginModule::ShutdownModuleDependencies()
{
#if PLATFORM_WINDOWS
if (this->NDI_LIB_HANDLE != nullptr)
{
NDIlib_destroy();
FPlatformProcess::FreeDllHandle(this->NDI_LIB_HANDLE);
this->NDI_LIB_HANDLE = nullptr;
}
#endif
#if (PLATFORM_LINUX || PLATFORM_LINUXARM64)
#endif
}
#undef LOCTEXT_NAMESPACE
IMPLEMENT_MODULE(FNDIIOPluginModule, NDIIO);

View File

@@ -0,0 +1,201 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Libraries/NDIIOLibrary.h>
#include <Services/NDIFinderService.h>
#include <NDIIOPluginModule.h>
#include <FastXml.h>
const TArray<FNDIConnectionInformation> UNDIIOLibrary::K2_GetNDISourceCollection()
{
// Return the FinderServices current network source collection
return FNDIFinderService::GetNetworkSourceCollection();
}
const bool UNDIIOLibrary::K2_FindNetworkSourceByName(UObject* WorldContextObject,
FNDIConnectionInformation& ConnectionInformation,
FString InSourceName)
{
// Ensure that the passed in information is empty
ConnectionInformation.Reset();
// Get the current network source collection from the finder service.
const TArray<FNDIConnectionInformation> NetworkSourceCollection = FNDIFinderService::GetNetworkSourceCollection();
// Get the current number of network source items in the collection
int32 final_count = NetworkSourceCollection.Num();
// Ensure we have a wide range of items to search through.
int32 last_index = final_count;
// Since the Source collection returned is already sorted alphabetically do a binary search to speed things up.
// We are only going to do comparisons that are necessary using O(log(n)) time complexity
for (int32 current_index = 0; current_index < last_index; /* current_index changed in loop */)
{
// Ensure that the index is valid (this will protect against negative values)
if (NetworkSourceCollection.IsValidIndex(current_index))
{
// Get the source reference from the collection
FNDIConnectionInformation source_info = NetworkSourceCollection[current_index];
// do a comparison against the requested SourceName
if (int32 comparitor_value = InSourceName.Compare(source_info.SourceName, ESearchCase::IgnoreCase))
{
// Our search says that our source name is greater than the info we checked
if (comparitor_value <= 0)
{
// set the last index to the current index
last_index = current_index;
// get halfway between the last index and the 0th index
current_index = last_index / 2;
}
// Our search says that our source name is less than the info we checked
else if (comparitor_value > 0)
{
// move up half the number of items within the collection
current_index = (last_index + current_index + 1) / 2;
}
}
// We found a comparable source.
else
{
// Set the source information structure
ConnectionInformation = source_info;
// return success
return true;
}
}
// Something weird happened (maybe the first check was larger than the search term); just return a fail
else
return false;
}
return false;
}
bool UNDIIOLibrary::K2_BeginBroadcastingActiveViewport(UObject* WorldContextObject)
{
// Get the plugin module for the owner of this object
if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr<FNDIIOPluginModule>("NDIIO"))
{
// Call the underlying functionality
return PluginModule->BeginBroadcastingActiveViewport();
}
return false;
}
void UNDIIOLibrary::K2_StopBroadcastingActiveViewport(UObject* WorldContextObject)
{
// Get the plugin module for the owner of this object
if (FNDIIOPluginModule* PluginModule = FModuleManager::GetModulePtr<FNDIIOPluginModule>("NDIIO"))
{
// Call the underlying functionality
PluginModule->StopBroadcastingActiveViewport();
}
}
UNDIMediaReceiver* UNDIIOLibrary::K2_GetNDIMediaReceiver(UNDIMediaReceiver* Receiver)
{
return Receiver;
}
UNDIMediaSender* UNDIIOLibrary::K2_GetNDIMediaSender(UNDIMediaSender* Sender)
{
return Sender;
}
const TArray<FNDIMetaDataElement> UNDIIOLibrary::K2_ParseNDIMetaData(FString Data)
{
class Parser : public IFastXmlCallback
{
public:
Parser(TArray<FNDIMetaDataElement>& ElementsIn)
: Elements(ElementsIn)
{}
virtual ~Parser()
{}
virtual bool ProcessXmlDeclaration(const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
return true;
}
virtual bool ProcessElement(const TCHAR* ElementName, const TCHAR* ElementData, int32 XmlFileLineNumber) override
{
if(CurrentElementStack.Num() > 0)
return false;
FNDIMetaDataElement NewElement;
NewElement.ElementName = ElementName;
NewElement.Data = ElementData;
if(CurrentElementStack.Num() == 0)
{
Elements.Push(NewElement);
CurrentElementStack.Push(&Elements.Last());
}
return true;
}
virtual bool ProcessAttribute(const TCHAR* AttributeName, const TCHAR* AttributeValue) override
{
check(CurrentElementStack.Num() > 0);
FNDIMetaDataElement* CurrentElement = CurrentElementStack.Last();
CurrentElement->Attributes.Add(AttributeName, AttributeValue);
return true;
}
virtual bool ProcessClose(const TCHAR* ElementName) override
{
check(CurrentElementStack.Num() > 0);
CurrentElementStack.Pop();
return true;
}
virtual bool ProcessComment(const TCHAR* Comment) override
{
return true;
}
private:
TArray<FNDIMetaDataElement>& Elements;
TArray<FNDIMetaDataElement*> CurrentElementStack;
};
TArray<FNDIMetaDataElement> Elements;
Parser Parser(Elements);
FText OutErrorMessage;
int32 OutErrorLineNumber;
FFastXml::ParseXmlFile(&Parser,
nullptr, // XmlFilePath
Data.GetCharArray().GetData(), // XmlFileContents
nullptr, // FeedbackContext
false, // bShowSlowTaskDialog
false, // bShowCancelButton
OutErrorMessage, // OutErrorMessage
OutErrorLineNumber // OutErrorLineNumber
);
return Elements;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,77 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaSoundWave.h>
#include <Objects/Media/NDIMediaReceiver.h>
UNDIMediaSoundWave::UNDIMediaSoundWave(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
// Set the Default Values for this object
this->bLooping = false;
this->NumChannels = 1;
this->SampleRate = 48000;
this->Duration = INDEFINITELY_LOOPING_DURATION;
}
/**
Set the Media Source of this object, so that when this object is called to 'GeneratePCMData' by the engine
we can request the media source to provide the pcm data from the current connected source
*/
void UNDIMediaSoundWave::SetConnectionSource(UNDIMediaReceiver* InMediaSource)
{
// Ensure there is no thread contention for generating pcm data from the connection source
FScopeLock Lock(&SyncContext);
// Do we have a media source object to work with
if (this->MediaSource != nullptr)
{
// Are we already registered with the incoming media source object
if (this->MediaSource != InMediaSource)
{
// It doesn't look like we are registered with the incoming, make sure
// to unregistered with the previous source
this->MediaSource->UnregisterAudioWave(this);
}
}
// Ensure we have a reference to the media source object
this->MediaSource = InMediaSource;
}
/**
Called by the engine to generate pcm data to be 'heard' by audio listener objects
*/
int32 UNDIMediaSoundWave::OnGeneratePCMAudio(TArray<uint8>& OutAudio, int32 NumSamples)
{
// Ensure there is no thread contention for generating pcm data from the connection source
FScopeLock Lock(&SyncContext);
// set the default value, in case we have no connection source
int32 samples_generated = 0;
OutAudio.Reset();
OutAudio.AddZeroed(NumSamples * sizeof(int16));
// check the connection source and continue
if (this->MediaSource != nullptr)
{
samples_generated = MediaSource->GeneratePCMData(this, OutAudio.GetData(), NumSamples);
}
// return to the engine the number of samples actually generated
return samples_generated;
}
bool UNDIMediaSoundWave::IsReadyForFinishDestroy()
{
// Ensure that there is no thread contention for generating data
FScopeLock Lock(&SyncContext);
return USoundWaveProcedural::IsReadyForFinishDestroy();
}

View File

@@ -0,0 +1,153 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaTexture2D.h>
#include <Objects/Media/NDIMediaTextureResource.h>
#include <Misc/EngineVersionComparison.h>
UNDIMediaTexture2D::UNDIMediaTexture2D(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer)
{
this->SetMyResource(nullptr);
}
void UNDIMediaTexture2D::UpdateTextureReference(FRHICommandList& RHICmdList, FTextureRHIRef Reference)
{
if (GetMyResource() != nullptr)
{
if (Reference.IsValid() && GetMyResource()->TextureRHI != Reference)
{
GetMyResource()->TextureRHI = (FTextureRHIRef&)Reference;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
}
else if (!Reference.IsValid())
{
if (FNDIMediaTextureResource* TextureResource = static_cast<FNDIMediaTextureResource*>(this->GetMyResource()))
{
ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference)
([this](FRHICommandListImmediate& RHICmdList) {
static int32 DefaultWidth = 1280;
static int32 DefaultHeight = 720;
// Set the default video texture to reference nothing
TRefCountPtr<FRHITexture> RenderableTexture;
const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DUpdateTextureReference"))
.SetExtent(DefaultWidth, DefaultHeight)
.SetFormat(EPixelFormat::PF_B8G8R8A8)
.SetNumMips(1)
.SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable)
.SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f)));
RenderableTexture = RHICreateTexture(CreateDesc);
GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
});
// Make sure _RenderThread is executed before continuing
FlushRenderingCommands();
}
}
}
}
FTextureResource* UNDIMediaTexture2D::CreateResource()
{
if (this->GetMyResource() != nullptr)
{
delete this->GetMyResource();
this->SetMyResource(nullptr);
}
if (FNDIMediaTextureResource* TextureResource = new FNDIMediaTextureResource(this))
{
this->SetMyResource(TextureResource);
ENQUEUE_RENDER_COMMAND(FNDIMediaTexture2DUpdateTextureReference)
([this](FRHICommandListImmediate& RHICmdList) {
static int32 DefaultWidth = 1280;
static int32 DefaultHeight = 720;
// Set the default video texture to reference nothing
TRefCountPtr<FRHITexture> RenderableTexture;
const FRHITextureCreateDesc CreateDesc = FRHITextureCreateDesc::Create2D(TEXT("NDIMediaTexture2DCreateResourceTexture"))
.SetExtent(DefaultWidth, DefaultHeight)
.SetFormat(EPixelFormat::PF_B8G8R8A8)
.SetNumMips(1)
.SetFlags(ETextureCreateFlags::Dynamic | ETextureCreateFlags::RenderTargetable)
.SetClearValue(FClearValueBinding(FLinearColor(0.0f, 0.0f, 0.0f)));
RenderableTexture = RHICreateTexture(CreateDesc);
GetMyResource()->TextureRHI = (FTextureRHIRef&)RenderableTexture;
RHIUpdateTextureReference(TextureReference.TextureReferenceRHI, GetMyResource()->TextureRHI);
});
}
return this->GetMyResource();
}
void UNDIMediaTexture2D::GetResourceSizeEx(FResourceSizeEx& CumulativeResourceSize)
{
Super::GetResourceSizeEx(CumulativeResourceSize);
if (FNDIMediaTextureResource* CurrentResource = static_cast<FNDIMediaTextureResource*>(this->GetMyResource()))
{
CumulativeResourceSize.AddUnknownMemoryBytes(CurrentResource->GetResourceSize());
}
}
float UNDIMediaTexture2D::GetSurfaceHeight() const
{
return GetMyResource() != nullptr ? GetMyResource()->GetSizeY() : 0.0f;
}
float UNDIMediaTexture2D::GetSurfaceWidth() const
{
return GetMyResource() != nullptr ? GetMyResource()->GetSizeX() : 0.0f;
}
float UNDIMediaTexture2D::GetSurfaceDepth() const
{
return 0.0f;
}
uint32 UNDIMediaTexture2D::GetSurfaceArraySize() const
{
return 0;
}
EMaterialValueType UNDIMediaTexture2D::GetMaterialType() const
{
return MCT_Texture2D;
}
ETextureClass UNDIMediaTexture2D::GetTextureClass() const
{
return ETextureClass::Other2DNoSource;
}
void UNDIMediaTexture2D::SetMyResource(FTextureResource* ResourceIn)
{
SetResource(ResourceIn);
}
FTextureResource* UNDIMediaTexture2D::GetMyResource()
{
return GetResource();
}
const FTextureResource* UNDIMediaTexture2D::GetMyResource() const
{
return GetResource();
}

View File

@@ -0,0 +1,73 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Objects/Media/NDIMediaTextureResource.h>
#include <RHI.h>
#include <DeviceProfiles/DeviceProfile.h>
#include <DeviceProfiles/DeviceProfileManager.h>
#include <Objects/Media/NDIMediaTexture2D.h>
#include <RenderUtils.h>
/**
Constructs a new instance of this object specifying a media texture owner
@param Owner The media object used as the owner for this object
*/
FNDIMediaTextureResource::FNDIMediaTextureResource(UNDIMediaTexture2D* Owner)
{
this->MediaTexture = Owner;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
void FNDIMediaTextureResource::InitRHI(FRHICommandListBase& RHICmdList)
#else
void FNDIMediaTextureResource::InitDynamicRHI()
#endif
{
if (this->MediaTexture != nullptr)
{
FSamplerStateInitializerRHI SamplerStateInitializer(
(ESamplerFilter)UDeviceProfileManager::Get().GetActiveProfile()->GetTextureLODSettings()->GetSamplerFilter(
MediaTexture),
AM_Border, AM_Border, AM_Wrap);
SamplerStateRHI = RHICreateSamplerState(SamplerStateInitializer);
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
void FNDIMediaTextureResource::ReleaseRHI()
#else
void FNDIMediaTextureResource::ReleaseDynamicRHI()
#endif
{
// Release the TextureRHI bound by this object
this->TextureRHI.SafeRelease();
// Ensure that we have a owning media texture
if (this->MediaTexture != nullptr)
{
// Remove the texture reference associated with the owner texture object
RHIUpdateTextureReference(MediaTexture->TextureReference.TextureReferenceRHI, nullptr);
}
}
SIZE_T FNDIMediaTextureResource::GetResourceSize()
{
return CalcTextureSize(GetSizeX(), GetSizeY(), EPixelFormat::PF_A8R8G8B8, 1);
}
uint32 FNDIMediaTextureResource::GetSizeX() const
{
return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().X : 0;
}
uint32 FNDIMediaTextureResource::GetSizeY() const
{
return this->TextureRHI.IsValid() ? TextureRHI->GetSizeXYZ().Y : 0;
}

View File

@@ -0,0 +1,492 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include "NDIMediaPlayer.h"
#include <MediaIOCoreSamples.h>
#include <MediaIOCoreTextureSampleBase.h>
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
#include <MediaIOCoreTextureSampleConverter.h>
#endif
#include <MediaIOCoreAudioSampleBase.h>
#include <IMediaEventSink.h>
#include <IMediaTextureSampleConverter.h>
#include <Misc/EngineVersionComparison.h>
#define LOCTEXT_NAMESPACE "FNDIMediaPlayer"
// An NDI-derived media texture sample, representing a frame of video
class NDIMediaTextureSample : public FMediaIOCoreTextureSampleBase, public IMediaTextureSampleConverter
{
using Super = FMediaIOCoreTextureSampleBase;
public:
NDIMediaTextureSample() = default;
virtual ~NDIMediaTextureSample() = default;
bool Initialize(const NDIlib_video_frame_v2_t& InVideoFrame, FTimespan InTime, UNDIMediaReceiver* InReceiver)
{
FreeSample();
VideoFrame = InVideoFrame;
Receiver = InReceiver;
if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVY)
SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres);
else if (InVideoFrame.FourCC == NDIlib_FourCC_video_type_UYVA)
SetBuffer(InVideoFrame.p_data, InVideoFrame.line_stride_in_bytes * InVideoFrame.yres +
InVideoFrame.xres*InVideoFrame.yres);
else
return false;
VideoFrame.p_data = Buffer.GetData();
SetProperties(InVideoFrame.line_stride_in_bytes, InVideoFrame.xres, InVideoFrame.yres, EMediaTextureSampleFormat::CharUYVY,
InTime, FFrameRate(InVideoFrame.frame_rate_N, InVideoFrame.frame_rate_D), FTimecode(),
true);
return true;
}
virtual const FMatrix& GetYUVToRGBMatrix() const override
{
return MediaShaders::YuvToRgbRec709Scaled;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
virtual void CopyConfiguration(const TSharedPtr<FMediaIOCoreTextureSampleBase>& SourceSample) override
{
Super::CopyConfiguration(SourceSample);
if (SourceSample.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SourceSample);
VideoFrame = NDISamplePtr->VideoFrame;
Receiver = NDISamplePtr->Receiver;
}
}
#endif
virtual uint32 GetConverterInfoFlags() const override
{
return ConverterInfoFlags_WillCreateOutputTexture;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (!Receiver)
return false;
FTextureRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame));
InDstTexture = DstTexture;
return true;
}
#else
virtual bool Convert(FTexture2DRHIRef & InDstTexture, const FConversionHints & Hints) override
{
if (!Receiver)
return false;
FTexture2DRHIRef DstTexture(Receiver->DisplayFrame(VideoFrame));
InDstTexture = DstTexture;
return true;
}
#endif
private:
NDIlib_video_frame_v2_t VideoFrame;
UNDIMediaReceiver* Receiver { nullptr };
//FMediaTimeStamp Time;
//std::vector<uint8_t> Data;
};
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
class NDIMediaTextureSampleConverter : public FMediaIOCoreTextureSampleConverter
{
using Super = FMediaIOCoreTextureSampleConverter;
public:
NDIMediaTextureSampleConverter() = default;
virtual ~NDIMediaTextureSampleConverter() = default;
virtual void Setup(const TSharedPtr<FMediaIOCoreTextureSampleBase>& InSample) override
{
FMediaIOCoreTextureSampleConverter::Setup(InSample);
JITRProxySample = InSample;
}
virtual uint32 GetConverterInfoFlags() const override
{
return ConverterInfoFlags_WillCreateOutputTexture;
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
virtual bool Convert(FRHICommandListImmediate& RHICmdList, FTextureRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (FMediaIOCoreTextureSampleConverter::Convert(RHICmdList, InDstTexture, Hints))
{
TSharedPtr<FMediaIOCoreTextureSampleBase> SamplePtr = JITRProxySample.Pin();
if (SamplePtr.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SamplePtr);
return NDISamplePtr->Convert(RHICmdList, InDstTexture, Hints);
}
}
return false;
}
#else
virtual bool Convert(FTexture2DRHIRef& InDstTexture, const FConversionHints& Hints) override
{
if (FMediaIOCoreTextureSampleConverter::Convert(InDstTexture, Hints))
{
TSharedPtr<FMediaIOCoreTextureSampleBase> SamplePtr = JITRProxySample.Pin();
if (SamplePtr.IsValid())
{
TSharedPtr<NDIMediaTextureSample> NDISamplePtr = StaticCastSharedPtr<NDIMediaTextureSample>(SamplePtr);
return NDISamplePtr->Convert(InDstTexture, Hints);
}
}
return false;
}
#endif
private:
TWeakPtr<FMediaIOCoreTextureSampleBase> JITRProxySample;
};
#endif
class NDIMediaTextureSamplePool : public TMediaObjectPool<NDIMediaTextureSample>
{};
// An NDI-derived media audio sample, representing a frame of audio
class NDIMediaAudioSample : public FMediaIOCoreAudioSampleBase
{
using Super = FMediaIOCoreAudioSampleBase;
public:
};
class NDIMediaAudioSamplePool : public TMediaObjectPool<NDIMediaAudioSample>
{};
FNDIMediaPlayer::FNDIMediaPlayer(IMediaEventSink& InEventSink)
: Super(InEventSink)
, NDIPlayerState(EMediaState::Closed)
, EventSink(InEventSink)
, TextureSamplePool(new NDIMediaTextureSamplePool)
, AudioSamplePool(new NDIMediaAudioSamplePool)
{}
FNDIMediaPlayer::~FNDIMediaPlayer()
{
Close();
delete TextureSamplePool;
delete AudioSamplePool;
}
FGuid FNDIMediaPlayer::GetPlayerPluginGUID() const
{
static FGuid PlayerPluginGUID(0x71b13c2b, 0x70874965, 0x8a0e23f7, 0x5be6698f);
return PlayerPluginGUID;
}
bool FNDIMediaPlayer::Open(const FString& Url, const IMediaOptions* Options)
{
if (!Super::Open(Url, Options))
{
return false;
}
MaxNumVideoFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxVideoFrameBuffer, (int64)8);
MaxNumAudioFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAudioFrameBuffer, (int64)8);
MaxNumMetadataFrameBuffer = Options->GetMediaOption(NDIMediaOption::MaxAncillaryFrameBuffer, (int64)8);
// Setup our different supported channels based on source settings
SetupSampleChannels();
// If the player is opened with an NDIMediaReceiver, use that. Otherwise create an internal one.
bool bIsNDIMediaReceiver = Options->HasMediaOption(NDIMediaOption::IsNDIMediaReceiver);
if (bIsNDIMediaReceiver)
{
Receiver = static_cast<UNDIMediaReceiver*>(const_cast<IMediaOptions*>(Options));
bInternalReceiver = false;
}
else
{
Receiver = NewObject<UNDIMediaReceiver>();
bInternalReceiver = true;
}
// Hook into the video and audio captures
Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle = Receiver->OnNDIReceiverVideoCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_video_frame_v2_t& video_frame)
{
this->DisplayFrame(video_frame);
});
Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle);
AudioCaptureEventHandle = Receiver->OnNDIReceiverAudioCaptureEvent.AddLambda([this](UNDIMediaReceiver* receiver, const NDIlib_audio_frame_v2_t& audio_frame)
{
this->PlayAudio(audio_frame);
});
// Control the player's state based on the receiver connecting and disconnecting
Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle);
ConnectedEventHandle = Receiver->OnNDIReceiverConnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver)
{
this->NDIPlayerState = EMediaState::Playing;
});
Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle);
DisconnectedEventHandle = Receiver->OnNDIReceiverDisconnectedEvent.AddLambda([this](UNDIMediaReceiver* receiver)
{
this->NDIPlayerState = EMediaState::Closed;
});
// Get ready to connect
CurrentState = EMediaState::Preparing;
NDIPlayerState = EMediaState::Preparing;
EventSink.ReceiveMediaEvent(EMediaEvent::MediaConnecting);
// Start up the receiver under the player's control.
// Use the provided URL as the source if given, otherwise use the connection info set for the receiver
FString Scheme;
FString Location;
if (Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive))
{
FNDIConnectionInformation ConnectionInformation = Receiver->ConnectionSetting;
ConnectionInformation.SourceName = Location;
Receiver->Initialize(ConnectionInformation, UNDIMediaReceiver::EUsage::Controlled);
}
else
{
Receiver->Initialize(UNDIMediaReceiver::EUsage::Controlled);
}
return true;
}
void FNDIMediaPlayer::Close()
{
NDIPlayerState = EMediaState::Closed;
if (Receiver != nullptr)
{
// Disconnect from receiver events
Receiver->OnNDIReceiverVideoCaptureEvent.Remove(VideoCaptureEventHandle);
VideoCaptureEventHandle.Reset();
Receiver->OnNDIReceiverAudioCaptureEvent.Remove(AudioCaptureEventHandle);
AudioCaptureEventHandle.Reset();
Receiver->OnNDIReceiverConnectedEvent.Remove(ConnectedEventHandle);
ConnectedEventHandle.Reset();
Receiver->OnNDIReceiverDisconnectedEvent.Remove(DisconnectedEventHandle);
DisconnectedEventHandle.Reset();
// Shut down the receiver
Receiver->Shutdown();
// If the player created the receiver, destroy the receiver
if (bInternalReceiver)
Receiver->ConditionalBeginDestroy();
Receiver = nullptr;
bInternalReceiver = false;
}
TextureSamplePool->Reset();
AudioSamplePool->Reset();
Super::Close();
}
void FNDIMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode)
{
// Update player state
EMediaState NewState = NDIPlayerState;
if (NewState != CurrentState)
{
CurrentState = NewState;
if (CurrentState == EMediaState::Playing)
{
EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged);
EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpened);
EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackResumed);
}
else if (NewState == EMediaState::Error)
{
EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpenFailed);
Close();
}
}
if (CurrentState != EMediaState::Playing)
{
return;
}
TickTimeManagement();
}
void FNDIMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan Timecode)
{
Super::TickFetch(DeltaTime, Timecode);
if ((CurrentState == EMediaState::Preparing) || (CurrentState == EMediaState::Playing))
{
if (Receiver != nullptr)
{
// Ask receiver to capture a new frame of video and audio.
// Will call DisplayFrame() and PlayAudio() through capture event.
Receiver->CaptureConnectedAudio();
Receiver->CaptureConnectedVideo();
}
}
if (CurrentState == EMediaState::Playing)
{
ProcessFrame();
VerifyFrameDropCount();
}
}
void FNDIMediaPlayer::ProcessFrame()
{
if (CurrentState == EMediaState::Playing)
{
// No need to lock here. That info is only used for debug information.
//AudioTrackFormat.NumChannels = 0;//NDIThreadAudioChannels;
//AudioTrackFormat.SampleRate = 0;//NDIThreadAudioSampleRate;
}
}
void FNDIMediaPlayer::DisplayFrame(const NDIlib_video_frame_v2_t& video_frame)
{
auto TextureSample = TextureSamplePool->AcquireShared();
if (TextureSample->Initialize(video_frame, FTimespan::FromSeconds(GetPlatformSeconds()), Receiver))
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
AddVideoSample(TextureSample);
#else
Samples->AddVideo(TextureSample);
#endif
}
}
void FNDIMediaPlayer::PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame)
{
auto AudioSample = AudioSamplePool->AcquireShared();
// UE wants 32bit signed interleaved audio data, so need to convert the NDI audio.
// Fortunately the NDI library has a utility function to do that.
// Get a buffer to convert to
const int32 available_samples = audio_frame.no_samples * audio_frame.no_channels;
void* SampleBuffer = AudioSample->RequestBuffer(available_samples);
if (SampleBuffer != nullptr)
{
// Format to convert to
NDIlib_audio_frame_interleaved_32s_t audio_frame_32s(
audio_frame.sample_rate,
audio_frame.no_channels,
audio_frame.no_samples,
audio_frame.timecode,
20,
static_cast<int32_t*>(SampleBuffer));
// Convert received NDI audio
NDIlib_util_audio_to_interleaved_32s_v2(&audio_frame, &audio_frame_32s);
// Supply converted audio data
if (AudioSample->SetProperties(available_samples
, audio_frame_32s.no_channels
, audio_frame_32s.sample_rate
, FTimespan::FromSeconds(GetPlatformSeconds())
, TOptional<FTimecode>()))
{
Samples->AddAudio(AudioSample);
}
}
}
void FNDIMediaPlayer::VerifyFrameDropCount()
{
}
bool FNDIMediaPlayer::IsHardwareReady() const
{
return NDIPlayerState == EMediaState::Playing ? true : false;
}
void FNDIMediaPlayer::SetupSampleChannels()
{
FMediaIOSamplingSettings VideoSettings = BaseSettings;
VideoSettings.BufferSize = MaxNumVideoFrameBuffer;
Samples->InitializeVideoBuffer(VideoSettings);
FMediaIOSamplingSettings AudioSettings = BaseSettings;
AudioSettings.BufferSize = MaxNumAudioFrameBuffer;
Samples->InitializeAudioBuffer(AudioSettings);
FMediaIOSamplingSettings MetadataSettings = BaseSettings;
MetadataSettings.BufferSize = MaxNumMetadataFrameBuffer;
Samples->InitializeMetadataBuffer(MetadataSettings);
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
TSharedPtr<FMediaIOCoreTextureSampleBase> FNDIMediaPlayer::AcquireTextureSample_AnyThread() const
{
return TextureSamplePool->AcquireShared();
}
TSharedPtr<FMediaIOCoreTextureSampleConverter> FNDIMediaPlayer::CreateTextureSampleConverter() const
{
return MakeShared<NDIMediaTextureSampleConverter>();
}
#endif
//~ ITimedDataInput interface
#if WITH_EDITOR
const FSlateBrush* FNDIMediaPlayer::GetDisplayIcon() const
{
return nullptr;
}
#endif
#undef LOCTEXT_NAMESPACE

View File

@@ -0,0 +1,75 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#pragma once
#include <NDIIOPluginAPI.h>
#include <Objects/Media/NDIMediaReceiver.h>
#include <MediaIOCorePlayerBase.h>
class FNDIMediaPlayer : public FMediaIOCorePlayerBase
{
using Super = FMediaIOCorePlayerBase;
public:
FNDIMediaPlayer(IMediaEventSink& InEventSink);
virtual ~FNDIMediaPlayer();
//~ IMediaPlayer interface
virtual FGuid GetPlayerPluginGUID() const override;
virtual bool Open(const FString& Url, const IMediaOptions* Options) override;
virtual void Close() override;
virtual void TickInput(FTimespan DeltaTime, FTimespan Timecode) override;
virtual void TickFetch(FTimespan DeltaTime, FTimespan Timecode) override;
protected:
virtual bool IsHardwareReady() const override;
virtual void SetupSampleChannels() override;
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 3)) // 5.3 or later
virtual TSharedPtr<FMediaIOCoreTextureSampleBase> AcquireTextureSample_AnyThread() const override;
virtual TSharedPtr<FMediaIOCoreTextureSampleConverter> CreateTextureSampleConverter() const override;
#endif
void DisplayFrame(const NDIlib_video_frame_v2_t& video_frame);
void PlayAudio(const NDIlib_audio_frame_v2_t& audio_frame);
void ProcessFrame();
void VerifyFrameDropCount();
public:
//~ ITimedDataInput interface
#if WITH_EDITOR
virtual const FSlateBrush* GetDisplayIcon() const override;
#endif
private:
/** Max sample count our different buffer can hold. Taken from MediaSource */
int32 MaxNumAudioFrameBuffer = 0;
int32 MaxNumMetadataFrameBuffer = 0;
int32 MaxNumVideoFrameBuffer = 0;
/** Current state of the media player. */
EMediaState NDIPlayerState = EMediaState::Closed;
/** The media event handler. */
IMediaEventSink& EventSink;
UNDIMediaReceiver* Receiver = nullptr;
bool bInternalReceiver = true;
FDelegateHandle VideoCaptureEventHandle;
FDelegateHandle AudioCaptureEventHandle;
FDelegateHandle ConnectedEventHandle;
FDelegateHandle DisconnectedEventHandle;
class NDIMediaTextureSamplePool* TextureSamplePool;
class NDIMediaAudioSamplePool* AudioSamplePool;
};

View File

@@ -0,0 +1,404 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Services/NDIConnectionService.h>
#include <UObject/UObjectGlobals.h>
#include <UObject/Package.h>
#include <Misc/CoreDelegates.h>
#include <NDIIOPluginSettings.h>
#include <Objects/Media/NDIMediaSender.h>
#include <Framework/Application/SlateApplication.h>
#include <Misc/EngineVersionComparison.h>
#include <Engine/Engine.h>
#include <TextureResource.h>
#if WITH_EDITOR
#include <Editor.h>
#endif
/** Define Global Accessors */
FNDIConnectionServiceSendVideoEvent FNDIConnectionService::EventOnSendVideoFrame;
TMap<USoundSubmix*, FNDIConnectionServiceSendAudioEvent> FNDIConnectionService::SubmixSendAudioFrameEvents;
FCriticalSection FNDIConnectionService::AudioSyncContext;
FCriticalSection FNDIConnectionService::RenderSyncContext;
/** ************************ **/
/**
Constructs a new instance of this object
*/
FNDIConnectionService::FNDIConnectionService() {}
// Begin the service
bool FNDIConnectionService::Start()
{
if (!bIsInitialized)
{
bIsInitialized = true;
// Define some basic properties
FNDIBroadcastConfiguration Configuration;
FString BroadcastName = TEXT("Unreal Engine");
EObjectFlags Flags = RF_Public | RF_Standalone | RF_Transient | RF_MarkAsNative;
bool bBeginBroadcastOnPlay = false;
// Load the plugin settings for broadcasting the active viewport
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
// Define the configuration properties
Configuration.FrameRate = CoreSettings->BroadcastRate;
Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840),
FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840));
// Set the broadcast name
BroadcastName = CoreSettings->ApplicationStreamName;
bBeginBroadcastOnPlay = CoreSettings->bBeginBroadcastOnPlay;
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
}
/** Construct the Active Viewport video texture */
this->VideoTexture = NewObject<UTextureRenderTarget2D>(
GetTransientPackage(), UTextureRenderTarget2D::StaticClass(), TEXT("NDIViewportVideoTexture"), Flags);
/** Construct the active viewport sender */
this->ActiveViewportSender = NewObject<UNDIMediaSender>(GetTransientPackage(), UNDIMediaSender::StaticClass(),
TEXT("NDIViewportSender"), Flags);
VideoTexture->UpdateResource();
// Update the active viewport sender, with the properties defined in the settings configuration
this->ActiveViewportSender->ChangeSourceName(BroadcastName);
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration);
// Hook into the core for the end of frame handlers
FCoreDelegates::OnEndFrameRT.AddRaw(this, &FNDIConnectionService::OnEndRenderFrame);
if (!GIsEditor)
{
FCoreDelegates::OnPostEngineInit.AddRaw(this, &FNDIConnectionService::OnPostEngineInit);
FCoreDelegates::OnEnginePreExit.AddRaw(this, &FNDIConnectionService::OnEnginePreExit);
if (bBeginBroadcastOnPlay)
BeginBroadcastingActiveViewport();
}
#if WITH_EDITOR
else
{
FEditorDelegates::PostPIEStarted.AddLambda([this](const bool Success) {
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
if (CoreSettings->bBeginBroadcastOnPlay == true)
BeginBroadcastingActiveViewport();
else
BeginAudioCapture();
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
}
bIsInPIEMode = true;
});
FEditorDelegates::PrePIEEnded.AddLambda([this](const bool Success) { StopBroadcastingActiveViewport(); });
}
#endif
}
return true;
}
// Stop the service
void FNDIConnectionService::Shutdown()
{
// Wait for the sync context locks
FScopeLock AudioLock(&AudioSyncContext);
FScopeLock RenderLock(&RenderSyncContext);
// reset the initialization properties
bIsInitialized = false;
StopAudioCapture();
// unbind our handlers for our frame events
FCoreDelegates::OnEndFrame.RemoveAll(this);
FCoreDelegates::OnEndFrameRT.RemoveAll(this);
// Cleanup the broadcasting of the active viewport
StopBroadcastingActiveViewport();
}
// Handler for when the render thread frame has ended
void FNDIConnectionService::OnEndRenderFrame()
{
FScopeLock Lock(&RenderSyncContext);
if (bIsInitialized)
{
int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks();
if (FNDIConnectionService::EventOnSendVideoFrame.IsBound())
{
FNDIConnectionService::EventOnSendVideoFrame.Broadcast(ticks);
}
}
}
void FNDIConnectionService::BeginAudioCapture()
{
if (bIsInitialized)
{
if (!bIsAudioInitialized)
{
if (GEngine)
{
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (AudioDevice.IsValid())
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
for (auto& SendAudioEvent : SubmixSendAudioFrameEvents)
{
if (SendAudioEvent.Key == nullptr)
AudioDevice->RegisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject());
else
AudioDevice->RegisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key);
}
#else
AudioDevice->RegisterSubmixBufferListener(this);
#endif
bIsAudioInitialized = true;
}
}
}
}
}
void FNDIConnectionService::StopAudioCapture()
{
if (bIsAudioInitialized)
{
if (GEngine)
{
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (AudioDevice)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
for (auto& SendAudioEvent : SubmixSendAudioFrameEvents)
{
if (SendAudioEvent.Key == nullptr)
AudioDevice->UnregisterSubmixBufferListener(AsShared(), AudioDevice->GetMainSubmixObject());
else
AudioDevice->UnregisterSubmixBufferListener(AsShared(), *SendAudioEvent.Key);
}
#else
AudioDevice->UnregisterSubmixBufferListener(this);
#endif
}
}
bIsAudioInitialized = false;
}
}
void FNDIConnectionService::OnPostEngineInit()
{
BeginAudioCapture();
}
void FNDIConnectionService::OnEnginePreExit()
{
StopAudioCapture();
}
bool FNDIConnectionService::BeginBroadcastingActiveViewport()
{
if (!bIsBroadcastingActiveViewport && IsValid(ActiveViewportSender))
{
// Load the plugin settings for broadcasting the active viewport
if (auto* CoreSettings = NewObject<UNDIIOPluginSettings>())
{
// Define some basic properties
FNDIBroadcastConfiguration Configuration;
FString BroadcastName = TEXT("Unreal Engine");
// Define the configuration properties
Configuration.FrameRate = CoreSettings->BroadcastRate;
Configuration.FrameSize = FIntPoint(FMath::Clamp(CoreSettings->PreferredFrameSize.X, 240, 3840),
FMath::Clamp(CoreSettings->PreferredFrameSize.Y, 240, 3840));
// Set the broadcast name
BroadcastName = CoreSettings->ApplicationStreamName;
// clean-up the settings object
CoreSettings->ConditionalBeginDestroy();
CoreSettings = nullptr;
// Update the active viewport sender, with the properties defined in the settings configuration
this->ActiveViewportSender->ChangeSourceName(BroadcastName);
this->ActiveViewportSender->ChangeBroadcastConfiguration(Configuration);
}
// we don't want to perform the linear conversion for the active viewport,
// since it's already had the conversion completed by the engine before passing to the sender
ActiveViewportSender->PerformLinearTosRGBConversion(false);
// Do not enable PTZ capabilities for active viewport sender
ActiveViewportSender->EnablePTZ(false);
// Initialize the sender, this will automatically start rendering output via NDI
ActiveViewportSender->Initialize(nullptr);
// We've initialized the active viewport
bIsBroadcastingActiveViewport = true;
// However we need to update the 'Video Texture' to the active viewport back buffer...
FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().AddRaw(
this, &FNDIConnectionService::OnActiveViewportBackbufferPreResize);
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().AddRaw(
this, &FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent);
BeginAudioCapture();
}
// always return true
return true;
}
// Handler for when the active viewport back buffer has been resized
void FNDIConnectionService::OnActiveViewportBackbufferPreResize(void* Backbuffer)
{
check(IsInGameThread());
// Ensure we have a valid video texture
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
FRenderCommandFence Fence;
TextureResource->TextureRHI.SafeRelease();
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
ENQUEUE_RENDER_COMMAND(FlushRHIThreadToUpdateTextureRenderTargetReference)(
[this](FRHICommandListImmediate& RHICmdList)
{
RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, nullptr);
RHICmdList.ImmediateFlush(EImmediateFlushType::FlushRHIThread);
});
// Wait for render thread to finish, so that renderthread texture references are updated
Fence.BeginFence();
Fence.Wait();
}
}
// Handler for when the back buffer is read to present to the end user
void FNDIConnectionService::OnActiveViewportBackbufferReadyToPresent(SWindow& Window,
const FTextureRHIRef& Backbuffer)
{
if (Window.GetType() == EWindowType::GameWindow || (Window.IsRegularWindow() && IsRunningInPIE()))
{
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
// Lets improve the performance a bit
if (TextureResource->TextureRHI != Backbuffer)
{
TextureResource->TextureRHI = (FTextureRHIRef&)Backbuffer;
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
RHIUpdateTextureReference(VideoTexture->TextureReference.TextureReferenceRHI, Backbuffer);
}
}
}
}
void FNDIConnectionService::StopBroadcastingActiveViewport()
{
// Wait for the sync context locks
FScopeLock RenderLock(&RenderSyncContext);
// reset the initialization properties
bIsInPIEMode = false;
StopAudioCapture();
// Ensure that if the active viewport sender is active, that we shut it down
if (IsValid(this->ActiveViewportSender))
{
FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().RemoveAll(this);
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().RemoveAll(this);
// shutdown the active viewport sender (just in case it was activated)
this->ActiveViewportSender->Shutdown();
// reset the broadcasting flag, so that we can restart the broadcast later
this->bIsBroadcastingActiveViewport = false;
FTextureResource* TextureResource = GetVideoTextureResource();
if (TextureResource != nullptr)
{
TextureResource->TextureRHI.SafeRelease();
this->ActiveViewportSender->ChangeVideoTexture(VideoTexture);
}
}
}
FTextureResource* FNDIConnectionService::GetVideoTextureResource() const
{
if(IsValid(this->VideoTexture))
return this->VideoTexture->GetResource();
return nullptr;
}
void FNDIConnectionService::OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock)
{
if (NumSamples > 0)
{
FScopeLock Lock(&AudioSyncContext);
if (bIsAudioInitialized)
{
int64 ticks = FDateTime::Now().GetTimeOfDay().GetTicks();
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
FAudioDeviceHandle AudioDevice = GEngine->GetActiveAudioDevice();
if (&AudioDevice->GetMainSubmixObject() == OwningSubmix)
OwningSubmix = nullptr;
#else
OwningSubmix = nullptr;
#endif
FNDIConnectionServiceSendAudioEvent* SendAudioEvent = SubmixSendAudioFrameEvents.Find(OwningSubmix);
if (SendAudioEvent)
{
if (SendAudioEvent->IsBound())
{
SendAudioEvent->Broadcast(ticks, AudioData, NumSamples, NumChannels, SampleRate, AudioClock);
}
}
}
}
}
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 4)) // 5.4 or later
const FString& FNDIConnectionService::GetListenerName() const
{
static const FString ListenerName(TEXT("NDIConnectionServiceListener"));
return ListenerName;
}
#endif

View File

@@ -0,0 +1,232 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Services/NDIFinderService.h>
#include <Async/Async.h>
#include <Misc/EngineVersionComparison.h>
#include <NDIIOPluginAPI.h>
/** Define Global Accessors */
static NDIlib_find_instance_t NDI_FIND_INSTANCE = nullptr;
static FCriticalSection NDI_FIND_SYNC_CONTEXT;
FNDIFinderService::FNDISourceCollectionChangedEvent FNDIFinderService::EventOnNDISourceCollectionChanged;
TArray<FNDIConnectionInformation> FNDIFinderService::NetworkSourceCollection = TArray<FNDIConnectionInformation>();
/** ************************ **/
FNDIFinderService::FNDIFinderService()
{
if (NDI_FIND_INSTANCE == nullptr)
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
NDI_FIND_INSTANCE = NDIlib_find_create_v2(nullptr);
}
}
// Begin the service
bool FNDIFinderService::Start()
{
if (!bIsThreadRunning && p_RunnableThread == nullptr)
{
if (NDI_FIND_INSTANCE != nullptr)
{
this->bIsThreadRunning = true;
p_RunnableThread = FRunnableThread::Create(this, TEXT("FNDIFinderService_Tick"), 0, TPri_BelowNormal);
return bIsThreadRunning = p_RunnableThread != nullptr;
}
}
return false;
}
/** FRunnable Interface implementation for 'Init' */
bool FNDIFinderService::Init()
{
return NDI_FIND_INSTANCE != nullptr;
}
/** FRunnable Interface implementation for 'Stop' */
uint32 FNDIFinderService::Run()
{
static const uint32 find_wait_time = 500;
if (NDI_FIND_INSTANCE == nullptr)
return 0;
// Only update when we are suppose to run
while (bIsThreadRunning)
{
// Wait up to 'find_wait_time' (in milliseconds) to determine whether new sources have been added
if (!NDIlib_find_wait_for_sources(NDI_FIND_INSTANCE, find_wait_time))
{
// alright the source collection has stopped updating, did we change the network source collection?
if (UpdateNetworkSourceCollection())
{
// Broadcast the even on the game thread for thread safety purposes
AsyncTask(ENamedThreads::GameThread, []() {
if (FNDIFinderService::EventOnNDISourceCollectionChanged.IsBound())
FNDIFinderService::EventOnNDISourceCollectionChanged.Broadcast();
});
}
}
}
// return success
return 1;
}
/** FRunnable Interface implementation for 'Run' */
void FNDIFinderService::Shutdown()
{
if (p_RunnableThread != nullptr)
{
this->bIsThreadRunning = false;
p_RunnableThread->WaitForCompletion();
p_RunnableThread = nullptr;
}
// Ensure we unload the finder instance
if (NDI_FIND_INSTANCE != nullptr)
NDIlib_find_destroy(NDI_FIND_INSTANCE);
}
// Stop the service
void FNDIFinderService::Stop()
{
Shutdown();
}
bool FNDIFinderService::UpdateNetworkSourceCollection()
{
uint32 no_sources = 0;
bool bHasCollectionChanged = false;
if (NDI_FIND_INSTANCE != nullptr)
{
const NDIlib_source_t* p_sources = NDIlib_find_get_current_sources(NDI_FIND_INSTANCE, &no_sources);
// Change Scope
{
FScopeLock lock(&NDI_FIND_SYNC_CONTEXT);
bHasCollectionChanged = FNDIFinderService::NetworkSourceCollection.Num() != no_sources;
if (no_sources > 0 && p_sources != nullptr)
{
uint32 CurrentSourceCount = NetworkSourceCollection.Num();
for (uint32 iter = 0; iter < no_sources; iter++)
{
if (iter >= CurrentSourceCount)
{
NetworkSourceCollection.Add(FNDIConnectionInformation());
}
const NDIlib_source_t* SourceInformation = &p_sources[iter];
FNDIConnectionInformation* CollectionSource = &NetworkSourceCollection[iter];
bHasCollectionChanged |= SourceInformation->p_url_address != CollectionSource->Url;
CollectionSource->Url = SourceInformation->p_url_address;
CollectionSource->SourceName = SourceInformation->p_ndi_name;
FString SourceName = SourceInformation->p_ndi_name;
SourceName.Split(TEXT(" "), &CollectionSource->MachineName, &CollectionSource->StreamName);
// Now that the MachineName and StreamName have been split, cleanup the stream name
CollectionSource->StreamName.RemoveFromStart("(");
CollectionSource->StreamName.RemoveFromEnd(")");
}
if (CurrentSourceCount > no_sources)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes);
#else
NetworkSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true);
#endif
bHasCollectionChanged = true;
}
}
else if (NetworkSourceCollection.Num() > 0)
{
NetworkSourceCollection.Empty();
bHasCollectionChanged = true;
}
bHasCollectionChanged |= NetworkSourceCollection.Num() != no_sources;
}
}
return bHasCollectionChanged;
}
/** Call to update an existing collection of network sources to match the current collection */
bool FNDIFinderService::UpdateSourceCollection(TArray<FNDIConnectionInformation>& InSourceCollection)
{
bool bHasCollectionChanged = false;
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
const uint32& no_sources = NetworkSourceCollection.Num();
bHasCollectionChanged = InSourceCollection.Num() != no_sources;
if (no_sources > 0)
{
uint32 CurrentSourceCount = InSourceCollection.Num();
for (uint32 iter = 0; iter < no_sources; iter++)
{
if (iter >= CurrentSourceCount)
{
InSourceCollection.Add(FNDIConnectionInformation());
CurrentSourceCount = InSourceCollection.Num();
}
FNDIConnectionInformation* CollectionSource = &InSourceCollection[iter];
const FNDIConnectionInformation* SourceInformation = &NetworkSourceCollection[iter];
bHasCollectionChanged |= SourceInformation->Url != CollectionSource->Url;
CollectionSource->Url = SourceInformation->Url;
CollectionSource->SourceName = SourceInformation->SourceName;
CollectionSource->MachineName = SourceInformation->MachineName;
CollectionSource->StreamName = SourceInformation->StreamName;
}
if (CurrentSourceCount > no_sources)
{
#if (ENGINE_MAJOR_VERSION > 5) || ((ENGINE_MAJOR_VERSION == 5) && (ENGINE_MINOR_VERSION >= 5)) // 5.5 or later
InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, EAllowShrinking::Yes);
#else
InSourceCollection.RemoveAt(no_sources, CurrentSourceCount - no_sources, true);
#endif
bHasCollectionChanged = true;
}
}
else if (InSourceCollection.Num() > 0)
{
InSourceCollection.Empty();
bHasCollectionChanged = true;
}
}
return bHasCollectionChanged;
}
/** Get the available sources on the network */
const TArray<FNDIConnectionInformation> FNDIFinderService::GetNetworkSourceCollection()
{
FScopeLock Lock(&NDI_FIND_SYNC_CONTEXT);
return FNDIFinderService::NetworkSourceCollection;
}

View File

@@ -0,0 +1,50 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIBroadcastConfiguration.h>
/** Copies an existing instance to this object */
FNDIBroadcastConfiguration::FNDIBroadcastConfiguration(const FNDIBroadcastConfiguration& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->FrameRate = other.FrameRate;
this->FrameSize = other.FrameSize;
}
/** Copies existing instance properties to this object */
FNDIBroadcastConfiguration& FNDIBroadcastConfiguration::operator=(const FNDIBroadcastConfiguration& other)
{
// perform a deep copy of the 'other' structure
this->FrameRate = other.FrameRate;
this->FrameSize = other.FrameSize;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIBroadcastConfiguration::operator==(const FNDIBroadcastConfiguration& other) const
{
// return the value of a deep compare against the 'other' structure
return this->FrameRate == other.FrameRate && this->FrameSize == other.FrameSize;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIBroadcastConfiguration::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->FrameRate.Numerator << this->FrameRate.Denominator << this->FrameSize;
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIBroadcastConfiguration::operator!=(const FNDIBroadcastConfiguration& other) const
{
return !(*this == other);
}

View File

@@ -0,0 +1,111 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIConnectionInformation.h>
#include <string>
/** Copies an existing instance to this object */
FNDIConnectionInformation::FNDIConnectionInformation(const FNDIConnectionInformation& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->Bandwidth = other.Bandwidth;
this->MachineName = other.MachineName;
this->SourceName = other.SourceName;
this->StreamName = other.StreamName;
this->Url = other.Url;
this->bMuteAudio = other.bMuteAudio;
this->bMuteVideo = other.bMuteVideo;
}
/** Copies existing instance properties to this object */
FNDIConnectionInformation& FNDIConnectionInformation::operator=(const FNDIConnectionInformation& other)
{
// perform a deep copy of the 'other' structure
this->Bandwidth = other.Bandwidth;
this->MachineName = other.MachineName;
this->SourceName = other.SourceName;
this->StreamName = other.StreamName;
this->Url = other.Url;
this->bMuteAudio = other.bMuteAudio;
this->bMuteVideo = other.bMuteVideo;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIConnectionInformation::operator==(const FNDIConnectionInformation& other) const
{
// return the value of a deep compare against the 'other' structure
return this->Bandwidth == other.Bandwidth &&
this->MachineName == other.MachineName && this->SourceName == other.SourceName &&
this->StreamName == other.StreamName && this->Url == other.Url &&
this->bMuteAudio == other.bMuteAudio && this->bMuteVideo == other.bMuteVideo;
}
FNDIConnectionInformation::operator NDIlib_recv_bandwidth_e() const
{
return this->Bandwidth == ENDISourceBandwidth::MetadataOnly ? NDIlib_recv_bandwidth_metadata_only
: this->Bandwidth == ENDISourceBandwidth::AudioOnly ? NDIlib_recv_bandwidth_audio_only
: this->Bandwidth == ENDISourceBandwidth::Lowest ? NDIlib_recv_bandwidth_lowest
: NDIlib_recv_bandwidth_highest;
}
/** Resets the current parameters to the default property values */
void FNDIConnectionInformation::Reset()
{
// Ensure we reset all the properties of this object to nominal default properties
this->Bandwidth = ENDISourceBandwidth::Highest;
this->MachineName = FString("");
this->SourceName = FString("");
this->StreamName = FString("");
this->Url = FString("");
this->bMuteAudio = false;
this->bMuteVideo = false;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIConnectionInformation::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->Bandwidth
<< this->MachineName << this->SourceName << this->StreamName << this->Url
<< this->bMuteAudio << this->bMuteVideo;
}
/** Determines whether this object is valid connection information */
bool FNDIConnectionInformation::IsValid() const
{
// Need at least a source name and/or machine+stream name and/or a URL
return (!this->SourceName.IsEmpty()) ||
((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty())) ||
(!this->Url.IsEmpty());
}
FString FNDIConnectionInformation::GetNDIName() const
{
std::string source_name;
if(!this->SourceName.IsEmpty())
return this->SourceName;
if ((!this->MachineName.IsEmpty()) && (!this->StreamName.IsEmpty()))
return this->MachineName + " (" + this->StreamName + ")";
return FString();
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIConnectionInformation::operator!=(const FNDIConnectionInformation& other) const
{
return !(*this == other);
}

View File

@@ -0,0 +1,74 @@
/*
Copyright (C) 2024 Vizrt NDI AB. All rights reserved.
This file and its use within a Product is bound by the terms of NDI SDK license that was provided
as part of the NDI SDK. For more information, please review the license and the NDI SDK documentation.
*/
#include <Structures/NDIReceiverPerformanceData.h>
/** Copies an existing instance to this object */
FNDIReceiverPerformanceData::FNDIReceiverPerformanceData(const FNDIReceiverPerformanceData& other)
{
// perform a deep copy of the 'other' structure and store the values in this object
this->AudioFrames = other.AudioFrames;
this->DroppedAudioFrames = other.DroppedAudioFrames;
this->DroppedMetadataFrames = other.DroppedMetadataFrames;
this->DroppedVideoFrames = other.DroppedVideoFrames;
this->MetadataFrames = other.MetadataFrames;
this->VideoFrames = other.VideoFrames;
}
/** Copies existing instance properties to this object */
FNDIReceiverPerformanceData& FNDIReceiverPerformanceData::operator=(const FNDIReceiverPerformanceData& other)
{
// perform a deep copy of the 'other' structure
this->AudioFrames = other.AudioFrames;
this->DroppedAudioFrames = other.DroppedAudioFrames;
this->DroppedMetadataFrames = other.DroppedMetadataFrames;
this->DroppedVideoFrames = other.DroppedVideoFrames;
this->MetadataFrames = other.MetadataFrames;
this->VideoFrames = other.VideoFrames;
// return the result of the copy
return *this;
}
/** Compares this object to 'other' and returns a determination of whether they are equal */
bool FNDIReceiverPerformanceData::operator==(const FNDIReceiverPerformanceData& other) const
{
// return the value of a deep compare against the 'other' structure
return this->AudioFrames == other.AudioFrames && this->DroppedAudioFrames == other.DroppedAudioFrames &&
this->DroppedMetadataFrames == other.DroppedMetadataFrames &&
this->DroppedVideoFrames == other.DroppedVideoFrames && this->MetadataFrames == other.MetadataFrames &&
this->VideoFrames == other.VideoFrames;
}
/** Resets the current parameters to the default property values */
void FNDIReceiverPerformanceData::Reset()
{
// Ensure we reset all the properties of this object to nominal default properties
this->AudioFrames = 0;
this->DroppedAudioFrames = 0;
this->DroppedMetadataFrames = 0;
this->DroppedVideoFrames = 0;
this->MetadataFrames = 0;
this->VideoFrames = 0;
}
/** Attempts to serialize this object using an Archive object */
FArchive& FNDIReceiverPerformanceData::Serialize(FArchive& Ar)
{
// we want to make sure that we are able to serialize this object, over many different version of this structure
int32 current_version = 0;
// serialize this structure
return Ar << current_version << this->AudioFrames << this->DroppedAudioFrames << this->DroppedMetadataFrames
<< this->DroppedVideoFrames << this->MetadataFrames << this->VideoFrames;
}
/** Compares this object to 'other" and returns a determination of whether they are NOT equal */
bool FNDIReceiverPerformanceData::operator!=(const FNDIReceiverPerformanceData& other) const
{
return !(*this == other);
}