Clearing up plugin issues

This commit is contained in:
2025-09-05 15:58:28 +03:00
parent 0b65461e3b
commit 3fe0c90dfb
136 changed files with 15080 additions and 0 deletions

View File

@@ -0,0 +1,635 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
typedef struct NDIlib_v6 {
// v1.5
union {
bool (*initialize)(void);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_initialize)(void);
};
union {
void (*destroy)(void);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_destroy)(void);
};
union {
const char* (*version)(void);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_version)(void);
};
union {
bool (*is_supported_CPU)(void);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_is_supported_CPU)(void);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*find_create)(const NDIlib_find_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create)(const NDIlib_find_create_t* p_create_settings);
};
union {
NDIlib_find_instance_t (*find_create_v2)(const NDIlib_find_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t (*NDIlib_find_create_v2)(const NDIlib_find_create_t* p_create_settings);
};
union {
void (*find_destroy)(NDIlib_find_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_find_destroy)(NDIlib_find_instance_t p_instance);
};
union {
const NDIlib_source_t* (*find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms);
};
union {
NDIlib_send_instance_t (*send_create)(const NDIlib_send_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_send_instance_t (*NDIlib_send_create)(const NDIlib_send_create_t* p_create_settings);
};
union {
void (*send_destroy)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_destroy)(NDIlib_send_instance_t p_instance);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
};
union {
void (*send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
NDIlib_frame_type_e (*send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
};
union {
void (*send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms);
};
union {
int (*send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
void (*send_clear_connection_metadata)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_clear_connection_metadata)(NDIlib_send_instance_t p_instance);
};
union {
void (*send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
void (*send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create_v2)(const NDIlib_recv_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v2)(const NDIlib_recv_create_t* p_create_settings);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*recv_create)(const NDIlib_recv_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create)(const NDIlib_recv_create_t* p_create_settings);
};
union {
void (*recv_destroy)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_destroy)(NDIlib_recv_instance_t p_instance);
};
union {
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
};
union {
void (*recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
bool (*recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
};
union {
void (*recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped);
};
union {
void (*recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
};
union {
void (*recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance);
};
union {
void (*recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
};
union {
int (*recv_get_no_connections)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_recv_get_no_connections)(NDIlib_recv_instance_t p_instance);
};
union {
NDIlib_routing_instance_t (*routing_create)(const NDIlib_routing_create_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_routing_instance_t (*NDIlib_routing_create)(const NDIlib_routing_create_t* p_create_settings);
};
union {
void (*routing_destroy)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_routing_destroy)(NDIlib_routing_instance_t p_instance);
};
union {
bool (*routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
};
union {
bool (*routing_clear)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_routing_clear)(NDIlib_routing_instance_t p_instance);
};
union {
void (*util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst);
};
// v2
union {
bool (*find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
const NDIlib_source_t* (*find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
};
union {
PROCESSINGNDILIB_DEPRECATED void (*util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst);
};
union {
void (*util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data);
};
// v3
union {
void (*recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
NDIlib_frame_type_e (*recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
};
union {
void (*send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
void (*util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
};
union {
void (*util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
union {
void (*util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
};
union {
void (*util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
// V3.01
union {
void (*recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string);
};
union {
bool (*recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_supported)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_supported)(NDIlib_recv_instance_t p_instance);
};
union {
const char* (*recv_get_web_control)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_get_web_control)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value);
};
union {
bool (*recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed);
};
union {
bool (*recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
};
union {
bool (*recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
};
union {
bool (*recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no);
};
union {
bool (*recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
};
union {
bool (*recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value);
};
union {
bool (*recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed);
};
union {
bool (*recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue);
};
union {
bool (*recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance);
};
union {
bool (*recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_stop)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_stop)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB);
};
union { // This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_is_recording)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_is_recording)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_filename)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_filename)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_error)(NDIlib_recv_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_error)(NDIlib_recv_instance_t p_instance);
};
union {
// This functionality is now provided via external NDI recording, see SDK documentation.
PROCESSINGNDILIB_DEPRECATED bool (*recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);
};
// v3.1
union {
NDIlib_recv_instance_t (*recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings);
PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t (*NDIlib_recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings);
};
// v3.5
union {
void (*recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src);
};
// v3.6
union {
NDIlib_framesync_instance_t (*framesync_create)(NDIlib_recv_instance_t p_receiver);
PROCESSINGNDILIB_DEPRECATED NDIlib_framesync_instance_t (*NDIlib_framesync_create)(NDIlib_recv_instance_t p_receiver);
};
union {
void (*framesync_destroy)(NDIlib_framesync_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_destroy)(NDIlib_framesync_instance_t p_instance);
};
union {
void (*framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
};
union {
void (*framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
};
union {
void (*framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type);
};
union {
void (*framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);
};
union {
void (*util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data);
};
union {
void (*util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
};
union {
void (*util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst);
};
// v3.8
union {
const NDIlib_source_t* (*send_get_source_name)(NDIlib_send_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_send_get_source_name)(NDIlib_send_instance_t p_instance);
};
// v4.0
union {
void (*send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
void (*util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216);
};
union {
void (*util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);
};
// v4.1
union {
int (*routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
};
union {
const NDIlib_source_t* (*routing_get_source_name)(NDIlib_routing_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_routing_get_source_name)(NDIlib_routing_instance_t p_instance);
};
union {
NDIlib_frame_type_e (*recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e (*NDIlib_recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data.
};
union {
void (*recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
void (*framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples);
};
union {
void (*framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
PROCESSINGNDILIB_DEPRECATED void (*NDIlib_framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
};
union {
int (*framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance);
PROCESSINGNDILIB_DEPRECATED int (*NDIlib_framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance);
};
// v5
union {
bool (*recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed);
PROCESSINGNDILIB_DEPRECATED bool (*NDIlib_recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed);
};
// v6.1
bool (*util_audio_to_interleaved_16s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
bool (*util_audio_from_interleaved_16s_v3)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
bool (*util_audio_to_interleaved_32s_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst);
bool (*util_audio_from_interleaved_32s_v3)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
bool (*util_audio_to_interleaved_32f_v3)(const NDIlib_audio_frame_v3_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
bool (*util_audio_from_interleaved_32f_v3)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v3_t* p_dst);
// v6.2
bool (*recv_get_source_name)(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms);
NDIlib_recv_advertiser_instance_t (*recv_advertiser_create)(const NDIlib_recv_advertiser_create_t* p_create_settings);
void (*recv_advertiser_destroy)(NDIlib_recv_advertiser_instance_t p_instance);
bool (*recv_advertiser_add_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver, bool allow_controlling, bool allow_monitoring, const char* p_input_group_name);
bool (*recv_advertiser_del_receiver)(NDIlib_recv_advertiser_instance_t p_instance, NDIlib_recv_instance_t p_receiver);
NDIlib_recv_listener_instance_t (*recv_listener_create)(const NDIlib_recv_listener_create_t* p_create_settings);
void (*recv_listener_destroy)(NDIlib_recv_listener_instance_t p_instance);
bool (*recv_listener_is_connected)(NDIlib_recv_listener_instance_t p_instance);
const char* (*recv_listener_get_server_url)(NDIlib_recv_listener_instance_t p_instance);
const NDIlib_receiver_t* (*recv_listener_get_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers);
bool (*recv_listener_wait_for_receivers)(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms);
} NDIlib_v6;
typedef struct NDIlib_v6 NDIlib_v5;
typedef struct NDIlib_v6 NDIlib_v4_5;
typedef struct NDIlib_v6 NDIlib_v4;
typedef struct NDIlib_v6 NDIlib_v3;
typedef struct NDIlib_v6 NDIlib_v2;
// Load the library.
PROCESSINGNDILIB_API
const NDIlib_v6* NDIlib_v6_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v5* NDIlib_v5_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v4_5* NDIlib_v4_5_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v4* NDIlib_v4_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v3* NDIlib_v3_load(void);
// Load the library.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_v2* NDIlib_v2_load(void);

View File

@@ -0,0 +1,79 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI finding.
// The reference to an instance of the finder.
struct NDIlib_find_instance_type;
typedef struct NDIlib_find_instance_type* NDIlib_find_instance_t;
// The creation structure that is used when you are creating a finder.
typedef struct NDIlib_find_create_t {
// Do we want to include the list of NDI sources that are running on the local machine? If TRUE then
// local sources will be visible, if FALSE then they will not.
bool show_local_sources;
// Which groups do you want to search in for sources.
const char* p_groups;
// The list of additional IP addresses that exist that we should query for sources on. For instance, if
// you want to find the sources on a remote machine that is not on your local sub-net then you can put a
// comma separated list of those IP addresses here and those sources will be available locally even
// though they are not mDNS discoverable. An example might be "12.0.0.8,13.0.12.8". When none is
// specified the registry is used.
// Default = NULL;
const char* p_extra_ips;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_find_create_t(
bool show_local_sources_ = true,
const char* p_groups_ = NULL,
const char* p_extra_ips_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_find_create_t;
//***********************************************************************************************************
// Create a new finder instance. This will return NULL if it fails.
PROCESSINGNDILIB_API
NDIlib_find_instance_t NDIlib_find_create_v2(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will destroy an existing finder instance.
PROCESSINGNDILIB_API
void NDIlib_find_destroy(NDIlib_find_instance_t p_instance);
// This function will recover the current set of sources (i.e. the ones that exist right this second). The
// char* memory buffers returned in NDIlib_source_t are valid until the next call to
// NDIlib_find_get_current_sources or a call to NDIlib_find_destroy. For a given NDIlib_find_instance_t, do
// not call NDIlib_find_get_current_sources asynchronously.
PROCESSINGNDILIB_API
const NDIlib_source_t* NDIlib_find_get_current_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources);
// This will allow you to wait until the number of online sources have changed.
PROCESSINGNDILIB_API
bool NDIlib_find_wait_for_sources(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms);

View File

@@ -0,0 +1,172 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// It is important when using video to realize that often you are using difference clocks for different parts
// of the signal chain. Within NDI, the sender can send at the clock rate that it wants and the receiver will
// receive it at that rate. The receiver however is very unlikely to share the exact same clock rate in many
// cases. For instance, bear in mind that computer clocks rely on crystals which while all rated for the same
// frequency are still not exact. If you sending computer has an audio clock that it "thinks" is 48000Hz, to
// the receiver computer that has a different audio clock this might be 48001Hz or 47998Hz. While these
// differences might appear small they accumulate over time and can cause audio to either slightly drift out
// of sync (it is receiving more audio sample than it needs to play back) or might cause audio glitches
// because it is not receiving enough audio samples. While we have described the situation for audio, the
// same exact problem occurs for video sources; it is commonly thought that this can be solved by simply
// having a "frame buffer" and that displaying the "most recently received video frame" will solve these
// timing discrepancies. Unfortunately this is not the case and when it is done because of the variance in
// clock timings, it is very common the video will appear the "jitter" when the sending and receiving clocks
// are almost in alignment. The solution to these problems is to implement a "time base corrector" for the
// video clock which is a device that uses hysteresis to know when the best time is to either drop or insert
// a video frame such that the video is most likely to play back smoothly, and audio should be dynamically
// audio sampled (with a high order resampling filter) to adaptively track any clocking differences.
// Implementing these components is very difficult to get entirely correct under all scenarios and this
// implementation is provided to facilitate this and help people who are building real time video
// applications to receive audio and video without needing to undertake the full complexity of implementing
// such clock devices.
//
// Another way to look at what this class does is that it transforms "push" sources (i.e. NDI sources in
// which the data is pushed from the sender to the receiver) into "pull" sources in which a host application
// is pulling the data down-stream. The frame-sync automatically tracks all clocks to achieve the best video
// performance doing this operation.
//
// In addition to time-base correction operations, these implementations also will automatically detect and
// correct timing jitter that might occur. This will internally correct for timing anomalies that might be
// caused by network, sender or receiver side timing errors caused by CPU limitations, network bandwidth
// fluctuations, etc...
//
// A very common use of a frame-synchronizer might be if you are displaying video on screen timed to the GPU
// v-sync, you should use such a device to convert from the incoming time-base into the time-base of the GPU.
//
// The following are common times that you want to use a frame-synchronizer
// Video playback on screen : Yes, you want the clock to be synced with vertical refresh.
// Audio playback through sound card : Yes you want the clock to be synced with your sound card clock.
// Video mixing : Yes you want the input video clocks to all be synced to your output video clock.
// Audio mixing : Yes, you want all input audio clocks to be brought into sync with your output
// audio clock.
// Recording a single channel : No, you want to record the signal in it's raw form without
// any re-clocking.
// Recording multiple channels : Maybe. If you want to sync some input channels to match a master clock
// so that they can be ISO edited, then you might want a frame-sync.
// The type instance for a frame-synchronizer.
struct NDIlib_framesync_instance_type;
typedef struct NDIlib_framesync_instance_type* NDIlib_framesync_instance_t;
// Create a frame synchronizer instance that can be used to get frames from a receiver. Once this receiver
// has been bound to a frame-sync then you should use it in order to receive video frames. You can continue
// to use the underlying receiver for other operations (tally, PTZ, etc...). Note that it remains your
// responsibility to destroy the receiver even when a frame-sync is using it. You should always destroy the
// receiver after the frame-sync has been destroyed.
//
PROCESSINGNDILIB_API
NDIlib_framesync_instance_t NDIlib_framesync_create(NDIlib_recv_instance_t p_receiver);
// Destroy a frame-sync implementation.
PROCESSINGNDILIB_API
void NDIlib_framesync_destroy(NDIlib_framesync_instance_t p_instance);
// This function will pull audio samples from the frame-sync queue. This function will always return data
// immediately, inserting silence if no current audio data is present. You should call this at the rate that
// you want audio and it will automatically adapt the incoming audio signal to match the rate at which you
// are calling by using dynamic audio sampling. Note that you have no obligation that your requested sample
// rate, no channels and no samples match the incoming signal and all combinations of conversions
// are supported.
//
// If you wish to know what the current incoming audio format is, then you can make a call with the
// parameters set to zero and it will then return the associated settings. For instance a call as follows:
//
// NDIlib_framesync_capture_audio(p_instance, p_audio_data, 0, 0, 0);
//
// will return in p_audio_data the current received audio format if there is one or sample_rate and
// no_channels equal to zero if there is not one. At any time you can specify sample_rate and no_channels as
// zero and it will return the current received audio format.
//
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_audio(
NDIlib_framesync_instance_t p_instance,
NDIlib_audio_frame_v2_t* p_audio_data,
int sample_rate, int no_channels, int no_samples
);
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_audio_v2(
NDIlib_framesync_instance_t p_instance,
NDIlib_audio_frame_v3_t* p_audio_data,
int sample_rate, int no_channels, int no_samples
);
// Free audio returned by NDIlib_framesync_capture_audio.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_audio(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data);
// Free audio returned by NDIlib_framesync_capture_audio_v2.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_audio_v2(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data);
// This function will tell you the approximate current depth of the audio queue to give you an indication
// of the number of audio samples you can request. Note that if you should treat the results of this function
// with some care because in reality the frame-sync API is meant to dynamically resample audio to match the
// rate that you are calling it. If you have an inaccurate clock then this function can be useful.
// for instance :
//
// while(true)
// { int no_samples = NDIlib_framesync_audio_queue_depth(p_instance);
// NDIlib_framesync_capture_audio( ... );
// play_audio( ... )
// NDIlib_framesync_free_audio( ... )
// inaccurate_sleep( 33ms );
// }
//
// Obviously because audio is being received in real-time there is no guarantee after the call that the
// number is correct since new samples might have been captured in that time. On synchronous use of this
// function however this will be the minimum number of samples in the queue at any later time until
// NDIlib_framesync_capture_audio is called.
//
PROCESSINGNDILIB_API
int NDIlib_framesync_audio_queue_depth(NDIlib_framesync_instance_t p_instance);
// This function will pull video samples from the frame-sync queue. This function will always immediately
// return a video sample by using time-base correction. You can specify the desired field type which is then
// used to return the best possible frame. Note that field based frame-synchronization means that the
// frame-synchronizer attempts to match the fielded input phase with the frame requests so that you have the
// most correct possible field ordering on output. Note that the same frame can be returned multiple times.
//
// If no video frame has ever been received, this will return NDIlib_video_frame_v2_t as an empty (all zero)
// structure. The reason for this is that it allows you to determine that there has not yet been any video
// and act accordingly. For instance you might want to display a constant frame output at a particular video
// format, or black.
//
PROCESSINGNDILIB_API
void NDIlib_framesync_capture_video(
NDIlib_framesync_instance_t p_instance,
NDIlib_video_frame_v2_t* p_video_data,
NDIlib_frame_format_type_e field_type NDILIB_CPP_DEFAULT_VALUE(NDIlib_frame_format_type_progressive)
);
// Free audio returned by NDIlib_framesync_capture_video.
PROCESSINGNDILIB_API
void NDIlib_framesync_free_video(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data);

View File

@@ -0,0 +1,129 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// C++ implementations of default constructors are here to avoid them needing to be inline with all of the
// rest of the code.
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
// All the structs used and reasonable defaults are here
inline NDIlib_source_t::NDIlib_source_t(const char* p_ndi_name_, const char* p_url_address_)
: p_ndi_name(p_ndi_name_), p_url_address(p_url_address_) {}
inline NDIlib_video_frame_v2_t::NDIlib_video_frame_v2_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_,
float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_,
int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_)
: xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_),
picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_),
timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_audio_frame_v2_t::NDIlib_audio_frame_v2_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_,
int channel_stride_in_bytes_, const char* p_metadata_, int64_t timestamp_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_), p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_audio_frame_v3_t::NDIlib_audio_frame_v3_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_,
NDIlib_FourCC_audio_type_e FourCC_, uint8_t* p_data_, int channel_stride_in_bytes_,
const char* p_metadata_, int64_t timestamp_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
FourCC(FourCC_), p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_),
p_metadata(p_metadata_), timestamp(timestamp_) {}
inline NDIlib_video_frame_t::NDIlib_video_frame_t(int xres_, int yres_, NDIlib_FourCC_video_type_e FourCC_, int frame_rate_N_, int frame_rate_D_,
float picture_aspect_ratio_, NDIlib_frame_format_type_e frame_format_type_,
int64_t timecode_, uint8_t* p_data_, int line_stride_in_bytes_)
: xres(xres_), yres(yres_), FourCC(FourCC_), frame_rate_N(frame_rate_N_), frame_rate_D(frame_rate_D_),
picture_aspect_ratio(picture_aspect_ratio_), frame_format_type(frame_format_type_),
timecode(timecode_), p_data(p_data_), line_stride_in_bytes(line_stride_in_bytes_) {}
inline NDIlib_audio_frame_t::NDIlib_audio_frame_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_,
int channel_stride_in_bytes_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
p_data(p_data_), channel_stride_in_bytes(channel_stride_in_bytes_) {}
inline NDIlib_metadata_frame_t::NDIlib_metadata_frame_t(int length_, int64_t timecode_, char* p_data_)
: length(length_), timecode(timecode_), p_data(p_data_) {}
inline NDIlib_tally_t::NDIlib_tally_t(bool on_program_, bool on_preview_)
: on_program(on_program_), on_preview(on_preview_) {}
inline NDIlib_routing_create_t::NDIlib_routing_create_t(const char* p_ndi_name_, const char* p_groups_)
: p_ndi_name(p_ndi_name_), p_groups(p_groups_) {}
inline NDIlib_recv_create_v3_t::NDIlib_recv_create_v3_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_,
NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_, const char* p_ndi_name_)
: source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_), p_ndi_recv_name(p_ndi_name_) {}
inline NDIlib_recv_create_t::NDIlib_recv_create_t(const NDIlib_source_t source_to_connect_to_, NDIlib_recv_color_format_e color_format_,
NDIlib_recv_bandwidth_e bandwidth_, bool allow_video_fields_)
: source_to_connect_to(source_to_connect_to_), color_format(color_format_), bandwidth(bandwidth_), allow_video_fields(allow_video_fields_) {}
inline NDIlib_recv_performance_t::NDIlib_recv_performance_t(void)
: video_frames(0), audio_frames(0), metadata_frames(0) {}
inline NDIlib_recv_queue_t::NDIlib_recv_queue_t(void)
: video_frames(0), audio_frames(0), metadata_frames(0) {}
inline NDIlib_recv_recording_time_t::NDIlib_recv_recording_time_t(void)
: no_frames(0), start_time(0), last_time(0) {}
inline NDIlib_send_create_t::NDIlib_send_create_t(const char* p_ndi_name_, const char* p_groups_, bool clock_video_, bool clock_audio_)
: p_ndi_name(p_ndi_name_), p_groups(p_groups_), clock_video(clock_video_), clock_audio(clock_audio_) {}
inline NDIlib_find_create_t::NDIlib_find_create_t(bool show_local_sources_, const char* p_groups_, const char* p_extra_ips_)
: show_local_sources(show_local_sources_), p_groups(p_groups_), p_extra_ips(p_extra_ips_) {}
inline NDIlib_audio_frame_interleaved_16s_t::NDIlib_audio_frame_interleaved_16s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int16_t* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
reference_level(reference_level_), p_data(p_data_) {}
inline NDIlib_audio_frame_interleaved_32s_t::NDIlib_audio_frame_interleaved_32s_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, int reference_level_, int32_t* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_),
reference_level(reference_level_), p_data(p_data_) {}
inline NDIlib_audio_frame_interleaved_32f_t::NDIlib_audio_frame_interleaved_32f_t(int sample_rate_, int no_channels_, int no_samples_, int64_t timecode_, float* p_data_)
: sample_rate(sample_rate_), no_channels(no_channels_), no_samples(no_samples_), timecode(timecode_), p_data(p_data_) {}
inline NDIlib_recv_advertiser_create_t::NDIlib_recv_advertiser_create_t(const char* p_url_address)
: p_url_address(p_url_address) {}
inline NDIlib_recv_listener_create_t::NDIlib_recv_listener_create_t(const char* p_url_address)
: p_url_address(p_url_address) {}
inline NDIlib_receiver_t::NDIlib_receiver_t(void)
: p_uuid(NULL), p_name(NULL), p_input_uuid(NULL), p_input_name(NULL), p_address(NULL),
p_streams(NULL), num_streams(0), p_commands(NULL), num_commands(0), events_subscribed(false) {}
#ifdef __clang__
#pragma clang diagnostic pop
#endif

View File

@@ -0,0 +1,165 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
#ifdef PROCESSINGNDILIB_STATIC
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C"
# else // __cplusplus
# define PROCESSINGNDILIB_API
# endif // __cplusplus
#else // PROCESSINGNDILIB_STATIC
# ifdef _WIN32
# ifdef PROCESSINGNDILIB_EXPORTS
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __declspec(dllexport)
# else // __cplusplus
# define PROCESSINGNDILIB_API __declspec(dllexport)
# endif // __cplusplus
# else // PROCESSINGNDILIB_EXPORTS
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __declspec(dllimport)
# else // __cplusplus
# define PROCESSINGNDILIB_API __declspec(dllimport)
# endif // __cplusplus
# ifdef _WIN64
# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x64.dll"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6"
# else // _WIN64
# define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x86.dll"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6"
# endif // _WIN64
# endif // PROCESSINGNDILIB_EXPORTS
# else // _WIN32
# ifdef __APPLE__
# define NDILIB_LIBRARY_NAME "libndi.dylib"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV6Apple"
# else // __APPLE__
# define NDILIB_LIBRARY_NAME "libndi.so.6"
# define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V6"
# define NDILIB_REDIST_URL ""
# endif // __APPLE__
# ifdef __cplusplus
# define PROCESSINGNDILIB_API extern "C" __attribute((visibility("default")))
# else // __cplusplus
# define PROCESSINGNDILIB_API __attribute((visibility("default")))
# endif // __cplusplus
# endif // _WIN32
#endif // PROCESSINGNDILIB_STATIC
#ifndef PROCESSINGNDILIB_DEPRECATED
# ifdef _WIN32
# ifdef _MSC_VER
# define PROCESSINGNDILIB_DEPRECATED __declspec(deprecated)
# else // _MSC_VER
# define PROCESSINGNDILIB_DEPRECATED __attribute((deprecated))
# endif // _MSC_VER
# else // _WIN32
# define PROCESSINGNDILIB_DEPRECATED
# endif // _WIN32
#endif // PROCESSINGNDILIB_DEPRECATED
#ifndef NDILIB_CPP_DEFAULT_CONSTRUCTORS
# ifdef __cplusplus
# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 1
# else // __cplusplus
# define NDILIB_CPP_DEFAULT_CONSTRUCTORS 0
# endif // __cplusplus
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
#ifndef NDILIB_CPP_DEFAULT_VALUE
# ifdef __cplusplus
# define NDILIB_CPP_DEFAULT_VALUE(a) =(a)
# else // __cplusplus
# define NDILIB_CPP_DEFAULT_VALUE(a)
# endif // __cplusplus
#endif // NDILIB_CPP_DEFAULT_VALUE
// Data structures shared by multiple SDKs.
#include "Processing.NDI.compat.h"
#include "Processing.NDI.structs.h"
// This is not actually required, but will start and end the libraries which might get you slightly better
// performance in some cases. In general it is more "correct" to call these although it is not required.
// There is no way to call these that would have an adverse impact on anything (even calling destroy before
// you've deleted all your objects). This will return false if the CPU is not sufficiently capable to run
// NDILib currently NDILib requires SSE4.2 instructions (see documentation). You can verify a specific CPU
// against the library with a call to NDIlib_is_supported_CPU().
PROCESSINGNDILIB_API
bool NDIlib_initialize(void);
PROCESSINGNDILIB_API
void NDIlib_destroy(void);
PROCESSINGNDILIB_API
const char* NDIlib_version(void);
// Recover whether the current CPU in the system is capable of running NDILib.
PROCESSINGNDILIB_API
bool NDIlib_is_supported_CPU(void);
// The finding (discovery API).
#include "Processing.NDI.Find.h"
// The receiving video and audio API.
#include "Processing.NDI.Recv.h"
// Extensions to support PTZ control, etc...
#include "Processing.NDI.Recv.ex.h"
// The receiver advertiser API.
#include "Processing.NDI.RecvAdvertiser.h"
// The receiver listener API.
#include "Processing.NDI.RecvListener.h"
// The sending video API.
#include "Processing.NDI.Send.h"
// The routing of inputs API.
#include "Processing.NDI.Routing.h"
// Utility functions.
#include "Processing.NDI.utilities.h"
// Deprecated structures and functions.
#include "Processing.NDI.deprecated.h"
// The frame synchronizer.
#include "Processing.NDI.FrameSync.h"
// Dynamic loading used for OSS libraries.
#include "Processing.NDI.DynamicLoad.h"
// The C++ implementations.
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
#include "Processing.NDI.Lib.cplusplus.h"
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS

View File

@@ -0,0 +1,211 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Has this receiver got PTZ control. Note that it might take a second or two after the connection for this
// value to be set. To avoid the need to poll this function, you can know when the value of this function
// might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_is_supported(NDIlib_recv_instance_t p_instance);
// Has this receiver got recording control. Note that it might take a second or two after the connection for
// this value to be set. To avoid the need to poll this function, you can know when the value of this
// function might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
//
// Note on deprecation of this function:
// NDI version 4 includes the native ability to record all NDI streams using an external application that
// is provided with the SDK. This is better in many ways than the internal recording support which only
// ever supported remotely recording systems and NDI|HX. This functionality will be supported in the SDK
// for some time although we are recommending that you use the newer support which is more feature rich and
// supports the recording of all stream types, does not take CPU time to record NDI sources (it does not
// require any type of re-compression since it can just store the data in the file), it will synchronize
// all recorders on a system (and cross systems if NTP clock locking is used).
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_is_supported(NDIlib_recv_instance_t p_instance);
// PTZ Controls.
// Zoom to an absolute value.
// zoom_value = 0.0 (zoomed in) ... 1.0 (zoomed out)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_zoom(NDIlib_recv_instance_t p_instance, const float zoom_value);
// Zoom at a particular speed.
// zoom_speed = -1.0 (zoom outwards) ... +1.0 (zoom inwards)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_zoom_speed(NDIlib_recv_instance_t p_instance, const float zoom_speed);
// Set the pan and tilt to an absolute value.
// pan_value = -1.0 (left) ... 0.0 (centered) ... +1.0 (right)
// tilt_value = -1.0 (bottom) ... 0.0 (centered) ... +1.0 (top)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_pan_tilt(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value);
// Set the pan and tilt direction and speed.
// pan_speed = -1.0 (moving right) ... 0.0 (stopped) ... +1.0 (moving left)
// tilt_speed = -1.0 (down) ... 0.0 (stopped) ... +1.0 (moving up)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_pan_tilt_speed(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed);
// Store the current position, focus, etc... as a preset.
// preset_no = 0 ... 99
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_store_preset(NDIlib_recv_instance_t p_instance, const int preset_no);
// Recall a preset, including position, focus, etc...
// preset_no = 0 ... 99
// speed = 0.0(as slow as possible) ... 1.0(as fast as possible) The speed at which to move to the new preset.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_recall_preset(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed);
// Put the camera in auto-focus.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_auto_focus(NDIlib_recv_instance_t p_instance);
// Focus to an absolute value.
// focus_value = 0.0 (focused to infinity) ... 1.0 (focused as close as possible)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_focus(NDIlib_recv_instance_t p_instance, const float focus_value);
// Focus at a particular speed.
// focus_speed = -1.0 (focus outwards) ... +1.0 (focus inwards)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_focus_speed(NDIlib_recv_instance_t p_instance, const float focus_speed);
// Put the camera in auto white balance mode.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_auto(NDIlib_recv_instance_t p_instance);
// Put the camera in indoor white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_indoor(NDIlib_recv_instance_t p_instance);
// Put the camera in indoor white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_outdoor(NDIlib_recv_instance_t p_instance);
// Use the current brightness to automatically set the current white balance.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_oneshot(NDIlib_recv_instance_t p_instance);
// Set the manual camera white balance using the R, B values.
// red = 0.0(not red) ... 1.0(very red)
// blue = 0.0(not blue) ... 1.0(very blue)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_white_balance_manual(NDIlib_recv_instance_t p_instance, const float red, const float blue);
// Put the camera in auto-exposure mode.
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_auto(NDIlib_recv_instance_t p_instance);
// Manually set the camera exposure iris.
// exposure_level = 0.0(dark) ... 1.0(light)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_manual(NDIlib_recv_instance_t p_instance, const float exposure_level);
// Manually set the camera exposure parameters.
// iris = 0.0(dark) ... 1.0(light)
// gain = 0.0(dark) ... 1.0(light)
// shutter_speed = 0.0(slow) ... 1.0(fast)
PROCESSINGNDILIB_API
bool NDIlib_recv_ptz_exposure_manual_v2(
NDIlib_recv_instance_t p_instance,
const float iris, const float gain, const float shutter_speed
);
// Recording control.
// This will start recording.If the recorder was already recording then the message is ignored.A filename is
// passed in as a "hint".Since the recorder might already be recording(or might not allow complete
// flexibility over its filename), the filename might or might not be used.If the filename is empty, or not
// present, a name will be chosen automatically. If you do not with to provide a filename hint you can simply
// pass NULL.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_start(NDIlib_recv_instance_t p_instance, const char* p_filename_hint);
// Stop recording.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_stop(NDIlib_recv_instance_t p_instance);
// This will control the audio level for the recording. dB is specified in decibels relative to the reference
// level of the source. Not all recording sources support controlling audio levels.For instance, a digital
// audio device would not be able to avoid clipping on sources already at the wrong level, thus might not
// support this message.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_set_audio_level(NDIlib_recv_instance_t p_instance, const float level_dB);
// This will determine if the source is currently recording. It will return true while recording is in
// progress and false when it is not. Because there is one recorded and multiple people might be connected to
// it, there is a chance that it is recording which was initiated by someone else.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_is_recording(NDIlib_recv_instance_t p_instance);
// Get the current filename for recording. When this is set it will return a non-NULL value which is owned by
// you and freed using NDIlib_recv_free_string. If a file was already being recorded by another client, the
// massage will contain the name of that file. The filename contains a UNC path (when one is available) to
// the recorded file, and can be used to access the file on your local machine for playback. If a UNC path
// is not available, then this will represent the local filename. This will remain valid even after the file
// has stopped being recorded until the next file is started.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const char* NDIlib_recv_recording_get_filename(NDIlib_recv_instance_t p_instance);
// This will tell you whether there was a recording error and what that string is. When this is set it will
// return a non-NULL value which is owned by you and freed using NDIlib_recv_free_string. When there is no
// error it will return NULL.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const char* NDIlib_recv_recording_get_error(NDIlib_recv_instance_t p_instance);
// In order to get the duration.
typedef struct NDIlib_recv_recording_time_t
{
// The number of actual video frames recorded.
int64_t no_frames;
// The starting time and current largest time of the record, in UTC time, at 100-nanosecond unit
// intervals. This allows you to know the record time irrespective of frame rate. For instance,
// last_time - start_time would give you the recording length in 100-nanosecond intervals.
int64_t start_time, last_time;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_recording_time_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_recording_time_t;
// Get the current recording times.
//
// See note above on depreciation and why this is, and how to replace this functionality.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
bool NDIlib_recv_recording_get_times(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times);

View File

@@ -0,0 +1,297 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI finding.
// The reference to an instance of the receiver.
struct NDIlib_recv_instance_type;
typedef struct NDIlib_recv_instance_type* NDIlib_recv_instance_t;
typedef enum NDIlib_recv_bandwidth_e {
NDIlib_recv_bandwidth_metadata_only = -10, // Receive metadata.
NDIlib_recv_bandwidth_audio_only = 10, // Receive metadata, audio.
NDIlib_recv_bandwidth_lowest = 0, // Receive metadata, audio, video at a lower bandwidth and resolution.
NDIlib_recv_bandwidth_highest = 100, // Receive metadata, audio, video at full resolution.
// Make sure this is a 32-bit enumeration.
NDIlib_recv_bandwidth_max = 0x7fffffff
} NDIlib_recv_bandwidth_e;
typedef enum NDIlib_recv_color_format_e {
// When there is no alpha channel, this mode delivers BGRX.
// When there is an alpha channel, this mode delivers BGRA.
NDIlib_recv_color_format_BGRX_BGRA = 0,
// When there is no alpha channel, this mode delivers UYVY.
// When there is an alpha channel, this mode delivers BGRA.
NDIlib_recv_color_format_UYVY_BGRA = 1,
// When there is no alpha channel, this mode delivers BGRX.
// When there is an alpha channel, this mode delivers RGBA.
NDIlib_recv_color_format_RGBX_RGBA = 2,
// When there is no alpha channel, this mode delivers UYVY.
// When there is an alpha channel, this mode delivers RGBA.
NDIlib_recv_color_format_UYVY_RGBA = 3,
// This format will try to decode the video using the fastest available color format for the incoming
// video signal. This format follows the following guidelines, although different platforms might
// vary slightly based on their capabilities and specific performance profiles. In general if you want
// the best performance this mode should be used.
//
// When using this format, you should consider than allow_video_fields is true, and individual fields
// will always be delivered.
//
// For most video sources on most platforms, this will follow the following conventions.
// No alpha channel : UYVY
// Alpha channel : UYVA
NDIlib_recv_color_format_fastest = 100,
// This format will try to provide the video in the format that is the closest to native for the incoming
// codec yielding the highest quality. Specifically, this allows for receiving on 16bpp color from many
// sources.
//
// When using this format, you should consider than allow_video_fields is true, and individual fields
// will always be delivered.
//
// For most video sources on most platforms, this will follow the following conventions
// No alpha channel : P216, or UYVY
// Alpha channel : PA16 or UYVA
NDIlib_recv_color_format_best = 101,
// Legacy definitions for backwards compatibility.
NDIlib_recv_color_format_e_BGRX_BGRA = NDIlib_recv_color_format_BGRX_BGRA,
NDIlib_recv_color_format_e_UYVY_BGRA = NDIlib_recv_color_format_UYVY_BGRA,
NDIlib_recv_color_format_e_RGBX_RGBA = NDIlib_recv_color_format_RGBX_RGBA,
NDIlib_recv_color_format_e_UYVY_RGBA = NDIlib_recv_color_format_UYVY_RGBA,
#ifdef _WIN32
// For Windows we can support flipped images which is unfortunately something that Microsoft decided to
// do back in the old days.
NDIlib_recv_color_format_BGRX_BGRA_flipped = 1000 + NDIlib_recv_color_format_BGRX_BGRA,
#endif
// Make sure this is a 32-bit enumeration.
NDIlib_recv_color_format_max = 0x7fffffff
} NDIlib_recv_color_format_e;
// The creation structure that is used when you are creating a receiver.
typedef struct NDIlib_recv_create_v3_t {
// The source that you wish to connect to.
NDIlib_source_t source_to_connect_to;
// Your preference of color space. See above.
NDIlib_recv_color_format_e color_format;
// The bandwidth setting that you wish to use for this video source. Bandwidth controlled by changing
// both the compression level and the resolution of the source. A good use for low bandwidth is working
// on WIFI connections.
NDIlib_recv_bandwidth_e bandwidth;
// When this flag is FALSE, all video that you receive will be progressive. For sources that provide
// fields, this is de-interlaced on the receiving side (because we cannot change what the up-stream
// source was actually rendering. This is provided as a convenience to down-stream sources that do not
// wish to understand fielded video. There is almost no performance impact of using this function.
bool allow_video_fields;
// The name of the NDI receiver to create. This is a NULL terminated UTF8 string and should be the name
// of receive channel that you have. This is in many ways symmetric with the name of senders, so this
// might be "Channel 1" on your system. If this is NULL then it will use the filename of your application
// indexed with the number of the instance number of this receiver.
const char* p_ndi_recv_name;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_create_v3_t(
const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(),
NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA,
NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest,
bool allow_video_fields_ = true,
const char* p_ndi_name_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_create_v3_t;
// This allows you determine the current performance levels of the receiving to be able to detect whether
// frames have been dropped.
typedef struct NDIlib_recv_performance_t {
// The number of video frames.
int64_t video_frames;
// The number of audio frames.
int64_t audio_frames;
// The number of metadata frames.
int64_t metadata_frames;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_performance_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_performance_t;
// Get the current queue depths.
typedef struct NDIlib_recv_queue_t {
// The number of video frames.
int video_frames;
// The number of audio frames.
int audio_frames;
// The number of metadata frames.
int metadata_frames;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_queue_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_queue_t;
//**************************************************************************************************************************
// Create a new receiver instance. This will return NULL if it fails. If you create this with the default
// settings (NULL) then it will automatically determine a receiver name.
PROCESSINGNDILIB_API
NDIlib_recv_instance_t NDIlib_recv_create_v3(const NDIlib_recv_create_v3_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will destroy an existing receiver instance.
PROCESSINGNDILIB_API
void NDIlib_recv_destroy(NDIlib_recv_instance_t p_instance);
// This function allows you to change the connection to another video source, you can also disconnect it by
// specifying a NULL here. This allows you to preserve a receiver without needing to.
PROCESSINGNDILIB_API
void NDIlib_recv_connect(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which
// case data of that type will not be captured in this call. This call can be called simultaneously on
// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads.
// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and
// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the
// appropriate free function below.
PROCESSINGNDILIB_API
NDIlib_frame_type_e NDIlib_recv_capture_v2(
NDIlib_recv_instance_t p_instance, // The library instance.
NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL).
NDIlib_audio_frame_v2_t* p_audio_data, // The audio data received (can be NULL).
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which
// case data of that type will not be captured in this call. This call can be called simultaneously on
// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads.
// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and
// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the
// appropriate free function below.
PROCESSINGNDILIB_API
NDIlib_frame_type_e NDIlib_recv_capture_v3(
NDIlib_recv_instance_t p_instance, // The library instance.
NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL).
NDIlib_audio_frame_v3_t* p_audio_data, // The audio data received (can be NULL).
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// Free the buffers returned by capture for video.
PROCESSINGNDILIB_API
void NDIlib_recv_free_video_v2(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
// Free the buffers returned by capture for audio.
PROCESSINGNDILIB_API
void NDIlib_recv_free_audio_v2(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
// Free the buffers returned by capture for audio.
PROCESSINGNDILIB_API
void NDIlib_recv_free_audio_v3(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
// Free the buffers returned by capture for metadata.
PROCESSINGNDILIB_API
void NDIlib_recv_free_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// This will free a string that was allocated and returned by NDIlib_recv (for instance the
// NDIlib_recv_get_web_control) function.
PROCESSINGNDILIB_API
void NDIlib_recv_free_string(NDIlib_recv_instance_t p_instance, const char* p_string);
// This function will send a meta message to the source that we are connected too. This returns FALSE if we
// are not currently connected to anything.
PROCESSINGNDILIB_API
bool NDIlib_recv_send_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// Set the up-stream tally notifications. This returns FALSE if we are not currently connected to anything.
// That said, the moment that we do connect to something it will automatically be sent the tally state.
PROCESSINGNDILIB_API
bool NDIlib_recv_set_tally(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally);
// Get the current performance structures. This can be used to determine if you have been calling
// NDIlib_recv_capture fast enough, or if your processing of data is not keeping up with real-time. The total
// structure will give you the total frame counts received, the dropped structure will tell you how many
// frames have been dropped. Either of these could be NULL.
PROCESSINGNDILIB_API
void NDIlib_recv_get_performance(
NDIlib_recv_instance_t p_instance,
NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped
);
// This will allow you to determine the current queue depth for all of the frame sources at any time.
PROCESSINGNDILIB_API
void NDIlib_recv_get_queue(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total);
// Connection based metadata is data that is sent automatically each time a new connection is received. You
// queue all of these up and they are sent on each connection. To reset them you need to clear them all and
// set them up again.
PROCESSINGNDILIB_API
void NDIlib_recv_clear_connection_metadata(NDIlib_recv_instance_t p_instance);
// Add a connection metadata string to the list of what is sent on each new connection. If someone is already
// connected then this string will be sent to them immediately.
PROCESSINGNDILIB_API
void NDIlib_recv_add_connection_metadata(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// Is this receiver currently connected to a source on the other end, or has the source not yet been found or
// is no longer online. This will normally return 0 or 1.
PROCESSINGNDILIB_API
int NDIlib_recv_get_no_connections(NDIlib_recv_instance_t p_instance);
// Get the URL that might be used for configuration of this input. Note that it might take a second or two
// after the connection for this value to be set. This function will return NULL if there is no web control
// user interface. You should call NDIlib_recv_free_string to free the string that is returned by this
// function. The returned value will be a fully formed URL, for instance "http://10.28.1.192/configuration/".
// To avoid the need to poll this function, you can know when the value of this function might have changed
// when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change.
PROCESSINGNDILIB_API
const char* NDIlib_recv_get_web_control(NDIlib_recv_instance_t p_instance);
// Retrieve the name of the current NDI source that the NDI receiver is connected to. This will return false
// if there has been no change in the source information since the last call. If p_source_name is NULL, then
// the name of the current NDI source will not be returned. If p_source_name is not NULL, then the name of
// the current source will be returned, however, the returned value can be NULL if the NDI receiver is
// currently not connected to a source. If the returned pointer is not NULL, then you should call
// NDIlib_recv_free_string to free the string that is returned by this function. A timeout value can be given
// to wait until a change occurs. If waiting is not desired, then use a timeout of 0.
PROCESSINGNDILIB_API
bool NDIlib_recv_get_source_name(NDIlib_recv_instance_t p_instance, const char** p_source_name, uint32_t timeout_in_ms NDILIB_CPP_DEFAULT_VALUE(0));

View File

@@ -0,0 +1,79 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// The type instance for a receiver advertiser.
struct NDIlib_recv_advertiser_instance_type;
typedef struct NDIlib_recv_advertiser_instance_type* NDIlib_recv_advertiser_instance_t;
typedef struct NDIlib_recv_advertiser_create_t {
// The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery
// server will be used. If there is no discovery server available, then the receiver advertiser will not
// be able to be instantiated and the create function will return NULL. The format of this field is
// expected to be the hostname or IP address, optionally followed by a colon and a port number. If the
// port number is not specified, then port 5959 will be used. For example,
// 127.0.0.1:5959
// or
// 127.0.0.1
// or
// hostname:5959
// This field can also specify multiple addresses separated by commas for redundancy support.
const char* p_url_address;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_advertiser_create_t(
const char* p_url_address = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_advertiser_create_t;
// Create an instance of the receiver advertiser. This will return NULL if it fails to create the advertiser.
PROCESSINGNDILIB_API
NDIlib_recv_advertiser_instance_t NDIlib_recv_advertiser_create(const NDIlib_recv_advertiser_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// Destroy an instance of the receiver advertiser.
PROCESSINGNDILIB_API
void NDIlib_recv_advertiser_destroy(NDIlib_recv_advertiser_instance_t p_instance);
// Add the receiver to the list of receivers that are being advertised. Returns false if the receiver has
// been previously registered.
PROCESSINGNDILIB_API
bool NDIlib_recv_advertiser_add_receiver(
NDIlib_recv_advertiser_instance_t p_instance,
NDIlib_recv_instance_t p_receiver,
bool allow_controlling, bool allow_monitoring,
const char* p_input_group_name NDILIB_CPP_DEFAULT_VALUE(NULL)
);
// Remove the receiver from the list of receivers that are being advertised. Returns false if the receiver
// was not previously registered.
PROCESSINGNDILIB_API
bool NDIlib_recv_advertiser_del_receiver(
NDIlib_recv_advertiser_instance_t p_instance,
NDIlib_recv_instance_t p_receiver
);

View File

@@ -0,0 +1,141 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2024 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// The type instance for a receiver listener.
struct NDIlib_recv_listener_instance_type;
typedef struct NDIlib_recv_listener_instance_type* NDIlib_recv_listener_instance_t;
typedef struct NDIlib_recv_listener_create_t {
// The URL address of the NDI Discovery Server to connect to. If NULL, then the default NDI discovery
// server will be used. If there is no discovery server available, then the receiver listener will not
// be able to be instantiated and the create function will return NULL. The format of this field is
// expected to be the hostname or IP address, optionally followed by a colon and a port number. If the
// port number is not specified, then port 5959 will be used. For example,
// 127.0.0.1:5959
// or
// 127.0.0.1
// or
// hostname:5959
// If this field is a comma-separated list, then only the first address will be used.
const char* p_url_address;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_listener_create_t(
const char* p_url_address = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_listener_create_t;
// Create an instance of the receiver listener. This will return NULL if it fails to create the listener.
PROCESSINGNDILIB_API
NDIlib_recv_listener_instance_t NDIlib_recv_listener_create(const NDIlib_recv_listener_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// Destroy an instance of the receiver listener.
PROCESSINGNDILIB_API
void NDIlib_recv_listener_destroy(NDIlib_recv_listener_instance_t p_instance);
// Returns true if the receiver listener is actively connected to the configured NDI Discovery Server.
PROCESSINGNDILIB_API
bool NDIlib_recv_listener_is_connected(NDIlib_recv_listener_instance_t p_instance);
// Retrieve the URL address of the NDI Discovery Server that the receiver listener is connected to. This can
// return NULL if the instance pointer is invalid.
PROCESSINGNDILIB_API
const char* NDIlib_recv_listener_get_server_url(NDIlib_recv_listener_instance_t p_instance);
// The types of streams that a receiver can receive from the source it's connected to.
typedef enum NDIlib_receiver_type_e {
NDIlib_receiver_type_none = 0,
NDIlib_receiver_type_metadata = 1,
NDIlib_receiver_type_video = 2,
NDIlib_receiver_type_audio = 3,
// Make sure this is a 32-bit enumeration.
NDIlib_receiver_type_max = 0x7fffffff
} NDIlib_receiver_type_e;
// The types of commands that a receiver can process.
typedef enum NDIlib_receiver_command_e {
NDIlib_receiver_command_none = 0,
// A receiver can be told to connect to a specific source.
NDIlib_receiver_command_connect = 1,
// Make sure this is a 32-bit enumeration.
NDIlib_receiver_command_max = 0x7fffffff
} NDIlib_receiver_command_e;
// Describes a receiver that has been discovered.
typedef struct NDIlib_receiver_t {
// The unique identifier for the receiver on the network.
const char* p_uuid;
// The human-readable name of the receiver.
const char* p_name;
// The unique identifier for the input group that the receiver belongs to.
const char* p_input_uuid;
// The human-readable name of the input group that the receiver belongs to.
const char* p_input_name;
// The known IP address of the receiver.
const char* p_address;
// An array of streams that the receiver is set to receive. The last entry in this list will be
// NDIlib_receiver_type_none.
NDIlib_receiver_type_e* p_streams;
// How many elements are in the p_streams array, excluding the NDIlib_receiver_type_none entry.
uint32_t num_streams;
// An array of commands that the receiver can process. The last entry in this list will be
// NDIlib_receiver_command_none.
NDIlib_receiver_command_e* p_commands;
// How many elements are in the p_commands array, excluding the NDIlib_receiver_command_none entry.
uint32_t num_commands;
// Are we currently subscribed for receive events?
bool events_subscribed;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_receiver_t(void);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_receiver_t;
// Retrieves the current list of advertised receivers. The memory for the returned structure is only valid
// until the next call or when destroy is called. For a given NDIlib_recv_listener_instance_t, do not call
// NDIlib_recv_listener_get_receivers asynchronously.
PROCESSINGNDILIB_API
const NDIlib_receiver_t* NDIlib_recv_listener_get_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t* p_num_receivers);
// This will allow you to wait until the number of online receivers has changed.
PROCESSINGNDILIB_API
bool NDIlib_recv_listener_wait_for_receivers(NDIlib_recv_listener_instance_t p_instance, uint32_t timeout_in_ms);

View File

@@ -0,0 +1,75 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI routing.
// The reference to an instance of the router.
struct NDIlib_routing_instance_type;
typedef struct NDIlib_routing_instance_type* NDIlib_routing_instance_t;
// The creation structure that is used when you are creating a sender.
typedef struct NDIlib_routing_create_t
{
// The name of the NDI source to create. This is a NULL terminated UTF8 string.
const char* p_ndi_name;
// What groups should this source be part of.
const char* p_groups;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_routing_create_t(const char* p_ndi_name_ = NULL, const char* p_groups_ = NULL);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_routing_create_t;
// Create an NDI routing source.
PROCESSINGNDILIB_API
NDIlib_routing_instance_t NDIlib_routing_create(const NDIlib_routing_create_t* p_create_settings);
// Destroy and NDI routing source.
PROCESSINGNDILIB_API
void NDIlib_routing_destroy(NDIlib_routing_instance_t p_instance);
// Change the routing of this source to another destination.
PROCESSINGNDILIB_API
bool NDIlib_routing_change(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source);
// Change the routing of this source to another destination.
PROCESSINGNDILIB_API
bool NDIlib_routing_clear(NDIlib_routing_instance_t p_instance);
// Get the current number of receivers connected to this source. This can be used to avoid even rendering
// when nothing is connected to the video source. which can significantly improve the efficiency if you want
// to make a lot of sources available on the network. If you specify a timeout that is not 0 then it will
// wait until there are connections for this amount of time.
PROCESSINGNDILIB_API
int NDIlib_routing_get_no_connections(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms);
// Retrieve the source information for the given router instance. This pointer is valid until
// NDIlib_routing_destroy is called.
PROCESSINGNDILIB_API
const NDIlib_source_t* NDIlib_routing_get_source_name(NDIlib_routing_instance_t p_instance);

View File

@@ -0,0 +1,145 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Structures and type definitions required by NDI sending.
// The reference to an instance of the sender.
struct NDIlib_send_instance_type;
typedef struct NDIlib_send_instance_type* NDIlib_send_instance_t;
// The creation structure that is used when you are creating a sender.
typedef struct NDIlib_send_create_t {
// The name of the NDI source to create. This is a NULL terminated UTF8 string.
const char* p_ndi_name;
// What groups should this source be part of. NULL means default.
const char* p_groups;
// Do you want audio and video to "clock" themselves. When they are clocked then by adding video frames,
// they will be rate limited to match the current frame rate that you are submitting at. The same is true
// for audio. In general if you are submitting video and audio off a single thread then you should only
// clock one of them (video is probably the better of the two to clock off). If you are submitting audio
// and video of separate threads then having both clocked can be useful.
bool clock_video, clock_audio;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_send_create_t(
const char* p_ndi_name_ = NULL,
const char* p_groups_ = NULL,
bool clock_video_ = true, bool clock_audio_ = true
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_send_create_t;
// Create a new sender instance. This will return NULL if it fails. If you specify leave p_create_settings
// null then the sender will be created with default settings.
PROCESSINGNDILIB_API
NDIlib_send_instance_t NDIlib_send_create(const NDIlib_send_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This will destroy an existing finder instance.
PROCESSINGNDILIB_API
void NDIlib_send_destroy(NDIlib_send_instance_t p_instance);
// This will add a video frame.
PROCESSINGNDILIB_API
void NDIlib_send_send_video_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
// This will add a video frame and will return immediately, having scheduled the frame to be displayed. All
// processing and sending of the video will occur asynchronously. The memory accessed by NDIlib_video_frame_t
// cannot be freed or re-used by the caller until a synchronizing event has occurred. In general the API is
// better able to take advantage of asynchronous processing than you might be able to by simple having a
// separate thread to submit frames.
//
// This call is particularly beneficial when processing BGRA video since it allows any color conversion,
// compression and network sending to all be done on separate threads from your main rendering thread.
//
// Synchronizing events are :
// - a call to NDIlib_send_send_video
// - a call to NDIlib_send_send_video_async with another frame to be sent
// - a call to NDIlib_send_send_video with p_video_data=NULL
// - a call to NDIlib_send_destroy
PROCESSINGNDILIB_API
void NDIlib_send_send_video_async_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data);
// This will add an audio frame.
PROCESSINGNDILIB_API
void NDIlib_send_send_audio_v2(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data);
// This will add an audio frame.
PROCESSINGNDILIB_API
void NDIlib_send_send_audio_v3(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data);
// This will add a metadata frame.
PROCESSINGNDILIB_API
void NDIlib_send_send_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// This allows you to receive metadata from the other end of the connection.
PROCESSINGNDILIB_API
NDIlib_frame_type_e NDIlib_send_capture(
NDIlib_send_instance_t p_instance, // The instance data.
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// Free the buffers returned by capture for metadata.
PROCESSINGNDILIB_API
void NDIlib_send_free_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// Determine the current tally sate. If you specify a timeout then it will wait until it has changed,
// otherwise it will simply poll it and return the current tally immediately. The return value is whether
// anything has actually change (true) or whether it timed out (false)
PROCESSINGNDILIB_API
bool NDIlib_send_get_tally(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms);
// Get the current number of receivers connected to this source. This can be used to avoid even rendering
// when nothing is connected to the video source. which can significantly improve the efficiency if you want
// to make a lot of sources available on the network. If you specify a timeout that is not 0 then it will
// wait until there are connections for this amount of time.
PROCESSINGNDILIB_API
int NDIlib_send_get_no_connections(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms);
// Connection based metadata is data that is sent automatically each time a new connection is received. You
// queue all of these up and they are sent on each connection. To reset them you need to clear them all and
// set them up again.
PROCESSINGNDILIB_API
void NDIlib_send_clear_connection_metadata(NDIlib_send_instance_t p_instance);
// Add a connection metadata string to the list of what is sent on each new connection. If someone is already
// connected then this string will be sent to them immediately.
PROCESSINGNDILIB_API
void NDIlib_send_add_connection_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata);
// This will assign a new fail-over source for this video source. What this means is that if this video
// source was to fail any receivers would automatically switch over to use this source, unless this source
// then came back online. You can specify NULL to clear the source.
PROCESSINGNDILIB_API
void NDIlib_send_set_failover(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source);
// Retrieve the source information for the given sender instance. This pointer is valid until NDIlib_send_destroy is called.
PROCESSINGNDILIB_API
const NDIlib_source_t* NDIlib_send_get_source_name(NDIlib_send_instance_t p_instance);

View File

@@ -0,0 +1,39 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
#ifndef __cplusplus
#include <stdbool.h>
#endif
#include <stdint.h>
#ifndef INFINITE
//#define INFINITE INFINITE
static const uint32_t INFINITE = 0xFFFFFFFF;
#endif

View File

@@ -0,0 +1,230 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
//
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// This describes a video frame
PROCESSINGNDILIB_DEPRECATED
typedef struct NDIlib_video_frame_t {
// The resolution of this frame.
int xres, yres;
// What FourCC this is with. This can be two values.
NDIlib_FourCC_video_type_e FourCC;
// What is the frame rate of this frame.
// For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps
int frame_rate_N, frame_rate_D;
// What is the picture aspect ratio of this frame.
// For instance 16.0/9.0 = 1.778 is 16:9 video. If this is zero, then square pixels are assumed (xres/yres).
float picture_aspect_ratio;
// Is this a fielded frame, or is it progressive.
NDIlib_frame_format_type_e frame_format_type;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The video data itself.
uint8_t* p_data;
// The inter-line stride of the video data, in bytes.
int line_stride_in_bytes;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_video_frame_t(
int xres_ = 0, int yres_ = 0,
NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_type_UYVY,
int frame_rate_N_ = 30000, int frame_rate_D_ = 1001,
float picture_aspect_ratio_ = 0.0f,
NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_video_frame_t;
// This describes an audio frame
PROCESSINGNDILIB_DEPRECATED
typedef struct NDIlib_audio_frame_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The audio data.
float* p_data;
// The inter channel stride of the audio channels, in bytes.
int channel_stride_in_bytes;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
float* p_data_ = NULL, int channel_stride_in_bytes_ = 0
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_t;
// For legacy reasons I called this the wrong thing. For backwards compatibility.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_find_instance_t NDIlib_find_create2(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_find_instance_t NDIlib_find_create(const NDIlib_find_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// DEPRECATED. This function is basically exactly the following and was confusing to use.
// if ((!timeout_in_ms) || (NDIlib_find_wait_for_sources(timeout_in_ms)))
// return NDIlib_find_get_current_sources(p_instance, p_no_sources);
// return NULL;
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
const NDIlib_source_t* NDIlib_find_get_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms);
// The creation structure that is used when you are creating a receiver.
PROCESSINGNDILIB_DEPRECATED
typedef struct NDIlib_recv_create_t {
// The source that you wish to connect to.
NDIlib_source_t source_to_connect_to;
// Your preference of color space. See above.
NDIlib_recv_color_format_e color_format;
// The bandwidth setting that you wish to use for this video source. Bandwidth
// controlled by changing both the compression level and the resolution of the source.
// A good use for low bandwidth is working on WIFI connections.
NDIlib_recv_bandwidth_e bandwidth;
// When this flag is FALSE, all video that you receive will be progressive. For sources that provide
// fields, this is de-interlaced on the receiving side (because we cannot change what the up-stream
// source was actually rendering. This is provided as a convenience to down-stream sources that do not
// wish to understand fielded video. There is almost no performance impact of using this function.
bool allow_video_fields;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_recv_create_t(
const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(),
NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA,
NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest,
bool allow_video_fields_ = true
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_recv_create_t;
// This function is deprecated, please use NDIlib_recv_create_v3 if you can. Using this function will
// continue to work, and be supported for backwards compatibility. If the input parameter is NULL it will be
// created with default settings and an automatically determined receiver name.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_recv_instance_t NDIlib_recv_create_v2(const NDIlib_recv_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// For legacy reasons I called this the wrong thing. For backwards compatibility. If the input parameter is
// NULL it will be created with default settings and an automatically determined receiver name.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_recv_instance_t NDIlib_recv_create2(const NDIlib_recv_create_t* p_create_settings NDILIB_CPP_DEFAULT_VALUE(NULL));
// This function is deprecated, please use NDIlib_recv_create_v3 if you can. Using this function will
// continue to work, and be supported for backwards compatibility. This version sets bandwidth to highest and
// allow fields to true. If the input parameter is NULL it will be created with default settings and an
// automatically determined receiver name.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_recv_instance_t NDIlib_recv_create(const NDIlib_recv_create_t* p_create_settings);
// This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which
// case data of that type will not be captured in this call. This call can be called simultaneously on
// separate threads, so it is entirely possible to receive audio, video, metadata all on separate threads.
// This function will return NDIlib_frame_type_none if no data is received within the specified timeout and
// NDIlib_frame_type_error if the connection is lost. Buffers captured with this must be freed with the
// appropriate free function below.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
NDIlib_frame_type_e NDIlib_recv_capture(
NDIlib_recv_instance_t p_instance, // The library instance.
NDIlib_video_frame_t* p_video_data, // The video data received (can be NULL).
NDIlib_audio_frame_t* p_audio_data, // The audio data received (can be NULL).
NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL).
uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data.
);
// Free the buffers returned by capture for video.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_recv_free_video(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
// Free the buffers returned by capture for audio.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_recv_free_audio(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
// This will add a video frame.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_send_send_video(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
// This will add a video frame and will return immediately, having scheduled the frame to be displayed. All
// processing and sending of the video will occur asynchronously. The memory accessed by NDIlib_video_frame_t
// cannot be freed or re-used by the caller until a synchronizing event has occurred. In general the API is
// better able to take advantage of asynchronous processing than you might be able to by simple having a
// separate thread to submit frames.
//
// This call is particularly beneficial when processing BGRA video since it allows any color conversion,
// compression and network sending to all be done on separate threads from your main rendering thread.
//
// Synchronizing events are :
// - a call to NDIlib_send_send_video
// - a call to NDIlib_send_send_video_async with another frame to be sent
// - a call to NDIlib_send_send_video with p_video_data=NULL
// - a call to NDIlib_send_destroy
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_send_send_video_async(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data);
// This will add an audio frame
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_send_send_audio(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data);
// Convert an planar floating point audio buffer into a interleaved short audio buffer.
// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_util_audio_to_interleaved_16s(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst);
// Convert an interleaved short audio buffer audio buffer into a planar floating point one.
// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_util_audio_from_interleaved_16s(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst);
// Convert an planar floating point audio buffer into a interleaved floating point audio buffer.
// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_util_audio_to_interleaved_32f(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst);
// Convert an interleaved floating point audio buffer into a planar floating point one.
// IMPORTANT : You must allocate the space for the samples in the destination to allow for your own memory management.
PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED
void NDIlib_util_audio_from_interleaved_32f(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst);

View File

@@ -0,0 +1,387 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
#ifndef NDI_LIB_FOURCC
#define NDI_LIB_FOURCC(ch0, ch1, ch2, ch3) \
((uint32_t)(uint8_t)(ch0) | ((uint32_t)(uint8_t)(ch1) << 8) | ((uint32_t)(uint8_t)(ch2) << 16) | ((uint32_t)(uint8_t)(ch3) << 24))
#endif
// An enumeration to specify the type of a packet returned by the functions.
typedef enum NDIlib_frame_type_e {
// What frame type is this?
NDIlib_frame_type_none = 0,
NDIlib_frame_type_video = 1,
NDIlib_frame_type_audio = 2,
NDIlib_frame_type_metadata = 3,
NDIlib_frame_type_error = 4,
// This indicates that the settings on this input have changed. This value will be returned from one of
// the NDIlib_recv_capture functions when the device is known to have new settings, for instance the web
// URL has changed or the device is now known to be a PTZ camera.
NDIlib_frame_type_status_change = 100,
// This indicates that the source has changed. This value will be returned from one of the
// NDIlib_recv_capture functions when the source that the receiver is connected to has changed.
NDIlib_frame_type_source_change = 101,
// Make sure this is a 32-bit enumeration.
NDIlib_frame_type_max = 0x7fffffff
} NDIlib_frame_type_e;
// FourCC values for video frames.
typedef enum NDIlib_FourCC_video_type_e {
// YCbCr color space using 4:2:2.
NDIlib_FourCC_video_type_UYVY = NDI_LIB_FOURCC('U', 'Y', 'V', 'Y'),
NDIlib_FourCC_type_UYVY = NDIlib_FourCC_video_type_UYVY,
// YCbCr + Alpha color space, using 4:2:2:4.
// In memory there are two separate planes. The first is a regular
// UYVY 4:2:2 buffer. Immediately following this in memory is a
// alpha channel buffer.
NDIlib_FourCC_video_type_UYVA = NDI_LIB_FOURCC('U', 'Y', 'V', 'A'),
NDIlib_FourCC_type_UYVA = NDIlib_FourCC_video_type_UYVA,
// YCbCr color space using 4:2:2 in 16bpp.
// In memory this is a semi-planar format. This is identical to a 16bpp version of the NV16 format.
// The first buffer is a 16bpp luminance buffer.
// Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs.
NDIlib_FourCC_video_type_P216 = NDI_LIB_FOURCC('P', '2', '1', '6'),
NDIlib_FourCC_type_P216 = NDIlib_FourCC_video_type_P216,
// YCbCr color space with an alpha channel, using 4:2:2:4.
// In memory this is a semi-planar format.
// The first buffer is a 16bpp luminance buffer.
// Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs.
// Immediately after is a single buffer of 16bpp alpha channel.
NDIlib_FourCC_video_type_PA16 = NDI_LIB_FOURCC('P', 'A', '1', '6'),
NDIlib_FourCC_type_PA16 = NDIlib_FourCC_video_type_PA16,
// Planar 8bit 4:2:0 video format.
// The first buffer is an 8bpp luminance buffer.
// Immediately following this is a 8bpp Cr buffer.
// Immediately following this is a 8bpp Cb buffer.
NDIlib_FourCC_video_type_YV12 = NDI_LIB_FOURCC('Y', 'V', '1', '2'),
NDIlib_FourCC_type_YV12 = NDIlib_FourCC_video_type_YV12,
// The first buffer is an 8bpp luminance buffer.
// Immediately following this is a 8bpp Cb buffer.
// Immediately following this is a 8bpp Cr buffer.
NDIlib_FourCC_video_type_I420 = NDI_LIB_FOURCC('I', '4', '2', '0'),
NDIlib_FourCC_type_I420 = NDIlib_FourCC_video_type_I420,
// Planar 8bit 4:2:0 video format.
// The first buffer is an 8bpp luminance buffer.
// Immediately following this is in interleaved buffer of 8bpp Cb, Cr pairs
NDIlib_FourCC_video_type_NV12 = NDI_LIB_FOURCC('N', 'V', '1', '2'),
NDIlib_FourCC_type_NV12 = NDIlib_FourCC_video_type_NV12,
// Planar 8bit, 4:4:4:4 video format.
// Color ordering in memory is blue, green, red, alpha
NDIlib_FourCC_video_type_BGRA = NDI_LIB_FOURCC('B', 'G', 'R', 'A'),
NDIlib_FourCC_type_BGRA = NDIlib_FourCC_video_type_BGRA,
// Planar 8bit, 4:4:4 video format, packed into 32bit pixels.
// Color ordering in memory is blue, green, red, 255
NDIlib_FourCC_video_type_BGRX = NDI_LIB_FOURCC('B', 'G', 'R', 'X'),
NDIlib_FourCC_type_BGRX = NDIlib_FourCC_video_type_BGRX,
// Planar 8bit, 4:4:4:4 video format.
// Color ordering in memory is red, green, blue, alpha
NDIlib_FourCC_video_type_RGBA = NDI_LIB_FOURCC('R', 'G', 'B', 'A'),
NDIlib_FourCC_type_RGBA = NDIlib_FourCC_video_type_RGBA,
// Planar 8bit, 4:4:4 video format, packed into 32bit pixels.
// Color ordering in memory is red, green, blue, 255.
NDIlib_FourCC_video_type_RGBX = NDI_LIB_FOURCC('R', 'G', 'B', 'X'),
NDIlib_FourCC_type_RGBX = NDIlib_FourCC_video_type_RGBX,
// Make sure this is a 32-bit enumeration.
NDIlib_FourCC_video_type_max = 0x7fffffff
} NDIlib_FourCC_video_type_e;
// Really for backwards compatibility.
PROCESSINGNDILIB_DEPRECATED
typedef NDIlib_FourCC_video_type_e NDIlib_FourCC_type_e;
// FourCC values for audio frames.
typedef enum NDIlib_FourCC_audio_type_e {
// Planar 32-bit floating point. Be sure to specify the channel stride.
NDIlib_FourCC_audio_type_FLTP = NDI_LIB_FOURCC('F', 'L', 'T', 'p'),
NDIlib_FourCC_type_FLTP = NDIlib_FourCC_audio_type_FLTP,
// Make sure this is a 32-bit enumeration.
NDIlib_FourCC_audio_type_max = 0x7fffffff
} NDIlib_FourCC_audio_type_e;
typedef enum NDIlib_frame_format_type_e {
// A progressive frame.
NDIlib_frame_format_type_progressive = 1,
// A fielded frame with the field 0 being on the even lines and field 1 being
// on the odd lines.
NDIlib_frame_format_type_interleaved = 0,
// Individual fields.
NDIlib_frame_format_type_field_0 = 2,
NDIlib_frame_format_type_field_1 = 3,
// Make sure this is a 32-bit enumeration.
NDIlib_frame_format_type_max = 0x7fffffff
} NDIlib_frame_format_type_e;
// When you specify this as a timecode, the timecode will be synthesized for you. This may be used when
// sending video, audio or metadata. If you never specify a timecode at all, asking for each to be
// synthesized, then this will use the current system time as the starting timecode and then generate
// synthetic ones, keeping your streams exactly in sync as long as the frames you are sending do not deviate
// from the system time in any meaningful way. In practice this means that if you never specify timecodes
// that they will always be generated for you correctly. Timecodes coming from different senders on the same
// machine will always be in sync with each other when working in this way. If you have NTP installed on your
// local network, then streams can be synchronized between multiple machines with very high precision.
//
// If you specify a timecode at a particular frame (audio or video), then ask for all subsequent ones to be
// synthesized. The subsequent ones will be generated to continue this sequence maintaining the correct
// relationship both the between streams and samples generated, avoiding them deviating in time from the
// timecode that you specified in any meaningful way.
//
// If you specify timecodes on one stream (e.g. video) and ask for the other stream (audio) to be
// synthesized, the correct timecodes will be generated for the other stream and will be synthesize exactly
// to match (they are not quantized inter-streams) the correct sample positions.
//
// When you send metadata messages and ask for the timecode to be synthesized, then it is chosen to match the
// closest audio or video frame timecode so that it looks close to something you might want ... unless there
// is no sample that looks close in which a timecode is synthesized from the last ones known and the time
// since it was sent.
static const int64_t NDIlib_send_timecode_synthesize = INT64_MAX;
// If the time-stamp is not available (i.e. a version of a sender before v2.5).
static const int64_t NDIlib_recv_timestamp_undefined = INT64_MAX;
// This is a descriptor of a NDI source available on the network.
typedef struct NDIlib_source_t {
// A UTF8 string that provides a user readable name for this source. This can be used for serialization,
// etc... and comprises the machine name and the source name on that machine. In the form,
// MACHINE_NAME (NDI_SOURCE_NAME)
// If you specify this parameter either as NULL, or an EMPTY string then the specific IP address and port
// number from below is used.
const char* p_ndi_name;
// A UTF8 string that provides the actual network address and any parameters. This is not meant to be
// application readable and might well change in the future. This can be NULL if you do not know it and
// the API internally will instantiate a finder that is used to discover it even if it is not yet
// available on the network.
union { // The current way of addressing the value.
const char* p_url_address;
// We used to use an IP address before we used the more general URL notification this is now
// depreciated but maintained for compatibility.
PROCESSINGNDILIB_DEPRECATED const char* p_ip_address;
};
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_source_t(const char* p_ndi_name_ = NULL, const char* p_url_address_ = NULL);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_source_t;
// This describes a video frame.
typedef struct NDIlib_video_frame_v2_t {
// The resolution of this frame.
int xres, yres;
// What FourCC describing the type of data for this frame.
NDIlib_FourCC_video_type_e FourCC;
// What is the frame rate of this frame.
// For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps.
int frame_rate_N, frame_rate_D;
// What is the picture aspect ratio of this frame.
// For instance 16.0/9.0 = 1.778 is 16:9 video
// 0 means square pixels.
float picture_aspect_ratio;
// Is this a fielded frame, or is it progressive.
NDIlib_frame_format_type_e frame_format_type;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The video data itself.
uint8_t* p_data;
union { // If the FourCC is not a compressed type, then this will be the inter-line stride of the video data
// in bytes. If the stride is 0, then it will default to sizeof(one pixel)*xres.
int line_stride_in_bytes;
// If the FourCC is a compressed type, then this will be the size of the p_data buffer in bytes.
int data_size_in_bytes;
};
// Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format.
// If you do not want any metadata then you may specify NULL here.
const char* p_metadata; // Present in >= v2.5
// This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact
// moment that the frame was submitted by the sending side and is generated by the SDK. If this value is
// NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined.
int64_t timestamp; // Present in >= v2.5
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_video_frame_v2_t(
int xres_ = 0, int yres_ = 0,
NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_video_type_UYVY,
int frame_rate_N_ = 30000, int frame_rate_D_ = 1001,
float picture_aspect_ratio_ = 0.0f,
NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0,
const char* p_metadata_ = NULL,
int64_t timestamp_ = 0
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_video_frame_v2_t;
// This describes an audio frame.
typedef struct NDIlib_audio_frame_v2_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The audio data.
float* p_data;
// The inter channel stride of the audio channels, in bytes.
int channel_stride_in_bytes;
// Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format.
// If you do not want any metadata then you may specify NULL here.
const char* p_metadata; // Present in >= v2.5
// This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact
// moment that the frame was submitted by the sending side and is generated by the SDK. If this value is
// NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined.
int64_t timestamp; // Present in >= v2.5
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_v2_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
float* p_data_ = NULL, int channel_stride_in_bytes_ = 0,
const char* p_metadata_ = NULL,
int64_t timestamp_ = 0
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_v2_t;
// This describes an audio frame.
typedef struct NDIlib_audio_frame_v3_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// What FourCC describing the type of data for this frame.
NDIlib_FourCC_audio_type_e FourCC;
// The audio data.
uint8_t* p_data;
union {
// If the FourCC is not a compressed type and the audio format is planar, then this will be the
// stride in bytes for a single channel.
int channel_stride_in_bytes;
// If the FourCC is a compressed type, then this will be the size of the p_data buffer in bytes.
int data_size_in_bytes;
};
// Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format.
// If you do not want any metadata then you may specify NULL here.
const char* p_metadata;
// This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact
// moment that the frame was submitted by the sending side and is generated by the SDK. If this value is
// NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined.
int64_t timestamp;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_v3_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
NDIlib_FourCC_audio_type_e FourCC_ = NDIlib_FourCC_audio_type_FLTP,
uint8_t* p_data_ = NULL, int channel_stride_in_bytes_ = 0,
const char* p_metadata_ = NULL,
int64_t timestamp_ = 0
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_v3_t;
// The data description for metadata.
typedef struct NDIlib_metadata_frame_t {
// The length of the string in UTF8 characters. This includes the NULL terminating character. If this is
// 0, then the length is assume to be the length of a NULL terminated string.
int length;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The metadata as a UTF8 XML string. This is a NULL terminated string.
char* p_data;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_metadata_frame_t(int length_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, char* p_data_ = NULL);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_metadata_frame_t;
// Tally structures
typedef struct NDIlib_tally_t {
// Is this currently on program output.
bool on_program;
// Is this currently on preview output.
bool on_preview;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_tally_t(bool on_program_ = false, bool on_preview_ = false);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_tally_t;

View File

@@ -0,0 +1,258 @@
#pragma once
// NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review
// the SDK documentation for the description of the full license terms, which are also provided in the file
// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any
// part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be
// downloaded at http://ndi.video/
//
//***********************************************************************************************************
//
// Copyright (C) 2023-2025 Vizrt NDI AB. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files(the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions :
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
// EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
// THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//***********************************************************************************************************
// Because many applications like submitting 16-bit interleaved audio, these functions will convert in and
// out of that format. It is important to note that the NDI SDK does define fully audio levels, something
// that most applications that you use do not. Specifically, the floating-point range, -1.0 to +1.0, is
// defined as a professional audio reference level of +4 dBU. If we take 16-bit audio and scale it into this
// range it is almost always correct for sending and will cause no problems. For receiving however it is not
// at all uncommon that the user has audio that exceeds reference level and in this case it is likely that
// audio exceeds the reference level and so if you are not careful you will end up having audio clipping when
// you use the 16-bit range.
// This describes an audio frame.
typedef struct NDIlib_audio_frame_interleaved_16s_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the
// full range of 16-bit audio. If you do not understand this and want to just use numbers:
// - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level.
// - If receiving audio, specify +20 dB. This means that the full 16-bit range corresponds to
// professional level audio with 20 dB of headroom. Note that if you are writing it into a file it
// might sound soft because you have 20 dB of headroom before clipping.
int reference_level;
// The audio data, interleaved 16-bit samples.
int16_t* p_data;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_interleaved_16s_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
int reference_level_ = 0,
int16_t* p_data_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_interleaved_16s_t;
// This describes an audio frame.
typedef struct NDIlib_audio_frame_interleaved_32s_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the
// full range of 32-bit audio. If you do not understand this and want to just use numbers:
// - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level.
// - If receiving audio, specify +20 dB. This means that the full 32-bit range corresponds to
// professional level audio with 20 dB of headroom. Note that if you are writing it into a file it
// might sound soft because you have 20 dB of headroom before clipping.
int reference_level;
// The audio data, interleaved 32-bit samples.
int32_t* p_data;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_interleaved_32s_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
int reference_level_ = 0,
int32_t* p_data_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_interleaved_32s_t;
// This describes an audio frame.
typedef struct NDIlib_audio_frame_interleaved_32f_t {
// The sample-rate of this buffer.
int sample_rate;
// The number of audio channels.
int no_channels;
// The number of audio samples per channel.
int no_samples;
// The timecode of this frame in 100-nanosecond intervals.
int64_t timecode;
// The audio data, interleaved 32-bit floating-point samples.
float* p_data;
#if NDILIB_CPP_DEFAULT_CONSTRUCTORS
NDIlib_audio_frame_interleaved_32f_t(
int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0,
int64_t timecode_ = NDIlib_send_timecode_synthesize,
float* p_data_ = NULL
);
#endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS
} NDIlib_audio_frame_interleaved_32f_t;
// This will add an audio frame in interleaved 16-bit.
PROCESSINGNDILIB_API
void NDIlib_util_send_send_audio_interleaved_16s(
NDIlib_send_instance_t p_instance,
const NDIlib_audio_frame_interleaved_16s_t* p_audio_data
);
// This will add an audio frame in interleaved 32-bit.
PROCESSINGNDILIB_API
void NDIlib_util_send_send_audio_interleaved_32s(
NDIlib_send_instance_t p_instance,
const NDIlib_audio_frame_interleaved_32s_t* p_audio_data
);
// This will add an audio frame in interleaved floating point.
PROCESSINGNDILIB_API
void NDIlib_util_send_send_audio_interleaved_32f(
NDIlib_send_instance_t p_instance,
const NDIlib_audio_frame_interleaved_32f_t* p_audio_data
);
// Convert to interleaved 16-bit.
PROCESSINGNDILIB_API
void NDIlib_util_audio_to_interleaved_16s_v2(
const NDIlib_audio_frame_v2_t* p_src,
NDIlib_audio_frame_interleaved_16s_t* p_dst
);
// Convert to interleaved 16-bit. The FourCC of the source audio frame must be NDIlib_FourCC_audio_type_FLTP.
// Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_to_interleaved_16s_v3(
const NDIlib_audio_frame_v3_t* p_src,
NDIlib_audio_frame_interleaved_16s_t* p_dst
);
// Convert from interleaved 16-bit.
PROCESSINGNDILIB_API
void NDIlib_util_audio_from_interleaved_16s_v2(
const NDIlib_audio_frame_interleaved_16s_t* p_src,
NDIlib_audio_frame_v2_t* p_dst
);
// Convert from interleaved 16-bit. The FourCC of the destination audio frame must be
// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly.
// Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_from_interleaved_16s_v3(
const NDIlib_audio_frame_interleaved_16s_t* p_src,
NDIlib_audio_frame_v3_t* p_dst
);
// Convert to interleaved 32-bit.
PROCESSINGNDILIB_API
void NDIlib_util_audio_to_interleaved_32s_v2(
const NDIlib_audio_frame_v2_t* p_src,
NDIlib_audio_frame_interleaved_32s_t* p_dst
);
// Convert to interleaved 32-bit. The FourCC of the source audio frame must be NDIlib_FourCC_audio_type_FLTP.
// Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_to_interleaved_32s_v3(
const NDIlib_audio_frame_v3_t* p_src,
NDIlib_audio_frame_interleaved_32s_t* p_dst
);
// Convert from interleaved 32-bit.
PROCESSINGNDILIB_API
void NDIlib_util_audio_from_interleaved_32s_v2(
const NDIlib_audio_frame_interleaved_32s_t* p_src,
NDIlib_audio_frame_v2_t* p_dst
);
// Convert from interleaved 32-bit. The FourCC of the destination audio frame must be
// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly.
// Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_from_interleaved_32s_v3(
const NDIlib_audio_frame_interleaved_32s_t* p_src,
NDIlib_audio_frame_v3_t* p_dst
);
// Convert to interleaved floating point.
PROCESSINGNDILIB_API
void NDIlib_util_audio_to_interleaved_32f_v2(
const NDIlib_audio_frame_v2_t* p_src,
NDIlib_audio_frame_interleaved_32f_t* p_dst
);
// Convert to interleaved floating point. The FourCC of the source audio frame must be
// NDIlib_FourCC_audio_type_FLTP. Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_to_interleaved_32f_v3(
const NDIlib_audio_frame_v3_t* p_src,
NDIlib_audio_frame_interleaved_32f_t* p_dst
);
// Convert from interleaved floating point.
PROCESSINGNDILIB_API
void NDIlib_util_audio_from_interleaved_32f_v2(
const NDIlib_audio_frame_interleaved_32f_t* p_src,
NDIlib_audio_frame_v2_t* p_dst
);
// Convert from interleaved floating point. The FourCC of the destination audio frame must be
// NDIlib_FourCC_audio_type_FLTP and its p_data allocated accordingly.
// Returns true if the conversion was successful.
PROCESSINGNDILIB_API
bool NDIlib_util_audio_from_interleaved_32f_v3(
const NDIlib_audio_frame_interleaved_32f_t* p_src,
NDIlib_audio_frame_v3_t* p_dst
);
// This is a helper function that you may use to convert from 10-bit packed UYVY into 16-bit semi-planar. The
// FourCC on the source is ignored in this function since we do not define a V210 format in NDI. You must
// make sure that there is memory and a stride allocated in p_dst.
PROCESSINGNDILIB_API
void NDIlib_util_V210_to_P216(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216);
// This converts from 16-bit semi-planar to 10-bit. You must make sure that there is memory and a stride
// allocated in p_dst.
PROCESSINGNDILIB_API
void NDIlib_util_P216_to_V210(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);