Need Help with texture / dynamic material for rendering a video stream onto it.

Hi folks,

I´m pretty new to Unreal Engine, so I´m having some troubles right now.
My goal: I want to access an Intel RealSense camera (D435) and stream its rgb to a dynamic texture/material.

So I started writing a plugin for third-party libraries with [this tutorial]](Unreal Engine: Including a Third-Party Library (on the example of the Point Cloud Library and Boost) [Tutorial] [Download] – Valentin Kraft's Portfolio), and it basically works (I can access the Realsense API without errors).

Then I started writing an actor which provides a UFunction to update a dynamic material, using these links as help:1](Setting Up a Dynamic Texture - C++ - Epic Developer Community Forums)2](UTexture2D::UpdateTextureRegions has no effect - Asset Creation - Epic Developer Community Forums)
As well as the [SRWorks plugin from HTC]](https://developer.vive.com/resources/knowledgebase/intro-vive-srworks-sdk/) (where they also stream from the vive pro cameras to dynamic textures)

Now here is my code so far:


// Fill out your copyright notice in the Description page of Project Settings.

#pragma once

#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include <Engine/Texture2D.h>
#include <rs.hpp>
#include "RealSenseComponent.generated.h"



UCLASS( ClassGroup=(Custom))
class REALSENSEPLUGIN_API ARealSenseComponent : public AActor
{
GENERATED_BODY()

public:
// Sets default values for this component's properties
ARealSenseComponent();

UFUNCTION(BlueprintCallable, Category = "RealSense")
void CreateUpdateableTexture(int img_width, int img_height);

UFUNCTION(BlueprintCallable, Category = "RealSense")
UTexture2D* ReceiveRGBFrame();

protected:
// Called when the game starts
virtual void BeginPlay() override;

/** Reveice a frame from realsense device and update texture*/
bool receiveFrame();

// texture to update
UTexture2D* TextureFromVideo = nullptr;
FUpdateTextureRegion2D* textureVideoRegion = nullptr;

// Realsense device pipeline
rs2::pipeline* pipeline = nullptr;

/** Update texture region from https://wiki.unrealengine.com/Dynamic_Textures */
void UpdateTextureRegions(UTexture2D* Texture, int32 MipIndex, uint32 NumRegions, FUpdateTextureRegion2D* Regions, uint32 SrcPitch, uint32 SrcBpp, uint8* SrcData, bool bFreeData);

// from SRWORKS plugin
void TextureRegionCleanUp(uint8* rawData, const FUpdateTextureRegion2D* region) {};
TFunction<void(uint8*, const FUpdateTextureRegion2D*)> texCleanUpFP = [this](uint8* rawData, const FUpdateTextureRegion2D* region) { TextureRegionCleanUp(rawData, region); };


public:
// Called every frame
virtual void Tick(float DeltaTime) override;

bool cameraWorks = false;

};

And in cpp file



ARealSenseComponent::ARealSenseComponent()
{

}




void ARealSenseComponent::CreateUpdateableTexture(int width, int height)
{
TextureFromVideo = UTexture2D::CreateTransient(width, height);
TextureFromVideo->AddToRoot();
TextureFromVideo->UpdateResource();
textureVideoRegion = new FUpdateTextureRegion2D(0, 0, 0, 0, width, height);

}

UTexture2D * ARealSenseComponent::ReceiveRGBFrame()
{
if (cameraWorks) {
TextureFromVideo->UpdateResource();
if (!receiveFrame()) {
UE_LOG(RealSenseLog, Error, TEXT("Could not receive a Frame from camera!"));
}
else {
UE_LOG(RealSenseLog, Log, TEXT("Received Frame!"))
}
}
else {
UE_LOG(RealSenseLog, Log, TEXT("CameraWorks is false"));
}
return TextureFromVideo;
}

// Called when the game starts
void ARealSenseComponent::BeginPlay()
{
Super::BeginPlay();

if (IRealSenseModule::IsAvailable()) {
int x = IRealSenseModule::Get().CheckRealsenseCamera();
UE_LOG(RealSenseLog, Log, TEXT("CheckRealsenseCamera returned: %d"), x);
}
else {
UE_LOG(RealSenseLog, Warning, TEXT("RealSenseModule not available"));
}


try {

pipeline = new rs2::pipeline();
pipeline->start();
cameraWorks = true;
}
catch (std::exception e) {
UE_LOG(RealSenseLog, Error, TEXT("Realsense initialization error: %s"), e.what());
}

try {
rs2::frameset frames = pipeline->wait_for_frames();
rs2::video_frame colorFrame = frames.get_color_frame();
int width = colorFrame.get_width();
int height = colorFrame.get_height();
CreateUpdateableTexture(width, height);

}
catch (std::exception e) {
UE_LOG(RealSenseLog, Error, TEXT("Realsense video stream error: %s"), e.what());
cameraWorks = false;
}
}

bool ARealSenseComponent::receiveFrame()
{
try {
rs2::frameset frames = pipeline->wait_for_frames();
rs2::video_frame colorFrame = frames.get_color_frame();
uint8* data = (uint8*)(colorFrame.get_data());
int height = colorFrame.get_height();
int width = colorFrame.get_width();
int channels = colorFrame.get_bytes_per_pixel();
int bits = colorFrame.get_bits_per_pixel();
rs2_format format = colorFrame.get_profile().format();
int end = width * height * channels;

UE_LOG(RealSenseLog, Log, TEXT("Image Received. Resolution: %d/%d, Channels: %d, Format: %s"), width, height, channels, *FString(rs2_format_to_string(format)));
UE_LOG(RealSenseLog, Log, TEXT("First/Last pixel: (%d/%d/%d), (%d/%d/%d)"), data[0], data[1], data[2], data[end - 3], data[end - 2], data[end - 1]);

// tried differend updating methods
TextureFromVideo->UpdateTextureRegions(DBL_MAX_10_EXP, 1, textureVideoRegion, static_cast<uint32>(width * bits),
bits, data, texCleanUpFP);

//UpdateTextureRegions(TextureFromVideo, DBL_MAX_10_EXP, 1, textureVideoRegion, static_cast<uint32>(width * bits), bits, data, false);
}
catch (const rs2::error & e) {

UE_LOG(RealSenseLog, Error, TEXT("%s"), *FString(e.what()));
cameraWorks = false;
}
catch (const std::exception& e) {

UE_LOG(RealSenseLog, Error, TEXT("%s"), *FString(e.what()));
cameraWorks = false;
}

return cameraWorks;
}


// Called every frame
void ARealSenseComponent::Tick(float DeltaTime)
{
//TextureFromVideo->UpdateResource();
Super::Tick(DeltaTime);

}

void ARealSenseComponent::UpdateTextureRegions(UTexture2D* Texture, int32 MipIndex, uint32 NumRegions, FUpdateTextureRegion2D* Regions, uint32 SrcPitch, uint32 SrcBpp, uint8* SrcData, bool bFreeData)
{
if (Texture && Texture->Resource)
{
struct FUpdateTextureRegionsData
{
FTexture2DResource* Texture2DResource;
int32 MipIndex;
uint32 NumRegions;
FUpdateTextureRegion2D* Regions;
uint32 SrcPitch;
uint32 SrcBpp;
uint8* SrcData;
};

FUpdateTextureRegionsData* RegionData = new FUpdateTextureRegionsData;

RegionData->Texture2DResource = (FTexture2DResource*)Texture->Resource;
RegionData->MipIndex = MipIndex;
RegionData->NumRegions = NumRegions;
RegionData->Regions = Regions;
RegionData->SrcPitch = SrcPitch;
RegionData->SrcBpp = SrcBpp;
RegionData->SrcData = SrcData;

ENQUEUE_UNIQUE_RENDER_COMMAND_TWOPARAMETER(
UpdateTextureRegionsData,
FUpdateTextureRegionsData*, RegionData, RegionData,
bool, bFreeData, bFreeData,
{
for (uint32 RegionIndex = 0; RegionIndex < RegionData->NumRegions; ++RegionIndex)
{
int32 CurrentFirstMip = RegionData->Texture2DResource->GetCurrentFirstMip();
if (RegionData->MipIndex >= CurrentFirstMip)
{
RHIUpdateTexture2D(
RegionData->Texture2DResource->GetTexture2DRHI(),
RegionData->MipIndex - CurrentFirstMip,
RegionData->Regions[RegionIndex],
RegionData->SrcPitch,
RegionData->SrcData
+ RegionData->Regions[RegionIndex].SrcY * RegionData->SrcPitch
+ RegionData->Regions[RegionIndex].SrcX * RegionData->SrcBpp
);
}
}
if (bFreeData)
{
FMemory::Free(RegionData->Regions);
FMemory::Free(RegionData->SrcData);
}
delete RegionData;
});
}
}

As learned from [here]](How do you change the texture on an object with blueprint? - Rendering - Epic Developer Community Forums) I created a bluprint with a plane, made its material dynamic and try to update it every tick:

But as I run the game, the Material does not get updated. The camera streams fine, as you can see in the log:

I have spend way to much time and headache trying to get it running, now I dont know what else I should do. Any suggestions anyone, please!!?

Fixed: