Send image over UDP to Unreal Engine and use it as a Texture

Hi,

I want to send images over UDP from a Python script to Unreal and load it there and create a Texture in the next step out of it. I am currently receiving the image (or anything) but I can not create a texture out of it and apply it to my material.

Here is my simple udp script in Python:

import cv2, imutils, socket
import numpy as np
import time
import base64
import sys

BUFF_SIZE = 65536
server_socket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
server_socket.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,BUFF_SIZE)
host_name = socket.gethostname()
host_ip = '127.0.0.1'#  socket.gethostbyname(host_name)
print(host_ip)
port = 54001
socket_address = (host_ip,port)
server_socket.bind(socket_address)
print('Listening at:',socket_address)

vid = cv2.VideoCapture(0) #  replace 'rocket.mp4' with 0 for webcam
fps,st,frames_to_count,cnt = (0,0,20,0)
while True:
    msg,client_addr = server_socket.recvfrom(BUFF_SIZE)
    print('GOT connection from ',client_addr)
    WIDTH=400
    while(vid.isOpened()):
        _,frame = vid.read()
        frame = imutils.resize(frame,width=WIDTH)
        encoded,buffer = cv2.imencode('.jpg',frame,[cv2.IMWRITE_JPEG_QUALITY,80])
        #message = base64.b64encode(buffer)
        print(sys.getsizeof(buffer))
        server_socket.sendto(bytes(buffer),client_addr)
        frame = cv2.putText(frame,'FPS: '+str(fps),(10,40),cv2.FONT_HERSHEY_SIMPLEX,0.7,(0,0,255),2)
        cv2.imshow('TRANSMITTING VIDEO',frame)
        key = cv2.waitKey(1) & 0xFF
        if key == ord('q'):
            server_socket.close()
            break
        if cnt == frames_to_count:
            try:
                fps = round(frames_to_count/(time.time()-st))
                st=time.time()
                cnt=0
            except:
                pass
        cnt+=1

And here is my C++ code for receiving the buffer and create a texture out of it:

TArray<uint8> Audp_module::Listen()
{
	TSharedRef<FInternetAddr> targetAddr = ISocketSubsystem::Get(PLATFORM_SOCKETSUBSYSTEM)->CreateInternetAddr();
	uint32 Size;
	TArray<uint8> Buffer;
	if(Socket != nullptr)
	{
		while (Socket->HasPendingData(Size))
		{
			int32 BytesRead = 0;

			ReceivedData.SetNumUninitialized(FMath::Min(Size, 65507u));
			Socket->RecvFrom(ReceivedData.GetData(), ReceivedData.Num(), BytesRead, *targetAddr);
			
			char ansiiData[65535];
			memcpy(ansiiData, ReceivedData.GetData(), BytesRead);
			ansiiData[BytesRead] = 0;
			
			FString data = ANSI_TO_TCHAR(ansiiData);
			//FString data = FString::FromInt(BytesRead);
			const auto encoded = FBase64::Encode(ReceivedData);
			GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, encoded);
			UE_LOG(LogTemp, Verbose, TEXT("Received: %s"), *encoded);
			//FString data = ANSI_TO_TCHAR(ansiiData);
			GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, "Message by UDP: " + data);
			FString Left, Right;
			// you need to remove Data:image....,
			encoded.Split(TEXT(","),&Left,&Right);
			
			int32 Utf8Length = FTCHARToUTF8_Convert::ConvertedLength(*encoded, encoded.Len());
			ReceivedData.SetNumUninitialized(Utf8Length);
			FTCHARToUTF8_Convert::Convert((UTF8CHAR*)ReceivedData.GetData(), ReceivedData.Num(), *encoded, encoded.Len());
			bool isDecode = FBase64::Decode(Right, ReceivedData);
			if (isDecode) {
				//TextureToUse = CreateBitTextureAtRuntime(Buffer);
				GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Yellow, "Got a texture!!! ");
				return ReceivedData;
			}
			else {
				TextureToUse = nullptr;
				return ReceivedData;
			}
		}
	}
	else
	{
		Socket = FUdpSocketBuilder(SocketDescription)
				.AsNonBlocking()
				.AsReusable()
				.BoundToEndpoint(LocalEndpoint)
				.WithReceiveBufferSize(SendSize)
				.WithReceiveBufferSize(BufferSize)
				.WithBroadcast();
		return Buffer;
	}
	return Buffer;
	
}

UTexture2D* Audp_module::CreateBitTextureAtRuntime(TArray<uint8>& BGRA8PixelData, bool& IsValid)
{
	IsValid = false;
	//UTexture2D* LoadedT2D = nullptr;
	
	//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
      
	//Create T2D!
	if (ImageWrapper.IsValid() && ImageWrapper->SetCompressed(BGRA8PixelData.GetData(), BGRA8PixelData.Num()))
	{ 
		TArray<uint8> UncompressedBGRA;
		if (ImageWrapper->GetRaw(ERGBFormat::BGRA, 8, UncompressedBGRA))
		{
            
			//Valid?
			if(!TextureToUse) return nullptr;
			//~~~~~~~~~~~~~~
             
			//Copy!
			void* TextureData = TextureToUse->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
			FMemory::Memcpy(TextureData, UncompressedBGRA.GetData(), UncompressedBGRA.Num());
			TextureToUse->PlatformData->Mips[0].BulkData.Unlock();

			//Update!
			TextureToUse->UpdateResource();
		}
	}
     
	// Success!
	IsValid = true;
	return TextureToUse;
}

UTexture2D* Audp_module::Base64_ToImage(TArray<uint8> Source)
{
	bool IsValid = false;
	return CreateBitTextureAtRuntime(Source, IsValid);

}

I call Listen() and Base64_ToImage() every Tick in Blueprint. Maybe that is also an issue but if I try to run it, it gets slower and slower over time and the Texture does not update. Here my Blueprint:

I am happy for any help or suggestions!

Best wishes