My colleague and I managed to fix this (mostly). Hold on to your hats!
We had to create our own BP class in C++ (people who use BP, do not worry, you can find some code to copy and paste below).
In the header, we created a UProperty for a scene capture component 2D that could be fed into a function. We also created a Calculate FOV function and a Update Portal View P Matrix Parameters.
The Calculate FOV function takes an input of a player controller. This then checks to see if a HMD is connected and if so, proceeds to grab the FOV from the VR Camera as it changes between VR and on PC.
The Update Portal View Projection Matrix Parameters function takes dynamic material instance, a player camera transform, and a player controller.
First of all we get the sceneCaptureComponent2D’s capture size (x,y). We then grab the view matrix and view location of the player’s camera transform. We then swap the axis to match unreal’s coord space so Z is up. Lastly we grab the scene capture component 2D’s FOV.
NOW THE MATH BEGINS!
If the viewport is wider than it is tall, the XAxisMultiplier is 1 whilst the YAxisMultiplier is viewport.x / viewport.y
Else the XAxisMultiplier is viewport.y / viewport.x and the XAxisMultiplyer is 1.
We then create a projection matrix where the FOV value, axis multipliers and clipping plane values (we gave 10 and 1000) are fed into the constructor.
Then a view projection matrix is created by multiplying the view matrix and the projection matrix.
We then break the VPMatrix up into its column components (for X, Y, Z and W axis) to be fed into out dynamic material instance and feed them into it.
HERE IS ALL THE CODE SO FAR:
.h
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "GameFramework/Actor.h"
#include "Kismet/GameplayStatics.h"
#include "Camera/PlayerCameraManager.h"
#include "Engine/SceneCapture2D.h"
#include "Classes/Components/SceneCaptureComponent2D.h"
#include "MyTestPortalActor.generated.h"
UCLASS()
class AMyTestPortalActor : public AActor
{
GENERATED_BODY()
public:
UPROPERTY(VisibleAnywhere, BlueprintReadWrite, Category = "RDSceneCapComp")
USceneCaptureComponent2D* RDSceneCapComp2D;
// Sets default values for this actor's properties
AMyTestPortalActor();
// Called when the game starts or when spawned
virtual void BeginPlay() override;
// Called every frame
virtual void Tick( float DeltaSeconds ) override;
UFUNCTION(BlueprintCallable, Category = "RDMatrix")
float CalculateFOV(APlayerController* playerController);
UFUNCTION(BlueprintCallable, Category = "RDMatrix")
void UpdatePortalVPMParameters(UMaterialInstanceDynamic* material, FTransform PlayerCameraXForm, APlayerController* playerController);
};
.cpp
#include "MyTestPortalActor.h"
#include "Kismet/GameplayStatics.h"
#include "Classes/Components/SceneCaptureComponent2D.h"
#include "Classes/Engine/TextureRenderTarget2D.h"
#include "Classes/Camera/CameraComponent.h"
#include "Runtime/HeadMountedDisplay/Public/IHeadMountedDisplay.h"
// Sets default values
AMyTestPortalActor::AMyTestPortalActor()
{
// Set this actor to call Tick() every frame. You can turn this off to improve performance if you don't need it.
PrimaryActorTick.bCanEverTick = true;
RDSceneCapComp2D = NULL;
}
// Called when the game starts or when spawned
void AMyTestPortalActor::BeginPlay()
{
Super::BeginPlay();
this->SetTickGroup(TG_PostPhysics);
}
// Called every frame
void AMyTestPortalActor::Tick( float DeltaTime )
{
Super::Tick( DeltaTime );
this->SetTickGroup(TG_PostPhysics);
}
float AMyTestPortalActor::CalculateFOV(APlayerController* playerController)
{
float fov = 90.0f;
if (playerController != NULL)
{
if (playerController->PlayerCameraManager != NULL)
{
fov = playerController->PlayerCameraManager->GetFOVAngle();
}
}
// FOV changes when we have a VR Headset enabled
if (GEngine->HMDDevice.IsValid() && GEngine->IsStereoscopic3D())
{
float HFOV, VFOV;
GEngine->HMDDevice->GetFieldOfView(HFOV, VFOV);
if (VFOV > 0 && HFOV > 0)
{
fov = FMath::Max(HFOV, VFOV);
// AspectRatio won't be used until bConstrainAspectRatio is set to true,
// but it doesn't really matter since HMD calcs its own projection matrix.
//OutViewInfo.AspectRatio = HFOV / VFOV;
//OutViewInfo.bConstrainAspectRatio = true;
}
}
return fov;
}
void AMyTestPortalActor::UpdatePortalVPMParameters(UMaterialInstanceDynamic* material, FTransform PlayerCameraXForm, APlayerController* playerController)
{
float captureSizeX = RDSceneCapComp2D->TextureTarget->GetSurfaceWidth();
float captureSizeY = RDSceneCapComp2D->TextureTarget->GetSurfaceHeight();
const FTransform& Transform = PlayerCameraXForm;
FMatrix ViewMatrix = Transform.ToInverseMatrixWithScale();
FVector ViewLocation = Transform.GetTranslation();
// swap axis st. x=z,y=x,z=y (unreal coord space) so that z is up
ViewMatrix = ViewMatrix * FMatrix(
FPlane(0, 0, 1, 0),
FPlane(1, 0, 0, 0),
FPlane(0, 1, 0, 0),
FPlane(0, 0, 0, 1));
const float FOV = RDSceneCapComp2D->FOVAngle * (float)PI / 360.0f;
// Build projection matrix
float XAxisMultiplier;
float YAxisMultiplier;
if (captureSizeX > captureSizeY)
{
// if the viewport is wider than it is tall
XAxisMultiplier = 1.0f;
YAxisMultiplier = captureSizeX / captureSizeY;
}
else
{
// if the viewport is taller than it is wide
XAxisMultiplier = captureSizeY / captureSizeX;
YAxisMultiplier = 1.0f;
}
FMatrix ProjectionMatrix = FReversedZPerspectiveMatrix(
FOV,
FOV,
XAxisMultiplier,
YAxisMultiplier,
10,
1000
);
const FMatrix ViewProjectionMatrix = ViewMatrix * ProjectionMatrix;
FVector Xaxis = ViewProjectionMatrix.GetColumn(0);
FVector Yaxis = ViewProjectionMatrix.GetColumn(1);
FVector Zaxis = ViewProjectionMatrix.GetColumn(2);
FVector Waxis = ViewProjectionMatrix.GetColumn(3);
float XaxisW = ViewProjectionMatrix.M[3][0];
float YaxisW = ViewProjectionMatrix.M[3][1];
float ZaxisW = ViewProjectionMatrix.M[3][2];
float WaxisW = ViewProjectionMatrix.M[3][3];
material->SetVectorParameterValue("PortalVPM_Xaxis", FLinearColor(Xaxis.X, Xaxis.Y, Xaxis.Z, XaxisW));
material->SetVectorParameterValue("PortalVPM_Yaxis", FLinearColor(Yaxis.X, Yaxis.Y, Yaxis.Z, YaxisW));
material->SetVectorParameterValue("PortalVPM_Zaxis", FLinearColor(Zaxis.X, Zaxis.Y, Zaxis.Z, ZaxisW));
material->SetVectorParameterValue("PortalVPM_Waxis", FLinearColor(Waxis.X, Waxis.Y, Waxis.Z, WaxisW));
}
Now how does the material know how to use these variables I wonder?
We created out own version of the engine’s screen space UV function as it does not work in VR properly.
So below is how we set up the material (with the functions output UV’s going into the vertex shader customised UV’s rather than the pixel shader UV’s)
And here is a screenshot of the function.
In order for the custom node to work, we added some HLSL that could make sense of the crazy variables we are feeding into it.
Here is the HLSL.
float4x4 vpm;vpm = float4x4(PortalVPM_Xaxis.x, PortalVPM_Yaxis.x, PortalVPM_Zaxis.x, PortalVPM_Waxis.x,PortalVPM_Xaxis.y, PortalVPM_Yaxis.y, PortalVPM_Zaxis.y, PortalVPM_Waxis.y,PortalVPM_Xaxis.z, PortalVPM_Yaxis.z, PortalVPM_Zaxis.z, PortalVPM_Waxis.z,PortalVPM_XaxisW, PortalVPM_YaxisW, PortalVPM_ZaxisW, PortalVPM_WaxisW);return mul(float4(In.xyz,1),vpm);
As you can see, it takes all the parameters we are feeding in, and created a matrix to be used in the rest of the custom maths.
Admittedly this got a little out of my depth and my colleague helped out a lot. But if you have any questions about how or why things need to be done, please give me a shout and I will make sure I get an answer for you.