Is it possible merge / boolean two or more meshes using the project?
[=BulleTime;274445]
Is it possible merge / boolean two or more meshes using the project?
[/]
No, that is an incredibly complex operation and would require the use of an external library like the Carve library.
In case anyone’s interested, I made a version of this class that uses FDynamicMeshBuilder to render the geometry. It’s a little different from the option proposed here in that it uses Vertex and Index buffers to store its geometry information rather than a triangle list, which in my opinion is both easier to define and certainly more efficient. It’s obviously incomplete and doesn’t have collision or blueprint hooks, I’m merely posting it here to show how you could do that and to spark discussion. Since FDynamicMeshBuilder doesn’t implement access to its buffers or a copy constructor, you cannot use it directly as a data storage device, so I had to duplicate some of its functionality.
Component header:
#pragma once
#include "Components/MeshComponent.h"
#include "DynamicMeshBuilder.h"
#include "DynamicMeshComponent.generated.h"
/**
*
*/
UCLASS()
class EMPTYTEST_API UDynamicMeshComponent : public UMeshComponent
{
GENERATED_BODY()
public:
// Begin UPrimitiveComponent interface.
virtual FPrimitiveSceneProxy* CreateSceneProxy() override;
// End UPrimitiveComponent interface.
// Begin UMeshComponent interface.
virtual int32 GetNumMaterials() const override;
// End UMeshComponent interface.
// Begin DynamicMeshBuilder functionality
int32 AddVertex(
const FVector& InPosition,
const FVector2D& InTextureCoordinate,
const FVector& InTangentX,
const FVector& InTangentY,
const FVector& InTangentZ,
const FColor& InColor
);
int32 AddVertex(const FDynamicMeshVertex &InVertex);
int32 AddVertices(const TArray<FDynamicMeshVertex>& InVertices);
void AddTriangle(int32 V0,int32 V1,int32 V2);
void AddTriangles(const TArray<int32> &InIndices);
// End DynamicMeshBuilder functionality
TArray<FDynamicMeshVertex> GetVertices();
TArray<int32> GetIndices();
private:
// Begin USceneComponent interface.
virtual FBoxSphereBounds CalcBounds(const FTransform & LocalToWorld) const override;
// Begin USceneComponent interface.
TArray<FDynamicMeshVertex> _vertices;
TArray<int32> _indices;
friend class FDynamicMeshSceneProxy;
};
Component implementation:
#include "EmptyTest.h"
#include "DynamicMeshComponent.h"
class FDynamicMeshSceneProxy : public FPrimitiveSceneProxy
{
public:
FDynamicMeshSceneProxy(UDynamicMeshComponent* Component)
: FPrimitiveSceneProxy(Component),
MaterialRelevance(Component->GetMaterialRelevance(GetScene().GetFeatureLevel())),
_vertices(Component->GetVertices()),
_indices(Component->GetIndices())
{
// Grab material
Material = Component->GetMaterial(0);
if(Material == NULL)
{
Material = UMaterial::GetDefaultMaterial(MD_Surface);
}
}
virtual void GetDynamicMeshElements(const TArray<const FSceneView*>& Views, const FSceneViewFamily& ViewFamily, uint32 VisibilityMap, FMeshElementCollector& Collector) const override
{
QUICK_SCOPE_CYCLE_COUNTER( STAT_DynamicMeshSceneProxy_GetDynamicMeshElements );
for (int32 ViewIndex = 0; ViewIndex < Views.Num(); ViewIndex++)
{
if (VisibilityMap & (1 << ViewIndex))
{
FMaterialRenderProxy* MaterialProxy = Material->GetRenderProxy(IsSelected());
const FSceneView* View = Views[ViewIndex];
FDynamicMeshBuilder MeshBuilder;
MeshBuilder.AddVertices(_vertices);
MeshBuilder.AddTriangles(_indices);
MeshBuilder.GetMesh(GetLocalToWorld(), MaterialProxy, SDPG_World, false, false, ViewIndex, Collector);
}
}
}
virtual FPrimitiveViewRelevance GetViewRelevance(const FSceneView* View)
{
FPrimitiveViewRelevance Result;
Result.bDrawRelevance = IsShown(View);
Result.bShadowRelevance = IsShadowCast(View);
Result.bDynamicRelevance = true;
MaterialRelevance.SetPrimitiveViewRelevance(Result);
return Result;
}
virtual bool CanBeOccluded() const override
{
return !MaterialRelevance.bDisableDepthTest;
}
virtual uint32 GetMemoryFootprint(void) const
{
return(sizeof(*this) + GetAllocatedSize());
}
uint32 GetAllocatedSize(void) const
{
return(FPrimitiveSceneProxy::GetAllocatedSize());
}
private:
UMaterialInterface* Material;
TArray<FDynamicMeshVertex> _vertices;
TArray<int32> _indices;
FMaterialRelevance MaterialRelevance;
};
int32 UDynamicMeshComponent::AddVertex(
const FVector& InPosition,
const FVector2D& InTextureCoordinate,
const FVector& InTangentX,
const FVector& InTangentY,
const FVector& InTangentZ,
const FColor& InColor
)
{
int32 VertexIndex = _vertices.Num();
FDynamicMeshVertex* Vertex = new(_vertices) FDynamicMeshVertex;
Vertex->Position = InPosition;
Vertex->TextureCoordinate = InTextureCoordinate;
Vertex->TangentX = InTangentX;
Vertex->TangentZ = InTangentZ;
// store the sign of the determinant in TangentZ.W (-1=0,+1=255)
Vertex->TangentZ.Vector.W = GetBasisDeterminantSign( InTangentX, InTangentY, InTangentZ ) < 0 ? 0 : 255;
Vertex->Color = InColor;
return VertexIndex;
}
int32 UDynamicMeshComponent::AddVertex(const FDynamicMeshVertex &InVertex)
{
int32 VertexIndex = _vertices.Num();
FDynamicMeshVertex* Vertex = new(_vertices) FDynamicMeshVertex(InVertex);
return VertexIndex;
}
int32 UDynamicMeshComponent::AddVertices(const TArray<FDynamicMeshVertex>& InVertices)
{
int32 StartIndex = _vertices.Num();
_vertices.Append(InVertices);
return StartIndex;
}
void UDynamicMeshComponent::AddTriangle(int32 V0,int32 V1,int32 V2)
{
_indices.Add(V0);
_indices.Add(V1);
_indices.Add(V2);
}
void UDynamicMeshComponent::AddTriangles(const TArray<int32> &InIndices)
{
_indices.Append(InIndices);
}
TArray<FDynamicMeshVertex> UDynamicMeshComponent::GetVertices()
{
return _vertices;
}
TArray<int32> UDynamicMeshComponent::GetIndices()
{
return _indices;
}
FPrimitiveSceneProxy* UDynamicMeshComponent::CreateSceneProxy()
{
return new FDynamicMeshSceneProxy(this);
}
int32 UDynamicMeshComponent::GetNumMaterials() const
{
return 1;
}
FBoxSphereBounds UDynamicMeshComponent::CalcBounds(const FTransform & LocalToWorld) const
{
FBoxSphereBounds NewBounds;
NewBounds.Origin = FVector::ZeroVector;
NewBounds.BoxExtent = FVector(HALF_WORLD_MAX,HALF_WORLD_MAX,HALF_WORLD_MAX);
NewBounds.SphereRadius = FMath::Sqrt(3.0f * FMath::Square(HALF_WORLD_MAX));
return NewBounds;
}
Usage example (two-sided triangle):
UDynamicMeshComponent* mesh = ObjectInitializer.CreateDefaultSubobject<UDynamicMeshComponent>(this, TEXT("DynamicTriangle"));
static ConstructorHelpers::FObjectFinder<UMaterialInterface> material(TEXT("Material'/Game/Materials/BaseColor.BaseColor'"));
mesh->SetMaterial(0, material.Object);
FVector const vecX(1.f, 0.f, 0.f);
FVector const vecY(0.f, 1.f, 0.f);
FVector const vecZ(0.f, 0.f, 1.f);
mesh->AddVertex(FVector(0.f, 0.f, 0.f),FVector2D::ZeroVector, vecX, vecY, vecZ, FColor::Blue);
mesh->AddVertex(FVector(0.f, 100.f, 0.f),FVector2D::ZeroVector, vecX, vecY, vecZ, FColor::Blue);
mesh->AddVertex(FVector(0.f, 0.f, 100.f),FVector2D::ZeroVector, vecX, vecY, vecZ, FColor::Blue);
mesh->AddTriangle(0,1,2);
mesh->AddTriangle(0,2,1);
RootComponent = mesh;
Stuck trying to get ImpactPoint from LineTraceComponent in mesh-space
Can anyone explain what transforms are needed to get a collision’s ImpactPoint in mesh-space, ie: exactly the same coordinates that are used in the triangle list input.
const bool hit = customMesh->LineTraceComponent(OutHitResult, cameraManager->GetCameraLocation(), cameraManager->GetCameraLocation() + (cameraManager->GetCameraRotation().Vector()*100000.0), LineParams);
auto point1 = customMesh->GetComponentToWorld().InverseTransformVector(OutHitResult.ImpactPoint);
auto point2 = customMesh->GetRelativeTransform().InverseTransformVector(OutHitResult.ImpactPoint);
I’ve tried a number of different transforms, including the two above as well as transforms from the parent object to the customMesh and still no luck. Any ideas?
Looks like the SceneProxy->GetLocalToWorld() matrix is what gets you the impact in mesh-space:
auto p = customMesh->SceneProxy->GetLocalToWorld().InverseTransformPosition(OutHitResult.ImpactPoint);
Hey guys! I wrote a heightMap Generator and connected it to a delaunay triangulator. Everything works just fine, but I dont have a clue about UV’s. How do I calculate them? By just playing with the values I got that every triangle indeed has a texture, but well, see the results:
Can you help me? I would really appreciate that!
Here is my current code:
inStream >> vertexCount >> buffer >> buffer; // first element is the number of elements // skip second and third
TArray<FProceduralMeshTriangle> triangles;
for (int i = 0; i < vertexCount; i++){
FProceduralMeshTriangle triangle;
FProceduralMeshVertex v0, v1, v2;
int i1, i2, i3;
inStream >> buffer >> i1 >> i2 >> i3;
i1--;
i2--;
i3--;
v0.Position.X = nodes[i1].X;
v0.Position.Y = nodes[i1].Y;
v0.Position.Z = vectorPoints[nodes[i1].X][nodes[i1].Y].first;
//v0.Position.Z = 5;
v0.U = 0.0;
v0.V = 0.5;
v1.Position.X = nodes[i2].X;
v1.Position.Y = nodes[i2].Y;
v1.Position.Z = vectorPoints[nodes[i2].X][nodes[i2].Y].first;
v1.U = 0.5;
v1.V = 0.0;
v2.Position.X = nodes[i3].X;
v2.Position.Y = nodes[i3].Y;
v2.Position.Z = vectorPoints[nodes[i3].X][nodes[i3].Y].first;
v2.U = 1.0;
v2.V = 0.5;
triangle.Vertex0 = v0;
triangle.Vertex1 = v1;
triangle.Vertex2 = v2;
triangles.Add(triangle);
}
static ConstructorHelpers::FObjectFinder<UMaterial> Material(TEXT("Material'/Game/StarterContent/Materials/M_Wood_Walnut.M_Wood_Walnut'"));
mesh = CreateDefaultSubobject<UProceduralMeshComponent>(TEXT("ProceduralCube"));
mesh->SetMaterial(0, Material.Object);
mesh->SetProceduralMeshTriangles(triangles);
RootComponent = mesh;
Okay, I added the following code:
double range = (lowestX - highestX)*-1;
double offset = 0 - lowestX;
for (int i = 0; i < triangles.Num(); i++){
FProceduralMeshVertex vT;
vT = triangles*.Vertex0;
vT.U = (vT.Position.X + offset) / range;
vT.V = 1 - vT.U;
triangles*.Vertex0 = vT;
vT = triangles*.Vertex1;
vT.U = (vT.Position.X + offset) / range;
vT.V = 1 - vT.U;
triangles*.Vertex1 = vT;
vT = triangles*.Vertex2;
vT.U = (vT.Position.X + offset) / range;
vT.V = 1 - vT.U;
triangles*.Vertex2 = vT;
}
Now it doesnt look like a nuclear accident anymore, but the texture is stretched about the whooole mesh, instead of repeating and it is still slighly distorted…
Do you see the mistake? And how can I make the texture repeat itself instead of being stretched about the whole mesh? (It is a large landscape mesh)
You could skip the UV-calculation and use a triplanar material instead.
Or you can multiply your texture coordinates by a factor. For example 10.
How would I use a triplanar material? Never heard of it (but I am generally very unexperienced with Materials)
Okay, I think I solved it (inside the material). I used a technique I rememberd from interior water materials, to make a texture depending on its world location. (WorldPosition/1200) -> Mask RG -> Texture UV’s
[=;274869]
How would I use a triplanar material? Never heard of it (but I am generally very unexperienced with Materials)
[/]
do an Internet search
Basically, you project the texture on the polies 3 times, once from each direction, and blend the results together with weights depending on the surface normal direction. Using world position instead of uv is definitely a good idea with procedural meshes I would say.
[=;274869]
How would I use a triplanar material? Never heard of it (but I am generally very unexperienced with Materials)
[/]
There is a tutorial on :
https://www…com/playlist?list=PLXePZFopdLKeBfbRnaojJj3sx7FAlRFbM
The procedural mesh code got added to master branch a few days ago as a .
https://.com/EpicGames/UnrealEngine/commit/5b75599e0096d94d05c1317944815234cb8e3966
Thanks, worked
Are the blue spots inside your terrain little “lakes”, that behave like water or is that just the texture that makes it look so?
Currently just a height-layered material but I am currently working on making it “real” water. Why did you ask?
Edit: But I will not make them really behave like water, because its an RTS Game. I mean, the lakes will have panned Materials and stuff but no water-physics calculation or what so ever
But is it possible that there are some problems with RayTraceing on Procedural Meshes? Because oftentumes my HitResult gives me a position in the range of 300+/- the position I clicked ( just in the Z axis, X and Y are correct) or just doesnt gives me hit at all…
I’m searching for a way to implement water with the procedural mesh, similar to what you can do with a physics - water - volume but unfortunately that only works with BSPs as far as I found out. I’ll probably go with a second mesh per chunk that simulates kind of water of physics, although I don’t really like that approach. Therefore I thought you might have found a way to use the water volume with a runtime generated mesh
Concerning the RayTracing I can’t really help you but I think that it was already discussed in this thread, at least I read something similar somewhere in the forums a couple of days ago
Do you mean you have a procedural mesh and a part of it should be water, or you have a procedural mesh that should *be *the water?
Concerning the Trace stuff I red that it seems to be not possible yet… Then I have to implement it by my own D:
First thing would be great but I’ll probably do it so that I have two meshes - one for the solid terrain and one which will in fact *be *the water. But there are other things that I’ll implement first so there’s enough time for me to maybe improve that approach
wow, after two hours of implementing a fast, pure inline native C line-triangle-intersetction algorythm I noticed the problem only occures when spawned by blueprint (and this was just for test-purposes anway) anyway, I am happy
(concerning the water topic->)
I never heared of a single mesh with different physics simulations and materials (but that doesn’t mean it doesn’t exist :D)
So, whats wrong with a submesh?
And so you are also implementing your main-landscape as a procedural mesh? Would be cool to stay in contact with someone who does similiar things, for questions and stuff