最近在工作中遇见了游戏场景内3Dui和曲面ui的需求,大致看了下ue4 4.27的实现方式,简单来说就是将widget渲染到一个RT上,然后将RT赋给一个动态材质,在场景代理中动态构建一个mesh并使用这个动态材质。
看看实现,首先在beginplay或者component onregister时初始化窗口,这里就用的创建2dui的方式。
void UWidgetComponent::InitWidget()
{
// Don't do any work if Slate is not initialized
if ( FSlateApplication::IsInitialized() )
{
UWorld* World = GetWorld();
if ( WidgetClass && Widget == nullptr && World && !World->bIsTearingDown)
{
Widget = CreateWidget(GetWorld(), WidgetClass);
SetTickMode(TickMode);
}
。。。。。。
}
}
然后在tick中更新各组件
更新widget,将widget放入一个slatewindow以进行渲染,注意在最开始首先创建了动态材质,并在材质中将RT作为参数赋了进去。这里很有迷惑性,因为这时RT还没有值是空,真正的的RT要等后面updateRenderTarget时再赋值进去。
void UWidgetComponent::UpdateWidget()
{
。。。。。
TSharedPtr<SWidget> NewSlateWidget;
if (Widget)
{
NewSlateWidget = Widget->TakeWidget();
}
// Create the SlateWindow if it doesn't exists
bool bNeededNewWindow = false;
if (!SlateWindow.IsValid())
{
UpdateMaterialInstance();
SlateWindow = SNew(SVirtualWindow).Size(CurrentDrawSize);
SlateWindow->SetIsFocusable(bWindowFocusable);
SlateWindow->SetVisibility(ConvertWindowVisibilityToVisibility(WindowVisibility));
RegisterWindow();
bNeededNewWindow = true;
}
。。。。。
// We Get here if we have a UMG Widget
if (NewSlateWidget.IsValid())
{
if (NewSlateWidget != CurrentSlateWidget || bNeededNewWindow)
{
CurrentSlateWidget = NewSlateWidget;
SlateWindow->SetContent(NewSlateWidget.ToSharedRef());
。。。。。
}
}
。。。。。
}
void UWidgetComponent::UpdateMaterialInstance()
{
。。。。。
UMaterialInterface* BaseMaterial = GetMaterial(0);
MaterialInstance = UMaterialInstanceDynamic::Create(BaseMaterial, this);
UpdateMaterialInstanceParameters();
}
void UWidgetComponent::UpdateMaterialInstanceParameters()
{
if ( MaterialInstance )
{
MaterialInstance->SetTextureParameterValue("SlateUI", RenderTarget);
MaterialInstance->SetVectorParameterValue("TintColorAndOpacity", TintColorAndOpacity);
MaterialInstance->SetScalarParameterValue("OpacityFromTexture", OpacityFromTexture);
}
}
更新完窗口后就将窗口绘制到RT上,核心函数就高亮的两个,在Update中创建和初始化RT,然后将其赋给material,再将窗口使用FWindowRender绘制到RT上。
void UWidgetComponent::DrawWidgetToRenderTarget(float DeltaTime)
{
。。。。。
UpdateRenderTarget(CurrentDrawSize);
// The render target could be null if the current draw size is zero
if(RenderTarget)
{
bRedrawRequested = false;
WidgetRenderer->DrawWindow(
RenderTarget,
SlateWindow->GetHittestGrid(),
SlateWindow.ToSharedRef(),
DrawScale,
CurrentDrawSize,
DeltaTime);
。。。。。
}
}
void UWidgetComponent::UpdateRenderTarget(FIntPoint DesiredRenderTargetSize)
{
。。。。。
if ( RenderTarget == nullptr )
{
RenderTarget = NewObject<UTextureRenderTarget2D>(this);
。。。。。
if ( MaterialInstance )
{
MaterialInstance->SetTextureParameterValue("SlateUI", RenderTarget);
}
}
。。。。。
}
到这里是将窗口画面绘制到RT到,而生成并渲染到3Dmesh上是通过代理,meshbuilder实现的。代理构造时获取窗口的材质和各参数。
详细看看平面窗口(plane)和曲面窗口(cylinder)是怎么计算的。
FWidget3DSceneProxy( UWidgetComponent* InComponent, ISlate3DRenderer& InRenderer )
: FPrimitiveSceneProxy( InComponent )
, ArcAngle(FMath::DegreesToRadians(InComponent->GetCylinderArcAngle()))
, Pivot( InComponent->GetPivot() )
, Renderer( InRenderer )
, RenderTarget( InComponent->GetRenderTarget() )
, MaterialInstance( InComponent->GetMaterialInstance() )
, BlendMode( InComponent->GetBlendMode() )
, GeometryMode(InComponent->GetGeometryMode())
, BodySetup(InComponent->GetBodySetup())
{
bWillEverBeLit = false;
MaterialRelevance = MaterialInstance->GetRelevance_Concurrent(GetScene().GetFeatureLevel());
}
virtual void GetDynamicMeshElements(const TArray<const FSceneView*>& Views, const FSceneViewFamily& ViewFamily, uint32 VisibilityMap, FMeshElementCollector& Collector) const override
{
#if WITH_EDITOR
const bool bWireframe = AllowDebugViewmodes() && ViewFamily.EngineShowFlags.Wireframe;
auto WireframeMaterialInstance = new FColoredMaterialRenderProxy(
GEngine->WireframeMaterial ? GEngine->WireframeMaterial->GetRenderProxy() : nullptr,
FLinearColor(0, 0.5f, 1.f)
);
Collector.RegisterOneFrameMaterialProxy(WireframeMaterialInstance);
FMaterialRenderProxy* ParentMaterialProxy = nullptr;
if ( bWireframe )
{
ParentMaterialProxy = WireframeMaterialInstance;
}
else
{
ParentMaterialProxy = MaterialInstance->GetRenderProxy();
}
#else
FMaterialRenderProxy* ParentMaterialProxy = MaterialInstance->GetRenderProxy();
#endif
//FSpriteTextureOverrideRenderProxy* TextureOverrideMaterialProxy = new FSpriteTextureOverrideRenderProxy(ParentMaterialProxy,
const FMatrix& ViewportLocalToWorld = GetLocalToWorld();
FMatrix PreviousLocalToWorld;
if (!GetScene().GetPreviousLocalToWorld(GetPrimitiveSceneInfo(), PreviousLocalToWorld))
{
PreviousLocalToWorld = GetLocalToWorld();
}
if( RenderTarget )
{
FTextureResource* TextureResource = RenderTarget->Resource;
if ( TextureResource )
{
if (GeometryMode == EWidgetGeometryMode::Plane)
{
。。。。。
}
else
{
ensure(GeometryMode == EWidgetGeometryMode::Cylinder);
。。。。。
}
}
}
。。。。。。。
}
计算平面窗口的mesh,比较简单,就构建两个三角形面片组成一个矩形平面
if (GeometryMode == EWidgetGeometryMode::Plane)
{
float U = -RenderTarget->SizeX * Pivot.X; //计算上下左右四个点,根据中心权重点pivot去偏移,默认pivot是0.5 0.5
float V = -RenderTarget->SizeY * Pivot.Y;
float UL = RenderTarget->SizeX * (1.0f - Pivot.X);
float VL = RenderTarget->SizeY * (1.0f - Pivot.Y);
int32 VertexIndices[4]; //一个矩形平面有两个三角形面片
for ( int32 ViewIndex = 0; ViewIndex < Views.Num(); ViewIndex++ )
{
FDynamicMeshBuilder MeshBuilder(Views[ViewIndex]->GetFeatureLevel());
if ( VisibilityMap & ( 1 << ViewIndex ) )
{
VertexIndices[0] = MeshBuilder.AddVertex(-FVector(0, U, V ), FVector2D(0, 0), FVector(0, -1, 0), FVector(0, 0, -1), FVector(1, 0, 0), FColor::White);
VertexIndices[1] = MeshBuilder.AddVertex(-FVector(0, U, VL), FVector2D(0, 1), FVector(0, -1, 0), FVector(0, 0, -1), FVector(1, 0, 0), FColor::White);
VertexIndices[2] = MeshBuilder.AddVertex(-FVector(0, UL, VL), FVector2D(1, 1), FVector(0, -1, 0), FVector(0, 0, -1), FVector(1, 0, 0), FColor::White);
VertexIndices[3] = MeshBuilder.AddVertex(-FVector(0, UL, V), FVector2D(1, 0), FVector(0, -1, 0), FVector(0, 0, -1), FVector(1, 0, 0), FColor::White);
MeshBuilder.AddTriangle(VertexIndices[0], VertexIndices[1], VertexIndices[2]);
MeshBuilder.AddTriangle(VertexIndices[0], VertexIndices[2], VertexIndices[3]);
FDynamicMeshBuilderSettings Settings;
Settings.bDisableBackfaceCulling = false;
Settings.bReceivesDecals = true;
Settings.bUseSelectionOutline = true;
MeshBuilder.GetMesh(ViewportLocalToWorld, PreviousLocalToWorld, ParentMaterialProxy, SDPG_World, Settings, nullptr, ViewIndex, Collector, FHitProxyId());
}
}
}
窗口大概就是这么个情况,pivot位于中心
而曲面窗口的计算就比较复杂了,从2d上解释是本质是使用多条直线模拟一个曲面,半径是规则多边形的最大外接圆的圆心,结合下面的图可能比较清楚
本质上来说,每一个面片的xyz坐标为:
-
x = radius * cosθ
-
y = radius * sinθ
-
z = ±1/2RenderTarget->SizeY
解析看注释
ensure(GeometryMode == EWidgetGeometryMode::Cylinder);
const int32 NumSegments = FMath::Lerp(4, 32, ArcAngle/PI); //弧度除以Π 这里等于角度除以180度,做差值求出合适的面片数,前面平面的面片是2
const float Radius = RenderTarget->SizeX / ArcAngle; //弧长除以弧度等于半径
const float Apothem = Radius * FMath::Cos(0.5f*ArcAngle); //求边心距 即以Radius为斜边的直角三角形的长直角边, 乘0.5是利用对称性
const float ChordLength = 2.0f * Radius * FMath::Sin(0.5f*ArcAngle); //求弧长,即以Radius为斜边的直角三角形的短直角边,后乘二是因为前面使用了一半的弧度计算
const float PivotOffsetX = ChordLength * (0.5-Pivot.X); //根据中心点设置弧度上的中心点偏移,小于0.5向左,大于0.5向右,这个中心点其实就是指的coord点
const float V = -RenderTarget->SizeY * Pivot.Y; //2d的Y方向上(即空间内的z方向上)的计算和平面计算一样
const float VL = RenderTarget->SizeY * (1.0f - Pivot.Y);
int32 VertexIndices[4];
for (int32 ViewIndex = 0; ViewIndex < Views.Num(); ViewIndex++)
{
FDynamicMeshBuilder MeshBuilder(Views[ViewIndex]->GetFeatureLevel());
if (VisibilityMap & (1 << ViewIndex))
{
const float RadiansPerStep = ArcAngle / NumSegments; //每个面片代表的弧度
FVector LastTangentX; //本菜鸟不是很懂这Tangent的含义,目测是计算方向,有没有大佬指点一下
FVector LastTangentY;
FVector LastTangentZ;
for (int32 Segment = 0; Segment < NumSegments; Segment++ )
{
const float Angle = -ArcAngle / 2 + Segment * RadiansPerStep; //从左至右计算每一个面片
const float NextAngle = Angle + RadiansPerStep; //即图中x1y1所在的灰线
// Polar to Cartesian
const float X0 = Radius * FMath::Cos(Angle) - Apothem; //计算面片的四个点,这里-Apothem是为了将widget的坐标原点固定到coord
const float Y0 = Radius * FMath::Sin(Angle); //我们可以更改这个-Apothem,从而实现widget原点为其圆心
const float X1 = Radius * FMath::Cos(NextAngle) - Apothem;
const float Y1 = Radius * FMath::Sin(NextAngle);
const float U0 = static_cast<float>(Segment) / NumSegments; //计算uv,注意图中xy坐标的方向,不难发现这个PivotOffsetX实际影响的是世界坐标的y值,影响的是窗口水平方向上的显示
const float U1 = static_cast<float>(Segment+1) / NumSegments; //后面就是构建三角形面片了
const FVector Vertex0 = -FVector(X0, PivotOffsetX + Y0, V);
const FVector Vertex1 = -FVector(X0, PivotOffsetX + Y0, VL);
const FVector Vertex2 = -FVector(X1, PivotOffsetX + Y1, VL);
const FVector Vertex3 = -FVector(X1, PivotOffsetX + Y1, V);
FVector TangentX = Vertex3 - Vertex0;
TangentX.Normalize();
FVector TangentY = Vertex1 - Vertex0;
TangentY.Normalize();
FVector TangentZ = FVector::CrossProduct(TangentX, TangentY);
if (Segment == 0)
{
LastTangentX = TangentX;
LastTangentY = TangentY;
LastTangentZ = TangentZ;
}
VertexIndices[0] = MeshBuilder.AddVertex(Vertex0, FVector2D(U0, 0), LastTangentX, LastTangentY, LastTangentZ, FColor::White);
VertexIndices[1] = MeshBuilder.AddVertex(Vertex1, FVector2D(U0, 1), LastTangentX, LastTangentY, LastTangentZ, FColor::White);
VertexIndices[2] = MeshBuilder.AddVertex(Vertex2, FVector2D(U1, 1), TangentX, TangentY, TangentZ, FColor::White);
VertexIndices[3] = MeshBuilder.AddVertex(Vertex3, FVector2D(U1, 0), TangentX, TangentY, TangentZ, FColor::White);
MeshBuilder.AddTriangle(VertexIndices[0], VertexIndices[1], VertexIndices[2]);
MeshBuilder.AddTriangle(VertexIndices[0], VertexIndices[2], VertexIndices[3]);
LastTangentX = TangentX;
LastTangentY = TangentY;
LastTangentZ = TangentZ;
}
FDynamicMeshBuilderSettings Settings;
Settings.bDisableBackfaceCulling = false;
Settings.bReceivesDecals = true;
Settings.bUseSelectionOutline = true;
MeshBuilder.GetMesh(ViewportLocalToWorld, PreviousLocalToWorld, ParentMaterialProxy, SDPG_World, Settings, nullptr, ViewIndex, Collector, FHitProxyId());
}
}
然后我们看看UWidgetInteractionComponent是怎么将射线检测点映射到3dui上的
if (TraceResult.HitWidgetComponent)
{
// @todo WASTED WORK: GetLocalHitLocation() gets called in GetHitWidgetPath();
if (TraceResult.HitWidgetComponent->GetGeometryMode() == EWidgetGeometryMode::Cylinder)
{
TTuple<FVector, FVector2D> CylinderHitLocation = TraceResult.HitWidgetComponent->GetCylinderHitLocation(TraceResult.HitResult.ImpactPoint, WorldDirection);
TraceResult.HitResult.ImpactPoint = CylinderHitLocation.Get<0>();
TraceResult.LocalHitLocation = CylinderHitLocation.Get<1>();
}
else
{
ensure(TraceResult.HitWidgetComponent->GetGeometryMode() == EWidgetGeometryMode::Plane);
TraceResult.HitWidgetComponent->GetLocalHitLocation(TraceResult.HitResult.ImpactPoint, TraceResult.LocalHitLocation);
}
TraceResult.HitWidgetPath = FindHoveredWidgetPath(TraceResult);
}
平面就不用说了,看看Cylinder的情况,这里实际上是计算的射线与圆的交点去做的,注意到实际的碰撞点并不在mesh上,而是widget左右两边形成的矩形平面上,通过将这个平面上的碰撞点和射线方向求出其与球面的交点从而映射到widget的mesh上
算法原文在这
射线与球的相交 - 简书
origin是射线起点, dir是射线的方向向量。p0,p1是两个交点,center为圆心,半径为R,d为圆心到射线的距离 。
当射线和圆相交的时候,可以看到,球心 center 到射线 ray 的距离 d <= R,这个即为相交的条件。那么射线与球相切就转化为了球心到射线的距离d的判断。
-
设圆心在射线上的投影为c',则 origin,center, c' 形成了一个直角三角形。
-
获得射线起点到圆心的向量 Voc = Vcenter - Vorigin
-
在射线方向上的投影为: Poc= Voc·(Voc·dir)
-
勾股定理:d·d = Voc·Voc - Poc·Poc
可以求出d的数值,
-
d < R,射线穿过圆,与圆有两个交点。
-
d = R,射线与圆相切,有一个交点为切点。
-
d > R,射线在圆外,没有交点。
计算的话就是两个方程:一条射线和球的解
射线方程:ray : P(t) = O + D·t ( t >= 0 ) //O=origin, D=direction, C=center, R=radius
圆的方程:sphere : sqr( P-C ) = R·R
射线方程表明的是如下一个点的集合P,当t从零增大时, D·t会沿着D向量的方向从零逐步变长,t 取值无限表示了射线单方向。从O点开始在D方向上无限个点构成了一条射线。球的方程表明了任何点P,只要到C点的距离等于半径R,则表明点在球面上,这么一个球面上的点的集合。
联立两个方程,试着求解 t
:
sqr( O + D·t - C ) = R·R
//D为单位向量
t·t + 2·(OC·D)·t + OC·OC - R·R = 0
最后化简得到
t·t + 2·(OC·D)·t + OC·OC - R·R = 0,这是一个关于t的一元二次方程
-
t0 = -(b + √Δ) / 2a
-
t1 = -(b - √Δ) / 2a
-
a = D·D = dot(D, D) = 1;
-
b = 2·OC·D = 2·dot(OC, D);
-
c = OC·OC - R·R = dot(OC, OC) - R·R;
-
判别式 Δ = sqr(b) - 4ac 如果判别式 Δ > 0,则表明球与射线相交。= 4·sqr( OC·D ) - 4·( OC·OC - R·R )= 4·( sqr( OC·D ) - OC·OC + R·R );
代码实现就是FindLineSphereIntersection函数,就是照着这个公式做的,没有区别。解析看注释
TOptional<float> FindLineSphereIntersection(const FVector& Start, const FVector& Dir, float Radius)
{
// Solution exist at two possible locations:
// (Start + Dir * t) (dot) (Start + Dir * t) = Radius^2
// Dir(dot)Dir*t^2 + 2*Start(dot)Dir + Start(dot)Start - Radius^2 = 0
//
// Recognize quadratic form with:
const float a = FVector::DotProduct(Dir,Dir);
const float b = 2 * FVector::DotProduct(Start,Dir);
const float c = FVector::DotProduct(Start,Start) - Radius*Radius;
const float Discriminant = b*b - 4 * a * c;
if (Discriminant >= 0)
{
const float SqrtDiscr = FMath::Sqrt(Discriminant);
const float Soln1 = (-b + SqrtDiscr) / (2 * a);
return Soln1;
}
。。。。。
}
TTuple<FVector, FVector2D> UWidgetComponent::GetCylinderHitLocation(FVector WorldHitLocation, FVector WorldHitDirection) const
{
// Turn this on to see a visualiztion of cylindrical collision testing.
static const bool bDrawCollisionDebug = false;
ensure(GeometryMode == EWidgetGeometryMode::Cylinder);
FTransform ToWorld = GetComponentToWorld();
const FVector HitLocation_ComponentSpace = GetComponentTransform().InverseTransformPosition(WorldHitLocation); //将碰撞点位置转换到组件空间
const FVector HitDirection_ComponentSpace = GetComponentTransform().InverseTransformVector(WorldHitDirection); //将碰撞点法线转换到组件空间
const float ArcAngleRadians = FMath::DegreesToRadians(GetCylinderArcAngle()); //计算那几个需要的参数
const float Radius = CurrentDrawSize.X / ArcAngleRadians;
const float Apothem = Radius * FMath::Cos(0.5f*ArcAngleRadians);
const float ChordLength = 2.0f * Radius * FMath::Sin(0.5f*ArcAngleRadians);
const float PivotOffsetX = ChordLength * (0.5-Pivot.X);
。。。。。。
const FVector HitLocation_CircleSpace( -Apothem, HitLocation_ComponentSpace.Y + PivotOffsetX, 0.0f ); //-Apothem是因为前面计算面片坐标点的时候减去了,z值另计算
const FVector HitDirection_CircleSpace( HitDirection_ComponentSpace.X, HitDirection_ComponentSpace.Y, 0.0f );
// DRAW HIT DIRECTION
。。。。。。
// Perform a ray vs. circle intersection test (effectively in 2D because Z coordinate is always 0)
const TOptional<float> Solution = FindLineSphereIntersection(HitLocation_CircleSpace, HitDirection_CircleSpace, Radius); //核心算法,计算射线与圆相交的点的距离time
if (Solution.IsSet())
{
const float Time = Solution.GetValue();
const FVector TrueHitLocation_CircleSpace = HitLocation_CircleSpace + HitDirection_CircleSpace * Time; //已经求出t 根据射线方程P(t) = O + D·t可以求出射线与球面的交点
。。。。。
// Determine the widget-space X hit coordinate.
const float Endpoint1 = FMath::Fmod(FMath::Atan2(-0.5f*ChordLength, -Apothem) + 2*PI, 2*PI);
const float Endpoint2 = FMath::Fmod(FMath::Atan2(+0.5f*ChordLength, -Apothem) + 2*PI, 2*PI);
const float HitAngleRads = FMath::Fmod(FMath::Atan2(TrueHitLocation_CircleSpace.Y, TrueHitLocation_CircleSpace.X) + 2*PI, 2*PI);
const float HitAngleZeroToOne = (HitAngleRads - FMath::Min(Endpoint1, Endpoint2)) / FMath::Abs(Endpoint2 - Endpoint1); //这里并没有求具体位置,求的是射线交点以圆心为顶点和最左侧线形成的夹角角度在widgetcomp在xy平面形成的扇形区域内的角度比例,即示例图中β角所占比例
// Determine the widget-space Y hit coordinate //通过这个比例乘以屏幕实际宽度就可以将碰撞点在x轴上正确映射
const FVector CylinderHitLocation_ComponentSpace = HitLocation_ComponentSpace + HitDirection_ComponentSpace*Time; //而这个曲面也是相对于XY平面来说的,在z轴上依旧是平面,不用球内空间计算,直接用射线方程在组件空间内计算交点,其z值就是实际映射的值
const float YHitLocation = (-CylinderHitLocation_ComponentSpace.Z + CurrentDrawSize.Y*Pivot.Y); //注意平面下v和vl是反向的
const FVector2D WidgetSpaceHitCoord = FVector2D(HitAngleZeroToOne * CurrentDrawSize.X, YHitLocation); //得出实际映射点
return MakeTuple(GetComponentTransform().TransformPosition(CylinderHitLocation_ComponentSpace), WidgetSpaceHitCoord);
}
else
{
return MakeTuple(FVector::ZeroVector, FVector2D::ZeroVector);
}
}
了解原理后我们可以对widgetcomponent进行一些拓展,可以在代理中定义和实现自定义形状的mesh的widget,不过要在widgetinteractioncomponent中处理好映射。
ue4的cylinderwidget只有一个参数值angle去控制,半径是通过弧度和屏幕的实际绘制宽度DrawSize去自动计算改变的,我们可以新增两个参数,一个单独控制半径,一个单独控制角度。就可以实现下面这种效果。原图链接以及unity曲面ui实现:
https://medium.com/xrpractices/curved-user-interfaces-xr-90a143eb150a
可以通过控制代理中计算面片x值是否减去Apothem去控制widgetcomp的坐标,不减去时,widget组件的坐标原点就是在形成的曲面的圆心上,注意这时在widgetinteractioncomponent中计算映射时,Apothem应设为0,但最后计算Endpoint和角度区间时还是使用Apothem = radius*cosθ,因为角度计算和是否减去Apothem无关。
这是个人一点浅显的理解,有啥错误谢谢大佬们指点~