Quantcast
Channel: Community | MonoGame - Latest topics
Viewing all articles
Browse latest Browse all 6821

3D Objects aren't interpolated, just plain flat shade.

$
0
0

@Morgan_Sundin wrote:

Isn't linear interpolation default in the shader?

Because what ever I do my polygons will be rendered as flat shaded, the result is more like 'nointerpolation'.
Here's an image of my 3D object.

And here's a part of my HLSL code.

struct VertexShaderInput
{
float4 Position : POSITION0;
float4 Pos2DAsSeenByLight    : TEXCOORD1;
float3 Normal : NORMAL0;
float2 TextureCoordinate : TEXCOORD0;
};


struct VertexShaderOutput
{
float4 Position				: POSITION0;	// The vertex position again
float4 Position3D           : TEXCOORD2;
float4 Pos2DAsSeenByLight   : TEXCOORD1;
float2 TextureCoordinate	: TEXCOORD0;
float3 Normal               : TEXCOORD3;
float4 ParticleColor		: COLOR0;
};

struct SScenePixelToFrame
{
float4 Color : COLOR0;
};

VertexShaderOutput VertexShaderCommon(VertexShaderInput input, float4x4 instanceTransform, float4 instanceColor)
{
VertexShaderOutput output;

// Apply the objects translation in the world
// to the input.Position that contain the
// X and Y values of the screen coordinate of the current pixel
float4 worldPosition = mul(input.Position, instanceTransform);

// Apply the camera view to it
float4 viewPosition = mul(worldPosition, View);

// And the projection frustum to become the camera screen position
output.Position = mul(viewPosition, Projection);

// And do the same for the light screen pixels
output.Pos2DAsSeenByLight = mul(worldPosition, xLightsViewProjection);

// Calculate the objects in the world vertex normals
output.Normal = normalize(mul(input.Normal, (float3x3)instanceTransform));

// The objects 3D positions is stored
output.Position3D = worldPosition;

// Copy across the input texture coordinate.
output.TextureCoordinate = input.TextureCoordinate;

output.ParticleColor = instanceColor;

return output;
}





SScenePixelToFrame PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
SScenePixelToFrame Output = (SScenePixelToFrame)0;

float2 ProjectedTexCoords;
ProjectedTexCoords[0] = input.Pos2DAsSeenByLight.x / input.Pos2DAsSeenByLight.w / 2.0f + 0.5f;
ProjectedTexCoords[1] = -input.Pos2DAsSeenByLight.y / input.Pos2DAsSeenByLight.w / 2.0f + 0.5f;

float diffuseLightingFactor = 0;
if ((saturate(ProjectedTexCoords).x == ProjectedTexCoords.x) && (saturate(ProjectedTexCoords).y == ProjectedTexCoords.y))
{
	float depthStoredInShadowMap = tex2D(ShadowMapSampler, ProjectedTexCoords).r;
	float realDistance = input.Pos2DAsSeenByLight.z / input.Pos2DAsSeenByLight.w;
	if ((realDistance - 1.0f / 100.0f) <= depthStoredInShadowMap)
	{
		diffuseLightingFactor = DotProduct(xLightPos, input.Position3D, input.Normal);
		diffuseLightingFactor = saturate(diffuseLightingFactor);
		diffuseLightingFactor *= xLightPower;

		// The light texture that will be projected onto the objects
		float lightTextureFactor = tex2D(LightShapeSampler, ProjectedTexCoords).r;
		diffuseLightingFactor *= lightTextureFactor;
	}
}

float4 baseColor = tex2D(TextureSampler, input.TextureCoordinate);
Output.Color = baseColor * (diffuseLightingFactor + xAmbient);

return Output;
}

What am I doing wrong here? Why is this always turning out with flat shaded polygons?
Or, might there be something wrong with my objects?

Regards, Morgan

Posts: 7

Participants: 5

Read full topic


Viewing all articles
Browse latest Browse all 6821

Trending Articles