How AutoShader calculates the normals?

I do not understand anything. ><
I was playing with sample from first message of this topic [[UPDATE] - Fur - Deferred shading)
and I got different result from what I saw in the default buffer if I used normalization.

This is modified sample (I added make_aux_buffer procedure, test shader, and used standart models)
panda3d.org.ru/files/deferred_light.zip

My test shader

//Cg
//

void vshader(float4 vtx_position : POSITION, 
		float4 vtx_normal : NORMAL, 
		uniform float4x4 mat_modelproj,
		uniform float4x4 tpose_view_to_model,
		out float4 l_position : POSITION,
		out float4 l_eye_normal : TEXCOORD1)
{
	l_position = mul(mat_modelproj, vtx_position);
	l_eye_normal.xyz = mul((float3x3)tpose_view_to_model, vtx_normal.xyz);
	l_eye_normal.w = 0;
}

void fshader(float2 l_texcoord0 : TEXCOORD0,
			float3 l_eye_normal : TEXCOORD1,
			out float3 o_color : COLOR)
{
	l_eye_normal.xyz = normalize(l_eye_normal.xyz);
	o_color.rgb = (l_eye_normal * 0.5) + float3(0.5,0.5,0.5);
}