Given two normalized vectors *v*_{1} and *v*_{2} in R^{3} (3D space) the angle *a* in degrees between them is defined as follows:

d := dot(v_{1, }v_{2}) | { d | -1..1] }

a := acos(d) * 180/PI

**Update (C# code expamle)**

To get the signed angle you can use the signed angle method from the code below:

public static float SignedAngle(Vector3 v1, Vector3 v2)
{
float a = Angle(v1, v2);
return ClampAngle(a);
}
public static float Angle(Vector3 v1, Vector3 v2)
{
float val = Dot(Normalize(v1), Normalize(v2));
return (float)Math.Acos(Clamp(val, -1.0f, 1.0f)) * (180.0f / (float)Math.PI);
}
public static Vector3 Normalize(Vector3 value)
{
float single = Length(value);
return value / single;
}
public static float Length(Vector3 a)
{
return (float)Math.Sqrt(a.x * a.x + a.y * a.y + a.z * a.z);
}
public static float Dot(Vector3 v1, Vector3 v2)
{
return v1.x * v2.x + v1.y * v2.y + v1.z * v2.z;
}
public static float ClampAngle(float a)
{
if (a > 180.0f) { a -= 360.0f; }
if (a < -180.0f) { a += 360.0f; }
return a;
}
public static float Clamp(float value, float min, float max)
{
return (value < min) ? min : (value > max) ? max : value;
}