Calculate latitude/longitude of bins during radar layer initialization

This commit is contained in:
Dan Paulat 2021-07-19 20:29:05 -05:00
parent 5ff7202c3e
commit aad37f9e01
2 changed files with 98 additions and 46 deletions

View file

@ -1,17 +1,35 @@
#version 330 core
layout (location = 0) in vec2 aPosition;
#define DEGREES_MAX 360.0f
#define LATITUDE_MAX 85.051128779806604f
#define LONGITUDE_MAX 180.0f
#define PI 3.1415926535897932384626433f
#define RAD2DEG 57.295779513082320876798156332941f
layout (location = 0) in vec2 aLatLong;
layout (location = 1) in vec2 aTexCoord;
uniform mat4 uMVPMatrix;
uniform vec2 uMapScreenCoord;
out vec2 texCoord;
vec2 latLngToScreenCoordinate(in vec2 latLng)
{
vec2 p;
latLng.x = clamp(latLng.x, -LATITUDE_MAX, LATITUDE_MAX);
p.xy = vec2(LONGITUDE_MAX + latLng.y,
-(LONGITUDE_MAX - RAD2DEG * log(tan(PI / 4 + latLng.x * PI / DEGREES_MAX))));
return p;
}
void main()
{
// Pass the texture coordinate to the fragment shader
texCoord = aTexCoord;
vec2 p = latLngToScreenCoordinate(aLatLong) - uMapScreenCoord;
// Transform the position to screen coordinates
gl_Position = uMVPMatrix * vec4(aPosition, 0.0f, 1.0f);
gl_Position = uMVPMatrix * vec4(p, 0.0f, 1.0f);
}