X-Git-Url: https://git.sesse.net/?p=movit;a=blobdiff_plain;f=init.h;fp=init.h;h=9c371e0316b4b6987a4a2e70ab995127d399e9d2;hp=a1e71dd54053b3f168f3c47267eba1512bd2096b;hb=c8c1721179a81ac5384fab773e59be544b57128b;hpb=9447b2d234394c1d966f77ed87271a3625a81cdd diff --git a/init.h b/init.h index a1e71dd..9c371e0 100644 --- a/init.h +++ b/init.h @@ -11,11 +11,17 @@ void init_movit(); // Whether init_movit() has been called. extern bool movit_initialized; -// An estimate on the number of different levels the linear texture interpolation -// of the GPU can deliver. My Intel card seems to be limited to 2^6 levels here, -// while a modern nVidia card (GTX 550 Ti) seem to use 2^8. +// An estimate on the smallest values the linear texture interpolation +// of the GPU can distinguish between, i.e., for a GPU with N-bit +// texture subpixel precision, this value will be 2^-N. // -// We currently don't bother to test above 2^10. +// From reading the little specs that exist and through practical tests, +// the broad picture seems to be that Intel cards have 6-bit precision, +// nVidia cards have 8-bit, and Radeon cards have 6-bit before R6xx +// (at least when not using trilinear sampling), but can reach +// 8-bit precision on R6xx or newer in some (unspecified) cases. +// +// We currently don't bother to test for more than 1024 levels. extern float movit_texel_subpixel_precision; // Whether the GPU in use supports GL_EXT_texture_sRGB.