gl_texture_cache: Avoid format views on Intel and AMD

Intel and AMD proprietary drivers are incapable of rendering to texture
views of different formats than the original texture. Avoid creating
these at a cache level. This will consume more memory, emulating them
with copies.
This commit is contained in:
ReinUsesLisp
2021-01-04 01:56:44 -03:00
parent 3a49c1a691
commit 7d904fef2e
11 changed files with 48 additions and 21 deletions

View File

@ -10,9 +10,7 @@
#include "video_core/surface.h"
namespace VideoCore::Surface {
namespace {
using Table = std::array<std::array<u64, 2>, MaxPixelFormat>;
// Compatibility table taken from Table 3.X.2 in:
@ -233,10 +231,13 @@ constexpr Table MakeCopyTable() {
EnableRange(copy, COPY_CLASS_64_BITS);
return copy;
}
} // Anonymous namespace
bool IsViewCompatible(PixelFormat format_a, PixelFormat format_b) {
bool IsViewCompatible(PixelFormat format_a, PixelFormat format_b, bool broken_views) {
if (broken_views) {
// If format views are broken, only accept formats that are identical.
return format_a == format_b;
}
static constexpr Table TABLE = MakeViewTable();
return IsSupported(TABLE, format_a, format_b);
}