gl_shader_decompiler: Fix TXQ types
TXQ returns integer types. Shaders usually do: R0 = TXQ(); // => int R0 = static_cast<float>(R0); If we don't treat it as an integer, it will cast a binary float value as float - resulting in a corrupted number.
This commit is contained in:
parent
a6d5ff05dc
commit
88a3c05b7b
@ -1196,11 +1196,12 @@ private:
|
|||||||
switch (meta->element) {
|
switch (meta->element) {
|
||||||
case 0:
|
case 0:
|
||||||
case 1:
|
case 1:
|
||||||
return "textureSize(" + sampler + ", " + lod + ')' + GetSwizzle(meta->element);
|
return "itof(int(textureSize(" + sampler + ", " + lod + ')' +
|
||||||
|
GetSwizzle(meta->element) + "))";
|
||||||
case 2:
|
case 2:
|
||||||
return "0";
|
return "0";
|
||||||
case 3:
|
case 3:
|
||||||
return "textureQueryLevels(" + sampler + ')';
|
return "itof(textureQueryLevels(" + sampler + "))";
|
||||||
}
|
}
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
return "0";
|
return "0";
|
||||||
|
Loading…
Reference in New Issue
Block a user