yuzu/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp

503 lines
20 KiB
C++
Raw Normal View History

// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#include <string_view>
#include "shader_recompiler/backend/glsl/emit_context.h"
2021-05-29 02:09:29 -04:00
#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h"
2021-05-26 21:18:17 -04:00
#include "shader_recompiler/frontend/ir/modifiers.h"
#include "shader_recompiler/frontend/ir/value.h"
namespace Shader::Backend::GLSL {
2021-05-26 21:18:17 -04:00
namespace {
std::string Texture(EmitContext& ctx, const IR::TextureInstInfo& info,
2021-05-26 21:18:17 -04:00
[[maybe_unused]] const IR::Value& index) {
if (info.type == TextureType::Buffer) {
throw NotImplementedException("TextureType::Buffer");
} else {
return fmt::format("tex{}", ctx.texture_bindings.at(info.descriptor_index));
}
}
std::string CastToIntVec(std::string_view value, const IR::TextureInstInfo& info) {
switch (info.type) {
case TextureType::Color1D:
return fmt::format("int({})", value);
case TextureType::ColorArray1D:
case TextureType::Color2D:
case TextureType::ColorArray2D:
2021-05-29 14:10:24 -04:00
return fmt::format("ivec2({})", value);
case TextureType::Color3D:
case TextureType::ColorCube:
return fmt::format("ivec3({})", value);
case TextureType::ColorArrayCube:
return fmt::format("ivec4({})", value);
default:
throw NotImplementedException("Offset type {}", info.type.Value());
}
}
2021-05-29 15:03:28 -04:00
std::string TexelFetchCastToInt(std::string_view value, const IR::TextureInstInfo& info) {
switch (info.type) {
case TextureType::Color1D:
return fmt::format("int({})", value);
case TextureType::ColorArray1D:
case TextureType::Color2D:
return fmt::format("ivec2({})", value);
case TextureType::ColorArray2D:
case TextureType::Color3D:
case TextureType::ColorCube:
return fmt::format("ivec3({})", value);
case TextureType::ColorArrayCube:
return fmt::format("ivec4({})", value);
default:
throw NotImplementedException("Offset type {}", info.type.Value());
}
}
2021-05-29 14:21:25 -04:00
std::string ShadowSamplerVecCast(TextureType type) {
switch (type) {
case TextureType::ColorArray2D:
case TextureType::ColorCube:
case TextureType::ColorArrayCube:
return "vec4";
default:
return "vec3";
}
}
2021-05-29 16:58:33 -04:00
std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) {
const std::array values{offset.InstRecursive(), offset2.InstRecursive()};
if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) {
// LOG_WARNING("Not all arguments in PTP are immediate, STUBBING");
return "";
}
const IR::Opcode opcode{values[0]->GetOpcode()};
if (opcode != values[1]->GetOpcode() || opcode != IR::Opcode::CompositeConstructU32x4) {
throw LogicError("Invalid PTP arguments");
}
auto read{[&](unsigned int a, unsigned int b) { return values[a]->Arg(b).U32(); }};
return fmt::format("ivec2[](ivec2({},{}),ivec2({},{}),ivec2({},{}),ivec2({},{}))", read(0, 0),
read(0, 1), read(0, 2), read(0, 3), read(1, 0), read(1, 1), read(1, 2),
read(1, 3));
}
IR::Inst* PrepareSparse(IR::Inst& inst) {
const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)};
if (sparse_inst) {
sparse_inst->Invalidate();
}
return sparse_inst;
}
2021-05-26 21:18:17 -04:00
} // namespace
void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view bias_lc,
[[maybe_unused]] const IR::Value& offset) {
2021-05-26 21:18:17 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
if (info.has_lod_clamp) {
throw NotImplementedException("Lod clamp samples");
}
2021-05-27 22:28:33 -04:00
const auto texture{Texture(ctx, info, index)};
const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""};
const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
const auto sparse_inst{PrepareSparse(inst)};
if (!sparse_inst) {
if (!offset.IsEmpty()) {
ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords,
CastToIntVec(ctx.reg_alloc.Consume(offset), info), bias);
} else {
2021-05-29 14:10:24 -04:00
if (ctx.stage == Stage::Fragment) {
ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias);
} else {
ctx.Add("{}=textureLod({},{},0.0);", texel, texture, coords);
}
}
return;
}
// TODO: Query sparseTexels extension support
2021-05-26 21:18:17 -04:00
if (!offset.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));",
*sparse_inst, texture, coords, CastToIntVec(ctx.reg_alloc.Consume(offset), info),
texel, bias);
2021-05-27 22:28:33 -04:00
} else {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst,
texture, coords, texel, bias);
}
}
void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view lod_lc,
[[maybe_unused]] const IR::Value& offset) {
const auto info{inst.Flags<IR::TextureInstInfo>()};
if (info.has_bias) {
throw NotImplementedException("Bias texture samples");
}
if (info.has_lod_clamp) {
throw NotImplementedException("Lod clamp samples");
}
const auto texture{Texture(ctx, info, index)};
const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
const auto sparse_inst{PrepareSparse(inst)};
if (!sparse_inst) {
if (!offset.IsEmpty()) {
ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc,
CastToIntVec(ctx.reg_alloc.Consume(offset), info));
} else {
ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc);
}
return;
}
2021-05-29 14:10:24 -04:00
// TODO: Query sparseTexels extension support
if (!offset.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));",
*sparse_inst, texture, CastToIntVec(coords, info), lod_lc,
CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel);
} else {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst,
texture, coords, lod_lc, texel);
}
}
void EmitImageSampleDrefImplicitLod([[maybe_unused]] EmitContext& ctx,
[[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view dref,
[[maybe_unused]] std::string_view bias_lc,
[[maybe_unused]] const IR::Value& offset) {
2021-05-29 14:21:25 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
2021-05-29 15:03:28 -04:00
const auto sparse_inst{PrepareSparse(inst)};
if (sparse_inst) {
throw NotImplementedException("Sparse texture samples");
}
2021-05-29 14:21:25 -04:00
if (info.has_bias) {
throw NotImplementedException("Bias texture samples");
}
if (info.has_lod_clamp) {
throw NotImplementedException("Lod clamp samples");
}
if (!offset.IsEmpty()) {
throw NotImplementedException("textureLodOffset");
}
const auto texture{Texture(ctx, info, index)};
const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""};
const auto cast{ShadowSamplerVecCast(info.type)};
if (ctx.stage == Stage::Fragment) {
ctx.AddF32("{}=texture({},{}({},{}){});", inst, texture, cast, coords, dref, bias);
} else {
ctx.AddF32("{}=textureLod({},{}({},{}),0.0);", inst, texture, cast, coords, dref);
}
}
void EmitImageSampleDrefExplicitLod([[maybe_unused]] EmitContext& ctx,
[[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view dref,
[[maybe_unused]] std::string_view lod_lc,
[[maybe_unused]] const IR::Value& offset) {
2021-05-29 01:53:32 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
2021-05-29 15:03:28 -04:00
const auto sparse_inst{PrepareSparse(inst)};
if (sparse_inst) {
throw NotImplementedException("Sparse texture samples");
}
2021-05-29 01:53:32 -04:00
if (info.has_bias) {
throw NotImplementedException("Bias texture samples");
}
if (info.has_lod_clamp) {
throw NotImplementedException("Lod clamp samples");
}
2021-05-29 14:10:24 -04:00
if (!offset.IsEmpty()) {
throw NotImplementedException("textureLodOffset");
}
2021-05-29 01:53:32 -04:00
const auto texture{Texture(ctx, info, index)};
if (info.type == TextureType::ColorArrayCube) {
ctx.AddF32("{}=textureLod({},{},{},{});", inst, texture, coords, dref, lod_lc);
} else {
ctx.AddF32("{}=textureLod({},vec3({},{}),{});", inst, texture, coords, dref, lod_lc);
}
}
void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] const IR::Value& offset,
[[maybe_unused]] const IR::Value& offset2) {
2021-05-29 16:58:33 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
const auto texture{Texture(ctx, info, index)};
const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
const auto sparse_inst{PrepareSparse(inst)};
if (!sparse_inst) {
if (offset.IsEmpty()) {
ctx.Add("{}=textureGather({},{},int({}));", texel, texture, coords,
info.gather_component);
return;
}
if (offset2.IsEmpty()) {
ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords,
CastToIntVec(ctx.reg_alloc.Consume(offset), info), info.gather_component);
return;
}
// PTP
const auto offsets{PtpOffsets(offset, offset2)};
ctx.Add("{}=textureGatherOffsets({},{},{},int({}));", texel, texture, coords, offsets,
info.gather_component);
return;
}
// TODO: Query sparseTexels extension support
if (offset.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},int({})));",
*sparse_inst, texture, coords, texel, info.gather_component);
}
if (offset2.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
*sparse_inst, texture, CastToIntVec(coords, info),
CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel, info.gather_component);
}
// PTP
const auto offsets{PtpOffsets(offset, offset2)};
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
*sparse_inst, texture, CastToIntVec(coords, info), offsets, texel,
info.gather_component);
}
void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] const IR::Value& offset,
[[maybe_unused]] const IR::Value& offset2,
[[maybe_unused]] std::string_view dref) {
2021-05-29 16:58:33 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
const auto texture{Texture(ctx, info, index)};
const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
const auto sparse_inst{PrepareSparse(inst)};
if (!sparse_inst) {
if (offset.IsEmpty()) {
ctx.Add("{}=textureGather({},{},{});", texel, texture, coords, dref);
return;
}
if (offset2.IsEmpty()) {
ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref,
CastToIntVec(ctx.reg_alloc.Consume(offset), info));
return;
}
// PTP
const auto offsets{PtpOffsets(offset, offset2)};
ctx.Add("{}=textureGatherOffsets({},{},{},{});", texel, texture, coords, dref, offsets);
return;
}
// TODO: Query sparseTexels extension support
if (offset.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},{}));", *sparse_inst,
texture, coords, dref, texel);
}
if (offset2.IsEmpty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
*sparse_inst, texture, CastToIntVec(coords, info), dref,
CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel);
}
// PTP
const auto offsets{PtpOffsets(offset, offset2)};
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
*sparse_inst, texture, CastToIntVec(coords, info), dref, offsets, texel);
}
void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view offset, [[maybe_unused]] std::string_view lod,
[[maybe_unused]] std::string_view ms) {
2021-05-29 15:03:28 -04:00
const auto info{inst.Flags<IR::TextureInstInfo>()};
if (info.has_bias) {
throw NotImplementedException("Bias texture samples");
}
if (info.has_lod_clamp) {
throw NotImplementedException("Lod clamp samples");
}
const auto texture{Texture(ctx, info, index)};
const auto sparse_inst{PrepareSparse(inst)};
const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
if (!sparse_inst) {
if (!offset.empty()) {
ctx.Add("{}=texelFetchOffset({},{},int({}),{});", texel, texture,
TexelFetchCastToInt(coords, info), lod, TexelFetchCastToInt(offset, info));
} else {
ctx.Add("{}=texelFetch({},{},int({}));", texel, texture,
TexelFetchCastToInt(coords, info), lod);
}
return;
}
// TODO: Query sparseTexels extension support
if (!offset.empty()) {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));",
*sparse_inst, texture, CastToIntVec(coords, info), lod,
CastToIntVec(offset, info), texel);
} else {
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchARB({},{},{},{}));", *sparse_inst,
texture, CastToIntVec(coords, info), lod, texel);
}
}
void EmitImageQueryDimensions([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view lod) {
const auto info{inst.Flags<IR::TextureInstInfo>()};
const auto texture{Texture(ctx, info, index)};
switch (info.type) {
case TextureType::Color1D:
return ctx.AddU32x4(
"{}=uvec4(uint(textureSize({},int({}))),0u,0u,uint(textureQueryLevels({})));", inst,
texture, lod, texture);
case TextureType::ColorArray1D:
case TextureType::Color2D:
case TextureType::ColorCube:
return ctx.AddU32x4(
"{}=uvec4(uvec2(textureSize({},int({}))),0u,uint(textureQueryLevels({})));", inst,
texture, lod, texture);
case TextureType::ColorArray2D:
case TextureType::Color3D:
case TextureType::ColorArrayCube:
return ctx.AddU32x4(
"{}=uvec4(uvec3(textureSize({},int({}))),uint(textureQueryLevels({})));", inst, texture,
lod, texture);
case TextureType::Buffer:
throw NotImplementedException("Texture buffers");
}
throw LogicError("Unspecified image type {}", info.type.Value());
}
void EmitImageQueryLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords) {
const auto info{inst.Flags<IR::TextureInstInfo>()};
const auto texture{Texture(ctx, info, index)};
return ctx.AddF32x4("{}=vec4(textureQueryLod({},{}),0.0,0.0);", inst, texture, coords);
}
void EmitImageGradient([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view derivates,
[[maybe_unused]] std::string_view offset,
[[maybe_unused]] std::string_view lod_clamp) {
throw NotImplementedException("GLSL Instruction");
}
void EmitImageRead([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords) {
throw NotImplementedException("GLSL Instruction");
}
void EmitImageWrite([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
[[maybe_unused]] const IR::Value& index,
[[maybe_unused]] std::string_view coords,
[[maybe_unused]] std::string_view color) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageSampleImplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageSampleExplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageSampleDrefImplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageSampleDrefExplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageGather(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageGatherDref(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageFetch(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageQueryDimensions(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageQueryLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageGradient(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageRead(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBindlessImageWrite(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageSampleImplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageSampleExplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageSampleDrefImplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageSampleDrefExplicitLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageGather(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageGatherDref(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageFetch(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageQueryDimensions(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageQueryLod(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageGradient(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageRead(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
void EmitBoundImageWrite(EmitContext&) {
throw NotImplementedException("GLSL Instruction");
}
} // namespace Shader::Backend::GLSL