shader: Initial implementation of an AST

This commit is contained in:
ReinUsesLisp
2021-02-11 16:39:06 -03:00
committed by ameerj
parent 2930dccecc
commit 9170200a11
33 changed files with 1347 additions and 591 deletions

View File

@ -17,38 +17,49 @@
#include "shader_recompiler/frontend/maxwell/location.h"
namespace Shader::Maxwell::Flow {
namespace {
struct Compare {
bool operator()(const Block& lhs, Location rhs) const noexcept {
return lhs.begin < rhs;
}
bool operator()(Location lhs, const Block& rhs) const noexcept {
return lhs < rhs.begin;
}
bool operator()(const Block& lhs, const Block& rhs) const noexcept {
return lhs.begin < rhs.begin;
}
};
} // Anonymous namespace
static u32 BranchOffset(Location pc, Instruction inst) {
return pc.Offset() + inst.branch.Offset() + 8;
}
static std::array<Block, 2> Split(Block&& block, Location pc, BlockId new_id) {
if (pc <= block.begin || pc >= block.end) {
static void Split(Block* old_block, Block* new_block, Location pc) {
if (pc <= old_block->begin || pc >= old_block->end) {
throw InvalidArgument("Invalid address to split={}", pc);
}
return {
Block{
.begin{block.begin},
.end{pc},
.end_class{EndClass::Branch},
.id{block.id},
.stack{block.stack},
.cond{true},
.branch_true{new_id},
.branch_false{UNREACHABLE_BLOCK_ID},
.imm_predecessors{},
},
Block{
.begin{pc},
.end{block.end},
.end_class{block.end_class},
.id{new_id},
.stack{std::move(block.stack)},
.cond{block.cond},
.branch_true{block.branch_true},
.branch_false{block.branch_false},
.imm_predecessors{},
},
*new_block = Block{
.begin{pc},
.end{old_block->end},
.end_class{old_block->end_class},
.stack{old_block->stack},
.cond{old_block->cond},
.branch_true{old_block->branch_true},
.branch_false{old_block->branch_false},
.ir{nullptr},
};
*old_block = Block{
.begin{old_block->begin},
.end{pc},
.end_class{EndClass::Branch},
.stack{std::move(old_block->stack)},
.cond{IR::Condition{true}},
.branch_true{new_block},
.branch_false{nullptr},
.ir{nullptr},
};
}
@ -112,7 +123,7 @@ static bool HasFlowTest(Opcode opcode) {
static std::string NameOf(const Block& block) {
if (block.begin.IsVirtual()) {
return fmt::format("\"Virtual {}\"", block.id);
return fmt::format("\"Virtual {}\"", block.begin);
} else {
return fmt::format("\"{}\"", block.begin);
}
@ -158,126 +169,23 @@ bool Block::Contains(Location pc) const noexcept {
Function::Function(Location start_address)
: entrypoint{start_address}, labels{{
.address{start_address},
.block_id{0},
.block{nullptr},
.stack{},
}} {}
void Function::BuildBlocksMap() {
const size_t num_blocks{NumBlocks()};
blocks_map.resize(num_blocks);
for (size_t block_index = 0; block_index < num_blocks; ++block_index) {
Block& block{blocks_data[block_index]};
blocks_map[block.id] = &block;
}
}
void Function::BuildImmediatePredecessors() {
for (const Block& block : blocks_data) {
if (block.branch_true != UNREACHABLE_BLOCK_ID) {
blocks_map[block.branch_true]->imm_predecessors.push_back(block.id);
}
if (block.branch_false != UNREACHABLE_BLOCK_ID) {
blocks_map[block.branch_false]->imm_predecessors.push_back(block.id);
}
}
}
void Function::BuildPostOrder() {
boost::container::small_vector<BlockId, 0x110> block_stack;
post_order_map.resize(NumBlocks());
Block& first_block{blocks_data[blocks.front()]};
first_block.post_order_visited = true;
block_stack.push_back(first_block.id);
const auto visit_branch = [&](BlockId block_id, BlockId branch_id) {
if (branch_id == UNREACHABLE_BLOCK_ID) {
return false;
}
if (blocks_map[branch_id]->post_order_visited) {
return false;
}
blocks_map[branch_id]->post_order_visited = true;
// Calling push_back twice is faster than insert on msvc
block_stack.push_back(block_id);
block_stack.push_back(branch_id);
return true;
};
while (!block_stack.empty()) {
const Block* const block{blocks_map[block_stack.back()]};
block_stack.pop_back();
if (!visit_branch(block->id, block->branch_true) &&
!visit_branch(block->id, block->branch_false)) {
post_order_map[block->id] = static_cast<u32>(post_order_blocks.size());
post_order_blocks.push_back(block->id);
}
}
}
void Function::BuildImmediateDominators() {
auto transform_block_id{std::views::transform([this](BlockId id) { return blocks_map[id]; })};
auto reverse_order_but_first{std::views::reverse | std::views::drop(1) | transform_block_id};
auto has_idom{std::views::filter([](Block* block) { return block->imm_dominator; })};
auto intersect{[this](Block* finger1, Block* finger2) {
while (finger1 != finger2) {
while (post_order_map[finger1->id] < post_order_map[finger2->id]) {
finger1 = finger1->imm_dominator;
}
while (post_order_map[finger2->id] < post_order_map[finger1->id]) {
finger2 = finger2->imm_dominator;
}
}
return finger1;
}};
for (Block& block : blocks_data) {
block.imm_dominator = nullptr;
}
Block* const start_block{&blocks_data[blocks.front()]};
start_block->imm_dominator = start_block;
bool changed{true};
while (changed) {
changed = false;
for (Block* const block : post_order_blocks | reverse_order_but_first) {
Block* new_idom{};
for (Block* predecessor : block->imm_predecessors | transform_block_id | has_idom) {
new_idom = new_idom ? intersect(predecessor, new_idom) : predecessor;
}
changed |= block->imm_dominator != new_idom;
block->imm_dominator = new_idom;
}
}
}
void Function::BuildDominanceFrontier() {
auto transform_block_id{std::views::transform([this](BlockId id) { return blocks_map[id]; })};
auto has_enough_predecessors{[](Block& block) { return block.imm_predecessors.size() >= 2; }};
for (Block& block : blocks_data | std::views::filter(has_enough_predecessors)) {
for (Block* current : block.imm_predecessors | transform_block_id) {
while (current != block.imm_dominator) {
current->dominance_frontiers.push_back(current->id);
current = current->imm_dominator;
}
}
}
}
CFG::CFG(Environment& env_, Location start_address) : env{env_} {
VisitFunctions(start_address);
for (Function& function : functions) {
function.BuildBlocksMap();
function.BuildImmediatePredecessors();
function.BuildPostOrder();
function.BuildImmediateDominators();
function.BuildDominanceFrontier();
}
}
void CFG::VisitFunctions(Location start_address) {
CFG::CFG(Environment& env_, ObjectPool<Block>& block_pool_, Location start_address)
: env{env_}, block_pool{block_pool_} {
functions.emplace_back(start_address);
functions.back().labels.back().block = block_pool.Create(Block{
.begin{start_address},
.end{start_address},
.end_class{EndClass::Branch},
.stack{},
.cond{IR::Condition{true}},
.branch_true{nullptr},
.branch_false{nullptr},
.ir{nullptr},
});
for (FunctionId function_id = 0; function_id < functions.size(); ++function_id) {
while (!functions[function_id].labels.empty()) {
Function& function{functions[function_id]};
@ -294,35 +202,16 @@ void CFG::AnalyzeLabel(FunctionId function_id, Label& label) {
return;
}
// Try to find the next block
Function* function{&functions[function_id]};
Function* const function{&functions[function_id]};
Location pc{label.address};
const auto next{std::upper_bound(function->blocks.begin(), function->blocks.end(), pc,
[function](Location pc, u32 block_index) {
return pc < function->blocks_data[block_index].begin;
})};
const auto next_index{std::distance(function->blocks.begin(), next)};
const bool is_last{next == function->blocks.end()};
Location next_pc;
BlockId next_id{UNREACHABLE_BLOCK_ID};
if (!is_last) {
next_pc = function->blocks_data[*next].begin;
next_id = function->blocks_data[*next].id;
}
const auto next_it{function->blocks.upper_bound(pc, Compare{})};
const bool is_last{next_it == function->blocks.end()};
Block* const next{is_last ? nullptr : &*next_it};
// Insert before the next block
Block block{
.begin{pc},
.end{pc},
.end_class{EndClass::Branch},
.id{label.block_id},
.stack{std::move(label.stack)},
.cond{true},
.branch_true{UNREACHABLE_BLOCK_ID},
.branch_false{UNREACHABLE_BLOCK_ID},
.imm_predecessors{},
};
Block* const block{label.block};
// Analyze instructions until it reaches an already visited block or there's a branch
bool is_branch{false};
while (is_last || pc < next_pc) {
while (!next || pc < next->begin) {
is_branch = AnalyzeInst(block, function_id, pc) == AnalysisState::Branch;
if (is_branch) {
break;
@ -332,43 +221,36 @@ void CFG::AnalyzeLabel(FunctionId function_id, Label& label) {
if (!is_branch) {
// If the block finished without a branch,
// it means that the next instruction is already visited, jump to it
block.end = pc;
block.cond = true;
block.branch_true = next_id;
block.branch_false = UNREACHABLE_BLOCK_ID;
block->end = pc;
block->cond = IR::Condition{true};
block->branch_true = next;
block->branch_false = nullptr;
}
// Function's pointer might be invalid, resolve it again
function = &functions[function_id];
const u32 new_block_index = static_cast<u32>(function->blocks_data.size());
function->blocks.insert(function->blocks.begin() + next_index, new_block_index);
function->blocks_data.push_back(std::move(block));
// Insert the new block
functions[function_id].blocks.insert(*block);
}
bool CFG::InspectVisitedBlocks(FunctionId function_id, const Label& label) {
const Location pc{label.address};
Function& function{functions[function_id]};
const auto it{std::ranges::find_if(function.blocks, [&function, pc](u32 block_index) {
return function.blocks_data[block_index].Contains(pc);
})};
const auto it{
std::ranges::find_if(function.blocks, [pc](auto& block) { return block.Contains(pc); })};
if (it == function.blocks.end()) {
// Address has not been visited
return false;
}
Block& block{function.blocks_data[*it]};
if (block.begin == pc) {
throw LogicError("Dangling branch");
Block* const visited_block{&*it};
if (visited_block->begin == pc) {
throw LogicError("Dangling block");
}
const u32 first_index{*it};
const u32 second_index{static_cast<u32>(function.blocks_data.size())};
const std::array new_indices{first_index, second_index};
std::array split_blocks{Split(std::move(block), pc, label.block_id)};
function.blocks_data[*it] = std::move(split_blocks[0]);
function.blocks_data.push_back(std::move(split_blocks[1]));
function.blocks.insert(function.blocks.erase(it), new_indices.begin(), new_indices.end());
Block* const new_block{label.block};
Split(visited_block, new_block, pc);
function.blocks.insert(it, *new_block);
return true;
}
CFG::AnalysisState CFG::AnalyzeInst(Block& block, FunctionId function_id, Location pc) {
CFG::AnalysisState CFG::AnalyzeInst(Block* block, FunctionId function_id, Location pc) {
const Instruction inst{env.ReadInstruction(pc.Offset())};
const Opcode opcode{Decode(inst.raw)};
switch (opcode) {
@ -390,12 +272,12 @@ CFG::AnalysisState CFG::AnalyzeInst(Block& block, FunctionId function_id, Locati
AnalyzeBRX(block, pc, inst, IsAbsoluteJump(opcode));
break;
case Opcode::RET:
block.end_class = EndClass::Return;
block->end_class = EndClass::Return;
break;
default:
break;
}
block.end = pc;
block->end = pc;
return AnalysisState::Branch;
case Opcode::BRK:
case Opcode::CONT:
@ -404,9 +286,9 @@ CFG::AnalysisState CFG::AnalyzeInst(Block& block, FunctionId function_id, Locati
if (!AnalyzeBranch(block, function_id, pc, inst, opcode)) {
return AnalysisState::Continue;
}
const auto [stack_pc, new_stack]{block.stack.Pop(OpcodeToken(opcode))};
block.branch_true = AddLabel(block, new_stack, stack_pc, function_id);
block.end = pc;
const auto [stack_pc, new_stack]{block->stack.Pop(OpcodeToken(opcode))};
block->branch_true = AddLabel(block, new_stack, stack_pc, function_id);
block->end = pc;
return AnalysisState::Branch;
}
case Opcode::PBK:
@ -414,7 +296,7 @@ CFG::AnalysisState CFG::AnalyzeInst(Block& block, FunctionId function_id, Locati
case Opcode::PEXIT:
case Opcode::PLONGJMP:
case Opcode::SSY:
block.stack.Push(OpcodeToken(opcode), BranchOffset(pc, inst));
block->stack.Push(OpcodeToken(opcode), BranchOffset(pc, inst));
return AnalysisState::Continue;
case Opcode::EXIT:
return AnalyzeEXIT(block, function_id, pc, inst);
@ -444,51 +326,51 @@ CFG::AnalysisState CFG::AnalyzeInst(Block& block, FunctionId function_id, Locati
return AnalysisState::Branch;
}
void CFG::AnalyzeCondInst(Block& block, FunctionId function_id, Location pc,
void CFG::AnalyzeCondInst(Block* block, FunctionId function_id, Location pc,
EndClass insn_end_class, IR::Condition cond) {
if (block.begin != pc) {
if (block->begin != pc) {
// If the block doesn't start in the conditional instruction
// mark it as a label to visit it later
block.end = pc;
block.cond = true;
block.branch_true = AddLabel(block, block.stack, pc, function_id);
block.branch_false = UNREACHABLE_BLOCK_ID;
block->end = pc;
block->cond = IR::Condition{true};
block->branch_true = AddLabel(block, block->stack, pc, function_id);
block->branch_false = nullptr;
return;
}
// Impersonate the visited block with a virtual block
// Jump from this virtual to the real conditional instruction and the next instruction
Function& function{functions[function_id]};
const BlockId conditional_block_id{++function.current_block_id};
function.blocks.push_back(static_cast<u32>(function.blocks_data.size()));
Block& virtual_block{function.blocks_data.emplace_back(Block{
.begin{}, // Virtual block
.end{},
// Create a virtual block and a conditional block
Block* const conditional_block{block_pool.Create()};
Block virtual_block{
.begin{block->begin.Virtual()},
.end{block->begin.Virtual()},
.end_class{EndClass::Branch},
.id{block.id}, // Impersonating
.stack{block.stack},
.stack{block->stack},
.cond{cond},
.branch_true{conditional_block_id},
.branch_false{UNREACHABLE_BLOCK_ID},
.imm_predecessors{},
})};
// Set the end properties of the conditional instruction and give it a new identity
Block& conditional_block{block};
conditional_block.end = pc;
conditional_block.end_class = insn_end_class;
conditional_block.id = conditional_block_id;
.branch_true{conditional_block},
.branch_false{nullptr},
.ir{nullptr},
};
// Save the contents of the visited block in the conditional block
*conditional_block = std::move(*block);
// Impersonate the visited block with a virtual block
*block = std::move(virtual_block);
// Set the end properties of the conditional instruction
conditional_block->end = pc;
conditional_block->end_class = insn_end_class;
// Add a label to the instruction after the conditional instruction
const BlockId endif_block_id{AddLabel(conditional_block, block.stack, pc + 1, function_id)};
Block* const endif_block{AddLabel(conditional_block, block->stack, pc + 1, function_id)};
// Branch to the next instruction from the virtual block
virtual_block.branch_false = endif_block_id;
block->branch_false = endif_block;
// And branch to it from the conditional instruction if it is a branch
if (insn_end_class == EndClass::Branch) {
conditional_block.cond = true;
conditional_block.branch_true = endif_block_id;
conditional_block.branch_false = UNREACHABLE_BLOCK_ID;
conditional_block->cond = IR::Condition{true};
conditional_block->branch_true = endif_block;
conditional_block->branch_false = nullptr;
}
// Finally insert the condition block into the list of blocks
functions[function_id].blocks.insert(*conditional_block);
}
bool CFG::AnalyzeBranch(Block& block, FunctionId function_id, Location pc, Instruction inst,
bool CFG::AnalyzeBranch(Block* block, FunctionId function_id, Location pc, Instruction inst,
Opcode opcode) {
if (inst.branch.is_cbuf) {
throw NotImplementedException("Branch with constant buffer offset");
@ -500,21 +382,21 @@ bool CFG::AnalyzeBranch(Block& block, FunctionId function_id, Location pc, Instr
const bool has_flow_test{HasFlowTest(opcode)};
const IR::FlowTest flow_test{has_flow_test ? inst.branch.flow_test.Value() : IR::FlowTest::T};
if (pred != Predicate{true} || flow_test != IR::FlowTest::T) {
block.cond = IR::Condition(flow_test, static_cast<IR::Pred>(pred.index), pred.negated);
block.branch_false = AddLabel(block, block.stack, pc + 1, function_id);
block->cond = IR::Condition(flow_test, static_cast<IR::Pred>(pred.index), pred.negated);
block->branch_false = AddLabel(block, block->stack, pc + 1, function_id);
} else {
block.cond = true;
block->cond = IR::Condition{true};
}
return true;
}
void CFG::AnalyzeBRA(Block& block, FunctionId function_id, Location pc, Instruction inst,
void CFG::AnalyzeBRA(Block* block, FunctionId function_id, Location pc, Instruction inst,
bool is_absolute) {
const Location bra_pc{is_absolute ? inst.branch.Absolute() : BranchOffset(pc, inst)};
block.branch_true = AddLabel(block, block.stack, bra_pc, function_id);
block->branch_true = AddLabel(block, block->stack, bra_pc, function_id);
}
void CFG::AnalyzeBRX(Block&, Location, Instruction, bool is_absolute) {
void CFG::AnalyzeBRX(Block*, Location, Instruction, bool is_absolute) {
throw NotImplementedException("{}", is_absolute ? "JMX" : "BRX");
}
@ -528,7 +410,7 @@ void CFG::AnalyzeCAL(Location pc, Instruction inst, bool is_absolute) {
}
}
CFG::AnalysisState CFG::AnalyzeEXIT(Block& block, FunctionId function_id, Location pc,
CFG::AnalysisState CFG::AnalyzeEXIT(Block* block, FunctionId function_id, Location pc,
Instruction inst) {
const IR::FlowTest flow_test{inst.branch.flow_test};
const Predicate pred{inst.Pred()};
@ -537,41 +419,52 @@ CFG::AnalysisState CFG::AnalyzeEXIT(Block& block, FunctionId function_id, Locati
return AnalysisState::Continue;
}
if (pred != Predicate{true} || flow_test != IR::FlowTest::T) {
if (block.stack.Peek(Token::PEXIT).has_value()) {
if (block->stack.Peek(Token::PEXIT).has_value()) {
throw NotImplementedException("Conditional EXIT with PEXIT token");
}
const IR::Condition cond{flow_test, static_cast<IR::Pred>(pred.index), pred.negated};
AnalyzeCondInst(block, function_id, pc, EndClass::Exit, cond);
return AnalysisState::Branch;
}
if (const std::optional<Location> exit_pc{block.stack.Peek(Token::PEXIT)}) {
const Stack popped_stack{block.stack.Remove(Token::PEXIT)};
block.cond = true;
block.branch_true = AddLabel(block, popped_stack, *exit_pc, function_id);
block.branch_false = UNREACHABLE_BLOCK_ID;
if (const std::optional<Location> exit_pc{block->stack.Peek(Token::PEXIT)}) {
const Stack popped_stack{block->stack.Remove(Token::PEXIT)};
block->cond = IR::Condition{true};
block->branch_true = AddLabel(block, popped_stack, *exit_pc, function_id);
block->branch_false = nullptr;
return AnalysisState::Branch;
}
block.end = pc;
block.end_class = EndClass::Exit;
block->end = pc;
block->end_class = EndClass::Exit;
return AnalysisState::Branch;
}
BlockId CFG::AddLabel(const Block& block, Stack stack, Location pc, FunctionId function_id) {
Block* CFG::AddLabel(Block* block, Stack stack, Location pc, FunctionId function_id) {
Function& function{functions[function_id]};
if (block.begin == pc) {
return block.id;
if (block->begin == pc) {
// Jumps to itself
return block;
}
const auto target{std::ranges::find(function.blocks_data, pc, &Block::begin)};
if (target != function.blocks_data.end()) {
return target->id;
if (const auto it{function.blocks.find(pc, Compare{})}; it != function.blocks.end()) {
// Block already exists and it has been visited
return &*it;
}
const BlockId block_id{++function.current_block_id};
// TODO: FIX DANGLING BLOCKS
Block* const new_block{block_pool.Create(Block{
.begin{pc},
.end{pc},
.end_class{EndClass::Branch},
.stack{stack},
.cond{IR::Condition{true}},
.branch_true{nullptr},
.branch_false{nullptr},
.ir{nullptr},
})};
function.labels.push_back(Label{
.address{pc},
.block_id{block_id},
.block{new_block},
.stack{std::move(stack)},
});
return block_id;
return new_block;
}
std::string CFG::Dot() const {
@ -581,18 +474,12 @@ std::string CFG::Dot() const {
for (const Function& function : functions) {
dot += fmt::format("\tsubgraph cluster_{} {{\n", function.entrypoint);
dot += fmt::format("\t\tnode [style=filled];\n");
for (const u32 block_index : function.blocks) {
const Block& block{function.blocks_data[block_index]};
for (const Block& block : function.blocks) {
const std::string name{NameOf(block)};
const auto add_branch = [&](BlockId branch_id, bool add_label) {
const auto it{std::ranges::find(function.blocks_data, branch_id, &Block::id)};
dot += fmt::format("\t\t{}->", name);
if (it == function.blocks_data.end()) {
dot += fmt::format("\"Unknown label {}\"", branch_id);
} else {
dot += NameOf(*it);
};
if (add_label && block.cond != true && block.cond != false) {
const auto add_branch = [&](Block* branch, bool add_label) {
dot += fmt::format("\t\t{}->{}", name, NameOf(*branch));
if (add_label && block.cond != IR::Condition{true} &&
block.cond != IR::Condition{false}) {
dot += fmt::format(" [label=\"{}\"]", block.cond);
}
dot += '\n';
@ -600,10 +487,10 @@ std::string CFG::Dot() const {
dot += fmt::format("\t\t{};\n", name);
switch (block.end_class) {
case EndClass::Branch:
if (block.cond != false) {
if (block.cond != IR::Condition{false}) {
add_branch(block.branch_true, true);
}
if (block.cond != true) {
if (block.cond != IR::Condition{true}) {
add_branch(block.branch_false, false);
}
break;
@ -619,12 +506,6 @@ std::string CFG::Dot() const {
node_uid);
++node_uid;
break;
case EndClass::Unreachable:
dot += fmt::format("\t\t{}->N{};\n", name, node_uid);
dot += fmt::format(
"\t\tN{} [label=\"Unreachable\"][shape=square][style=stripped];\n", node_uid);
++node_uid;
break;
}
}
if (function.entrypoint == 8) {
@ -635,10 +516,11 @@ std::string CFG::Dot() const {
dot += "\t}\n";
}
if (!functions.empty()) {
if (functions.front().blocks.empty()) {
auto& function{functions.front()};
if (function.blocks.empty()) {
dot += "Start;\n";
} else {
dot += fmt::format("\tStart -> {};\n", NameOf(functions.front().blocks_data.front()));
dot += fmt::format("\tStart -> {};\n", NameOf(*function.blocks.begin()));
}
dot += fmt::format("\tStart [shape=diamond];\n");
}

View File

@ -11,25 +11,27 @@
#include <vector>
#include <boost/container/small_vector.hpp>
#include <boost/intrusive/set.hpp>
#include "shader_recompiler/environment.h"
#include "shader_recompiler/frontend/ir/condition.h"
#include "shader_recompiler/frontend/maxwell/instruction.h"
#include "shader_recompiler/frontend/maxwell/location.h"
#include "shader_recompiler/frontend/maxwell/opcodes.h"
#include "shader_recompiler/object_pool.h"
namespace Shader::IR {
class Block;
}
namespace Shader::Maxwell::Flow {
using BlockId = u32;
using FunctionId = size_t;
constexpr BlockId UNREACHABLE_BLOCK_ID{static_cast<u32>(-1)};
enum class EndClass {
Branch,
Exit,
Return,
Unreachable,
};
enum class Token {
@ -59,58 +61,37 @@ private:
boost::container::small_vector<StackEntry, 3> entries;
};
struct Block {
struct Block : boost::intrusive::set_base_hook<
// Normal link is ~2.5% faster compared to safe link
boost::intrusive::link_mode<boost::intrusive::normal_link>> {
[[nodiscard]] bool Contains(Location pc) const noexcept;
bool operator<(const Block& rhs) const noexcept {
return begin < rhs.begin;
}
Location begin;
Location end;
EndClass end_class;
BlockId id;
Stack stack;
IR::Condition cond;
BlockId branch_true;
BlockId branch_false;
boost::container::small_vector<BlockId, 4> imm_predecessors;
boost::container::small_vector<BlockId, 8> dominance_frontiers;
union {
bool post_order_visited{false};
Block* imm_dominator;
};
Block* branch_true;
Block* branch_false;
IR::Block* ir;
};
struct Label {
Location address;
BlockId block_id;
Block* block;
Stack stack;
};
struct Function {
Function(Location start_address);
void BuildBlocksMap();
void BuildImmediatePredecessors();
void BuildPostOrder();
void BuildImmediateDominators();
void BuildDominanceFrontier();
[[nodiscard]] size_t NumBlocks() const noexcept {
return static_cast<size_t>(current_block_id) + 1;
}
Location entrypoint;
BlockId current_block_id{0};
boost::container::small_vector<Label, 16> labels;
boost::container::small_vector<u32, 0x130> blocks;
boost::container::small_vector<Block, 0x130> blocks_data;
// Translates from BlockId to block index
boost::container::small_vector<Block*, 0x130> blocks_map;
boost::container::small_vector<u32, 0x130> post_order_blocks;
boost::container::small_vector<BlockId, 0x130> post_order_map;
boost::intrusive::set<Block> blocks;
};
class CFG {
@ -120,7 +101,7 @@ class CFG {
};
public:
explicit CFG(Environment& env, Location start_address);
explicit CFG(Environment& env, ObjectPool<Block>& block_pool, Location start_address);
CFG& operator=(const CFG&) = delete;
CFG(const CFG&) = delete;
@ -133,35 +114,37 @@ public:
[[nodiscard]] std::span<const Function> Functions() const noexcept {
return std::span(functions.data(), functions.size());
}
[[nodiscard]] std::span<Function> Functions() noexcept {
return std::span(functions.data(), functions.size());
}
private:
void VisitFunctions(Location start_address);
void AnalyzeLabel(FunctionId function_id, Label& label);
/// Inspect already visited blocks.
/// Return true when the block has already been visited
bool InspectVisitedBlocks(FunctionId function_id, const Label& label);
AnalysisState AnalyzeInst(Block& block, FunctionId function_id, Location pc);
AnalysisState AnalyzeInst(Block* block, FunctionId function_id, Location pc);
void AnalyzeCondInst(Block& block, FunctionId function_id, Location pc, EndClass insn_end_class,
void AnalyzeCondInst(Block* block, FunctionId function_id, Location pc, EndClass insn_end_class,
IR::Condition cond);
/// Return true when the branch instruction is confirmed to be a branch
bool AnalyzeBranch(Block& block, FunctionId function_id, Location pc, Instruction inst,
bool AnalyzeBranch(Block* block, FunctionId function_id, Location pc, Instruction inst,
Opcode opcode);
void AnalyzeBRA(Block& block, FunctionId function_id, Location pc, Instruction inst,
void AnalyzeBRA(Block* block, FunctionId function_id, Location pc, Instruction inst,
bool is_absolute);
void AnalyzeBRX(Block& block, Location pc, Instruction inst, bool is_absolute);
void AnalyzeBRX(Block* block, Location pc, Instruction inst, bool is_absolute);
void AnalyzeCAL(Location pc, Instruction inst, bool is_absolute);
AnalysisState AnalyzeEXIT(Block& block, FunctionId function_id, Location pc, Instruction inst);
AnalysisState AnalyzeEXIT(Block* block, FunctionId function_id, Location pc, Instruction inst);
/// Return the branch target block id
BlockId AddLabel(const Block& block, Stack stack, Location pc, FunctionId function_id);
Block* AddLabel(Block* block, Stack stack, Location pc, FunctionId function_id);
Environment& env;
ObjectPool<Block>& block_pool;
boost::container::small_vector<Function, 1> functions;
FunctionId current_function_id{0};
};

View File

@ -15,7 +15,7 @@
namespace Shader::Maxwell {
class Location {
static constexpr u32 VIRTUAL_OFFSET{std::numeric_limits<u32>::max()};
static constexpr u32 VIRTUAL_BIAS{4};
public:
constexpr Location() = default;
@ -27,12 +27,18 @@ public:
Align();
}
constexpr Location Virtual() const noexcept {
Location virtual_location;
virtual_location.offset = offset - VIRTUAL_BIAS;
return virtual_location;
}
[[nodiscard]] constexpr u32 Offset() const noexcept {
return offset;
}
[[nodiscard]] constexpr bool IsVirtual() const {
return offset == VIRTUAL_OFFSET;
return offset % 8 == VIRTUAL_BIAS;
}
constexpr auto operator<=>(const Location&) const noexcept = default;
@ -89,7 +95,7 @@ private:
offset -= 8 + (offset % 32 == 8 ? 8 : 0);
}
u32 offset{VIRTUAL_OFFSET};
u32 offset{0xcccccccc};
};
} // namespace Shader::Maxwell

View File

@ -4,57 +4,58 @@
#include <algorithm>
#include <memory>
#include <vector>
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/structured_control_flow.h"
#include "shader_recompiler/frontend/maxwell/program.h"
#include "shader_recompiler/frontend/maxwell/termination_code.h"
#include "shader_recompiler/frontend/maxwell/translate/translate.h"
#include "shader_recompiler/ir_opt/passes.h"
namespace Shader::Maxwell {
namespace {
void TranslateCode(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, const Flow::Function& cfg_function, IR::Function& function,
std::span<IR::Block*> block_map) {
IR::BlockList TranslateCode(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, Flow::Function& cfg_function) {
const size_t num_blocks{cfg_function.blocks.size()};
function.blocks.reserve(num_blocks);
for (const Flow::BlockId block_id : cfg_function.blocks) {
const Flow::Block& flow_block{cfg_function.blocks_data[block_id]};
IR::Block* const ir_block{block_pool.Create(Translate(inst_pool, env, flow_block))};
block_map[flow_block.id] = ir_block;
function.blocks.emplace_back(ir_block);
}
}
void EmitTerminationInsts(const Flow::Function& cfg_function,
std::span<IR::Block* const> block_map) {
for (const Flow::BlockId block_id : cfg_function.blocks) {
const Flow::Block& flow_block{cfg_function.blocks_data[block_id]};
EmitTerminationCode(flow_block, block_map);
}
}
void TranslateFunction(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, const Flow::Function& cfg_function,
IR::Function& function) {
std::vector<IR::Block*> block_map;
block_map.resize(cfg_function.blocks_data.size());
TranslateCode(inst_pool, block_pool, env, cfg_function, function, block_map);
EmitTerminationInsts(cfg_function, block_map);
std::vector<IR::Block*> blocks(cfg_function.blocks.size());
std::ranges::for_each(cfg_function.blocks, [&, i = size_t{0}](auto& cfg_block) mutable {
const u32 begin{cfg_block.begin.Offset()};
const u32 end{cfg_block.end.Offset()};
blocks[i] = block_pool.Create(inst_pool, begin, end);
cfg_block.ir = blocks[i];
++i;
});
std::ranges::for_each(cfg_function.blocks, [&, i = size_t{0}](auto& cfg_block) mutable {
IR::Block* const block{blocks[i]};
++i;
if (cfg_block.end_class != Flow::EndClass::Branch) {
block->SetReturn();
} else if (cfg_block.cond == IR::Condition{true}) {
block->SetBranch(cfg_block.branch_true->ir);
} else if (cfg_block.cond == IR::Condition{false}) {
block->SetBranch(cfg_block.branch_false->ir);
} else {
block->SetBranches(cfg_block.cond, cfg_block.branch_true->ir,
cfg_block.branch_false->ir);
}
});
return IR::VisitAST(inst_pool, block_pool, blocks,
[&](IR::Block* block) { Translate(env, block); });
}
} // Anonymous namespace
IR::Program TranslateProgram(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, const Flow::CFG& cfg) {
Environment& env, Flow::CFG& cfg) {
IR::Program program;
auto& functions{program.functions};
functions.reserve(cfg.Functions().size());
for (const Flow::Function& cfg_function : cfg.Functions()) {
TranslateFunction(inst_pool, block_pool, env, cfg_function, functions.emplace_back());
for (Flow::Function& cfg_function : cfg.Functions()) {
functions.push_back(IR::Function{
.blocks{TranslateCode(inst_pool, block_pool, env, cfg_function)},
});
}
fmt::print(stdout, "No optimizations: {}", IR::DumpProgram(program));
std::ranges::for_each(functions, Optimization::SsaRewritePass);
for (IR::Function& function : functions) {
Optimization::Invoke(Optimization::GlobalMemoryToStorageBufferPass, function);

View File

@ -19,6 +19,6 @@ namespace Shader::Maxwell {
[[nodiscard]] IR::Program TranslateProgram(ObjectPool<IR::Inst>& inst_pool,
ObjectPool<IR::Block>& block_pool, Environment& env,
const Flow::CFG& cfg);
Flow::CFG& cfg);
} // namespace Shader::Maxwell

View File

@ -1,86 +0,0 @@
// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#include <span>
#include "shader_recompiler/exception.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/ir_emitter.h"
#include "shader_recompiler/frontend/maxwell/control_flow.h"
#include "shader_recompiler/frontend/maxwell/termination_code.h"
namespace Shader::Maxwell {
static void EmitExit(IR::IREmitter& ir) {
ir.Exit();
}
static IR::U1 GetFlowTest(IR::FlowTest flow_test, IR::IREmitter& ir) {
switch (flow_test) {
case IR::FlowTest::T:
return ir.Imm1(true);
case IR::FlowTest::F:
return ir.Imm1(false);
case IR::FlowTest::NE:
// FIXME: Verify this
return ir.LogicalNot(ir.GetZFlag());
case IR::FlowTest::NaN:
// FIXME: Verify this
return ir.LogicalAnd(ir.GetSFlag(), ir.GetZFlag());
default:
throw NotImplementedException("Flow test {}", flow_test);
}
}
static IR::U1 GetCond(IR::Condition cond, IR::IREmitter& ir) {
const IR::FlowTest flow_test{cond.FlowTest()};
const auto [pred, pred_negated]{cond.Pred()};
if (pred == IR::Pred::PT && !pred_negated) {
return GetFlowTest(flow_test, ir);
}
if (flow_test == IR::FlowTest::T) {
return ir.GetPred(pred, pred_negated);
}
return ir.LogicalAnd(ir.GetPred(pred, pred_negated), GetFlowTest(flow_test, ir));
}
static void EmitBranch(const Flow::Block& flow_block, std::span<IR::Block* const> block_map,
IR::IREmitter& ir) {
const auto add_immediate_predecessor = [&](Flow::BlockId label) {
block_map[label]->AddImmediatePredecessor(&ir.block);
};
if (flow_block.cond == true) {
add_immediate_predecessor(flow_block.branch_true);
return ir.Branch(block_map[flow_block.branch_true]);
}
if (flow_block.cond == false) {
add_immediate_predecessor(flow_block.branch_false);
return ir.Branch(block_map[flow_block.branch_false]);
}
add_immediate_predecessor(flow_block.branch_true);
add_immediate_predecessor(flow_block.branch_false);
return ir.BranchConditional(GetCond(flow_block.cond, ir), block_map[flow_block.branch_true],
block_map[flow_block.branch_false]);
}
void EmitTerminationCode(const Flow::Block& flow_block, std::span<IR::Block* const> block_map) {
IR::Block* const block{block_map[flow_block.id]};
IR::IREmitter ir(*block);
switch (flow_block.end_class) {
case Flow::EndClass::Branch:
EmitBranch(flow_block, block_map, ir);
break;
case Flow::EndClass::Exit:
EmitExit(ir);
break;
case Flow::EndClass::Return:
ir.Return();
break;
case Flow::EndClass::Unreachable:
ir.Unreachable();
break;
}
}
} // namespace Shader::Maxwell

View File

@ -1,17 +0,0 @@
// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#pragma once
#include <span>
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/maxwell/control_flow.h"
namespace Shader::Maxwell {
/// Emit termination instructions and collect immediate predecessors
void EmitTerminationCode(const Flow::Block& flow_block, std::span<IR::Block* const> block_map);
} // namespace Shader::Maxwell

View File

@ -28,7 +28,7 @@ void SHL(TranslatorVisitor& v, u64 insn, const IR::U32& unsafe_shift) {
IR::U32 result;
if (shl.w != 0) {
// When .W is set, the shift value is wrapped
// To emulate this we just have to clamp it ourselves.
// To emulate this we just have to wrap it ourselves.
const IR::U32 shift{v.ir.BitwiseAnd(unsafe_shift, v.ir.Imm32(31))};
result = v.ir.ShiftLeftLogical(base, shift);
} else {

View File

@ -23,14 +23,13 @@ static void Invoke(TranslatorVisitor& visitor, Location pc, u64 insn) {
}
}
IR::Block Translate(ObjectPool<IR::Inst>& inst_pool, Environment& env,
const Flow::Block& flow_block) {
IR::Block block{inst_pool, flow_block.begin.Offset(), flow_block.end.Offset()};
TranslatorVisitor visitor{env, block};
const Location pc_end{flow_block.end};
Location pc{flow_block.begin};
while (pc != pc_end) {
void Translate(Environment& env, IR::Block* block) {
if (block->IsVirtual()) {
return;
}
TranslatorVisitor visitor{env, *block};
const Location pc_end{block->LocationEnd()};
for (Location pc = block->LocationBegin(); pc != pc_end; ++pc) {
const u64 insn{env.ReadInstruction(pc.Offset())};
const Opcode opcode{Decode(insn)};
switch (opcode) {
@ -43,9 +42,7 @@ IR::Block Translate(ObjectPool<IR::Inst>& inst_pool, Environment& env,
default:
throw LogicError("Invalid opcode {}", opcode);
}
++pc;
}
return block;
}
} // namespace Shader::Maxwell

View File

@ -6,14 +6,9 @@
#include "shader_recompiler/environment.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/microinstruction.h"
#include "shader_recompiler/frontend/maxwell/control_flow.h"
#include "shader_recompiler/frontend/maxwell/location.h"
#include "shader_recompiler/object_pool.h"
namespace Shader::Maxwell {
[[nodiscard]] IR::Block Translate(ObjectPool<IR::Inst>& inst_pool, Environment& env,
const Flow::Block& flow_block);
void Translate(Environment& env, IR::Block* block);
} // namespace Shader::Maxwell