forked from OSchip/llvm-project
1850 lines
72 KiB
C++
1850 lines
72 KiB
C++
//===-- NVPTXReplaceImageHandles.cpp - Replace image handles for Fermi ----===//
|
|
//
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
//
|
|
// On Fermi, image handles are not supported. To work around this, we traverse
|
|
// the machine code and replace image handles with concrete symbols. For this
|
|
// to work reliably, inlining of all function call must be performed.
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "NVPTX.h"
|
|
#include "NVPTXMachineFunctionInfo.h"
|
|
#include "NVPTXSubtarget.h"
|
|
#include "NVPTXTargetMachine.h"
|
|
#include "MCTargetDesc/NVPTXBaseInfo.h"
|
|
#include "llvm/ADT/DenseSet.h"
|
|
#include "llvm/CodeGen/MachineFunction.h"
|
|
#include "llvm/CodeGen/MachineFunctionPass.h"
|
|
#include "llvm/CodeGen/MachineRegisterInfo.h"
|
|
#include "llvm/Support/raw_ostream.h"
|
|
|
|
using namespace llvm;
|
|
|
|
namespace {
|
|
class NVPTXReplaceImageHandles : public MachineFunctionPass {
|
|
private:
|
|
static char ID;
|
|
DenseSet<MachineInstr *> InstrsToRemove;
|
|
|
|
public:
|
|
NVPTXReplaceImageHandles();
|
|
|
|
bool runOnMachineFunction(MachineFunction &MF) override;
|
|
|
|
StringRef getPassName() const override {
|
|
return "NVPTX Replace Image Handles";
|
|
}
|
|
private:
|
|
bool processInstr(MachineInstr &MI);
|
|
bool replaceImageHandle(MachineOperand &Op, MachineFunction &MF);
|
|
bool findIndexForHandle(MachineOperand &Op, MachineFunction &MF,
|
|
unsigned &Idx);
|
|
};
|
|
}
|
|
|
|
char NVPTXReplaceImageHandles::ID = 0;
|
|
|
|
NVPTXReplaceImageHandles::NVPTXReplaceImageHandles()
|
|
: MachineFunctionPass(ID) {}
|
|
|
|
bool NVPTXReplaceImageHandles::runOnMachineFunction(MachineFunction &MF) {
|
|
bool Changed = false;
|
|
InstrsToRemove.clear();
|
|
|
|
for (MachineBasicBlock &MBB : MF)
|
|
for (MachineInstr &MI : MBB)
|
|
Changed |= processInstr(MI);
|
|
|
|
// Now clean up any handle-access instructions
|
|
// This is needed in debug mode when code cleanup passes are not executed,
|
|
// but we need the handle access to be eliminated because they are not
|
|
// valid instructions when image handles are disabled.
|
|
for (MachineInstr *MI : InstrsToRemove) {
|
|
unsigned DefReg = MI->getOperand(0).getReg();
|
|
// Only these that are not used can be removed.
|
|
if (MF.getRegInfo().use_nodbg_empty(DefReg))
|
|
MI->eraseFromParent();
|
|
}
|
|
return Changed;
|
|
}
|
|
|
|
static unsigned suldRegisterToIndexOpcode(unsigned RegOC) {
|
|
switch (RegOC) {
|
|
case NVPTX::SULD_1D_I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_I64_CLAMP_R:
|
|
return NVPTX::SULD_1D_I64_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I64_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I64_CLAMP_I;
|
|
case NVPTX::SULD_2D_I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_I64_CLAMP_R:
|
|
return NVPTX::SULD_2D_I64_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I64_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I64_CLAMP_I;
|
|
case NVPTX::SULD_3D_I8_CLAMP_R:
|
|
return NVPTX::SULD_3D_I8_CLAMP_I;
|
|
case NVPTX::SULD_3D_I16_CLAMP_R:
|
|
return NVPTX::SULD_3D_I16_CLAMP_I;
|
|
case NVPTX::SULD_3D_I32_CLAMP_R:
|
|
return NVPTX::SULD_3D_I32_CLAMP_I;
|
|
case NVPTX::SULD_3D_I64_CLAMP_R:
|
|
return NVPTX::SULD_3D_I64_CLAMP_I;
|
|
case NVPTX::SULD_1D_V2I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_V2I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_V2I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_V2I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_V2I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_V2I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_V2I64_CLAMP_R:
|
|
return NVPTX::SULD_1D_V2I64_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I64_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I64_CLAMP_I;
|
|
case NVPTX::SULD_2D_V2I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_V2I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_V2I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_V2I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_V2I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_V2I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_V2I64_CLAMP_R:
|
|
return NVPTX::SULD_2D_V2I64_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I64_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I64_CLAMP_I;
|
|
case NVPTX::SULD_3D_V2I8_CLAMP_R:
|
|
return NVPTX::SULD_3D_V2I8_CLAMP_I;
|
|
case NVPTX::SULD_3D_V2I16_CLAMP_R:
|
|
return NVPTX::SULD_3D_V2I16_CLAMP_I;
|
|
case NVPTX::SULD_3D_V2I32_CLAMP_R:
|
|
return NVPTX::SULD_3D_V2I32_CLAMP_I;
|
|
case NVPTX::SULD_3D_V2I64_CLAMP_R:
|
|
return NVPTX::SULD_3D_V2I64_CLAMP_I;
|
|
case NVPTX::SULD_1D_V4I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_V4I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_V4I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_V4I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_V4I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_V4I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I8_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I8_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I16_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I16_CLAMP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I32_CLAMP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_V4I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_V4I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_V4I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_V4I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_V4I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_V4I32_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I8_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I8_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I16_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I16_CLAMP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I32_CLAMP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I32_CLAMP_I;
|
|
case NVPTX::SULD_3D_V4I8_CLAMP_R:
|
|
return NVPTX::SULD_3D_V4I8_CLAMP_I;
|
|
case NVPTX::SULD_3D_V4I16_CLAMP_R:
|
|
return NVPTX::SULD_3D_V4I16_CLAMP_I;
|
|
case NVPTX::SULD_3D_V4I32_CLAMP_R:
|
|
return NVPTX::SULD_3D_V4I32_CLAMP_I;
|
|
case NVPTX::SULD_1D_I8_TRAP_R:
|
|
return NVPTX::SULD_1D_I8_TRAP_I;
|
|
case NVPTX::SULD_1D_I16_TRAP_R:
|
|
return NVPTX::SULD_1D_I16_TRAP_I;
|
|
case NVPTX::SULD_1D_I32_TRAP_R:
|
|
return NVPTX::SULD_1D_I32_TRAP_I;
|
|
case NVPTX::SULD_1D_I64_TRAP_R:
|
|
return NVPTX::SULD_1D_I64_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I8_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I8_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I16_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I16_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I32_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I32_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_I64_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_I64_TRAP_I;
|
|
case NVPTX::SULD_2D_I8_TRAP_R:
|
|
return NVPTX::SULD_2D_I8_TRAP_I;
|
|
case NVPTX::SULD_2D_I16_TRAP_R:
|
|
return NVPTX::SULD_2D_I16_TRAP_I;
|
|
case NVPTX::SULD_2D_I32_TRAP_R:
|
|
return NVPTX::SULD_2D_I32_TRAP_I;
|
|
case NVPTX::SULD_2D_I64_TRAP_R:
|
|
return NVPTX::SULD_2D_I64_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I8_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I8_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I16_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I16_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I32_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I32_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_I64_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_I64_TRAP_I;
|
|
case NVPTX::SULD_3D_I8_TRAP_R:
|
|
return NVPTX::SULD_3D_I8_TRAP_I;
|
|
case NVPTX::SULD_3D_I16_TRAP_R:
|
|
return NVPTX::SULD_3D_I16_TRAP_I;
|
|
case NVPTX::SULD_3D_I32_TRAP_R:
|
|
return NVPTX::SULD_3D_I32_TRAP_I;
|
|
case NVPTX::SULD_3D_I64_TRAP_R:
|
|
return NVPTX::SULD_3D_I64_TRAP_I;
|
|
case NVPTX::SULD_1D_V2I8_TRAP_R:
|
|
return NVPTX::SULD_1D_V2I8_TRAP_I;
|
|
case NVPTX::SULD_1D_V2I16_TRAP_R:
|
|
return NVPTX::SULD_1D_V2I16_TRAP_I;
|
|
case NVPTX::SULD_1D_V2I32_TRAP_R:
|
|
return NVPTX::SULD_1D_V2I32_TRAP_I;
|
|
case NVPTX::SULD_1D_V2I64_TRAP_R:
|
|
return NVPTX::SULD_1D_V2I64_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I8_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I8_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I16_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I16_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I32_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I32_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I64_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I64_TRAP_I;
|
|
case NVPTX::SULD_2D_V2I8_TRAP_R:
|
|
return NVPTX::SULD_2D_V2I8_TRAP_I;
|
|
case NVPTX::SULD_2D_V2I16_TRAP_R:
|
|
return NVPTX::SULD_2D_V2I16_TRAP_I;
|
|
case NVPTX::SULD_2D_V2I32_TRAP_R:
|
|
return NVPTX::SULD_2D_V2I32_TRAP_I;
|
|
case NVPTX::SULD_2D_V2I64_TRAP_R:
|
|
return NVPTX::SULD_2D_V2I64_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I8_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I8_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I16_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I16_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I32_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I32_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I64_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I64_TRAP_I;
|
|
case NVPTX::SULD_3D_V2I8_TRAP_R:
|
|
return NVPTX::SULD_3D_V2I8_TRAP_I;
|
|
case NVPTX::SULD_3D_V2I16_TRAP_R:
|
|
return NVPTX::SULD_3D_V2I16_TRAP_I;
|
|
case NVPTX::SULD_3D_V2I32_TRAP_R:
|
|
return NVPTX::SULD_3D_V2I32_TRAP_I;
|
|
case NVPTX::SULD_3D_V2I64_TRAP_R:
|
|
return NVPTX::SULD_3D_V2I64_TRAP_I;
|
|
case NVPTX::SULD_1D_V4I8_TRAP_R:
|
|
return NVPTX::SULD_1D_V4I8_TRAP_I;
|
|
case NVPTX::SULD_1D_V4I16_TRAP_R:
|
|
return NVPTX::SULD_1D_V4I16_TRAP_I;
|
|
case NVPTX::SULD_1D_V4I32_TRAP_R:
|
|
return NVPTX::SULD_1D_V4I32_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I8_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I8_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I16_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I16_TRAP_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I32_TRAP_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I32_TRAP_I;
|
|
case NVPTX::SULD_2D_V4I8_TRAP_R:
|
|
return NVPTX::SULD_2D_V4I8_TRAP_I;
|
|
case NVPTX::SULD_2D_V4I16_TRAP_R:
|
|
return NVPTX::SULD_2D_V4I16_TRAP_I;
|
|
case NVPTX::SULD_2D_V4I32_TRAP_R:
|
|
return NVPTX::SULD_2D_V4I32_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I8_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I8_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I16_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I16_TRAP_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I32_TRAP_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I32_TRAP_I;
|
|
case NVPTX::SULD_3D_V4I8_TRAP_R:
|
|
return NVPTX::SULD_3D_V4I8_TRAP_I;
|
|
case NVPTX::SULD_3D_V4I16_TRAP_R:
|
|
return NVPTX::SULD_3D_V4I16_TRAP_I;
|
|
case NVPTX::SULD_3D_V4I32_TRAP_R:
|
|
return NVPTX::SULD_3D_V4I32_TRAP_I;
|
|
case NVPTX::SULD_1D_I8_ZERO_R:
|
|
return NVPTX::SULD_1D_I8_ZERO_I;
|
|
case NVPTX::SULD_1D_I16_ZERO_R:
|
|
return NVPTX::SULD_1D_I16_ZERO_I;
|
|
case NVPTX::SULD_1D_I32_ZERO_R:
|
|
return NVPTX::SULD_1D_I32_ZERO_I;
|
|
case NVPTX::SULD_1D_I64_ZERO_R:
|
|
return NVPTX::SULD_1D_I64_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_I8_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_I8_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_I16_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_I16_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_I32_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_I32_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_I64_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_I64_ZERO_I;
|
|
case NVPTX::SULD_2D_I8_ZERO_R:
|
|
return NVPTX::SULD_2D_I8_ZERO_I;
|
|
case NVPTX::SULD_2D_I16_ZERO_R:
|
|
return NVPTX::SULD_2D_I16_ZERO_I;
|
|
case NVPTX::SULD_2D_I32_ZERO_R:
|
|
return NVPTX::SULD_2D_I32_ZERO_I;
|
|
case NVPTX::SULD_2D_I64_ZERO_R:
|
|
return NVPTX::SULD_2D_I64_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_I8_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_I8_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_I16_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_I16_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_I32_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_I32_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_I64_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_I64_ZERO_I;
|
|
case NVPTX::SULD_3D_I8_ZERO_R:
|
|
return NVPTX::SULD_3D_I8_ZERO_I;
|
|
case NVPTX::SULD_3D_I16_ZERO_R:
|
|
return NVPTX::SULD_3D_I16_ZERO_I;
|
|
case NVPTX::SULD_3D_I32_ZERO_R:
|
|
return NVPTX::SULD_3D_I32_ZERO_I;
|
|
case NVPTX::SULD_3D_I64_ZERO_R:
|
|
return NVPTX::SULD_3D_I64_ZERO_I;
|
|
case NVPTX::SULD_1D_V2I8_ZERO_R:
|
|
return NVPTX::SULD_1D_V2I8_ZERO_I;
|
|
case NVPTX::SULD_1D_V2I16_ZERO_R:
|
|
return NVPTX::SULD_1D_V2I16_ZERO_I;
|
|
case NVPTX::SULD_1D_V2I32_ZERO_R:
|
|
return NVPTX::SULD_1D_V2I32_ZERO_I;
|
|
case NVPTX::SULD_1D_V2I64_ZERO_R:
|
|
return NVPTX::SULD_1D_V2I64_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I8_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I8_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I16_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I16_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I32_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I32_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V2I64_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V2I64_ZERO_I;
|
|
case NVPTX::SULD_2D_V2I8_ZERO_R:
|
|
return NVPTX::SULD_2D_V2I8_ZERO_I;
|
|
case NVPTX::SULD_2D_V2I16_ZERO_R:
|
|
return NVPTX::SULD_2D_V2I16_ZERO_I;
|
|
case NVPTX::SULD_2D_V2I32_ZERO_R:
|
|
return NVPTX::SULD_2D_V2I32_ZERO_I;
|
|
case NVPTX::SULD_2D_V2I64_ZERO_R:
|
|
return NVPTX::SULD_2D_V2I64_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I8_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I8_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I16_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I16_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I32_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I32_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V2I64_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V2I64_ZERO_I;
|
|
case NVPTX::SULD_3D_V2I8_ZERO_R:
|
|
return NVPTX::SULD_3D_V2I8_ZERO_I;
|
|
case NVPTX::SULD_3D_V2I16_ZERO_R:
|
|
return NVPTX::SULD_3D_V2I16_ZERO_I;
|
|
case NVPTX::SULD_3D_V2I32_ZERO_R:
|
|
return NVPTX::SULD_3D_V2I32_ZERO_I;
|
|
case NVPTX::SULD_3D_V2I64_ZERO_R:
|
|
return NVPTX::SULD_3D_V2I64_ZERO_I;
|
|
case NVPTX::SULD_1D_V4I8_ZERO_R:
|
|
return NVPTX::SULD_1D_V4I8_ZERO_I;
|
|
case NVPTX::SULD_1D_V4I16_ZERO_R:
|
|
return NVPTX::SULD_1D_V4I16_ZERO_I;
|
|
case NVPTX::SULD_1D_V4I32_ZERO_R:
|
|
return NVPTX::SULD_1D_V4I32_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I8_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I8_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I16_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I16_ZERO_I;
|
|
case NVPTX::SULD_1D_ARRAY_V4I32_ZERO_R:
|
|
return NVPTX::SULD_1D_ARRAY_V4I32_ZERO_I;
|
|
case NVPTX::SULD_2D_V4I8_ZERO_R:
|
|
return NVPTX::SULD_2D_V4I8_ZERO_I;
|
|
case NVPTX::SULD_2D_V4I16_ZERO_R:
|
|
return NVPTX::SULD_2D_V4I16_ZERO_I;
|
|
case NVPTX::SULD_2D_V4I32_ZERO_R:
|
|
return NVPTX::SULD_2D_V4I32_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I8_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I8_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I16_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I16_ZERO_I;
|
|
case NVPTX::SULD_2D_ARRAY_V4I32_ZERO_R:
|
|
return NVPTX::SULD_2D_ARRAY_V4I32_ZERO_I;
|
|
case NVPTX::SULD_3D_V4I8_ZERO_R:
|
|
return NVPTX::SULD_3D_V4I8_ZERO_I;
|
|
case NVPTX::SULD_3D_V4I16_ZERO_R:
|
|
return NVPTX::SULD_3D_V4I16_ZERO_I;
|
|
case NVPTX::SULD_3D_V4I32_ZERO_R:
|
|
return NVPTX::SULD_3D_V4I32_ZERO_I;
|
|
default:
|
|
llvm_unreachable("Unhandled SULD opcode");
|
|
}
|
|
}
|
|
|
|
static unsigned sustRegisterToIndexOpcode(unsigned RegOC) {
|
|
switch (RegOC) {
|
|
case NVPTX::SUST_B_1D_B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_B64_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_B64_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V2B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V2B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V2B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V2B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V2B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V2B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V2B64_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V2B64_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V4B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V4B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V4B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V4B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_V4B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_V4B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B64_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B64_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B64_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B64_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B8_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B8_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B16_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B16_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B32_CLAMP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_B64_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_B64_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V2B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V2B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V2B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V2B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V2B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V2B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V2B64_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V2B64_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V4B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V4B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V4B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V4B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_V4B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_V4B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B64_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B64_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B32_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B64_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B64_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B8_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B8_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B16_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B16_CLAMP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B32_CLAMP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B32_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_B8_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_B8_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_B16_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_B16_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_B32_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_B32_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_B64_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_B64_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V2B8_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V2B8_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V2B16_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V2B16_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V2B32_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V2B32_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V2B64_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V2B64_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V4B8_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V4B8_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V4B16_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V4B16_CLAMP_I;
|
|
case NVPTX::SUST_B_3D_V4B32_CLAMP_R:
|
|
return NVPTX::SUST_B_3D_V4B32_CLAMP_I;
|
|
case NVPTX::SUST_B_1D_B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_B64_TRAP_R:
|
|
return NVPTX::SUST_B_1D_B64_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V2B64_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V2B64_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_V4B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B64_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B64_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B64_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B64_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B8_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B8_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B16_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B16_TRAP_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B32_TRAP_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_B64_TRAP_R:
|
|
return NVPTX::SUST_B_2D_B64_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V2B64_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V2B64_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_V4B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B64_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B64_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B32_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B64_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B64_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B8_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B8_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B16_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B16_TRAP_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B32_TRAP_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B32_TRAP_I;
|
|
case NVPTX::SUST_B_3D_B8_TRAP_R:
|
|
return NVPTX::SUST_B_3D_B8_TRAP_I;
|
|
case NVPTX::SUST_B_3D_B16_TRAP_R:
|
|
return NVPTX::SUST_B_3D_B16_TRAP_I;
|
|
case NVPTX::SUST_B_3D_B32_TRAP_R:
|
|
return NVPTX::SUST_B_3D_B32_TRAP_I;
|
|
case NVPTX::SUST_B_3D_B64_TRAP_R:
|
|
return NVPTX::SUST_B_3D_B64_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V2B64_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V2B64_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_B_3D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_B_3D_V4B32_TRAP_I;
|
|
case NVPTX::SUST_B_1D_B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_B32_ZERO_I;
|
|
case NVPTX::SUST_B_1D_B64_ZERO_R:
|
|
return NVPTX::SUST_B_1D_B64_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V2B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V2B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V2B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V2B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V2B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V2B32_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V2B64_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V2B64_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V4B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V4B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V4B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V4B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_V4B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_V4B32_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B32_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_B64_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_B64_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B32_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V2B64_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V2B64_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B8_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B8_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B16_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B16_ZERO_I;
|
|
case NVPTX::SUST_B_1D_ARRAY_V4B32_ZERO_R:
|
|
return NVPTX::SUST_B_1D_ARRAY_V4B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_B64_ZERO_R:
|
|
return NVPTX::SUST_B_2D_B64_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V2B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V2B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V2B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V2B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V2B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V2B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V2B64_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V2B64_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V4B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V4B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V4B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V4B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_V4B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_V4B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_B64_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_B64_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B32_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V2B64_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V2B64_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B8_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B8_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B16_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B16_ZERO_I;
|
|
case NVPTX::SUST_B_2D_ARRAY_V4B32_ZERO_R:
|
|
return NVPTX::SUST_B_2D_ARRAY_V4B32_ZERO_I;
|
|
case NVPTX::SUST_B_3D_B8_ZERO_R:
|
|
return NVPTX::SUST_B_3D_B8_ZERO_I;
|
|
case NVPTX::SUST_B_3D_B16_ZERO_R:
|
|
return NVPTX::SUST_B_3D_B16_ZERO_I;
|
|
case NVPTX::SUST_B_3D_B32_ZERO_R:
|
|
return NVPTX::SUST_B_3D_B32_ZERO_I;
|
|
case NVPTX::SUST_B_3D_B64_ZERO_R:
|
|
return NVPTX::SUST_B_3D_B64_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V2B8_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V2B8_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V2B16_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V2B16_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V2B32_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V2B32_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V2B64_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V2B64_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V4B8_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V4B8_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V4B16_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V4B16_ZERO_I;
|
|
case NVPTX::SUST_B_3D_V4B32_ZERO_R:
|
|
return NVPTX::SUST_B_3D_V4B32_ZERO_I;
|
|
case NVPTX::SUST_P_1D_B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_B32_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_V4B32_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_B32_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V2B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V2B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V2B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V2B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V2B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V2B32_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V4B8_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V4B8_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V4B16_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V4B16_TRAP_I;
|
|
case NVPTX::SUST_P_1D_ARRAY_V4B32_TRAP_R:
|
|
return NVPTX::SUST_P_1D_ARRAY_V4B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_V4B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V2B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V2B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V2B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V2B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V2B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V2B32_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V4B8_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V4B8_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V4B16_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V4B16_TRAP_I;
|
|
case NVPTX::SUST_P_2D_ARRAY_V4B32_TRAP_R:
|
|
return NVPTX::SUST_P_2D_ARRAY_V4B32_TRAP_I;
|
|
case NVPTX::SUST_P_3D_B8_TRAP_R:
|
|
return NVPTX::SUST_P_3D_B8_TRAP_I;
|
|
case NVPTX::SUST_P_3D_B16_TRAP_R:
|
|
return NVPTX::SUST_P_3D_B16_TRAP_I;
|
|
case NVPTX::SUST_P_3D_B32_TRAP_R:
|
|
return NVPTX::SUST_P_3D_B32_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V2B8_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V2B8_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V2B16_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V2B16_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V2B32_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V2B32_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V4B8_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V4B8_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V4B16_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V4B16_TRAP_I;
|
|
case NVPTX::SUST_P_3D_V4B32_TRAP_R:
|
|
return NVPTX::SUST_P_3D_V4B32_TRAP_I;
|
|
default:
|
|
llvm_unreachable("Unhandled SUST opcode");
|
|
}
|
|
}
|
|
|
|
static unsigned texRegisterToIndexOpcode(unsigned RegOC) {
|
|
switch (RegOC) {
|
|
case NVPTX::TEX_1D_F32_S32_RR:
|
|
return NVPTX::TEX_1D_F32_S32_IR;
|
|
case NVPTX::TEX_1D_F32_S32_RI:
|
|
return NVPTX::TEX_1D_F32_S32_II;
|
|
case NVPTX::TEX_1D_F32_F32_RR:
|
|
return NVPTX::TEX_1D_F32_F32_IR;
|
|
case NVPTX::TEX_1D_F32_F32_RI:
|
|
return NVPTX::TEX_1D_F32_F32_II;
|
|
case NVPTX::TEX_1D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_F32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_F32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_S32_S32_RR:
|
|
return NVPTX::TEX_1D_S32_S32_IR;
|
|
case NVPTX::TEX_1D_S32_S32_RI:
|
|
return NVPTX::TEX_1D_S32_S32_II;
|
|
case NVPTX::TEX_1D_S32_F32_RR:
|
|
return NVPTX::TEX_1D_S32_F32_IR;
|
|
case NVPTX::TEX_1D_S32_F32_RI:
|
|
return NVPTX::TEX_1D_S32_F32_II;
|
|
case NVPTX::TEX_1D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_S32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_S32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_U32_S32_RR:
|
|
return NVPTX::TEX_1D_U32_S32_IR;
|
|
case NVPTX::TEX_1D_U32_S32_RI:
|
|
return NVPTX::TEX_1D_U32_S32_II;
|
|
case NVPTX::TEX_1D_U32_F32_RR:
|
|
return NVPTX::TEX_1D_U32_F32_IR;
|
|
case NVPTX::TEX_1D_U32_F32_RI:
|
|
return NVPTX::TEX_1D_U32_F32_II;
|
|
case NVPTX::TEX_1D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_U32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_U32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_S32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_F32_S32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_F32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_S32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_S32_S32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_S32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_S32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_U32_S32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_U32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_IR;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_RI:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_IR;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_RI:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_F32_S32_RR:
|
|
return NVPTX::TEX_2D_F32_S32_IR;
|
|
case NVPTX::TEX_2D_F32_S32_RI:
|
|
return NVPTX::TEX_2D_F32_S32_II;
|
|
case NVPTX::TEX_2D_F32_F32_RR:
|
|
return NVPTX::TEX_2D_F32_F32_IR;
|
|
case NVPTX::TEX_2D_F32_F32_RI:
|
|
return NVPTX::TEX_2D_F32_F32_II;
|
|
case NVPTX::TEX_2D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_F32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_F32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_S32_S32_RR:
|
|
return NVPTX::TEX_2D_S32_S32_IR;
|
|
case NVPTX::TEX_2D_S32_S32_RI:
|
|
return NVPTX::TEX_2D_S32_S32_II;
|
|
case NVPTX::TEX_2D_S32_F32_RR:
|
|
return NVPTX::TEX_2D_S32_F32_IR;
|
|
case NVPTX::TEX_2D_S32_F32_RI:
|
|
return NVPTX::TEX_2D_S32_F32_II;
|
|
case NVPTX::TEX_2D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_S32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_S32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_U32_S32_RR:
|
|
return NVPTX::TEX_2D_U32_S32_IR;
|
|
case NVPTX::TEX_2D_U32_S32_RI:
|
|
return NVPTX::TEX_2D_U32_S32_II;
|
|
case NVPTX::TEX_2D_U32_F32_RR:
|
|
return NVPTX::TEX_2D_U32_F32_IR;
|
|
case NVPTX::TEX_2D_U32_F32_RI:
|
|
return NVPTX::TEX_2D_U32_F32_II;
|
|
case NVPTX::TEX_2D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_U32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_U32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_S32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_F32_S32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_F32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_S32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_S32_S32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_S32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_S32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_U32_S32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_U32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_IR;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_RI:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_IR;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_RI:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_F32_S32_RR:
|
|
return NVPTX::TEX_3D_F32_S32_IR;
|
|
case NVPTX::TEX_3D_F32_S32_RI:
|
|
return NVPTX::TEX_3D_F32_S32_II;
|
|
case NVPTX::TEX_3D_F32_F32_RR:
|
|
return NVPTX::TEX_3D_F32_F32_IR;
|
|
case NVPTX::TEX_3D_F32_F32_RI:
|
|
return NVPTX::TEX_3D_F32_F32_II;
|
|
case NVPTX::TEX_3D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_3D_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_3D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_F32_F32_GRAD_IR;
|
|
case NVPTX::TEX_3D_F32_F32_GRAD_RI:
|
|
return NVPTX::TEX_3D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_S32_S32_RR:
|
|
return NVPTX::TEX_3D_S32_S32_IR;
|
|
case NVPTX::TEX_3D_S32_S32_RI:
|
|
return NVPTX::TEX_3D_S32_S32_II;
|
|
case NVPTX::TEX_3D_S32_F32_RR:
|
|
return NVPTX::TEX_3D_S32_F32_IR;
|
|
case NVPTX::TEX_3D_S32_F32_RI:
|
|
return NVPTX::TEX_3D_S32_F32_II;
|
|
case NVPTX::TEX_3D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_3D_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_3D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_S32_F32_GRAD_IR;
|
|
case NVPTX::TEX_3D_S32_F32_GRAD_RI:
|
|
return NVPTX::TEX_3D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_U32_S32_RR:
|
|
return NVPTX::TEX_3D_U32_S32_IR;
|
|
case NVPTX::TEX_3D_U32_S32_RI:
|
|
return NVPTX::TEX_3D_U32_S32_II;
|
|
case NVPTX::TEX_3D_U32_F32_RR:
|
|
return NVPTX::TEX_3D_U32_F32_IR;
|
|
case NVPTX::TEX_3D_U32_F32_RI:
|
|
return NVPTX::TEX_3D_U32_F32_II;
|
|
case NVPTX::TEX_3D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_3D_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_3D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_U32_F32_GRAD_IR;
|
|
case NVPTX::TEX_3D_U32_F32_GRAD_RI:
|
|
return NVPTX::TEX_3D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_CUBE_F32_F32_RR:
|
|
return NVPTX::TEX_CUBE_F32_F32_IR;
|
|
case NVPTX::TEX_CUBE_F32_F32_RI:
|
|
return NVPTX::TEX_CUBE_F32_F32_II;
|
|
case NVPTX::TEX_CUBE_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_S32_F32_RR:
|
|
return NVPTX::TEX_CUBE_S32_F32_IR;
|
|
case NVPTX::TEX_CUBE_S32_F32_RI:
|
|
return NVPTX::TEX_CUBE_S32_F32_II;
|
|
case NVPTX::TEX_CUBE_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_U32_F32_RR:
|
|
return NVPTX::TEX_CUBE_U32_F32_IR;
|
|
case NVPTX::TEX_CUBE_U32_F32_RI:
|
|
return NVPTX::TEX_CUBE_U32_F32_II;
|
|
case NVPTX::TEX_CUBE_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_IR;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_RI:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TLD4_R_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_F32_F32_IR;
|
|
case NVPTX::TLD4_R_2D_F32_F32_RI:
|
|
return NVPTX::TLD4_R_2D_F32_F32_II;
|
|
case NVPTX::TLD4_G_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_F32_F32_IR;
|
|
case NVPTX::TLD4_G_2D_F32_F32_RI:
|
|
return NVPTX::TLD4_G_2D_F32_F32_II;
|
|
case NVPTX::TLD4_B_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_F32_F32_IR;
|
|
case NVPTX::TLD4_B_2D_F32_F32_RI:
|
|
return NVPTX::TLD4_B_2D_F32_F32_II;
|
|
case NVPTX::TLD4_A_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_F32_F32_IR;
|
|
case NVPTX::TLD4_A_2D_F32_F32_RI:
|
|
return NVPTX::TLD4_A_2D_F32_F32_II;
|
|
case NVPTX::TLD4_R_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_S32_F32_IR;
|
|
case NVPTX::TLD4_R_2D_S32_F32_RI:
|
|
return NVPTX::TLD4_R_2D_S32_F32_II;
|
|
case NVPTX::TLD4_G_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_S32_F32_IR;
|
|
case NVPTX::TLD4_G_2D_S32_F32_RI:
|
|
return NVPTX::TLD4_G_2D_S32_F32_II;
|
|
case NVPTX::TLD4_B_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_S32_F32_IR;
|
|
case NVPTX::TLD4_B_2D_S32_F32_RI:
|
|
return NVPTX::TLD4_B_2D_S32_F32_II;
|
|
case NVPTX::TLD4_A_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_S32_F32_IR;
|
|
case NVPTX::TLD4_A_2D_S32_F32_RI:
|
|
return NVPTX::TLD4_A_2D_S32_F32_II;
|
|
case NVPTX::TLD4_R_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_U32_F32_IR;
|
|
case NVPTX::TLD4_R_2D_U32_F32_RI:
|
|
return NVPTX::TLD4_R_2D_U32_F32_II;
|
|
case NVPTX::TLD4_G_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_U32_F32_IR;
|
|
case NVPTX::TLD4_G_2D_U32_F32_RI:
|
|
return NVPTX::TLD4_G_2D_U32_F32_II;
|
|
case NVPTX::TLD4_B_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_U32_F32_IR;
|
|
case NVPTX::TLD4_B_2D_U32_F32_RI:
|
|
return NVPTX::TLD4_B_2D_U32_F32_II;
|
|
case NVPTX::TLD4_A_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_U32_F32_IR;
|
|
case NVPTX::TLD4_A_2D_U32_F32_RI:
|
|
return NVPTX::TLD4_A_2D_U32_F32_II;
|
|
case NVPTX::TEX_UNIFIED_1D_F32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_F32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_F32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_F32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_1D_S32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_S32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_S32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_S32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_1D_U32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_U32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_U32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_U32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_F32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_F32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_F32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_S32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_S32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_S32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_U32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_U32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_1D_ARRAY_U32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_F32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_F32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_F32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_F32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_S32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_S32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_S32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_S32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_U32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_U32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_U32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_U32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_F32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_F32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_F32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_S32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_S32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_S32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_U32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_U32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_2D_ARRAY_U32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_3D_F32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_F32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_3D_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_3D_F32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_3D_F32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_3D_S32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_S32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_3D_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_3D_S32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_3D_S32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_3D_U32_S32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_U32_S32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_3D_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_3D_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_3D_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_3D_U32_F32_GRAD_R:
|
|
return NVPTX::TEX_UNIFIED_3D_U32_F32_GRAD_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_U32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_F32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_F32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_F32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_F32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_S32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_S32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_S32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_S32_F32_LEVEL_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_U32_F32_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_U32_F32_I;
|
|
case NVPTX::TEX_UNIFIED_CUBE_ARRAY_U32_F32_LEVEL_R:
|
|
return NVPTX::TEX_UNIFIED_CUBE_ARRAY_U32_F32_LEVEL_I;
|
|
case NVPTX::TLD4_UNIFIED_R_2D_F32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_R_2D_F32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_G_2D_F32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_G_2D_F32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_B_2D_F32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_B_2D_F32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_A_2D_F32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_A_2D_F32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_R_2D_S32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_R_2D_S32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_G_2D_S32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_G_2D_S32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_B_2D_S32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_B_2D_S32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_A_2D_S32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_A_2D_S32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_R_2D_U32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_R_2D_U32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_G_2D_U32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_G_2D_U32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_B_2D_U32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_B_2D_U32_F32_I;
|
|
case NVPTX::TLD4_UNIFIED_A_2D_U32_F32_R:
|
|
return NVPTX::TLD4_UNIFIED_A_2D_U32_F32_I;
|
|
default:
|
|
llvm_unreachable("Unhandled TEX opcode");
|
|
};
|
|
}
|
|
|
|
static unsigned samplerRegisterToIndexOpcode(unsigned RegOC) {
|
|
switch (RegOC) {
|
|
case NVPTX::TEX_1D_F32_S32_RR:
|
|
return NVPTX::TEX_1D_F32_S32_RI;
|
|
case NVPTX::TEX_1D_F32_S32_IR:
|
|
return NVPTX::TEX_1D_F32_S32_II;
|
|
case NVPTX::TEX_1D_F32_F32_RR:
|
|
return NVPTX::TEX_1D_F32_F32_RI;
|
|
case NVPTX::TEX_1D_F32_F32_IR:
|
|
return NVPTX::TEX_1D_F32_F32_II;
|
|
case NVPTX::TEX_1D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_F32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_F32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_S32_S32_RR:
|
|
return NVPTX::TEX_1D_S32_S32_RI;
|
|
case NVPTX::TEX_1D_S32_S32_IR:
|
|
return NVPTX::TEX_1D_S32_S32_II;
|
|
case NVPTX::TEX_1D_S32_F32_RR:
|
|
return NVPTX::TEX_1D_S32_F32_RI;
|
|
case NVPTX::TEX_1D_S32_F32_IR:
|
|
return NVPTX::TEX_1D_S32_F32_II;
|
|
case NVPTX::TEX_1D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_S32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_S32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_U32_S32_RR:
|
|
return NVPTX::TEX_1D_U32_S32_RI;
|
|
case NVPTX::TEX_1D_U32_S32_IR:
|
|
return NVPTX::TEX_1D_U32_S32_II;
|
|
case NVPTX::TEX_1D_U32_F32_RR:
|
|
return NVPTX::TEX_1D_U32_F32_RI;
|
|
case NVPTX::TEX_1D_U32_F32_IR:
|
|
return NVPTX::TEX_1D_U32_F32_II;
|
|
case NVPTX::TEX_1D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_U32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_U32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_S32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_F32_S32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_ARRAY_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_S32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_S32_S32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_ARRAY_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_S32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_S32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_U32_S32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_S32_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_RI;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_IR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_RI;
|
|
case NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_IR:
|
|
return NVPTX::TEX_1D_ARRAY_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_F32_S32_RR:
|
|
return NVPTX::TEX_2D_F32_S32_RI;
|
|
case NVPTX::TEX_2D_F32_S32_IR:
|
|
return NVPTX::TEX_2D_F32_S32_II;
|
|
case NVPTX::TEX_2D_F32_F32_RR:
|
|
return NVPTX::TEX_2D_F32_F32_RI;
|
|
case NVPTX::TEX_2D_F32_F32_IR:
|
|
return NVPTX::TEX_2D_F32_F32_II;
|
|
case NVPTX::TEX_2D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_F32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_F32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_S32_S32_RR:
|
|
return NVPTX::TEX_2D_S32_S32_RI;
|
|
case NVPTX::TEX_2D_S32_S32_IR:
|
|
return NVPTX::TEX_2D_S32_S32_II;
|
|
case NVPTX::TEX_2D_S32_F32_RR:
|
|
return NVPTX::TEX_2D_S32_F32_RI;
|
|
case NVPTX::TEX_2D_S32_F32_IR:
|
|
return NVPTX::TEX_2D_S32_F32_II;
|
|
case NVPTX::TEX_2D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_S32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_S32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_U32_S32_RR:
|
|
return NVPTX::TEX_2D_U32_S32_RI;
|
|
case NVPTX::TEX_2D_U32_S32_IR:
|
|
return NVPTX::TEX_2D_U32_S32_II;
|
|
case NVPTX::TEX_2D_U32_F32_RR:
|
|
return NVPTX::TEX_2D_U32_F32_RI;
|
|
case NVPTX::TEX_2D_U32_F32_IR:
|
|
return NVPTX::TEX_2D_U32_F32_II;
|
|
case NVPTX::TEX_2D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_U32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_U32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_S32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_F32_S32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_ARRAY_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_S32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_S32_S32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_ARRAY_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_S32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_S32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_U32_S32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_S32_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_RI;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_IR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_RI;
|
|
case NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_IR:
|
|
return NVPTX::TEX_2D_ARRAY_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_F32_S32_RR:
|
|
return NVPTX::TEX_3D_F32_S32_RI;
|
|
case NVPTX::TEX_3D_F32_S32_IR:
|
|
return NVPTX::TEX_3D_F32_S32_II;
|
|
case NVPTX::TEX_3D_F32_F32_RR:
|
|
return NVPTX::TEX_3D_F32_F32_RI;
|
|
case NVPTX::TEX_3D_F32_F32_IR:
|
|
return NVPTX::TEX_3D_F32_F32_II;
|
|
case NVPTX::TEX_3D_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_3D_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_3D_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_F32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_F32_F32_GRAD_RI;
|
|
case NVPTX::TEX_3D_F32_F32_GRAD_IR:
|
|
return NVPTX::TEX_3D_F32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_S32_S32_RR:
|
|
return NVPTX::TEX_3D_S32_S32_RI;
|
|
case NVPTX::TEX_3D_S32_S32_IR:
|
|
return NVPTX::TEX_3D_S32_S32_II;
|
|
case NVPTX::TEX_3D_S32_F32_RR:
|
|
return NVPTX::TEX_3D_S32_F32_RI;
|
|
case NVPTX::TEX_3D_S32_F32_IR:
|
|
return NVPTX::TEX_3D_S32_F32_II;
|
|
case NVPTX::TEX_3D_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_3D_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_3D_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_S32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_S32_F32_GRAD_RI;
|
|
case NVPTX::TEX_3D_S32_F32_GRAD_IR:
|
|
return NVPTX::TEX_3D_S32_F32_GRAD_II;
|
|
case NVPTX::TEX_3D_U32_S32_RR:
|
|
return NVPTX::TEX_3D_U32_S32_RI;
|
|
case NVPTX::TEX_3D_U32_S32_IR:
|
|
return NVPTX::TEX_3D_U32_S32_II;
|
|
case NVPTX::TEX_3D_U32_F32_RR:
|
|
return NVPTX::TEX_3D_U32_F32_RI;
|
|
case NVPTX::TEX_3D_U32_F32_IR:
|
|
return NVPTX::TEX_3D_U32_F32_II;
|
|
case NVPTX::TEX_3D_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_3D_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_3D_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_3D_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_3D_U32_F32_GRAD_RR:
|
|
return NVPTX::TEX_3D_U32_F32_GRAD_RI;
|
|
case NVPTX::TEX_3D_U32_F32_GRAD_IR:
|
|
return NVPTX::TEX_3D_U32_F32_GRAD_II;
|
|
case NVPTX::TEX_CUBE_F32_F32_RR:
|
|
return NVPTX::TEX_CUBE_F32_F32_RI;
|
|
case NVPTX::TEX_CUBE_F32_F32_IR:
|
|
return NVPTX::TEX_CUBE_F32_F32_II;
|
|
case NVPTX::TEX_CUBE_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_S32_F32_RR:
|
|
return NVPTX::TEX_CUBE_S32_F32_RI;
|
|
case NVPTX::TEX_CUBE_S32_F32_IR:
|
|
return NVPTX::TEX_CUBE_S32_F32_II;
|
|
case NVPTX::TEX_CUBE_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_U32_F32_RR:
|
|
return NVPTX::TEX_CUBE_U32_F32_RI;
|
|
case NVPTX::TEX_CUBE_U32_F32_IR:
|
|
return NVPTX::TEX_CUBE_U32_F32_II;
|
|
case NVPTX::TEX_CUBE_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_U32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_F32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_S32_F32_LEVEL_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_II;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_RR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_RI;
|
|
case NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_IR:
|
|
return NVPTX::TEX_CUBE_ARRAY_U32_F32_LEVEL_II;
|
|
case NVPTX::TLD4_R_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_F32_F32_RI;
|
|
case NVPTX::TLD4_R_2D_F32_F32_IR:
|
|
return NVPTX::TLD4_R_2D_F32_F32_II;
|
|
case NVPTX::TLD4_G_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_F32_F32_RI;
|
|
case NVPTX::TLD4_G_2D_F32_F32_IR:
|
|
return NVPTX::TLD4_G_2D_F32_F32_II;
|
|
case NVPTX::TLD4_B_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_F32_F32_RI;
|
|
case NVPTX::TLD4_B_2D_F32_F32_IR:
|
|
return NVPTX::TLD4_B_2D_F32_F32_II;
|
|
case NVPTX::TLD4_A_2D_F32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_F32_F32_RI;
|
|
case NVPTX::TLD4_A_2D_F32_F32_IR:
|
|
return NVPTX::TLD4_A_2D_F32_F32_II;
|
|
case NVPTX::TLD4_R_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_S32_F32_RI;
|
|
case NVPTX::TLD4_R_2D_S32_F32_IR:
|
|
return NVPTX::TLD4_R_2D_S32_F32_II;
|
|
case NVPTX::TLD4_G_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_S32_F32_RI;
|
|
case NVPTX::TLD4_G_2D_S32_F32_IR:
|
|
return NVPTX::TLD4_G_2D_S32_F32_II;
|
|
case NVPTX::TLD4_B_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_S32_F32_RI;
|
|
case NVPTX::TLD4_B_2D_S32_F32_IR:
|
|
return NVPTX::TLD4_B_2D_S32_F32_II;
|
|
case NVPTX::TLD4_A_2D_S32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_S32_F32_RI;
|
|
case NVPTX::TLD4_A_2D_S32_F32_IR:
|
|
return NVPTX::TLD4_A_2D_S32_F32_II;
|
|
case NVPTX::TLD4_R_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_R_2D_U32_F32_RI;
|
|
case NVPTX::TLD4_R_2D_U32_F32_IR:
|
|
return NVPTX::TLD4_R_2D_U32_F32_II;
|
|
case NVPTX::TLD4_G_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_G_2D_U32_F32_RI;
|
|
case NVPTX::TLD4_G_2D_U32_F32_IR:
|
|
return NVPTX::TLD4_G_2D_U32_F32_II;
|
|
case NVPTX::TLD4_B_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_B_2D_U32_F32_RI;
|
|
case NVPTX::TLD4_B_2D_U32_F32_IR:
|
|
return NVPTX::TLD4_B_2D_U32_F32_II;
|
|
case NVPTX::TLD4_A_2D_U32_F32_RR:
|
|
return NVPTX::TLD4_A_2D_U32_F32_RI;
|
|
case NVPTX::TLD4_A_2D_U32_F32_IR:
|
|
return NVPTX::TLD4_A_2D_U32_F32_II;
|
|
default:
|
|
llvm_unreachable("Unhandled TEX opcode");
|
|
};
|
|
}
|
|
|
|
static unsigned queryRegisterToIndexOpcode(unsigned RegOC) {
|
|
switch (RegOC) {
|
|
case NVPTX::TXQ_CHANNEL_ORDER_R:
|
|
return NVPTX::TXQ_CHANNEL_ORDER_I;
|
|
case NVPTX::TXQ_CHANNEL_DATA_TYPE_R:
|
|
return NVPTX::TXQ_CHANNEL_DATA_TYPE_I;
|
|
case NVPTX::TXQ_WIDTH_R:
|
|
return NVPTX::TXQ_WIDTH_I;
|
|
case NVPTX::TXQ_HEIGHT_R:
|
|
return NVPTX::TXQ_HEIGHT_I;
|
|
case NVPTX::TXQ_DEPTH_R:
|
|
return NVPTX::TXQ_DEPTH_I;
|
|
case NVPTX::TXQ_ARRAY_SIZE_R:
|
|
return NVPTX::TXQ_ARRAY_SIZE_I;
|
|
case NVPTX::TXQ_NUM_SAMPLES_R:
|
|
return NVPTX::TXQ_NUM_SAMPLES_I;
|
|
case NVPTX::TXQ_NUM_MIPMAP_LEVELS_R:
|
|
return NVPTX::TXQ_NUM_MIPMAP_LEVELS_I;
|
|
case NVPTX::SUQ_CHANNEL_ORDER_R:
|
|
return NVPTX::SUQ_CHANNEL_ORDER_I;
|
|
case NVPTX::SUQ_CHANNEL_DATA_TYPE_R:
|
|
return NVPTX::SUQ_CHANNEL_DATA_TYPE_I;
|
|
case NVPTX::SUQ_WIDTH_R:
|
|
return NVPTX::SUQ_WIDTH_I;
|
|
case NVPTX::SUQ_HEIGHT_R:
|
|
return NVPTX::SUQ_HEIGHT_I;
|
|
case NVPTX::SUQ_DEPTH_R:
|
|
return NVPTX::SUQ_DEPTH_I;
|
|
case NVPTX::SUQ_ARRAY_SIZE_R:
|
|
return NVPTX::SUQ_ARRAY_SIZE_I;
|
|
default:
|
|
llvm_unreachable("Unhandled TXQ/SUQ opcode");
|
|
};
|
|
}
|
|
|
|
bool NVPTXReplaceImageHandles::processInstr(MachineInstr &MI) {
|
|
MachineFunction &MF = *MI.getParent()->getParent();
|
|
const MCInstrDesc &MCID = MI.getDesc();
|
|
const NVPTXInstrInfo *TII = MF.getSubtarget<NVPTXSubtarget>().getInstrInfo();
|
|
|
|
if (MCID.TSFlags & NVPTXII::IsTexFlag) {
|
|
// This is a texture fetch, so operand 4 is a texref and operand 5 is
|
|
// a samplerref
|
|
MachineOperand &TexHandle = MI.getOperand(4);
|
|
if (replaceImageHandle(TexHandle, MF))
|
|
MI.setDesc(TII->get(texRegisterToIndexOpcode(MI.getOpcode())));
|
|
|
|
if (!(MCID.TSFlags & NVPTXII::IsTexModeUnifiedFlag)) {
|
|
MachineOperand &SampHandle = MI.getOperand(5);
|
|
if (replaceImageHandle(SampHandle, MF))
|
|
MI.setDesc(TII->get(samplerRegisterToIndexOpcode(MI.getOpcode())));
|
|
}
|
|
|
|
return true;
|
|
} else if (MCID.TSFlags & NVPTXII::IsSuldMask) {
|
|
unsigned VecSize =
|
|
1 << (((MCID.TSFlags & NVPTXII::IsSuldMask) >> NVPTXII::IsSuldShift) - 1);
|
|
|
|
// For a surface load of vector size N, the Nth operand will be the surfref
|
|
MachineOperand &SurfHandle = MI.getOperand(VecSize);
|
|
|
|
if (replaceImageHandle(SurfHandle, MF))
|
|
MI.setDesc(TII->get(suldRegisterToIndexOpcode(MI.getOpcode())));
|
|
|
|
return true;
|
|
} else if (MCID.TSFlags & NVPTXII::IsSustFlag) {
|
|
// This is a surface store, so operand 0 is a surfref
|
|
MachineOperand &SurfHandle = MI.getOperand(0);
|
|
|
|
if (replaceImageHandle(SurfHandle, MF))
|
|
MI.setDesc(TII->get(sustRegisterToIndexOpcode(MI.getOpcode())));
|
|
|
|
return true;
|
|
} else if (MCID.TSFlags & NVPTXII::IsSurfTexQueryFlag) {
|
|
// This is a query, so operand 1 is a surfref/texref
|
|
MachineOperand &Handle = MI.getOperand(1);
|
|
|
|
if (replaceImageHandle(Handle, MF))
|
|
MI.setDesc(TII->get(queryRegisterToIndexOpcode(MI.getOpcode())));
|
|
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
bool NVPTXReplaceImageHandles::replaceImageHandle(MachineOperand &Op,
|
|
MachineFunction &MF) {
|
|
unsigned Idx;
|
|
if (findIndexForHandle(Op, MF, Idx)) {
|
|
Op.ChangeToImmediate(Idx);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
bool NVPTXReplaceImageHandles::
|
|
findIndexForHandle(MachineOperand &Op, MachineFunction &MF, unsigned &Idx) {
|
|
const MachineRegisterInfo &MRI = MF.getRegInfo();
|
|
NVPTXMachineFunctionInfo *MFI = MF.getInfo<NVPTXMachineFunctionInfo>();
|
|
|
|
assert(Op.isReg() && "Handle is not in a reg?");
|
|
|
|
// Which instruction defines the handle?
|
|
MachineInstr &TexHandleDef = *MRI.getVRegDef(Op.getReg());
|
|
|
|
switch (TexHandleDef.getOpcode()) {
|
|
case NVPTX::LD_i64_avar: {
|
|
// The handle is a parameter value being loaded, replace with the
|
|
// parameter symbol
|
|
const NVPTXTargetMachine &TM =
|
|
static_cast<const NVPTXTargetMachine &>(MF.getTarget());
|
|
if (TM.getDrvInterface() == NVPTX::CUDA) {
|
|
// For CUDA, we preserve the param loads coming from function arguments
|
|
return false;
|
|
}
|
|
|
|
assert(TexHandleDef.getOperand(6).isSymbol() && "Load is not a symbol!");
|
|
StringRef Sym = TexHandleDef.getOperand(6).getSymbolName();
|
|
std::string ParamBaseName = std::string(MF.getName());
|
|
ParamBaseName += "_param_";
|
|
assert(Sym.startswith(ParamBaseName) && "Invalid symbol reference");
|
|
unsigned Param = atoi(Sym.data()+ParamBaseName.size());
|
|
std::string NewSym;
|
|
raw_string_ostream NewSymStr(NewSym);
|
|
NewSymStr << MF.getName() << "_param_" << Param;
|
|
|
|
InstrsToRemove.insert(&TexHandleDef);
|
|
Idx = MFI->getImageHandleSymbolIndex(NewSymStr.str().c_str());
|
|
return true;
|
|
}
|
|
case NVPTX::texsurf_handles: {
|
|
// The handle is a global variable, replace with the global variable name
|
|
assert(TexHandleDef.getOperand(1).isGlobal() && "Load is not a global!");
|
|
const GlobalValue *GV = TexHandleDef.getOperand(1).getGlobal();
|
|
assert(GV->hasName() && "Global sampler must be named!");
|
|
InstrsToRemove.insert(&TexHandleDef);
|
|
Idx = MFI->getImageHandleSymbolIndex(GV->getName().data());
|
|
return true;
|
|
}
|
|
case NVPTX::nvvm_move_i64:
|
|
case TargetOpcode::COPY: {
|
|
bool Res = findIndexForHandle(TexHandleDef.getOperand(1), MF, Idx);
|
|
if (Res) {
|
|
InstrsToRemove.insert(&TexHandleDef);
|
|
}
|
|
return Res;
|
|
}
|
|
default:
|
|
llvm_unreachable("Unknown instruction operating on handle");
|
|
}
|
|
}
|
|
|
|
MachineFunctionPass *llvm::createNVPTXReplaceImageHandlesPass() {
|
|
return new NVPTXReplaceImageHandles();
|
|
}
|