2016-06-22 13:37:51 +00:00
|
|
|
#pragma once
|
|
|
|
|
2021-12-23 15:03:48 +00:00
|
|
|
#include "util/types.hpp"
|
2024-05-29 15:16:50 +00:00
|
|
|
#include "util/atomic.hpp"
|
2021-12-23 15:03:48 +00:00
|
|
|
|
2019-11-29 23:11:28 +00:00
|
|
|
// Include asmjit with warnings ignored
|
2018-06-12 18:03:53 +00:00
|
|
|
#define ASMJIT_EMBED
|
2021-12-28 19:25:36 +00:00
|
|
|
#define ASMJIT_STATIC
|
|
|
|
#define ASMJIT_BUILD_DEBUG
|
|
|
|
#undef Bool
|
2018-05-14 20:06:17 +00:00
|
|
|
|
2019-11-29 23:11:28 +00:00
|
|
|
#ifdef _MSC_VER
|
|
|
|
#pragma warning(push, 0)
|
|
|
|
#include <asmjit/asmjit.h>
|
|
|
|
#pragma warning(pop)
|
|
|
|
#else
|
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wall"
|
|
|
|
#pragma GCC diagnostic ignored "-Wextra"
|
|
|
|
#pragma GCC diagnostic ignored "-Wold-style-cast"
|
2021-03-05 19:05:37 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wunused-parameter"
|
2021-03-08 20:41:23 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
2021-03-23 19:32:50 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wredundant-decls"
|
2021-04-03 16:38:02 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wnon-virtual-dtor"
|
2021-03-30 15:31:46 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Weffc++"
|
2021-12-30 16:39:18 +00:00
|
|
|
#ifdef __clang__
|
|
|
|
#pragma GCC diagnostic ignored "-Wdeprecated-anon-enum-enum-conversion"
|
|
|
|
#pragma GCC diagnostic ignored "-Wcast-qual"
|
|
|
|
#else
|
2021-03-13 15:03:08 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wduplicated-branches"
|
2021-12-30 16:39:18 +00:00
|
|
|
#pragma GCC diagnostic ignored "-Wdeprecated-enum-enum-conversion"
|
2021-03-13 15:03:08 +00:00
|
|
|
#endif
|
2018-06-12 18:03:53 +00:00
|
|
|
#include <asmjit/asmjit.h>
|
2021-12-30 16:39:18 +00:00
|
|
|
#if defined(ARCH_ARM64)
|
|
|
|
#include <asmjit/a64.h>
|
|
|
|
#endif
|
2019-11-29 23:11:28 +00:00
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
2018-05-16 23:38:14 +00:00
|
|
|
#include <array>
|
2018-05-14 20:06:17 +00:00
|
|
|
#include <functional>
|
2021-12-23 15:03:48 +00:00
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
|
|
|
#include <string_view>
|
|
|
|
#include <unordered_map>
|
2022-08-29 00:55:59 +00:00
|
|
|
#include <util/v128.hpp>
|
2018-05-14 20:06:17 +00:00
|
|
|
|
2021-12-30 16:39:18 +00:00
|
|
|
#if defined(ARCH_X64)
|
|
|
|
using native_asm = asmjit::x86::Assembler;
|
|
|
|
using native_args = std::array<asmjit::x86::Gp, 4>;
|
|
|
|
#elif defined(ARCH_ARM64)
|
|
|
|
using native_asm = asmjit::a64::Assembler;
|
|
|
|
using native_args = std::array<asmjit::a64::Gp, 4>;
|
|
|
|
#endif
|
|
|
|
|
2021-12-24 17:33:32 +00:00
|
|
|
void jit_announce(uptr func, usz size, std::string_view name);
|
|
|
|
|
|
|
|
void jit_announce(auto* func, usz size, std::string_view name)
|
|
|
|
{
|
|
|
|
jit_announce(uptr(func), size, name);
|
|
|
|
}
|
|
|
|
|
2019-01-28 17:23:26 +00:00
|
|
|
enum class jit_class
|
|
|
|
{
|
|
|
|
ppu_code,
|
|
|
|
ppu_data,
|
|
|
|
spu_code,
|
|
|
|
spu_data,
|
|
|
|
};
|
|
|
|
|
2021-12-28 19:25:36 +00:00
|
|
|
struct jit_runtime_base
|
|
|
|
{
|
|
|
|
jit_runtime_base() noexcept = default;
|
|
|
|
virtual ~jit_runtime_base() = default;
|
|
|
|
|
|
|
|
jit_runtime_base(const jit_runtime_base&) = delete;
|
|
|
|
jit_runtime_base& operator=(const jit_runtime_base&) = delete;
|
|
|
|
|
|
|
|
const asmjit::Environment& environment() const noexcept;
|
|
|
|
void* _add(asmjit::CodeHolder* code) noexcept;
|
|
|
|
virtual uchar* _alloc(usz size, usz align) noexcept = 0;
|
|
|
|
};
|
|
|
|
|
2019-01-26 20:15:45 +00:00
|
|
|
// ASMJIT runtime for emitting code in a single 2G region
|
2021-12-28 19:25:36 +00:00
|
|
|
struct jit_runtime final : jit_runtime_base
|
2019-01-26 20:15:45 +00:00
|
|
|
{
|
|
|
|
jit_runtime();
|
|
|
|
~jit_runtime() override;
|
|
|
|
|
|
|
|
// Allocate executable memory
|
2021-12-28 19:25:36 +00:00
|
|
|
uchar* _alloc(usz size, usz align) noexcept override;
|
2019-01-26 20:15:45 +00:00
|
|
|
|
|
|
|
// Allocate memory
|
2023-10-11 23:21:35 +00:00
|
|
|
static u8* alloc(usz size, usz align, bool exec = true) noexcept;
|
2019-01-26 20:15:45 +00:00
|
|
|
|
|
|
|
// Should be called at least once after global initialization
|
|
|
|
static void initialize();
|
|
|
|
|
|
|
|
// Deallocate all memory
|
|
|
|
static void finalize() noexcept;
|
|
|
|
};
|
|
|
|
|
2018-05-14 20:06:17 +00:00
|
|
|
namespace asmjit
|
|
|
|
{
|
|
|
|
// Should only be used to build global functions
|
2021-12-28 19:25:36 +00:00
|
|
|
jit_runtime_base& get_global_runtime();
|
2018-05-14 20:06:17 +00:00
|
|
|
|
2021-12-22 16:27:20 +00:00
|
|
|
// Don't use directly
|
2021-12-28 19:25:36 +00:00
|
|
|
class inline_runtime : public jit_runtime_base
|
2021-12-22 16:27:20 +00:00
|
|
|
{
|
|
|
|
uchar* m_data;
|
|
|
|
usz m_size;
|
|
|
|
|
|
|
|
public:
|
2021-12-28 19:25:36 +00:00
|
|
|
inline_runtime(uchar* data, usz size);
|
2021-12-22 16:27:20 +00:00
|
|
|
|
|
|
|
~inline_runtime();
|
2021-12-28 19:25:36 +00:00
|
|
|
|
|
|
|
uchar* _alloc(usz size, usz align) noexcept override;
|
2021-12-22 16:27:20 +00:00
|
|
|
};
|
|
|
|
|
2020-10-20 05:22:25 +00:00
|
|
|
// Emit xbegin and adjacent loop, return label at xbegin (don't use xabort please)
|
2020-10-29 02:01:45 +00:00
|
|
|
template <typename F>
|
2021-12-28 19:25:36 +00:00
|
|
|
[[nodiscard]] inline asmjit::Label build_transaction_enter(asmjit::x86::Assembler& c, asmjit::Label fallback, F func)
|
2020-10-29 02:01:45 +00:00
|
|
|
{
|
|
|
|
Label fall = c.newLabel();
|
|
|
|
Label begin = c.newLabel();
|
|
|
|
c.jmp(begin);
|
|
|
|
c.bind(fall);
|
|
|
|
|
|
|
|
// Don't repeat on zero status (may indicate syscall or interrupt)
|
|
|
|
c.test(x86::eax, x86::eax);
|
|
|
|
c.jz(fallback);
|
|
|
|
|
2020-10-30 22:52:24 +00:00
|
|
|
// First invoked after failure (can fallback to proceed, or jump anywhere else)
|
|
|
|
func();
|
|
|
|
|
2020-10-29 02:01:45 +00:00
|
|
|
// Other bad statuses are ignored regardless of repeat flag (TODO)
|
2021-12-28 19:25:36 +00:00
|
|
|
c.align(AlignMode::kCode, 16);
|
2020-10-29 02:01:45 +00:00
|
|
|
c.bind(begin);
|
|
|
|
return fall;
|
|
|
|
|
|
|
|
// xbegin should be issued manually, allows to add more check before entering transaction
|
|
|
|
}
|
2020-10-30 22:52:24 +00:00
|
|
|
|
|
|
|
// Helper to spill RDX (EDX) register for RDTSC
|
2021-12-28 19:25:36 +00:00
|
|
|
inline void build_swap_rdx_with(asmjit::x86::Assembler& c, std::array<x86::Gp, 4>& args, const asmjit::x86::Gp& with)
|
2020-10-30 22:52:24 +00:00
|
|
|
{
|
|
|
|
#ifdef _WIN32
|
|
|
|
c.xchg(args[1], with);
|
|
|
|
args[1] = with;
|
|
|
|
#else
|
|
|
|
c.xchg(args[2], with);
|
|
|
|
args[2] = with;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get full RDTSC value into chosen register (clobbers rax/rdx or saves only rax with other target)
|
2021-12-28 19:25:36 +00:00
|
|
|
inline void build_get_tsc(asmjit::x86::Assembler& c, const asmjit::x86::Gp& to = asmjit::x86::rax)
|
2020-10-30 22:52:24 +00:00
|
|
|
{
|
|
|
|
if (&to != &x86::rax && &to != &x86::rdx)
|
|
|
|
{
|
|
|
|
// Swap to save its contents
|
|
|
|
c.xchg(x86::rax, to);
|
|
|
|
}
|
|
|
|
|
|
|
|
c.rdtsc();
|
|
|
|
c.shl(x86::rdx, 32);
|
|
|
|
|
|
|
|
if (&to == &x86::rax)
|
|
|
|
{
|
|
|
|
c.or_(x86::rax, x86::rdx);
|
|
|
|
}
|
|
|
|
else if (&to == &x86::rdx)
|
|
|
|
{
|
|
|
|
c.or_(x86::rdx, x86::rax);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// Swap back, maybe there is more effective way to do it
|
|
|
|
c.xchg(x86::rax, to);
|
|
|
|
c.mov(to.r32(), to.r32());
|
|
|
|
c.or_(to.r64(), x86::rdx);
|
|
|
|
}
|
|
|
|
}
|
2021-12-28 19:25:36 +00:00
|
|
|
|
2021-12-30 16:39:18 +00:00
|
|
|
inline void build_init_args_from_ghc(native_asm& c, native_args& args)
|
|
|
|
{
|
|
|
|
#if defined(ARCH_X64)
|
|
|
|
// TODO: handle case when args don't overlap with r13/rbp/r12/rbx
|
|
|
|
c.mov(args[0], x86::r13);
|
|
|
|
c.mov(args[1], x86::rbp);
|
|
|
|
c.mov(args[2], x86::r12);
|
|
|
|
c.mov(args[3], x86::rbx);
|
|
|
|
#else
|
|
|
|
static_cast<void>(c);
|
|
|
|
static_cast<void>(args);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void build_init_ghc_args(native_asm& c, native_args& args)
|
|
|
|
{
|
|
|
|
#if defined(ARCH_X64)
|
|
|
|
// TODO: handle case when args don't overlap with r13/rbp/r12/rbx
|
|
|
|
c.mov(x86::r13, args[0]);
|
|
|
|
c.mov(x86::rbp, args[1]);
|
|
|
|
c.mov(x86::r12, args[2]);
|
|
|
|
c.mov(x86::rbx, args[3]);
|
|
|
|
#else
|
|
|
|
static_cast<void>(c);
|
|
|
|
static_cast<void>(args);
|
|
|
|
#endif
|
|
|
|
}
|
2022-01-24 19:22:42 +00:00
|
|
|
|
|
|
|
#if defined(ARCH_X64)
|
2022-08-24 16:36:37 +00:00
|
|
|
struct simd_builder : native_asm
|
|
|
|
{
|
2022-08-29 00:55:59 +00:00
|
|
|
std::unordered_map<v128, Label> consts;
|
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
Operand v0, v1, v2, v3, v4, v5;
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
uint vsize = 16;
|
|
|
|
uint vmask = 0;
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
simd_builder(CodeHolder* ch) noexcept;
|
2022-08-29 00:55:59 +00:00
|
|
|
~simd_builder();
|
|
|
|
|
|
|
|
void operator()() noexcept;
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-09-03 12:51:37 +00:00
|
|
|
void _init(uint new_vsize = 0);
|
2022-08-24 16:36:37 +00:00
|
|
|
void vec_cleanup_ret();
|
|
|
|
void vec_set_all_zeros(const Operand& v);
|
|
|
|
void vec_set_all_ones(const Operand& v);
|
|
|
|
void vec_set_const(const Operand& v, const v128& value);
|
|
|
|
void vec_clobbering_test(u32 esize, const Operand& v, const Operand& rhs);
|
2022-09-04 11:23:14 +00:00
|
|
|
void vec_broadcast_gpr(u32 esize, const Operand& v, const x86::Gp& r);
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
// return x86::ptr(base, ctr, X, 0) where X is set for esize accordingly
|
|
|
|
x86::Mem ptr_scale_for_vec(u32 esize, const x86::Gp& base, const x86::Gp& index);
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
void vec_load_unaligned(u32 esize, const Operand& v, const x86::Mem& src);
|
|
|
|
void vec_store_unaligned(u32 esize, const Operand& v, const x86::Mem& dst);
|
|
|
|
void vec_partial_move(u32 esize, const Operand& dst, const Operand& src);
|
2022-01-24 19:22:42 +00:00
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
void _vec_binary_op(x86::Inst::Id sse_op, x86::Inst::Id vex_op, x86::Inst::Id evex_op, const Operand& dst, const Operand& lhs, const Operand& rhs);
|
|
|
|
|
|
|
|
void vec_shuffle_xi8(const Operand& dst, const Operand& lhs, const Operand& rhs)
|
|
|
|
{
|
|
|
|
using enum x86::Inst::Id;
|
|
|
|
_vec_binary_op(kIdPshufb, kIdVpshufb, kIdVpshufb, dst, lhs, rhs);
|
|
|
|
}
|
|
|
|
|
|
|
|
void vec_xor(u32, const Operand& dst, const Operand& lhs, const Operand& rhs)
|
|
|
|
{
|
|
|
|
using enum x86::Inst::Id;
|
|
|
|
_vec_binary_op(kIdPxor, kIdVpxor, kIdVpxord, dst, lhs, rhs);
|
|
|
|
}
|
|
|
|
|
|
|
|
void vec_or(u32, const Operand& dst, const Operand& lhs, const Operand& rhs)
|
|
|
|
{
|
|
|
|
using enum x86::Inst::Id;
|
|
|
|
_vec_binary_op(kIdPor, kIdVpor, kIdVpord, dst, lhs, rhs);
|
|
|
|
}
|
|
|
|
|
2022-09-04 11:23:14 +00:00
|
|
|
void vec_andn(u32, const Operand& dst, const Operand& lhs, const Operand& rhs)
|
|
|
|
{
|
|
|
|
using enum x86::Inst::Id;
|
|
|
|
_vec_binary_op(kIdPandn, kIdVpandn, kIdVpandnd, dst, lhs, rhs);
|
|
|
|
}
|
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
void vec_umin(u32 esize, const Operand& dst, const Operand& lhs, const Operand& rhs);
|
|
|
|
void vec_umax(u32 esize, const Operand& dst, const Operand& lhs, const Operand& rhs);
|
2022-09-04 11:23:14 +00:00
|
|
|
void vec_cmp_eq(u32 esize, const Operand& dst, const Operand& lhs, const Operand& rhs);
|
2022-08-24 16:36:37 +00:00
|
|
|
|
2022-09-03 12:51:37 +00:00
|
|
|
void vec_extract_high(u32 esize, const Operand& dst, const Operand& src);
|
|
|
|
void vec_extract_gpr(u32 esize, const x86::Gp& dst, const Operand& src);
|
2022-08-24 16:36:37 +00:00
|
|
|
|
|
|
|
simd_builder& keep_if_not_masked()
|
|
|
|
{
|
|
|
|
if (vmask && vmask < 8)
|
|
|
|
{
|
|
|
|
this->k(x86::KReg(vmask));
|
|
|
|
}
|
|
|
|
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
simd_builder& zero_if_not_masked()
|
|
|
|
{
|
|
|
|
if (vmask && vmask < 8)
|
|
|
|
{
|
|
|
|
this->k(x86::KReg(vmask));
|
|
|
|
this->z();
|
|
|
|
}
|
|
|
|
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
2022-09-03 12:51:37 +00:00
|
|
|
void build_loop(u32 esize, const x86::Gp& reg_ctr, const x86::Gp& reg_cnt, auto&& build, auto&& reduce)
|
2022-08-24 16:36:37 +00:00
|
|
|
{
|
|
|
|
ensure((esize & (esize - 1)) == 0);
|
|
|
|
ensure(esize <= vsize);
|
|
|
|
|
|
|
|
Label body = this->newLabel();
|
|
|
|
Label next = this->newLabel();
|
|
|
|
Label exit = this->newLabel();
|
|
|
|
|
|
|
|
const u32 step = vsize / esize;
|
|
|
|
|
|
|
|
this->xor_(reg_ctr.r32(), reg_ctr.r32()); // Reset counter reg
|
2022-09-03 12:51:37 +00:00
|
|
|
this->cmp(reg_cnt, step);
|
2022-08-24 16:36:37 +00:00
|
|
|
this->jb(next); // If count < step, skip main loop body
|
|
|
|
this->align(AlignMode::kCode, 16);
|
|
|
|
this->bind(body);
|
2022-09-03 12:51:37 +00:00
|
|
|
this->sub(reg_cnt, step);
|
2022-08-24 16:36:37 +00:00
|
|
|
build();
|
|
|
|
this->add(reg_ctr, step);
|
2022-09-03 12:51:37 +00:00
|
|
|
this->cmp(reg_cnt, step);
|
|
|
|
this->jae(body);
|
2022-08-24 16:36:37 +00:00
|
|
|
this->bind(next);
|
|
|
|
|
|
|
|
if (vmask)
|
|
|
|
{
|
|
|
|
// Build single last iteration (masked)
|
2022-09-03 12:51:37 +00:00
|
|
|
this->test(reg_cnt, reg_cnt);
|
|
|
|
this->jz(exit);
|
2022-09-08 14:06:09 +00:00
|
|
|
|
|
|
|
if (esize == 1 && vsize == 64)
|
|
|
|
{
|
|
|
|
this->bzhi(reg_cnt.r64(), x86::Mem(consts[~u128()], 0), reg_cnt.r64());
|
|
|
|
this->kmovq(x86::k7, reg_cnt.r64());
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
this->bzhi(reg_cnt.r32(), x86::Mem(consts[~u128()], 0), reg_cnt.r32());
|
|
|
|
this->kmovd(x86::k7, reg_cnt.r32());
|
|
|
|
}
|
|
|
|
|
2022-08-24 16:36:37 +00:00
|
|
|
vmask = 7;
|
|
|
|
build();
|
2022-09-03 12:51:37 +00:00
|
|
|
|
|
|
|
// Rollout reduction step
|
|
|
|
this->bind(exit);
|
|
|
|
while (true)
|
|
|
|
{
|
|
|
|
vsize /= 2;
|
|
|
|
if (vsize < esize)
|
|
|
|
break;
|
|
|
|
this->_init(vsize);
|
|
|
|
reduce();
|
|
|
|
}
|
2022-08-24 16:36:37 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-09-03 12:51:37 +00:00
|
|
|
// Build unrolled loop tail (reduced vector width)
|
|
|
|
while (true)
|
|
|
|
{
|
|
|
|
vsize /= 2;
|
|
|
|
if (vsize < esize)
|
|
|
|
break;
|
|
|
|
|
|
|
|
// Shall not clobber flags
|
|
|
|
this->_init(vsize);
|
|
|
|
reduce();
|
|
|
|
|
|
|
|
if (vsize == esize)
|
|
|
|
{
|
|
|
|
// Last "iteration"
|
|
|
|
this->test(reg_cnt, reg_cnt);
|
|
|
|
this->jz(exit);
|
|
|
|
build();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const u32 step = vsize / esize;
|
|
|
|
Label next = this->newLabel();
|
|
|
|
this->cmp(reg_cnt, step);
|
|
|
|
this->jb(next);
|
|
|
|
build();
|
|
|
|
this->add(reg_ctr, step);
|
|
|
|
this->sub(reg_cnt, step);
|
|
|
|
this->bind(next);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
this->bind(exit);
|
2022-08-24 16:36:37 +00:00
|
|
|
}
|
|
|
|
|
2022-09-03 12:51:37 +00:00
|
|
|
this->_init(0);
|
2022-08-24 16:36:37 +00:00
|
|
|
}
|
|
|
|
};
|
2022-01-24 19:22:42 +00:00
|
|
|
|
|
|
|
// for (; count > 0; ctr++, count--)
|
|
|
|
inline void build_loop(native_asm& c, auto ctr, auto count, auto&& build)
|
|
|
|
{
|
|
|
|
asmjit::Label body = c.newLabel();
|
|
|
|
asmjit::Label exit = c.newLabel();
|
|
|
|
|
|
|
|
c.test(count, count);
|
|
|
|
c.jz(exit);
|
|
|
|
c.align(asmjit::AlignMode::kCode, 16);
|
|
|
|
c.bind(body);
|
|
|
|
build();
|
|
|
|
c.inc(ctr);
|
|
|
|
c.sub(count, 1);
|
|
|
|
c.ja(body);
|
|
|
|
c.bind(exit);
|
|
|
|
}
|
2022-08-24 16:36:37 +00:00
|
|
|
|
|
|
|
inline void maybe_flush_lbr(native_asm& c, uint count = 2)
|
|
|
|
{
|
|
|
|
// Workaround for bad LBR callstacks which happen in some situations (mainly TSX) - execute additional RETs
|
|
|
|
Label next = c.newLabel();
|
|
|
|
c.lea(x86::rcx, x86::qword_ptr(next));
|
|
|
|
|
|
|
|
for (u32 i = 0; i < count; i++)
|
|
|
|
{
|
|
|
|
c.push(x86::rcx);
|
|
|
|
c.sub(x86::rcx, 16);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (u32 i = 0; i < count; i++)
|
|
|
|
{
|
|
|
|
c.ret();
|
|
|
|
c.align(asmjit::AlignMode::kCode, 16);
|
|
|
|
}
|
|
|
|
|
|
|
|
c.bind(next);
|
|
|
|
}
|
2022-01-24 19:22:42 +00:00
|
|
|
#endif
|
2018-05-14 20:06:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Build runtime function with asmjit::X86Assembler
|
2021-12-30 16:39:18 +00:00
|
|
|
template <typename FT, typename Asm = native_asm, typename F>
|
2022-08-17 13:53:05 +00:00
|
|
|
inline FT build_function_asm(std::string_view name, F&& builder, ::jit_runtime* custom_runtime = nullptr)
|
2018-05-14 20:06:17 +00:00
|
|
|
{
|
2022-06-14 12:28:38 +00:00
|
|
|
#ifdef __APPLE__
|
|
|
|
pthread_jit_write_protect_np(false);
|
|
|
|
#endif
|
2018-05-14 20:06:17 +00:00
|
|
|
using namespace asmjit;
|
|
|
|
|
2022-08-17 13:53:05 +00:00
|
|
|
auto& rt = custom_runtime ? *custom_runtime : get_global_runtime();
|
2018-05-14 20:06:17 +00:00
|
|
|
|
|
|
|
CodeHolder code;
|
2021-12-28 19:25:36 +00:00
|
|
|
code.init(rt.environment());
|
2018-05-14 20:06:17 +00:00
|
|
|
|
2021-12-30 16:39:18 +00:00
|
|
|
#if defined(ARCH_X64)
|
|
|
|
native_args args;
|
2018-05-14 20:06:17 +00:00
|
|
|
#ifdef _WIN32
|
|
|
|
args[0] = x86::rcx;
|
|
|
|
args[1] = x86::rdx;
|
|
|
|
args[2] = x86::r8;
|
|
|
|
args[3] = x86::r9;
|
|
|
|
#else
|
|
|
|
args[0] = x86::rdi;
|
|
|
|
args[1] = x86::rsi;
|
|
|
|
args[2] = x86::rdx;
|
|
|
|
args[3] = x86::rcx;
|
|
|
|
#endif
|
2021-12-30 16:39:18 +00:00
|
|
|
#elif defined(ARCH_ARM64)
|
|
|
|
native_args args;
|
|
|
|
args[0] = a64::x0;
|
|
|
|
args[1] = a64::x1;
|
|
|
|
args[2] = a64::x2;
|
|
|
|
args[3] = a64::x3;
|
|
|
|
#endif
|
2018-05-14 20:06:17 +00:00
|
|
|
|
2021-12-30 16:39:18 +00:00
|
|
|
Asm compiler(&code);
|
2021-12-28 19:25:36 +00:00
|
|
|
compiler.addEncodingOptions(EncodingOptions::kOptimizedAlign);
|
2022-01-18 23:41:32 +00:00
|
|
|
if constexpr (std::is_invocable_r_v<bool, F, Asm&, native_args&>)
|
|
|
|
{
|
|
|
|
if (!builder(compiler, args))
|
|
|
|
return nullptr;
|
|
|
|
}
|
2021-12-30 16:39:18 +00:00
|
|
|
else
|
2022-01-18 23:41:32 +00:00
|
|
|
{
|
|
|
|
builder(compiler, args);
|
|
|
|
}
|
|
|
|
|
2022-08-29 00:55:59 +00:00
|
|
|
if constexpr (std::is_invocable_r_v<void, Asm>)
|
|
|
|
{
|
|
|
|
// Finalization
|
|
|
|
compiler();
|
|
|
|
}
|
|
|
|
|
2021-12-28 19:25:36 +00:00
|
|
|
const auto result = rt._add(&code);
|
|
|
|
jit_announce(result, code.codeSize(), name);
|
|
|
|
return reinterpret_cast<FT>(uptr(result));
|
2018-05-14 20:06:17 +00:00
|
|
|
}
|
|
|
|
|
2016-06-22 13:37:51 +00:00
|
|
|
#ifdef LLVM_AVAILABLE
|
|
|
|
|
2021-12-23 15:03:48 +00:00
|
|
|
namespace llvm
|
|
|
|
{
|
|
|
|
class LLVMContext;
|
|
|
|
class ExecutionEngine;
|
|
|
|
class Module;
|
2024-08-03 01:54:37 +00:00
|
|
|
class StringRef;
|
2021-12-23 15:03:48 +00:00
|
|
|
}
|
2016-06-22 13:37:51 +00:00
|
|
|
|
|
|
|
// Temporary compiler interface
|
|
|
|
class jit_compiler final
|
|
|
|
{
|
2017-06-24 15:36:49 +00:00
|
|
|
// Local LLVM context
|
2021-12-23 15:03:48 +00:00
|
|
|
std::unique_ptr<llvm::LLVMContext> m_context{};
|
2017-06-24 15:36:49 +00:00
|
|
|
|
2016-06-22 13:37:51 +00:00
|
|
|
// Execution instance
|
2021-04-03 16:38:02 +00:00
|
|
|
std::unique_ptr<llvm::ExecutionEngine> m_engine{};
|
2016-06-22 13:37:51 +00:00
|
|
|
|
2017-02-26 15:56:31 +00:00
|
|
|
// Arch
|
2021-04-03 16:38:02 +00:00
|
|
|
std::string m_cpu{};
|
2017-02-26 15:56:31 +00:00
|
|
|
|
2024-05-29 15:16:50 +00:00
|
|
|
// Disk Space left
|
|
|
|
atomic_t<usz> m_disk_space = umax;
|
|
|
|
|
2016-06-22 13:37:51 +00:00
|
|
|
public:
|
2019-03-18 16:24:55 +00:00
|
|
|
jit_compiler(const std::unordered_map<std::string, u64>& _link, const std::string& _cpu, u32 flags = 0);
|
2016-06-22 13:37:51 +00:00
|
|
|
~jit_compiler();
|
|
|
|
|
2017-06-24 15:36:49 +00:00
|
|
|
// Get LLVM context
|
|
|
|
auto& get_context()
|
|
|
|
{
|
2021-12-23 15:03:48 +00:00
|
|
|
return *m_context;
|
2017-06-24 15:36:49 +00:00
|
|
|
}
|
|
|
|
|
2018-05-01 10:20:36 +00:00
|
|
|
auto& get_engine() const
|
|
|
|
{
|
|
|
|
return *m_engine;
|
|
|
|
}
|
|
|
|
|
2017-07-15 09:20:40 +00:00
|
|
|
// Add module (path to obj cache dir)
|
2020-05-06 15:18:30 +00:00
|
|
|
void add(std::unique_ptr<llvm::Module> _module, const std::string& path);
|
2017-06-22 21:52:09 +00:00
|
|
|
|
2018-05-01 10:20:36 +00:00
|
|
|
// Add module (not cached)
|
2020-05-06 15:18:30 +00:00
|
|
|
void add(std::unique_ptr<llvm::Module> _module);
|
2018-05-01 10:20:36 +00:00
|
|
|
|
2017-07-15 09:20:40 +00:00
|
|
|
// Add object (path to obj file)
|
2024-05-29 15:16:50 +00:00
|
|
|
bool add(const std::string& path);
|
2017-07-15 09:20:40 +00:00
|
|
|
|
2022-08-17 13:53:05 +00:00
|
|
|
// Update global mapping for a single value
|
|
|
|
void update_global_mapping(const std::string& name, u64 addr);
|
|
|
|
|
2020-04-07 13:09:47 +00:00
|
|
|
// Check object file
|
|
|
|
static bool check(const std::string& path);
|
|
|
|
|
2017-06-22 21:52:09 +00:00
|
|
|
// Finalize
|
2017-06-24 15:36:49 +00:00
|
|
|
void fin();
|
2017-02-26 15:56:31 +00:00
|
|
|
|
2016-06-22 13:37:51 +00:00
|
|
|
// Get compiled function address
|
2017-06-24 15:36:49 +00:00
|
|
|
u64 get(const std::string& name);
|
|
|
|
|
2017-02-26 15:56:31 +00:00
|
|
|
// Get CPU info
|
2018-03-17 17:41:35 +00:00
|
|
|
static std::string cpu(const std::string& _cpu);
|
2023-04-08 12:21:22 +00:00
|
|
|
|
|
|
|
// Get system triple (PPU)
|
|
|
|
static std::string triple1();
|
|
|
|
|
|
|
|
// Get system triple (SPU)
|
|
|
|
static std::string triple2();
|
2024-05-29 15:16:50 +00:00
|
|
|
|
|
|
|
bool add_sub_disk_space(ssz space);
|
2016-06-22 13:37:51 +00:00
|
|
|
};
|
|
|
|
|
2024-08-03 01:54:37 +00:00
|
|
|
llvm::StringRef fallback_cpu_detection();
|
|
|
|
|
2023-12-29 14:14:22 +00:00
|
|
|
#endif // LLVM_AVAILABLE
|