public class AMD64MacroAssembler extends AMD64Assembler
| Modifier and Type | Class and Description |
|---|---|
static class |
AMD64MacroAssembler.ExtendMode |
AMD64Assembler.AMD64BinaryArithmetic, AMD64Assembler.AMD64ImmOp, AMD64Assembler.AMD64MIOp, AMD64Assembler.AMD64MOp, AMD64Assembler.AMD64MROp, AMD64Assembler.AMD64Op, AMD64Assembler.AMD64RMIOp, AMD64Assembler.AMD64RMOp, AMD64Assembler.AMD64RROp, AMD64Assembler.AMD64Shift, AMD64Assembler.AMD64Z0Op, AMD64Assembler.ConditionFlag, AMD64Assembler.EvexGatherOp, AMD64Assembler.Options, AMD64Assembler.SSEMROp, AMD64Assembler.SSEOp, AMD64Assembler.VexAESOp, AMD64Assembler.VexFloatCompareOp, AMD64Assembler.VexGatherOp, AMD64Assembler.VexGeneralMoveOp, AMD64Assembler.VexGeneralPurposeRMOp, AMD64Assembler.VexGeneralPurposeRMVOp, AMD64Assembler.VexGeneralPurposeRVMOp, AMD64Assembler.VexMaskedMoveOp, AMD64Assembler.VexMoveMaskOp, AMD64Assembler.VexMoveOp, AMD64Assembler.VexMRIOp, AMD64Assembler.VexMROp, AMD64Assembler.VexOp, AMD64Assembler.VexRMIOp, AMD64Assembler.VexRMOp, AMD64Assembler.VexRRIOp, AMD64Assembler.VexRROp, AMD64Assembler.VexRVMConvertOp, AMD64Assembler.VexRVMIOp, AMD64Assembler.VexRVMOp, AMD64Assembler.VexRVMROp, AMD64Assembler.VexShiftOpAMD64BaseAssembler.AddressDisplacementAnnotation, AMD64BaseAssembler.EVEXComparisonPredicate, AMD64BaseAssembler.EVEXPrefixConfig, AMD64BaseAssembler.EVEXTuple, AMD64BaseAssembler.OperandDataAnnotation, AMD64BaseAssembler.OperandSize, AMD64BaseAssembler.VEXPrefixConfigAssembler.CodeAnnotation, Assembler.InstructionCounter, Assembler.LabelHintJCC_ERRATUM_MITIGATION_BOUNDARY, MODRM_IN_BYTES, OPCODE_IN_BYTES, P_0F, P_0F38, P_0F3ADEFAULT_DISP8_SCALE, force4ByteNonZeroDisplacementscodePatchingAnnotationConsumer| Constructor and Description |
|---|
AMD64MacroAssembler(TargetDescription target) |
AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues) |
AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues,
boolean hasIntelJccErratum) |
| Modifier and Type | Method and Description |
|---|---|
void |
addqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
alignBeforeCall(boolean align,
int prefixInstructionSize)
Emits alignment before a direct call to a fixed address.
|
void |
andlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
applyMOpAndJcc(AMD64Assembler.AMD64MOp op,
AMD64BaseAssembler.OperandSize size,
Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
boolean annotateImm,
IntConsumer applyBeforeFusedPair) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Supplier<AMD64Address> src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget) |
void |
cmplAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmplAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmplAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpptr(Register src1,
AMD64Address src2) |
void |
cmpptr(Register src1,
Register src2) |
int |
cmpqAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpqAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
int |
cmpqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
decqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
decrementl(Register reg) |
void |
decrementl(Register reg,
int value) |
void |
decrementq(Register reg) |
void |
decrementq(Register reg,
int value) |
int |
directCall(long address,
Register scratch) |
int |
directJmp(long address,
Register scratch) |
void |
fcos(Register dest,
Register value,
AMD64Address tmp) |
void |
flog(Register dest,
Register value,
boolean base10,
AMD64Address tmp) |
void |
fpop() |
void |
fsin(Register dest,
Register value,
AMD64Address tmp) |
void |
ftan(Register dest,
Register value,
AMD64Address tmp) |
void |
incqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
incrementl(Register reg,
int value) |
void |
incrementq(Register reg) |
void |
incrementq(Register reg,
int value) |
int |
indirectCall(Register callReg)
Emits an indirect call instruction.
|
int |
indirectCall(Register callReg,
boolean mitigateDecodingAsDirectCall)
Emits an indirect call instruction.
|
static boolean |
isAVX(AMD64 arch) |
static boolean |
isAVX(AMD64MacroAssembler asm) |
static void |
loadAndExtendAVX(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
AMD64Address src,
AMD64Address.Scale scaleSrc) |
static void |
loadAndExtendSSE(AMD64MacroAssembler asm,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
AMD64Address src,
AMD64Address.Scale scaleSrc) |
static void |
loadAndExtendSSE(AMD64MacroAssembler asm,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
Register src,
AMD64Address.Scale scaleSrc) |
void |
movdbl(AMD64Address dst,
Register src) |
void |
movdbl(Register dst,
AMD64Address src) |
void |
movdbl(Register dst,
Register src) |
static void |
movdl(AMD64MacroAssembler asm,
Register dst,
AMD64Address src) |
static void |
movdl(AMD64MacroAssembler asm,
Register dst,
Register src) |
static void |
movdqu(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64Address dst,
Register src) |
static void |
movdqu(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
movdqu(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
void |
movflt(AMD64Address dst,
Register src) |
void |
movflt(Register dst,
AMD64Address src) |
void |
movflt(Register dst,
Register src) |
static void |
movlhps(AMD64MacroAssembler asm,
Register dst,
Register src) |
void |
movlong(AMD64Address dst,
long src)
Non-atomic write of a 64-bit constant to memory.
|
void |
movptr(AMD64Address dst,
Register src) |
void |
movptr(Register dst,
AMD64Address src) |
static void |
movSZx(AMD64MacroAssembler asm,
AMD64Address.Scale scaleSrc,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address src)
Load one, two, four or eight bytes, according to
scaleSrc, into dst and zero-
or sign-extend depending on extendMode. |
static void |
movSZx(AMD64MacroAssembler asm,
AMD64BaseAssembler.OperandSize operandSize,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address src) |
static void |
packusdw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
packuswb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
palignr(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
static void |
palignr(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2,
int imm8) |
static void |
pand(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
pand(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pand(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2) |
static void |
pandn(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pandU(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src,
Register tmp)
PAND with unaligned memory operand.
|
static void |
pcmpeq(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64Address.Scale elementStride,
Register dst,
AMD64Address src)
Compares all packed bytes/words/dwords in
dst to src. |
static void |
pcmpeq(AMD64MacroAssembler asm,
AVXKind.AVXSize vectorSize,
AMD64Address.Scale elementStride,
Register dst,
Register src)
Compares all packed bytes/words/dwords in
dst to src. |
static void |
pcmpeq(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
JavaKind elementKind,
Register dst,
AMD64Address src)
Compares all packed bytes/words/dwords in
dst to src. |
static void |
pcmpeq(AMD64MacroAssembler asm,
AVXKind.AVXSize vectorSize,
JavaKind elementKind,
Register dst,
Register src)
Compares all packed bytes/words/dwords in
dst to src. |
static void |
pcmpeqb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
pcmpeqb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pcmpeqd(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
pcmpeqd(AMD64MacroAssembler asm,
AVXKind.AVXSize vectorSize,
Register dst,
Register src) |
static void |
pcmpeqw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
pcmpeqw(AMD64MacroAssembler asm,
AVXKind.AVXSize vectorSize,
Register dst,
Register src) |
static void |
pcmpgtb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pcmpgtd(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pmovmsk(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pmovSZx(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
AMD64Address src,
AMD64Address.Scale scaleSrc) |
static void |
pmovSZx(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
Register src,
AMD64Address.Scale scaleSrc) |
static void |
pmovSZx(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address.Scale scaleDst,
Register src,
AMD64Address.Scale scaleSrc,
int displacement) |
static void |
pmovSZx(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64MacroAssembler.ExtendMode extendMode,
AMD64Address.Scale scaleDst,
Register src,
AMD64Address.Scale scaleSrc,
Register index,
int displacement)
Load elements from address
(src, index, displacement) into vector register
dst, and zero- or sign-extend them to fit scaleDst. |
static void |
por(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pshufb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
static void |
pshufb(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pslld(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
int imm8) |
static void |
pslld(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
static void |
psllw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
int imm8) |
static void |
psllw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
static void |
psrld(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
int imm8) |
static void |
psrld(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
static void |
psrlw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
int imm8) |
static void |
psrlw(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
static void |
ptest(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst) |
static void |
ptest(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
ptestU(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
AMD64Address src,
Register tmp)
PTEST with unaligned memory operand.
|
static void |
pxor(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src) |
static void |
pxor(AMD64MacroAssembler asm,
AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2) |
void |
setl(AMD64Assembler.ConditionFlag cc,
Register dst) |
void |
setq(AMD64Assembler.ConditionFlag cc,
Register dst) |
void |
sublAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
sublAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
subqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
subqAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
AMD64Address src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testbAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testbAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testlAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testlAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
int |
testqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
xorlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
addl, addl, addl, addpd, addpd, addq, addq, addq, addressInBytes, addsd, addsd, align, andl, andl, andpd, andpd, andq, andq, andq, bsfq, bsrl, bsrq, bswapl, bswapq, btrq, call, call, cdql, cdqq, clflush, clflushopt, cmovl, cmovl, cmovq, cmovq, cmpb, cmpl, cmpl, cmpl, cmpq, cmpq, cmpq, cmpw, cmpxchgb, cmpxchgl, cmpxchgq, cmpxchgw, cvtdq2pd, cvtsi2sdl, cvtsi2sdq, cvttpd2dq, cvttsd2sil, cvttsd2siq, cvttss2sil, decl, decl, decq, divsd, emit, emit, emit, emit, ensureUniquePC, ensureWithinBoundary, evmovdqu16, evmovdqu16, evmovdqu16, evmovdqu16, evmovdqu64, evmovdqu64, evpbroadcastw, evpcmpeqb, evpcmpgtb, evpcmpgtb, evpcmpuw, evpcmpuw, evpmovwb, evpmovwb, evpmovzxbw, evpmovzxbw, fcos, ffree, fincstp, fldd, fldlg2, fldln2, flds, fnstswAX, fprem, fptan, fsin, fstp, fstpd, fstps, fwait, fxch, fyl2x, getPlaceholder, getPrefixInBytes, getPrefixInBytes, getPrefixInBytes, getPrefixInBytes, hlt, illegal, imull, imulq, incl, incl, incq, int3, jcc, jcc, jcc, jcc, jccb, jmp, jmp, jmp, jmp, jmpb, jmpWithoutAlignment, kmovb, kmovb, kmovb, kmovd, kmovd, kmovd, kmovq, kmovq, kmovq, kmovw, kmovw, kmovw, kortestd, kortestq, ktestd, ktestq, lead, leaq, leave, lfence, lock, makeAddress, makeAddress, mayCrossBoundary, membar, mitigateJCCErratum, mitigateJCCErratum, movapd, movaps, movb, movb, movb, movddup, movdl, movdl, movdq, movdq, movdq, movdqu, movdqu, movdqu, movl, movl, movl, movl, movl, movl, movlhps, movlpd, movq, movq, movq, movq, movq, movq, movsbl, movsbl, movsbq, movsbq, movsd, movsd, movsd, movslq, movslq, movslq, movss, movss, movss, movswl, movswq, movw, movw, movw, movzbl, movzbl, movzbq, movzbq, movzwl, movzwq, mulpd, mulpd, mulsd, mulsd, mulss, negl, negq, nop, nop, notl, notq, nullCheck, orl, orl, orl, orq, orq, orq, packusdw, packuswb, paddd, paddq, palignr, pand, pand, pandn, patchJumpTarget, pause, pcmpeqb, pcmpeqb, pcmpeqd, pcmpeqd, pcmpeqw, pcmpeqw, pcmpestri, pcmpestri, pcmpgtb, pcmpgtd, pextrw, pinsrw, pmovmskb, pmovsxbd, pmovsxbd, pmovsxbq, pmovsxbq, pmovsxbw, pmovsxbw, pmovsxdq, pmovsxdq, pmovsxwd, pmovsxwd, pmovsxwq, pmovsxwq, pmovzxbd, pmovzxbd, pmovzxbq, pmovzxbq, pmovzxbw, pmovzxbw, pmovzxdq, pmovzxdq, pmovzxwd, pmovzxwd, pmovzxwq, pmovzxwq, pop, popcntl, popfq, por, prefetchnta, prefetcht0, prefetcht1, prefetcht2, prefetchw, pshufb, pshufb, pshufd, pshuflw, pslld, psllq, psllq, psllw, psrad, psrld, psrldq, psrlq, psrlw, psubd, psubusb, psubusb, ptest, ptest, punpcklbw, push, pushfq, pxor, rcpps, rdtsc, rdtscp, repStosb, repStosq, ret, sarl, sarl, sarq, sarq, sbbq, setb, sfence, shll, shll, shlq, shlq, shlxl, shrl, shrl, shrq, shrq, sqrtsd, subl, subl, subl, subpd, subq, subq, subqWide, subsd, subsd, testl, testl, testl, testq, testq, ucomisd, ucomiss, unpckhpd, unpcklpd, vmovdqu, vmovdqu, vmovdqu64, vmovdqu64, vpand, vpandn, vpcmpeqb, vpcmpeqd, vpcmpeqw, vpmovmskb, vpmovzxbw, vpor, vpslld, vpsllw, vpsrld, vpsrlw, vptest, vpxor, vpxor, vzeroupper, xaddb, xaddl, xaddq, xaddw, xchgb, xchgl, xchgq, xchgw, xorl, xorpd, xorpd, xorps, xorps, xorqaddFeatures, annotatePatchingImmediate, emitModRM, emitModRM, emitOperandHelper, emitOperandHelper, emitOperandHelper, emitOperandHelper, emitVEX, encode, evexPrefix, evexPrefix, getFeatures, getLFlag, getRXB, getRXB, inRC, isAVX512Register, isCurrentRegionFeature, isVariableLengthAVX512Register, needsRex, needsRex, prefix, prefix, prefix, prefix, prefix, prefix, prefixb, prefixq, prefixq, prefixq, removeFeatures, rexw, setForce4ByteNonZeroDisplacements, simdPrefix, simdPrefix, simdPrefix, simdPrefix, supports, supportsCPUFeature, vexPrefix, vexPrefix, vexPrefix, vexPrefix, vexPrefixbind, close, copy, createLabelName, emitByte, emitByte, emitInt, emitInt, emitLong, emitLong, emitShort, emitShort, emitString, emitString, emitString0, getByte, getInstructionCounter, getInt, getMachineCodeCallDisplacementOffset, getReturnAddressSize, getShort, inlineObjects, isTargetMP, nameOf, position, requestLabelHint, reset, setCodePatchingAnnotationConsumerpublic AMD64MacroAssembler(TargetDescription target)
public AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues)
public AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues,
boolean hasIntelJccErratum)
public final void decrementq(Register reg)
public final void decrementq(Register reg,
int value)
public final void incrementq(Register reg)
public void incrementq(Register reg,
int value)
public final void movptr(Register dst,
AMD64Address src)
public final void movptr(AMD64Address dst, Register src)
public final void cmpptr(Register src1,
Register src2)
public final void cmpptr(Register src1,
AMD64Address src2)
public final void decrementl(Register reg)
public final void decrementl(Register reg,
int value)
public final void incrementl(Register reg,
int value)
public void movflt(Register dst,
Register src)
public void movflt(Register dst,
AMD64Address src)
public void movflt(AMD64Address dst, Register src)
public void movdbl(Register dst,
Register src)
public void movdbl(Register dst,
AMD64Address src)
public void movdbl(AMD64Address dst, Register src)
public final void movlong(AMD64Address dst, long src)
public final void setl(AMD64Assembler.ConditionFlag cc, Register dst)
public final void setq(AMD64Assembler.ConditionFlag cc, Register dst)
public final void flog(Register dest,
Register value,
boolean base10,
AMD64Address tmp)
public final void fsin(Register dest,
Register value,
AMD64Address tmp)
public final void fcos(Register dest,
Register value,
AMD64Address tmp)
public final void ftan(Register dest,
Register value,
AMD64Address tmp)
public final void fpop()
public void alignBeforeCall(boolean align,
int prefixInstructionSize)
align is true, the fixed address, i.e., the displacement of the call
instruction, should be aligned to 4 bytes; 2) when useBranchesWithin32ByteBoundary is
true, the call instruction should be aligned with 32-bytes boundary.prefixInstructionSize - size of the additional instruction to be emitted before the call
instruction. This is used in HotSpot inline cache convention where a movq
instruction of the cached receiver type to rax register must be emitted
before the call instruction.public final int indirectCall(Register callReg)
public final int indirectCall(Register callReg,
boolean mitigateDecodingAsDirectCall)
NativeCall::is_call_before(address pc) function in HotSpot determines that there
is a direct call instruction whose last byte is at pc - 1 if the byte at
pc - 5 is 0xE8. An indirect call can thus be incorrectly decoded as a direct call if
the preceding instructions match this pattern. To avoid this,
mitigateDecodingAsDirectCall == true will insert sufficient nops to avoid the false
decoding.public final int directCall(long address,
Register scratch)
public final int directJmp(long address,
Register scratch)
public void applyMOpAndJcc(AMD64Assembler.AMD64MOp op, AMD64BaseAssembler.OperandSize size, Register dst, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testlAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, AMD64Address src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testlAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final int testqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void testbAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void testbAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, boolean annotateImm, IntConsumer applyBeforeFusedPair)
public final void cmplAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void cmpqAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmplAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final int cmpqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void cmplAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final int cmpqAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Supplier<AMD64Address> src2, AMD64Assembler.ConditionFlag cc, Label branchTarget)
public final void andlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void addqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void sublAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void subqAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void sublAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void subqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void incqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void decqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public final void xorlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp)
public static void movSZx(AMD64MacroAssembler asm, AMD64BaseAssembler.OperandSize operandSize, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address src)
public static void movSZx(AMD64MacroAssembler asm, AMD64Address.Scale scaleSrc, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address src)
scaleSrc, into dst and zero-
or sign-extend depending on extendMode.public static void pmovSZx(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, Register src, AMD64Address.Scale scaleSrc, int displacement)
public static void pmovSZx(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64MacroAssembler.ExtendMode extendMode, AMD64Address.Scale scaleDst, Register src, AMD64Address.Scale scaleSrc, Register index, int displacement)
(src, index, displacement) into vector register
dst, and zero- or sign-extend them to fit scaleDst.size - vector size. May be AVXKind.AVXSize.XMM or AVXKind.AVXSize.YMM.dst - a XMM or YMM vector register.scaleDst - target stride. Must be greater or equal to scaleSrc.src - the source address.scaleSrc - source stride. Must be smaller or equal to scaleDst.index - address index offset, scaled by scaleSrc.displacement - address displacement in bytes. If scaleDst is greater than
scaleSrc, this displacement is scaled by the ratio of the former and
latter scales, e.g. if scaleDst is AMD64Address.Scale.Times4 and
scaleSrc is AMD64Address.Scale.Times2, the displacement is halved.public static void pmovSZx(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, AMD64Address src, AMD64Address.Scale scaleSrc)
public static void pmovSZx(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, Register src, AMD64Address.Scale scaleSrc)
public static void pmovmsk(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void movdqu(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void movdqu(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64Address dst, Register src)
public static void movdqu(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pcmpeq(AMD64MacroAssembler asm, AVXKind.AVXSize vectorSize, AMD64Address.Scale elementStride, Register dst, Register src)
dst to src. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public static void pcmpeq(AMD64MacroAssembler asm, AVXKind.AVXSize vectorSize, JavaKind elementKind, Register dst, Register src)
dst to src. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public static void pcmpeqb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pcmpeqw(AMD64MacroAssembler asm, AVXKind.AVXSize vectorSize, Register dst, Register src)
public static void pcmpeqd(AMD64MacroAssembler asm, AVXKind.AVXSize vectorSize, Register dst, Register src)
public static void pcmpeq(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64Address.Scale elementStride, Register dst, AMD64Address src)
dst to src. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public static void pcmpeq(AMD64MacroAssembler asm, AVXKind.AVXSize size, JavaKind elementKind, Register dst, AMD64Address src)
dst to src. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public static void pcmpeqb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void pcmpeqw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void pcmpeqd(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void pcmpgtb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pcmpgtd(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void loadAndExtendAVX(AMD64MacroAssembler asm, AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, AMD64Address src, AMD64Address.Scale scaleSrc)
public static void loadAndExtendSSE(AMD64MacroAssembler asm, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, AMD64Address src, AMD64Address.Scale scaleSrc)
public static void loadAndExtendSSE(AMD64MacroAssembler asm, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address.Scale scaleDst, Register src, AMD64Address.Scale scaleSrc)
public static void packuswb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void packusdw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void palignr(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src, int imm8)
public static void palignr(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src1, Register src2, int imm8)
public static void pand(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pand(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src1, Register src2)
public static void pand(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void pandU(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src, Register tmp)
public static void pandn(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void por(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pxor(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pxor(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src1, Register src2)
public static void psllw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, int imm8)
public static void psllw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src, int imm8)
public static void psrlw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, int imm8)
public static void psrlw(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src, int imm8)
public static void pslld(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, int imm8)
public static void pslld(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src, int imm8)
public static void psrld(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, int imm8)
public static void psrld(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src, int imm8)
public static void pshufb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void pshufb(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src)
public static void ptest(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst)
public static void ptest(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, Register src)
public static void ptestU(AMD64MacroAssembler asm, AVXKind.AVXSize size, Register dst, AMD64Address src, Register tmp)
public static void movlhps(AMD64MacroAssembler asm, Register dst, Register src)
public static void movdl(AMD64MacroAssembler asm, Register dst, Register src)
public static void movdl(AMD64MacroAssembler asm, Register dst, AMD64Address src)
public static boolean isAVX(AMD64MacroAssembler asm)
public static boolean isAVX(AMD64 arch)