mirror of
https://gitlab.alpinelinux.org/alpine/aports.git
synced 2025-08-05 21:37:15 +02:00
Backported patches [1][2] that add support for llvm-libunwind[3] on ppc64le. [1] dc25db1eb7a3cbe5c1b58f5225606efe9218ccd0 [2] 2b4c5e067b7259ae5d60553c62e309a1780df570 [3] https://github.com/llvm-mirror/libunwind
1504 lines
36 KiB
Diff
1504 lines
36 KiB
Diff
--- a/src/AddressSpace.hpp
|
|
+++ b/src/AddressSpace.hpp
|
|
@@ -608,6 +608,13 @@
|
|
RemoteAddressSpace<Pointer32<BigEndian>> oas;
|
|
};
|
|
|
|
+/// unw_addr_space_ppc is the concrete instance that a unw_addr_space_t points
|
|
+/// to when examining a 64-bit PowerPC process.
|
|
+struct unw_addr_space_ppc64 : public unw_addr_space {
|
|
+ unw_addr_space_ppc64(task_t task) : oas(task) {}
|
|
+ RemoteAddressSpace<Pointer64<LittleEndian>> oas;
|
|
+};
|
|
+
|
|
#endif // UNW_REMOTE
|
|
|
|
} // namespace libunwind
|
|
--- a/src/Registers.hpp
|
|
+++ b/src/Registers.hpp
|
|
@@ -1046,6 +1046,646 @@
|
|
}
|
|
#endif // _LIBUNWIND_TARGET_PPC
|
|
|
|
+#if defined(_LIBUNWIND_TARGET_PPC64)
|
|
+/// Registers_ppc64 holds the register state of a thread in a 64-bit PowerPC
|
|
+/// process.
|
|
+class _LIBUNWIND_HIDDEN Registers_ppc64 {
|
|
+public:
|
|
+ Registers_ppc64();
|
|
+ Registers_ppc64(const void *registers);
|
|
+
|
|
+ bool validRegister(int num) const;
|
|
+ uint64_t getRegister(int num) const;
|
|
+ void setRegister(int num, uint64_t value);
|
|
+ bool validFloatRegister(int num) const;
|
|
+ double getFloatRegister(int num) const;
|
|
+ void setFloatRegister(int num, double value);
|
|
+ bool validVectorRegister(int num) const;
|
|
+ v128 getVectorRegister(int num) const;
|
|
+ void setVectorRegister(int num, v128 value);
|
|
+ const char *getRegisterName(int num);
|
|
+ void jumpto();
|
|
+ static int lastDwarfRegNum() { return 116; }
|
|
+
|
|
+ uint64_t getSP() const { return _registers.__r1; }
|
|
+ void setSP(uint64_t value) { _registers.__r1 = value; }
|
|
+ uint64_t getIP() const { return _registers.__srr0; }
|
|
+ void setIP(uint64_t value) { _registers.__srr0 = value; }
|
|
+
|
|
+private:
|
|
+ struct ppc64_thread_state_t {
|
|
+ uint64_t __srr0; /* Instruction address register (PC) */
|
|
+ uint64_t __srr1; /* Machine state register (supervisor) */
|
|
+ uint64_t __r0;
|
|
+ uint64_t __r1;
|
|
+ uint64_t __r2;
|
|
+ uint64_t __r3;
|
|
+ uint64_t __r4;
|
|
+ uint64_t __r5;
|
|
+ uint64_t __r6;
|
|
+ uint64_t __r7;
|
|
+ uint64_t __r8;
|
|
+ uint64_t __r9;
|
|
+ uint64_t __r10;
|
|
+ uint64_t __r11;
|
|
+ uint64_t __r12;
|
|
+ uint64_t __r13;
|
|
+ uint64_t __r14;
|
|
+ uint64_t __r15;
|
|
+ uint64_t __r16;
|
|
+ uint64_t __r17;
|
|
+ uint64_t __r18;
|
|
+ uint64_t __r19;
|
|
+ uint64_t __r20;
|
|
+ uint64_t __r21;
|
|
+ uint64_t __r22;
|
|
+ uint64_t __r23;
|
|
+ uint64_t __r24;
|
|
+ uint64_t __r25;
|
|
+ uint64_t __r26;
|
|
+ uint64_t __r27;
|
|
+ uint64_t __r28;
|
|
+ uint64_t __r29;
|
|
+ uint64_t __r30;
|
|
+ uint64_t __r31;
|
|
+ uint64_t __cr; /* Condition register */
|
|
+ uint64_t __xer; /* User's integer exception register */
|
|
+ uint64_t __lr; /* Link register */
|
|
+ uint64_t __ctr; /* Count register */
|
|
+ uint64_t __vrsave; /* Vector Save Register */
|
|
+ };
|
|
+
|
|
+ union ppc64_vsr_t {
|
|
+ struct asfloat_s {
|
|
+ double f;
|
|
+ uint64_t v2;
|
|
+ } asfloat;
|
|
+ v128 v;
|
|
+ };
|
|
+
|
|
+ ppc64_thread_state_t _registers;
|
|
+ ppc64_vsr_t _vectorScalarRegisters[64];
|
|
+
|
|
+ static int getVectorRegNum(int num);
|
|
+};
|
|
+
|
|
+inline Registers_ppc64::Registers_ppc64(const void *registers) {
|
|
+ static_assert((check_fit<Registers_ppc64, unw_context_t>::does_fit),
|
|
+ "ppc64 registers do not fit into unw_context_t");
|
|
+ memcpy(&_registers, static_cast<const uint8_t *>(registers),
|
|
+ sizeof(_registers));
|
|
+ static_assert(sizeof(_registers) == 312,
|
|
+ "expected vector scalar register offset to be 312");
|
|
+ memcpy(&_vectorScalarRegisters,
|
|
+ static_cast<const uint8_t *>(registers) + sizeof(_registers),
|
|
+ sizeof(_vectorScalarRegisters));
|
|
+ static_assert(sizeof(_registers) +
|
|
+ sizeof(_vectorScalarRegisters) == 1336,
|
|
+ "expected vector register offset to be 1336 bytes");
|
|
+}
|
|
+
|
|
+inline Registers_ppc64::Registers_ppc64() {
|
|
+ memset(&_registers, 0, sizeof(_registers));
|
|
+ memset(&_vectorScalarRegisters, 0, sizeof(_vectorScalarRegisters));
|
|
+}
|
|
+
|
|
+inline bool Registers_ppc64::validRegister(int regNum) const {
|
|
+ switch (regNum) {
|
|
+ case UNW_REG_IP:
|
|
+ case UNW_REG_SP:
|
|
+ case UNW_PPC64_XER:
|
|
+ case UNW_PPC64_LR:
|
|
+ case UNW_PPC64_CTR:
|
|
+ case UNW_PPC64_VRSAVE:
|
|
+ return true;
|
|
+ }
|
|
+
|
|
+ if (regNum >= UNW_PPC64_R0 && regNum <= UNW_PPC64_R31)
|
|
+ return true;
|
|
+ if (regNum >= UNW_PPC64_CR0 && regNum <= UNW_PPC64_CR7)
|
|
+ return true;
|
|
+
|
|
+ return false;
|
|
+}
|
|
+
|
|
+inline uint64_t Registers_ppc64::getRegister(int regNum) const {
|
|
+ switch (regNum) {
|
|
+ case UNW_REG_IP:
|
|
+ return _registers.__srr0;
|
|
+ case UNW_PPC64_R0:
|
|
+ return _registers.__r0;
|
|
+ case UNW_PPC64_R1:
|
|
+ case UNW_REG_SP:
|
|
+ return _registers.__r1;
|
|
+ case UNW_PPC64_R2:
|
|
+ return _registers.__r2;
|
|
+ case UNW_PPC64_R3:
|
|
+ return _registers.__r3;
|
|
+ case UNW_PPC64_R4:
|
|
+ return _registers.__r4;
|
|
+ case UNW_PPC64_R5:
|
|
+ return _registers.__r5;
|
|
+ case UNW_PPC64_R6:
|
|
+ return _registers.__r6;
|
|
+ case UNW_PPC64_R7:
|
|
+ return _registers.__r7;
|
|
+ case UNW_PPC64_R8:
|
|
+ return _registers.__r8;
|
|
+ case UNW_PPC64_R9:
|
|
+ return _registers.__r9;
|
|
+ case UNW_PPC64_R10:
|
|
+ return _registers.__r10;
|
|
+ case UNW_PPC64_R11:
|
|
+ return _registers.__r11;
|
|
+ case UNW_PPC64_R12:
|
|
+ return _registers.__r12;
|
|
+ case UNW_PPC64_R13:
|
|
+ return _registers.__r13;
|
|
+ case UNW_PPC64_R14:
|
|
+ return _registers.__r14;
|
|
+ case UNW_PPC64_R15:
|
|
+ return _registers.__r15;
|
|
+ case UNW_PPC64_R16:
|
|
+ return _registers.__r16;
|
|
+ case UNW_PPC64_R17:
|
|
+ return _registers.__r17;
|
|
+ case UNW_PPC64_R18:
|
|
+ return _registers.__r18;
|
|
+ case UNW_PPC64_R19:
|
|
+ return _registers.__r19;
|
|
+ case UNW_PPC64_R20:
|
|
+ return _registers.__r20;
|
|
+ case UNW_PPC64_R21:
|
|
+ return _registers.__r21;
|
|
+ case UNW_PPC64_R22:
|
|
+ return _registers.__r22;
|
|
+ case UNW_PPC64_R23:
|
|
+ return _registers.__r23;
|
|
+ case UNW_PPC64_R24:
|
|
+ return _registers.__r24;
|
|
+ case UNW_PPC64_R25:
|
|
+ return _registers.__r25;
|
|
+ case UNW_PPC64_R26:
|
|
+ return _registers.__r26;
|
|
+ case UNW_PPC64_R27:
|
|
+ return _registers.__r27;
|
|
+ case UNW_PPC64_R28:
|
|
+ return _registers.__r28;
|
|
+ case UNW_PPC64_R29:
|
|
+ return _registers.__r29;
|
|
+ case UNW_PPC64_R30:
|
|
+ return _registers.__r30;
|
|
+ case UNW_PPC64_R31:
|
|
+ return _registers.__r31;
|
|
+ case UNW_PPC64_CR0:
|
|
+ return (_registers.__cr & 0xF0000000);
|
|
+ case UNW_PPC64_CR1:
|
|
+ return (_registers.__cr & 0x0F000000);
|
|
+ case UNW_PPC64_CR2:
|
|
+ return (_registers.__cr & 0x00F00000);
|
|
+ case UNW_PPC64_CR3:
|
|
+ return (_registers.__cr & 0x000F0000);
|
|
+ case UNW_PPC64_CR4:
|
|
+ return (_registers.__cr & 0x0000F000);
|
|
+ case UNW_PPC64_CR5:
|
|
+ return (_registers.__cr & 0x00000F00);
|
|
+ case UNW_PPC64_CR6:
|
|
+ return (_registers.__cr & 0x000000F0);
|
|
+ case UNW_PPC64_CR7:
|
|
+ return (_registers.__cr & 0x0000000F);
|
|
+ case UNW_PPC64_XER:
|
|
+ return _registers.__xer;
|
|
+ case UNW_PPC64_LR:
|
|
+ return _registers.__lr;
|
|
+ case UNW_PPC64_CTR:
|
|
+ return _registers.__ctr;
|
|
+ case UNW_PPC64_VRSAVE:
|
|
+ return _registers.__vrsave;
|
|
+ }
|
|
+ _LIBUNWIND_ABORT("unsupported ppc64 register");
|
|
+}
|
|
+
|
|
+inline void Registers_ppc64::setRegister(int regNum, uint64_t value) {
|
|
+ switch (regNum) {
|
|
+ case UNW_REG_IP:
|
|
+ _registers.__srr0 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R0:
|
|
+ _registers.__r0 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R1:
|
|
+ case UNW_REG_SP:
|
|
+ _registers.__r1 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R2:
|
|
+ _registers.__r2 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R3:
|
|
+ _registers.__r3 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R4:
|
|
+ _registers.__r4 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R5:
|
|
+ _registers.__r5 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R6:
|
|
+ _registers.__r6 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R7:
|
|
+ _registers.__r7 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R8:
|
|
+ _registers.__r8 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R9:
|
|
+ _registers.__r9 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R10:
|
|
+ _registers.__r10 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R11:
|
|
+ _registers.__r11 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R12:
|
|
+ _registers.__r12 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R13:
|
|
+ _registers.__r13 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R14:
|
|
+ _registers.__r14 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R15:
|
|
+ _registers.__r15 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R16:
|
|
+ _registers.__r16 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R17:
|
|
+ _registers.__r17 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R18:
|
|
+ _registers.__r18 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R19:
|
|
+ _registers.__r19 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R20:
|
|
+ _registers.__r20 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R21:
|
|
+ _registers.__r21 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R22:
|
|
+ _registers.__r22 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R23:
|
|
+ _registers.__r23 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R24:
|
|
+ _registers.__r24 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R25:
|
|
+ _registers.__r25 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R26:
|
|
+ _registers.__r26 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R27:
|
|
+ _registers.__r27 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R28:
|
|
+ _registers.__r28 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R29:
|
|
+ _registers.__r29 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R30:
|
|
+ _registers.__r30 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_R31:
|
|
+ _registers.__r31 = value;
|
|
+ return;
|
|
+ case UNW_PPC64_CR0:
|
|
+ _registers.__cr &= 0x0FFFFFFF;
|
|
+ _registers.__cr |= (value & 0xF0000000);
|
|
+ return;
|
|
+ case UNW_PPC64_CR1:
|
|
+ _registers.__cr &= 0xF0FFFFFF;
|
|
+ _registers.__cr |= (value & 0x0F000000);
|
|
+ return;
|
|
+ case UNW_PPC64_CR2:
|
|
+ _registers.__cr &= 0xFF0FFFFF;
|
|
+ _registers.__cr |= (value & 0x00F00000);
|
|
+ return;
|
|
+ case UNW_PPC64_CR3:
|
|
+ _registers.__cr &= 0xFFF0FFFF;
|
|
+ _registers.__cr |= (value & 0x000F0000);
|
|
+ return;
|
|
+ case UNW_PPC64_CR4:
|
|
+ _registers.__cr &= 0xFFFF0FFF;
|
|
+ _registers.__cr |= (value & 0x0000F000);
|
|
+ return;
|
|
+ case UNW_PPC64_CR5:
|
|
+ _registers.__cr &= 0xFFFFF0FF;
|
|
+ _registers.__cr |= (value & 0x00000F00);
|
|
+ return;
|
|
+ case UNW_PPC64_CR6:
|
|
+ _registers.__cr &= 0xFFFFFF0F;
|
|
+ _registers.__cr |= (value & 0x000000F0);
|
|
+ return;
|
|
+ case UNW_PPC64_CR7:
|
|
+ _registers.__cr &= 0xFFFFFFF0;
|
|
+ _registers.__cr |= (value & 0x0000000F);
|
|
+ return;
|
|
+ case UNW_PPC64_XER:
|
|
+ _registers.__xer = value;
|
|
+ return;
|
|
+ case UNW_PPC64_LR:
|
|
+ _registers.__lr = value;
|
|
+ return;
|
|
+ case UNW_PPC64_CTR:
|
|
+ _registers.__ctr = value;
|
|
+ return;
|
|
+ case UNW_PPC64_VRSAVE:
|
|
+ _registers.__vrsave = value;
|
|
+ return;
|
|
+ }
|
|
+ _LIBUNWIND_ABORT("unsupported ppc64 register");
|
|
+}
|
|
+
|
|
+inline bool Registers_ppc64::validFloatRegister(int regNum) const {
|
|
+ return regNum >= UNW_PPC64_F0 && regNum <= UNW_PPC64_F31;
|
|
+}
|
|
+
|
|
+inline double Registers_ppc64::getFloatRegister(int regNum) const {
|
|
+ assert(validFloatRegister(regNum));
|
|
+ return _vectorScalarRegisters[regNum - UNW_PPC64_F0].asfloat.f;
|
|
+}
|
|
+
|
|
+inline void Registers_ppc64::setFloatRegister(int regNum, double value) {
|
|
+ assert(validFloatRegister(regNum));
|
|
+ _vectorScalarRegisters[regNum - UNW_PPC64_F0].asfloat.f = value;
|
|
+}
|
|
+
|
|
+inline bool Registers_ppc64::validVectorRegister(int regNum) const {
|
|
+#ifdef PPC64_HAS_VMX
|
|
+ if (regNum >= UNW_PPC64_VS0 && regNum <= UNW_PPC64_VS31)
|
|
+ return true;
|
|
+ if (regNum >= UNW_PPC64_VS32 && regNum <= UNW_PPC64_VS63)
|
|
+ return true;
|
|
+#else
|
|
+ if (regNum >= UNW_PPC64_V0 && regNum <= UNW_PPC64_V31)
|
|
+ return true;
|
|
+#endif
|
|
+return false;
|
|
+}
|
|
+
|
|
+inline int Registers_ppc64::getVectorRegNum(int num)
|
|
+{
|
|
+ if (num >= UNW_PPC64_VS0 && num <= UNW_PPC64_VS31)
|
|
+ return num - UNW_PPC64_VS0;
|
|
+ else
|
|
+ return num - UNW_PPC64_VS32 + 32;
|
|
+}
|
|
+
|
|
+inline v128 Registers_ppc64::getVectorRegister(int regNum) const {
|
|
+ assert(validVectorRegister(regNum));
|
|
+ return _vectorScalarRegisters[getVectorRegNum(regNum)].v;
|
|
+}
|
|
+
|
|
+inline void Registers_ppc64::setVectorRegister(int regNum, v128 value) {
|
|
+ assert(validVectorRegister(regNum));
|
|
+ _vectorScalarRegisters[getVectorRegNum(regNum)].v = value;
|
|
+}
|
|
+
|
|
+inline const char *Registers_ppc64::getRegisterName(int regNum) {
|
|
+ switch (regNum) {
|
|
+ case UNW_REG_IP:
|
|
+ return "ip";
|
|
+ case UNW_REG_SP:
|
|
+ return "sp";
|
|
+ case UNW_PPC64_R0:
|
|
+ return "r0";
|
|
+ case UNW_PPC64_R1:
|
|
+ return "r1";
|
|
+ case UNW_PPC64_R2:
|
|
+ return "r2";
|
|
+ case UNW_PPC64_R3:
|
|
+ return "r3";
|
|
+ case UNW_PPC64_R4:
|
|
+ return "r4";
|
|
+ case UNW_PPC64_R5:
|
|
+ return "r5";
|
|
+ case UNW_PPC64_R6:
|
|
+ return "r6";
|
|
+ case UNW_PPC64_R7:
|
|
+ return "r7";
|
|
+ case UNW_PPC64_R8:
|
|
+ return "r8";
|
|
+ case UNW_PPC64_R9:
|
|
+ return "r9";
|
|
+ case UNW_PPC64_R10:
|
|
+ return "r10";
|
|
+ case UNW_PPC64_R11:
|
|
+ return "r11";
|
|
+ case UNW_PPC64_R12:
|
|
+ return "r12";
|
|
+ case UNW_PPC64_R13:
|
|
+ return "r13";
|
|
+ case UNW_PPC64_R14:
|
|
+ return "r14";
|
|
+ case UNW_PPC64_R15:
|
|
+ return "r15";
|
|
+ case UNW_PPC64_R16:
|
|
+ return "r16";
|
|
+ case UNW_PPC64_R17:
|
|
+ return "r17";
|
|
+ case UNW_PPC64_R18:
|
|
+ return "r18";
|
|
+ case UNW_PPC64_R19:
|
|
+ return "r19";
|
|
+ case UNW_PPC64_R20:
|
|
+ return "r20";
|
|
+ case UNW_PPC64_R21:
|
|
+ return "r21";
|
|
+ case UNW_PPC64_R22:
|
|
+ return "r22";
|
|
+ case UNW_PPC64_R23:
|
|
+ return "r23";
|
|
+ case UNW_PPC64_R24:
|
|
+ return "r24";
|
|
+ case UNW_PPC64_R25:
|
|
+ return "r25";
|
|
+ case UNW_PPC64_R26:
|
|
+ return "r26";
|
|
+ case UNW_PPC64_R27:
|
|
+ return "r27";
|
|
+ case UNW_PPC64_R28:
|
|
+ return "r28";
|
|
+ case UNW_PPC64_R29:
|
|
+ return "r29";
|
|
+ case UNW_PPC64_R30:
|
|
+ return "r30";
|
|
+ case UNW_PPC64_R31:
|
|
+ return "r31";
|
|
+ case UNW_PPC64_CR0:
|
|
+ return "cr0";
|
|
+ case UNW_PPC64_CR1:
|
|
+ return "cr1";
|
|
+ case UNW_PPC64_CR2:
|
|
+ return "cr2";
|
|
+ case UNW_PPC64_CR3:
|
|
+ return "cr3";
|
|
+ case UNW_PPC64_CR4:
|
|
+ return "cr4";
|
|
+ case UNW_PPC64_CR5:
|
|
+ return "cr5";
|
|
+ case UNW_PPC64_CR6:
|
|
+ return "cr6";
|
|
+ case UNW_PPC64_CR7:
|
|
+ return "cr7";
|
|
+ case UNW_PPC64_XER:
|
|
+ return "xer";
|
|
+ case UNW_PPC64_LR:
|
|
+ return "lr";
|
|
+ case UNW_PPC64_CTR:
|
|
+ return "ctr";
|
|
+ case UNW_PPC64_VRSAVE:
|
|
+ return "vrsave";
|
|
+ case UNW_PPC64_F0:
|
|
+ return "fp0";
|
|
+ case UNW_PPC64_F1:
|
|
+ return "fp1";
|
|
+ case UNW_PPC64_F2:
|
|
+ return "fp2";
|
|
+ case UNW_PPC64_F3:
|
|
+ return "fp3";
|
|
+ case UNW_PPC64_F4:
|
|
+ return "fp4";
|
|
+ case UNW_PPC64_F5:
|
|
+ return "fp5";
|
|
+ case UNW_PPC64_F6:
|
|
+ return "fp6";
|
|
+ case UNW_PPC64_F7:
|
|
+ return "fp7";
|
|
+ case UNW_PPC64_F8:
|
|
+ return "fp8";
|
|
+ case UNW_PPC64_F9:
|
|
+ return "fp9";
|
|
+ case UNW_PPC64_F10:
|
|
+ return "fp10";
|
|
+ case UNW_PPC64_F11:
|
|
+ return "fp11";
|
|
+ case UNW_PPC64_F12:
|
|
+ return "fp12";
|
|
+ case UNW_PPC64_F13:
|
|
+ return "fp13";
|
|
+ case UNW_PPC64_F14:
|
|
+ return "fp14";
|
|
+ case UNW_PPC64_F15:
|
|
+ return "fp15";
|
|
+ case UNW_PPC64_F16:
|
|
+ return "fp16";
|
|
+ case UNW_PPC64_F17:
|
|
+ return "fp17";
|
|
+ case UNW_PPC64_F18:
|
|
+ return "fp18";
|
|
+ case UNW_PPC64_F19:
|
|
+ return "fp19";
|
|
+ case UNW_PPC64_F20:
|
|
+ return "fp20";
|
|
+ case UNW_PPC64_F21:
|
|
+ return "fp21";
|
|
+ case UNW_PPC64_F22:
|
|
+ return "fp22";
|
|
+ case UNW_PPC64_F23:
|
|
+ return "fp23";
|
|
+ case UNW_PPC64_F24:
|
|
+ return "fp24";
|
|
+ case UNW_PPC64_F25:
|
|
+ return "fp25";
|
|
+ case UNW_PPC64_F26:
|
|
+ return "fp26";
|
|
+ case UNW_PPC64_F27:
|
|
+ return "fp27";
|
|
+ case UNW_PPC64_F28:
|
|
+ return "fp28";
|
|
+ case UNW_PPC64_F29:
|
|
+ return "fp29";
|
|
+ case UNW_PPC64_F30:
|
|
+ return "fp30";
|
|
+ case UNW_PPC64_F31:
|
|
+ return "fp31";
|
|
+ case UNW_PPC64_V0:
|
|
+ return "v0";
|
|
+ case UNW_PPC64_V1:
|
|
+ return "v1";
|
|
+ case UNW_PPC64_V2:
|
|
+ return "v2";
|
|
+ case UNW_PPC64_V3:
|
|
+ return "v3";
|
|
+ case UNW_PPC64_V4:
|
|
+ return "v4";
|
|
+ case UNW_PPC64_V5:
|
|
+ return "v5";
|
|
+ case UNW_PPC64_V6:
|
|
+ return "v6";
|
|
+ case UNW_PPC64_V7:
|
|
+ return "v7";
|
|
+ case UNW_PPC64_V8:
|
|
+ return "v8";
|
|
+ case UNW_PPC64_V9:
|
|
+ return "v9";
|
|
+ case UNW_PPC64_V10:
|
|
+ return "v10";
|
|
+ case UNW_PPC64_V11:
|
|
+ return "v11";
|
|
+ case UNW_PPC64_V12:
|
|
+ return "v12";
|
|
+ case UNW_PPC64_V13:
|
|
+ return "v13";
|
|
+ case UNW_PPC64_V14:
|
|
+ return "v14";
|
|
+ case UNW_PPC64_V15:
|
|
+ return "v15";
|
|
+ case UNW_PPC64_V16:
|
|
+ return "v16";
|
|
+ case UNW_PPC64_V17:
|
|
+ return "v17";
|
|
+ case UNW_PPC64_V18:
|
|
+ return "v18";
|
|
+ case UNW_PPC64_V19:
|
|
+ return "v19";
|
|
+ case UNW_PPC64_V20:
|
|
+ return "v20";
|
|
+ case UNW_PPC64_V21:
|
|
+ return "v21";
|
|
+ case UNW_PPC64_V22:
|
|
+ return "v22";
|
|
+ case UNW_PPC64_V23:
|
|
+ return "v23";
|
|
+ case UNW_PPC64_V24:
|
|
+ return "v24";
|
|
+ case UNW_PPC64_V25:
|
|
+ return "v25";
|
|
+ case UNW_PPC64_V26:
|
|
+ return "v26";
|
|
+ case UNW_PPC64_V27:
|
|
+ return "v27";
|
|
+ case UNW_PPC64_V28:
|
|
+ return "v28";
|
|
+ case UNW_PPC64_V29:
|
|
+ return "v29";
|
|
+ case UNW_PPC64_V30:
|
|
+ return "v30";
|
|
+ case UNW_PPC64_V31:
|
|
+ return "v31";
|
|
+ }
|
|
+ return "unknown register";
|
|
+}
|
|
+#endif // _LIBUNWIND_TARGET_PPC64
|
|
|
|
#if defined(_LIBUNWIND_TARGET_AARCH64)
|
|
/// Registers_arm64 holds the register state of a thread in a 64-bit arm
|
|
--- a/src/UnwindCursor.hpp
|
|
+++ b/src/UnwindCursor.hpp
|
|
@@ -507,6 +507,12 @@
|
|
}
|
|
#endif
|
|
|
|
+#if defined(_LIBUNWIND_TARGET_PPC64)
|
|
+ int stepWithCompactEncoding(Registers_ppc64 &) {
|
|
+ return UNW_EINVAL;
|
|
+ }
|
|
+#endif
|
|
+
|
|
#if defined(_LIBUNWIND_TARGET_AARCH64)
|
|
int stepWithCompactEncoding(Registers_arm64 &) {
|
|
return CompactUnwinder_arm64<A>::stepWithCompactEncoding(
|
|
@@ -547,6 +553,12 @@
|
|
}
|
|
#endif
|
|
|
|
+#if defined(_LIBUNWIND_TARGET_PPC64)
|
|
+ bool compactSaysUseDwarf(Registers_ppc64 &, uint32_t *) const {
|
|
+ return true;
|
|
+ }
|
|
+#endif
|
|
+
|
|
#if defined(_LIBUNWIND_TARGET_AARCH64)
|
|
bool compactSaysUseDwarf(Registers_arm64 &, uint32_t *offset) const {
|
|
if ((_info.format & UNWIND_ARM64_MODE_MASK) == UNWIND_ARM64_MODE_DWARF) {
|
|
@@ -579,6 +591,12 @@
|
|
|
|
#if defined(_LIBUNWIND_TARGET_PPC)
|
|
compact_unwind_encoding_t dwarfEncoding(Registers_ppc &) const {
|
|
+ return 0;
|
|
+ }
|
|
+#endif
|
|
+
|
|
+#if defined(_LIBUNWIND_TARGET_PPC64)
|
|
+ compact_unwind_encoding_t dwarfEncoding(Registers_ppc64 &) const {
|
|
return 0;
|
|
}
|
|
#endif
|
|
--- a/src/UnwindRegistersRestore.S
|
|
+++ b/src/UnwindRegistersRestore.S
|
|
@@ -92,6 +92,271 @@
|
|
pop %rdi # rdi was saved here earlier
|
|
ret # rip was saved here
|
|
|
|
+#elif defined(__powerpc64__)
|
|
+
|
|
+DEFINE_LIBUNWIND_PRIVATE_FUNCTION(_ZN9libunwind15Registers_ppc646jumptoEv)
|
|
+//
|
|
+// void libunwind::Registers_ppc64::jumpto()
|
|
+//
|
|
+// On entry:
|
|
+// thread_state pointer is in r3
|
|
+//
|
|
+
|
|
+// load register (GPR)
|
|
+#define PPC64_LR(n) \
|
|
+ ld %r##n, (8 * (n + 2))(%r3)
|
|
+
|
|
+ // restore integral registers
|
|
+ // skip r0 for now
|
|
+ // skip r1 for now
|
|
+ PPC64_LR(2)
|
|
+ // skip r3 for now
|
|
+ // skip r4 for now
|
|
+ // skip r5 for now
|
|
+ PPC64_LR(6)
|
|
+ PPC64_LR(7)
|
|
+ PPC64_LR(8)
|
|
+ PPC64_LR(9)
|
|
+ PPC64_LR(10)
|
|
+ PPC64_LR(11)
|
|
+ PPC64_LR(12)
|
|
+ PPC64_LR(13)
|
|
+ PPC64_LR(14)
|
|
+ PPC64_LR(15)
|
|
+ PPC64_LR(16)
|
|
+ PPC64_LR(17)
|
|
+ PPC64_LR(18)
|
|
+ PPC64_LR(19)
|
|
+ PPC64_LR(20)
|
|
+ PPC64_LR(21)
|
|
+ PPC64_LR(22)
|
|
+ PPC64_LR(23)
|
|
+ PPC64_LR(24)
|
|
+ PPC64_LR(25)
|
|
+ PPC64_LR(26)
|
|
+ PPC64_LR(27)
|
|
+ PPC64_LR(28)
|
|
+ PPC64_LR(29)
|
|
+ PPC64_LR(30)
|
|
+ PPC64_LR(31)
|
|
+
|
|
+#ifdef PPC64_HAS_VMX
|
|
+
|
|
+ // restore VS registers
|
|
+ // (note that this also restores floating point registers and V registers,
|
|
+ // because part of VS is mapped to these registers)
|
|
+
|
|
+ addi %r4, %r3, PPC64_OFFS_FP
|
|
+
|
|
+// load VS register
|
|
+#define PPC64_LVS(n) \
|
|
+ lxvd2x %vs##n, 0, %r4 ;\
|
|
+ addi %r4, %r4, 16
|
|
+
|
|
+ // restore the first 32 VS regs (and also all floating point regs)
|
|
+ PPC64_LVS(0)
|
|
+ PPC64_LVS(1)
|
|
+ PPC64_LVS(2)
|
|
+ PPC64_LVS(3)
|
|
+ PPC64_LVS(4)
|
|
+ PPC64_LVS(5)
|
|
+ PPC64_LVS(6)
|
|
+ PPC64_LVS(7)
|
|
+ PPC64_LVS(8)
|
|
+ PPC64_LVS(9)
|
|
+ PPC64_LVS(10)
|
|
+ PPC64_LVS(11)
|
|
+ PPC64_LVS(12)
|
|
+ PPC64_LVS(13)
|
|
+ PPC64_LVS(14)
|
|
+ PPC64_LVS(15)
|
|
+ PPC64_LVS(16)
|
|
+ PPC64_LVS(17)
|
|
+ PPC64_LVS(18)
|
|
+ PPC64_LVS(19)
|
|
+ PPC64_LVS(20)
|
|
+ PPC64_LVS(21)
|
|
+ PPC64_LVS(22)
|
|
+ PPC64_LVS(23)
|
|
+ PPC64_LVS(24)
|
|
+ PPC64_LVS(25)
|
|
+ PPC64_LVS(26)
|
|
+ PPC64_LVS(27)
|
|
+ PPC64_LVS(28)
|
|
+ PPC64_LVS(29)
|
|
+ PPC64_LVS(30)
|
|
+ PPC64_LVS(31)
|
|
+
|
|
+ // use VRSAVE to conditionally restore the remaining VS regs,
|
|
+ // that are where the V regs are mapped
|
|
+
|
|
+ ld %r5, PPC64_OFFS_VRSAVE(%r3) // test VRsave
|
|
+ cmpwi %r5, 0
|
|
+ beq Lnovec
|
|
+
|
|
+// conditionally load VS
|
|
+#define PPC64_CLVS_BOTTOM(n) \
|
|
+ beq Ldone##n ;\
|
|
+ addi %r4, %r3, PPC64_OFFS_FP + n * 16 ;\
|
|
+ lxvd2x %vs##n, 0, %r4 ;\
|
|
+Ldone##n:
|
|
+
|
|
+#define PPC64_CLVSl(n) \
|
|
+ andis. %r0, %r5, (1<<(47-n)) ;\
|
|
+PPC64_CLVS_BOTTOM(n)
|
|
+
|
|
+#define PPC64_CLVSh(n) \
|
|
+ andi. %r0, %r5, (1<<(63-n)) ;\
|
|
+PPC64_CLVS_BOTTOM(n)
|
|
+
|
|
+ PPC64_CLVSl(32)
|
|
+ PPC64_CLVSl(33)
|
|
+ PPC64_CLVSl(34)
|
|
+ PPC64_CLVSl(35)
|
|
+ PPC64_CLVSl(36)
|
|
+ PPC64_CLVSl(37)
|
|
+ PPC64_CLVSl(38)
|
|
+ PPC64_CLVSl(39)
|
|
+ PPC64_CLVSl(40)
|
|
+ PPC64_CLVSl(41)
|
|
+ PPC64_CLVSl(42)
|
|
+ PPC64_CLVSl(43)
|
|
+ PPC64_CLVSl(44)
|
|
+ PPC64_CLVSl(45)
|
|
+ PPC64_CLVSl(46)
|
|
+ PPC64_CLVSl(47)
|
|
+ PPC64_CLVSh(48)
|
|
+ PPC64_CLVSh(49)
|
|
+ PPC64_CLVSh(50)
|
|
+ PPC64_CLVSh(51)
|
|
+ PPC64_CLVSh(52)
|
|
+ PPC64_CLVSh(53)
|
|
+ PPC64_CLVSh(54)
|
|
+ PPC64_CLVSh(55)
|
|
+ PPC64_CLVSh(56)
|
|
+ PPC64_CLVSh(57)
|
|
+ PPC64_CLVSh(58)
|
|
+ PPC64_CLVSh(59)
|
|
+ PPC64_CLVSh(60)
|
|
+ PPC64_CLVSh(61)
|
|
+ PPC64_CLVSh(62)
|
|
+ PPC64_CLVSh(63)
|
|
+
|
|
+#else
|
|
+
|
|
+// load FP register
|
|
+#define PPC64_LF(n) \
|
|
+ lfd %f##n, (PPC64_OFFS_FP + n * 16)(%r3)
|
|
+
|
|
+ // restore float registers
|
|
+ PPC64_LF(0)
|
|
+ PPC64_LF(1)
|
|
+ PPC64_LF(2)
|
|
+ PPC64_LF(3)
|
|
+ PPC64_LF(4)
|
|
+ PPC64_LF(5)
|
|
+ PPC64_LF(6)
|
|
+ PPC64_LF(7)
|
|
+ PPC64_LF(8)
|
|
+ PPC64_LF(9)
|
|
+ PPC64_LF(10)
|
|
+ PPC64_LF(11)
|
|
+ PPC64_LF(12)
|
|
+ PPC64_LF(13)
|
|
+ PPC64_LF(14)
|
|
+ PPC64_LF(15)
|
|
+ PPC64_LF(16)
|
|
+ PPC64_LF(17)
|
|
+ PPC64_LF(18)
|
|
+ PPC64_LF(19)
|
|
+ PPC64_LF(20)
|
|
+ PPC64_LF(21)
|
|
+ PPC64_LF(22)
|
|
+ PPC64_LF(23)
|
|
+ PPC64_LF(24)
|
|
+ PPC64_LF(25)
|
|
+ PPC64_LF(26)
|
|
+ PPC64_LF(27)
|
|
+ PPC64_LF(28)
|
|
+ PPC64_LF(29)
|
|
+ PPC64_LF(30)
|
|
+ PPC64_LF(31)
|
|
+
|
|
+ // restore vector registers if any are in use
|
|
+ ld %r5, PPC64_OFFS_VRSAVE(%r3) // test VRsave
|
|
+ cmpwi %r5, 0
|
|
+ beq Lnovec
|
|
+
|
|
+ subi %r4, %r1, 16
|
|
+ // r4 is now a 16-byte aligned pointer into the red zone
|
|
+ // the _vectorScalarRegisters may not be 16-byte aligned
|
|
+ // so copy via red zone temp buffer
|
|
+
|
|
+#define PPC64_CLV_UNALIGNED_BOTTOM(n) \
|
|
+ beq Ldone##n ;\
|
|
+ ld %r0, (PPC64_OFFS_V + n * 16)(%r3) ;\
|
|
+ std %r0, 0(%r4) ;\
|
|
+ ld %r0, (PPC64_OFFS_V + n * 16 + 8)(%r3) ;\
|
|
+ std %r0, 8(%r4) ;\
|
|
+ lvx %v##n, 0, %r4 ;\
|
|
+Ldone ## n:
|
|
+
|
|
+#define PPC64_CLV_UNALIGNEDl(n) \
|
|
+ andis. %r0, %r5, (1<<(15-n)) ;\
|
|
+PPC64_CLV_UNALIGNED_BOTTOM(n)
|
|
+
|
|
+#define PPC64_CLV_UNALIGNEDh(n) \
|
|
+ andi. %r0, %r5, (1<<(31-n)) ;\
|
|
+PPC64_CLV_UNALIGNED_BOTTOM(n)
|
|
+
|
|
+ PPC64_CLV_UNALIGNEDl(0)
|
|
+ PPC64_CLV_UNALIGNEDl(1)
|
|
+ PPC64_CLV_UNALIGNEDl(2)
|
|
+ PPC64_CLV_UNALIGNEDl(3)
|
|
+ PPC64_CLV_UNALIGNEDl(4)
|
|
+ PPC64_CLV_UNALIGNEDl(5)
|
|
+ PPC64_CLV_UNALIGNEDl(6)
|
|
+ PPC64_CLV_UNALIGNEDl(7)
|
|
+ PPC64_CLV_UNALIGNEDl(8)
|
|
+ PPC64_CLV_UNALIGNEDl(9)
|
|
+ PPC64_CLV_UNALIGNEDl(10)
|
|
+ PPC64_CLV_UNALIGNEDl(11)
|
|
+ PPC64_CLV_UNALIGNEDl(12)
|
|
+ PPC64_CLV_UNALIGNEDl(13)
|
|
+ PPC64_CLV_UNALIGNEDl(14)
|
|
+ PPC64_CLV_UNALIGNEDl(15)
|
|
+ PPC64_CLV_UNALIGNEDh(16)
|
|
+ PPC64_CLV_UNALIGNEDh(17)
|
|
+ PPC64_CLV_UNALIGNEDh(18)
|
|
+ PPC64_CLV_UNALIGNEDh(19)
|
|
+ PPC64_CLV_UNALIGNEDh(20)
|
|
+ PPC64_CLV_UNALIGNEDh(21)
|
|
+ PPC64_CLV_UNALIGNEDh(22)
|
|
+ PPC64_CLV_UNALIGNEDh(23)
|
|
+ PPC64_CLV_UNALIGNEDh(24)
|
|
+ PPC64_CLV_UNALIGNEDh(25)
|
|
+ PPC64_CLV_UNALIGNEDh(26)
|
|
+ PPC64_CLV_UNALIGNEDh(27)
|
|
+ PPC64_CLV_UNALIGNEDh(28)
|
|
+ PPC64_CLV_UNALIGNEDh(29)
|
|
+ PPC64_CLV_UNALIGNEDh(30)
|
|
+ PPC64_CLV_UNALIGNEDh(31)
|
|
+
|
|
+#endif
|
|
+
|
|
+Lnovec:
|
|
+ ld %r0, PPC64_OFFS_CR(%r3)
|
|
+ mtcr %r0
|
|
+ ld %r0, PPC64_OFFS_SRR0(%r3)
|
|
+ mtctr %r0
|
|
+
|
|
+ PPC64_LR(0)
|
|
+ PPC64_LR(5)
|
|
+ PPC64_LR(4)
|
|
+ PPC64_LR(1)
|
|
+ PPC64_LR(3)
|
|
+ bctr
|
|
+
|
|
|
|
#elif defined(__ppc__)
|
|
|
|
--- a/src/UnwindRegistersSave.S
|
|
+++ b/src/UnwindRegistersSave.S
|
|
@@ -96,6 +96,238 @@
|
|
DEFINE_LIBUNWIND_FUNCTION(unw_getcontext)
|
|
teq $0, $0
|
|
|
|
+
|
|
+#elif defined(__powerpc64__)
|
|
+
|
|
+//
|
|
+// extern int unw_getcontext(unw_context_t* thread_state)
|
|
+//
|
|
+// On entry:
|
|
+// thread_state pointer is in r3
|
|
+//
|
|
+DEFINE_LIBUNWIND_FUNCTION(unw_getcontext)
|
|
+
|
|
+// store register (GPR)
|
|
+#define PPC64_STR(n) \
|
|
+ std %r##n, (8 * (n + 2))(%r3)
|
|
+
|
|
+ // save GPRs
|
|
+ PPC64_STR(0)
|
|
+ mflr %r0
|
|
+ std %r0, PPC64_OFFS_SRR0(%r3) // store lr as ssr0
|
|
+ PPC64_STR(1)
|
|
+ PPC64_STR(2)
|
|
+ PPC64_STR(3)
|
|
+ PPC64_STR(4)
|
|
+ PPC64_STR(5)
|
|
+ PPC64_STR(6)
|
|
+ PPC64_STR(7)
|
|
+ PPC64_STR(8)
|
|
+ PPC64_STR(9)
|
|
+ PPC64_STR(10)
|
|
+ PPC64_STR(11)
|
|
+ PPC64_STR(12)
|
|
+ PPC64_STR(13)
|
|
+ PPC64_STR(14)
|
|
+ PPC64_STR(15)
|
|
+ PPC64_STR(16)
|
|
+ PPC64_STR(17)
|
|
+ PPC64_STR(18)
|
|
+ PPC64_STR(19)
|
|
+ PPC64_STR(20)
|
|
+ PPC64_STR(21)
|
|
+ PPC64_STR(22)
|
|
+ PPC64_STR(23)
|
|
+ PPC64_STR(24)
|
|
+ PPC64_STR(25)
|
|
+ PPC64_STR(26)
|
|
+ PPC64_STR(27)
|
|
+ PPC64_STR(28)
|
|
+ PPC64_STR(29)
|
|
+ PPC64_STR(30)
|
|
+ PPC64_STR(31)
|
|
+
|
|
+ mfcr %r0
|
|
+ std %r0, PPC64_OFFS_CR(%r3)
|
|
+ mfxer %r0
|
|
+ std %r0, PPC64_OFFS_XER(%r3)
|
|
+ mflr %r0
|
|
+ std %r0, PPC64_OFFS_LR(%r3)
|
|
+ mfctr %r0
|
|
+ std %r0, PPC64_OFFS_CTR(%r3)
|
|
+ mfvrsave %r0
|
|
+ std %r0, PPC64_OFFS_VRSAVE(%r3)
|
|
+
|
|
+#ifdef PPC64_HAS_VMX
|
|
+ // save VS registers
|
|
+ // (note that this also saves floating point registers and V registers,
|
|
+ // because part of VS is mapped to these registers)
|
|
+
|
|
+ addi %r4, %r3, PPC64_OFFS_FP
|
|
+
|
|
+// store VS register
|
|
+#define PPC64_STVS(n) \
|
|
+ stxvd2x %vs##n, 0, %r4 ;\
|
|
+ addi %r4, %r4, 16
|
|
+
|
|
+ PPC64_STVS(0)
|
|
+ PPC64_STVS(1)
|
|
+ PPC64_STVS(2)
|
|
+ PPC64_STVS(3)
|
|
+ PPC64_STVS(4)
|
|
+ PPC64_STVS(5)
|
|
+ PPC64_STVS(6)
|
|
+ PPC64_STVS(7)
|
|
+ PPC64_STVS(8)
|
|
+ PPC64_STVS(9)
|
|
+ PPC64_STVS(10)
|
|
+ PPC64_STVS(11)
|
|
+ PPC64_STVS(12)
|
|
+ PPC64_STVS(13)
|
|
+ PPC64_STVS(14)
|
|
+ PPC64_STVS(15)
|
|
+ PPC64_STVS(16)
|
|
+ PPC64_STVS(17)
|
|
+ PPC64_STVS(18)
|
|
+ PPC64_STVS(19)
|
|
+ PPC64_STVS(20)
|
|
+ PPC64_STVS(21)
|
|
+ PPC64_STVS(22)
|
|
+ PPC64_STVS(23)
|
|
+ PPC64_STVS(24)
|
|
+ PPC64_STVS(25)
|
|
+ PPC64_STVS(26)
|
|
+ PPC64_STVS(27)
|
|
+ PPC64_STVS(28)
|
|
+ PPC64_STVS(29)
|
|
+ PPC64_STVS(30)
|
|
+ PPC64_STVS(31)
|
|
+ PPC64_STVS(32)
|
|
+ PPC64_STVS(33)
|
|
+ PPC64_STVS(34)
|
|
+ PPC64_STVS(35)
|
|
+ PPC64_STVS(36)
|
|
+ PPC64_STVS(37)
|
|
+ PPC64_STVS(38)
|
|
+ PPC64_STVS(39)
|
|
+ PPC64_STVS(40)
|
|
+ PPC64_STVS(41)
|
|
+ PPC64_STVS(42)
|
|
+ PPC64_STVS(43)
|
|
+ PPC64_STVS(44)
|
|
+ PPC64_STVS(45)
|
|
+ PPC64_STVS(46)
|
|
+ PPC64_STVS(47)
|
|
+ PPC64_STVS(48)
|
|
+ PPC64_STVS(49)
|
|
+ PPC64_STVS(50)
|
|
+ PPC64_STVS(51)
|
|
+ PPC64_STVS(52)
|
|
+ PPC64_STVS(53)
|
|
+ PPC64_STVS(54)
|
|
+ PPC64_STVS(55)
|
|
+ PPC64_STVS(56)
|
|
+ PPC64_STVS(57)
|
|
+ PPC64_STVS(58)
|
|
+ PPC64_STVS(59)
|
|
+ PPC64_STVS(60)
|
|
+ PPC64_STVS(61)
|
|
+ PPC64_STVS(62)
|
|
+ PPC64_STVS(63)
|
|
+
|
|
+#else
|
|
+
|
|
+// store FP register
|
|
+#define PPC64_STF(n) \
|
|
+ stfd %f##n, (PPC64_OFFS_FP + n * 16)(%r3)
|
|
+
|
|
+ // save float registers
|
|
+ PPC64_STF(0)
|
|
+ PPC64_STF(1)
|
|
+ PPC64_STF(2)
|
|
+ PPC64_STF(3)
|
|
+ PPC64_STF(4)
|
|
+ PPC64_STF(5)
|
|
+ PPC64_STF(6)
|
|
+ PPC64_STF(7)
|
|
+ PPC64_STF(8)
|
|
+ PPC64_STF(9)
|
|
+ PPC64_STF(10)
|
|
+ PPC64_STF(11)
|
|
+ PPC64_STF(12)
|
|
+ PPC64_STF(13)
|
|
+ PPC64_STF(14)
|
|
+ PPC64_STF(15)
|
|
+ PPC64_STF(16)
|
|
+ PPC64_STF(17)
|
|
+ PPC64_STF(18)
|
|
+ PPC64_STF(19)
|
|
+ PPC64_STF(20)
|
|
+ PPC64_STF(21)
|
|
+ PPC64_STF(22)
|
|
+ PPC64_STF(23)
|
|
+ PPC64_STF(24)
|
|
+ PPC64_STF(25)
|
|
+ PPC64_STF(26)
|
|
+ PPC64_STF(27)
|
|
+ PPC64_STF(28)
|
|
+ PPC64_STF(29)
|
|
+ PPC64_STF(30)
|
|
+ PPC64_STF(31)
|
|
+
|
|
+ // save vector registers
|
|
+
|
|
+ // Use 16-bytes below the stack pointer as an
|
|
+ // aligned buffer to save each vector register.
|
|
+ // Note that the stack pointer is always 16-byte aligned.
|
|
+ subi %r4, %r1, 16
|
|
+
|
|
+#define PPC64_STV_UNALIGNED(n) \
|
|
+ stvx %v##n, 0, %r4 ;\
|
|
+ ld %r5, 0(%r4) ;\
|
|
+ std %r5, (PPC64_OFFS_V + n * 16)(%r3) ;\
|
|
+ ld %r5, 8(%r4) ;\
|
|
+ std %r5, (PPC64_OFFS_V + n * 16 + 8)(%r3)
|
|
+
|
|
+ PPC64_STV_UNALIGNED(0)
|
|
+ PPC64_STV_UNALIGNED(1)
|
|
+ PPC64_STV_UNALIGNED(2)
|
|
+ PPC64_STV_UNALIGNED(3)
|
|
+ PPC64_STV_UNALIGNED(4)
|
|
+ PPC64_STV_UNALIGNED(5)
|
|
+ PPC64_STV_UNALIGNED(6)
|
|
+ PPC64_STV_UNALIGNED(7)
|
|
+ PPC64_STV_UNALIGNED(8)
|
|
+ PPC64_STV_UNALIGNED(9)
|
|
+ PPC64_STV_UNALIGNED(10)
|
|
+ PPC64_STV_UNALIGNED(11)
|
|
+ PPC64_STV_UNALIGNED(12)
|
|
+ PPC64_STV_UNALIGNED(13)
|
|
+ PPC64_STV_UNALIGNED(14)
|
|
+ PPC64_STV_UNALIGNED(15)
|
|
+ PPC64_STV_UNALIGNED(16)
|
|
+ PPC64_STV_UNALIGNED(17)
|
|
+ PPC64_STV_UNALIGNED(18)
|
|
+ PPC64_STV_UNALIGNED(19)
|
|
+ PPC64_STV_UNALIGNED(20)
|
|
+ PPC64_STV_UNALIGNED(21)
|
|
+ PPC64_STV_UNALIGNED(22)
|
|
+ PPC64_STV_UNALIGNED(23)
|
|
+ PPC64_STV_UNALIGNED(24)
|
|
+ PPC64_STV_UNALIGNED(25)
|
|
+ PPC64_STV_UNALIGNED(26)
|
|
+ PPC64_STV_UNALIGNED(27)
|
|
+ PPC64_STV_UNALIGNED(28)
|
|
+ PPC64_STV_UNALIGNED(29)
|
|
+ PPC64_STV_UNALIGNED(30)
|
|
+ PPC64_STV_UNALIGNED(31)
|
|
+
|
|
+#endif
|
|
+
|
|
+ li %r3, 0 // return UNW_ESUCCESS
|
|
+ blr
|
|
+
|
|
+
|
|
#elif defined(__ppc__)
|
|
|
|
;
|
|
--- a/include/__libunwind_config.h
|
|
+++ b/include/__libunwind_config.h
|
|
@@ -26,6 +26,11 @@
|
|
# define _LIBUNWIND_CONTEXT_SIZE 21
|
|
# define _LIBUNWIND_CURSOR_SIZE 33
|
|
# define _LIBUNWIND_HIGHEST_DWARF_REGISTER 17
|
|
+# elif defined(__powerpc64__)
|
|
+# define _LIBUNWIND_TARGET_PPC64 1
|
|
+# define _LIBUNWIND_CONTEXT_SIZE 167
|
|
+# define _LIBUNWIND_CURSOR_SIZE 179
|
|
+# define _LIBUNWIND_HIGHEST_DWARF_REGISTER 116
|
|
# elif defined(__ppc__)
|
|
# define _LIBUNWIND_TARGET_PPC 1
|
|
# define _LIBUNWIND_CONTEXT_SIZE 117
|
|
@@ -58,11 +63,12 @@
|
|
# define _LIBUNWIND_TARGET_I386
|
|
# define _LIBUNWIND_TARGET_X86_64 1
|
|
# define _LIBUNWIND_TARGET_PPC 1
|
|
+# define _LIBUNWIND_TARGET_PPC64 1
|
|
# define _LIBUNWIND_TARGET_AARCH64 1
|
|
# define _LIBUNWIND_TARGET_ARM 1
|
|
# define _LIBUNWIND_TARGET_OR1K 1
|
|
-# define _LIBUNWIND_CONTEXT_SIZE 128
|
|
-# define _LIBUNWIND_CURSOR_SIZE 140
|
|
+# define _LIBUNWIND_CONTEXT_SIZE 167
|
|
+# define _LIBUNWIND_CURSOR_SIZE 179
|
|
# define _LIBUNWIND_HIGHEST_DWARF_REGISTER 120
|
|
#endif // _LIBUNWIND_IS_NATIVE_ONLY
|
|
|
|
--- a/src/assembly.h
|
|
+++ b/src/assembly.h
|
|
@@ -16,7 +16,20 @@
|
|
#ifndef UNWIND_ASSEMBLY_H
|
|
#define UNWIND_ASSEMBLY_H
|
|
|
|
-#if defined(__POWERPC__) || defined(__powerpc__) || defined(__ppc__)
|
|
+#if defined(__powerpc64__)
|
|
+#define SEPARATOR ;
|
|
+#define PPC64_OFFS_SRR0 0
|
|
+#define PPC64_OFFS_CR 272
|
|
+#define PPC64_OFFS_XER 280
|
|
+#define PPC64_OFFS_LR 288
|
|
+#define PPC64_OFFS_CTR 296
|
|
+#define PPC64_OFFS_VRSAVE 304
|
|
+#define PPC64_OFFS_FP 312
|
|
+#define PPC64_OFFS_V 824
|
|
+#ifdef _ARCH_PWR8
|
|
+#define PPC64_HAS_VMX
|
|
+#endif
|
|
+#elif defined(__POWERPC__) || defined(__powerpc__) || defined(__ppc__)
|
|
#define SEPARATOR @
|
|
#elif defined(__arm64__)
|
|
#define SEPARATOR %%
|
|
--- a/src/config.h
|
|
+++ b/src/config.h
|
|
@@ -57,16 +57,20 @@
|
|
#define _LIBUNWIND_BUILD_SJLJ_APIS
|
|
#endif
|
|
|
|
-#if defined(__i386__) || defined(__x86_64__) || defined(__ppc__) || defined(__ppc64__)
|
|
+#if defined(__i386__) || defined(__x86_64__) || defined(__ppc__) || defined(__ppc64__) || defined(__powerpc64__)
|
|
#define _LIBUNWIND_SUPPORT_FRAME_APIS
|
|
#endif
|
|
|
|
#if defined(__i386__) || defined(__x86_64__) || \
|
|
- defined(__ppc__) || defined(__ppc64__) || \
|
|
+ defined(__ppc__) || defined(__ppc64__) || defined(__powerpc64__) || \
|
|
(!defined(__APPLE__) && defined(__arm__)) || \
|
|
(defined(__arm64__) || defined(__aarch64__)) || \
|
|
(defined(__APPLE__) && defined(__mips__))
|
|
#define _LIBUNWIND_BUILD_ZERO_COST_APIS
|
|
+#endif
|
|
+
|
|
+#if defined(__powerpc64__) && defined(_ARCH_PWR8)
|
|
+#define PPC64_HAS_VMX
|
|
#endif
|
|
|
|
#if defined(NDEBUG) && defined(_LIBUNWIND_IS_BAREMETAL)
|
|
--- a/src/libunwind.cpp
|
|
+++ b/src/libunwind.cpp
|
|
@@ -50,6 +50,8 @@
|
|
# define REGISTER_KIND Registers_x86
|
|
#elif defined(__x86_64__)
|
|
# define REGISTER_KIND Registers_x86_64
|
|
+#elif defined(__powerpc64__)
|
|
+# define REGISTER_KIND Registers_ppc64
|
|
#elif defined(__ppc__)
|
|
# define REGISTER_KIND Registers_ppc
|
|
#elif defined(__aarch64__)
|
|
--- a/include/libunwind.h
|
|
+++ b/include/libunwind.h
|
|
@@ -309,6 +309,190 @@
|
|
UNW_PPC_SPEFSCR = 112
|
|
};
|
|
|
|
+// 64-bit ppc register numbers
|
|
+enum {
|
|
+ UNW_PPC64_R0 = 0,
|
|
+ UNW_PPC64_R1 = 1,
|
|
+ UNW_PPC64_R2 = 2,
|
|
+ UNW_PPC64_R3 = 3,
|
|
+ UNW_PPC64_R4 = 4,
|
|
+ UNW_PPC64_R5 = 5,
|
|
+ UNW_PPC64_R6 = 6,
|
|
+ UNW_PPC64_R7 = 7,
|
|
+ UNW_PPC64_R8 = 8,
|
|
+ UNW_PPC64_R9 = 9,
|
|
+ UNW_PPC64_R10 = 10,
|
|
+ UNW_PPC64_R11 = 11,
|
|
+ UNW_PPC64_R12 = 12,
|
|
+ UNW_PPC64_R13 = 13,
|
|
+ UNW_PPC64_R14 = 14,
|
|
+ UNW_PPC64_R15 = 15,
|
|
+ UNW_PPC64_R16 = 16,
|
|
+ UNW_PPC64_R17 = 17,
|
|
+ UNW_PPC64_R18 = 18,
|
|
+ UNW_PPC64_R19 = 19,
|
|
+ UNW_PPC64_R20 = 20,
|
|
+ UNW_PPC64_R21 = 21,
|
|
+ UNW_PPC64_R22 = 22,
|
|
+ UNW_PPC64_R23 = 23,
|
|
+ UNW_PPC64_R24 = 24,
|
|
+ UNW_PPC64_R25 = 25,
|
|
+ UNW_PPC64_R26 = 26,
|
|
+ UNW_PPC64_R27 = 27,
|
|
+ UNW_PPC64_R28 = 28,
|
|
+ UNW_PPC64_R29 = 29,
|
|
+ UNW_PPC64_R30 = 30,
|
|
+ UNW_PPC64_R31 = 31,
|
|
+ UNW_PPC64_F0 = 32,
|
|
+ UNW_PPC64_F1 = 33,
|
|
+ UNW_PPC64_F2 = 34,
|
|
+ UNW_PPC64_F3 = 35,
|
|
+ UNW_PPC64_F4 = 36,
|
|
+ UNW_PPC64_F5 = 37,
|
|
+ UNW_PPC64_F6 = 38,
|
|
+ UNW_PPC64_F7 = 39,
|
|
+ UNW_PPC64_F8 = 40,
|
|
+ UNW_PPC64_F9 = 41,
|
|
+ UNW_PPC64_F10 = 42,
|
|
+ UNW_PPC64_F11 = 43,
|
|
+ UNW_PPC64_F12 = 44,
|
|
+ UNW_PPC64_F13 = 45,
|
|
+ UNW_PPC64_F14 = 46,
|
|
+ UNW_PPC64_F15 = 47,
|
|
+ UNW_PPC64_F16 = 48,
|
|
+ UNW_PPC64_F17 = 49,
|
|
+ UNW_PPC64_F18 = 50,
|
|
+ UNW_PPC64_F19 = 51,
|
|
+ UNW_PPC64_F20 = 52,
|
|
+ UNW_PPC64_F21 = 53,
|
|
+ UNW_PPC64_F22 = 54,
|
|
+ UNW_PPC64_F23 = 55,
|
|
+ UNW_PPC64_F24 = 56,
|
|
+ UNW_PPC64_F25 = 57,
|
|
+ UNW_PPC64_F26 = 58,
|
|
+ UNW_PPC64_F27 = 59,
|
|
+ UNW_PPC64_F28 = 60,
|
|
+ UNW_PPC64_F29 = 61,
|
|
+ UNW_PPC64_F30 = 62,
|
|
+ UNW_PPC64_F31 = 63,
|
|
+ // 64: reserved
|
|
+ UNW_PPC64_LR = 65,
|
|
+ UNW_PPC64_CTR = 66,
|
|
+ // 67: reserved
|
|
+ UNW_PPC64_CR0 = 68,
|
|
+ UNW_PPC64_CR1 = 69,
|
|
+ UNW_PPC64_CR2 = 70,
|
|
+ UNW_PPC64_CR3 = 71,
|
|
+ UNW_PPC64_CR4 = 72,
|
|
+ UNW_PPC64_CR5 = 73,
|
|
+ UNW_PPC64_CR6 = 74,
|
|
+ UNW_PPC64_CR7 = 75,
|
|
+ UNW_PPC64_XER = 76,
|
|
+ UNW_PPC64_V0 = 77,
|
|
+ UNW_PPC64_V1 = 78,
|
|
+ UNW_PPC64_V2 = 79,
|
|
+ UNW_PPC64_V3 = 80,
|
|
+ UNW_PPC64_V4 = 81,
|
|
+ UNW_PPC64_V5 = 82,
|
|
+ UNW_PPC64_V6 = 83,
|
|
+ UNW_PPC64_V7 = 84,
|
|
+ UNW_PPC64_V8 = 85,
|
|
+ UNW_PPC64_V9 = 86,
|
|
+ UNW_PPC64_V10 = 87,
|
|
+ UNW_PPC64_V11 = 88,
|
|
+ UNW_PPC64_V12 = 89,
|
|
+ UNW_PPC64_V13 = 90,
|
|
+ UNW_PPC64_V14 = 91,
|
|
+ UNW_PPC64_V15 = 92,
|
|
+ UNW_PPC64_V16 = 93,
|
|
+ UNW_PPC64_V17 = 94,
|
|
+ UNW_PPC64_V18 = 95,
|
|
+ UNW_PPC64_V19 = 96,
|
|
+ UNW_PPC64_V20 = 97,
|
|
+ UNW_PPC64_V21 = 98,
|
|
+ UNW_PPC64_V22 = 99,
|
|
+ UNW_PPC64_V23 = 100,
|
|
+ UNW_PPC64_V24 = 101,
|
|
+ UNW_PPC64_V25 = 102,
|
|
+ UNW_PPC64_V26 = 103,
|
|
+ UNW_PPC64_V27 = 104,
|
|
+ UNW_PPC64_V28 = 105,
|
|
+ UNW_PPC64_V29 = 106,
|
|
+ UNW_PPC64_V30 = 107,
|
|
+ UNW_PPC64_V31 = 108,
|
|
+ // 109, 111-113: OpenPOWER ELF V2 ABI: reserved
|
|
+ // Borrowing VRSAVE number from PPC32.
|
|
+ UNW_PPC64_VRSAVE = 109,
|
|
+ UNW_PPC64_VSCR = 110,
|
|
+ UNW_PPC64_TFHAR = 114,
|
|
+ UNW_PPC64_TFIAR = 115,
|
|
+ UNW_PPC64_TEXASR = 116,
|
|
+ UNW_PPC64_VS0 = UNW_PPC64_F0,
|
|
+ UNW_PPC64_VS1 = UNW_PPC64_F1,
|
|
+ UNW_PPC64_VS2 = UNW_PPC64_F2,
|
|
+ UNW_PPC64_VS3 = UNW_PPC64_F3,
|
|
+ UNW_PPC64_VS4 = UNW_PPC64_F4,
|
|
+ UNW_PPC64_VS5 = UNW_PPC64_F5,
|
|
+ UNW_PPC64_VS6 = UNW_PPC64_F6,
|
|
+ UNW_PPC64_VS7 = UNW_PPC64_F7,
|
|
+ UNW_PPC64_VS8 = UNW_PPC64_F8,
|
|
+ UNW_PPC64_VS9 = UNW_PPC64_F9,
|
|
+ UNW_PPC64_VS10 = UNW_PPC64_F10,
|
|
+ UNW_PPC64_VS11 = UNW_PPC64_F11,
|
|
+ UNW_PPC64_VS12 = UNW_PPC64_F12,
|
|
+ UNW_PPC64_VS13 = UNW_PPC64_F13,
|
|
+ UNW_PPC64_VS14 = UNW_PPC64_F14,
|
|
+ UNW_PPC64_VS15 = UNW_PPC64_F15,
|
|
+ UNW_PPC64_VS16 = UNW_PPC64_F16,
|
|
+ UNW_PPC64_VS17 = UNW_PPC64_F17,
|
|
+ UNW_PPC64_VS18 = UNW_PPC64_F18,
|
|
+ UNW_PPC64_VS19 = UNW_PPC64_F19,
|
|
+ UNW_PPC64_VS20 = UNW_PPC64_F20,
|
|
+ UNW_PPC64_VS21 = UNW_PPC64_F21,
|
|
+ UNW_PPC64_VS22 = UNW_PPC64_F22,
|
|
+ UNW_PPC64_VS23 = UNW_PPC64_F23,
|
|
+ UNW_PPC64_VS24 = UNW_PPC64_F24,
|
|
+ UNW_PPC64_VS25 = UNW_PPC64_F25,
|
|
+ UNW_PPC64_VS26 = UNW_PPC64_F26,
|
|
+ UNW_PPC64_VS27 = UNW_PPC64_F27,
|
|
+ UNW_PPC64_VS28 = UNW_PPC64_F28,
|
|
+ UNW_PPC64_VS29 = UNW_PPC64_F29,
|
|
+ UNW_PPC64_VS30 = UNW_PPC64_F30,
|
|
+ UNW_PPC64_VS31 = UNW_PPC64_F31,
|
|
+ UNW_PPC64_VS32 = UNW_PPC64_V0,
|
|
+ UNW_PPC64_VS33 = UNW_PPC64_V1,
|
|
+ UNW_PPC64_VS34 = UNW_PPC64_V2,
|
|
+ UNW_PPC64_VS35 = UNW_PPC64_V3,
|
|
+ UNW_PPC64_VS36 = UNW_PPC64_V4,
|
|
+ UNW_PPC64_VS37 = UNW_PPC64_V5,
|
|
+ UNW_PPC64_VS38 = UNW_PPC64_V6,
|
|
+ UNW_PPC64_VS39 = UNW_PPC64_V7,
|
|
+ UNW_PPC64_VS40 = UNW_PPC64_V8,
|
|
+ UNW_PPC64_VS41 = UNW_PPC64_V9,
|
|
+ UNW_PPC64_VS42 = UNW_PPC64_V10,
|
|
+ UNW_PPC64_VS43 = UNW_PPC64_V11,
|
|
+ UNW_PPC64_VS44 = UNW_PPC64_V12,
|
|
+ UNW_PPC64_VS45 = UNW_PPC64_V13,
|
|
+ UNW_PPC64_VS46 = UNW_PPC64_V14,
|
|
+ UNW_PPC64_VS47 = UNW_PPC64_V15,
|
|
+ UNW_PPC64_VS48 = UNW_PPC64_V16,
|
|
+ UNW_PPC64_VS49 = UNW_PPC64_V17,
|
|
+ UNW_PPC64_VS50 = UNW_PPC64_V18,
|
|
+ UNW_PPC64_VS51 = UNW_PPC64_V19,
|
|
+ UNW_PPC64_VS52 = UNW_PPC64_V20,
|
|
+ UNW_PPC64_VS53 = UNW_PPC64_V21,
|
|
+ UNW_PPC64_VS54 = UNW_PPC64_V22,
|
|
+ UNW_PPC64_VS55 = UNW_PPC64_V23,
|
|
+ UNW_PPC64_VS56 = UNW_PPC64_V24,
|
|
+ UNW_PPC64_VS57 = UNW_PPC64_V25,
|
|
+ UNW_PPC64_VS58 = UNW_PPC64_V26,
|
|
+ UNW_PPC64_VS59 = UNW_PPC64_V27,
|
|
+ UNW_PPC64_VS60 = UNW_PPC64_V28,
|
|
+ UNW_PPC64_VS61 = UNW_PPC64_V29,
|
|
+ UNW_PPC64_VS62 = UNW_PPC64_V30,
|
|
+ UNW_PPC64_VS63 = UNW_PPC64_V31
|
|
+};
|
|
+
|
|
// 64-bit ARM64 registers
|
|
enum {
|
|
UNW_ARM64_X0 = 0,
|