diff --git a/src/internal/bytealg/compare_arm64.s b/src/internal/bytealg/compare_arm64.s index 9b6354715a..2bd38064c3 100644 --- a/src/internal/bytealg/compare_arm64.s +++ b/src/internal/bytealg/compare_arm64.s @@ -10,7 +10,7 @@ TEXT ·Compare(SB),NOSPLIT|NOFRAME,$0-56 MOVD a_len+8(FP), R0 MOVD b_base+24(FP), R3 MOVD b_len+32(FP), R1 - ADD $56, RSP, R7 + MOVD $ret+48(FP), R7 B cmpbody<>(SB) TEXT bytes·Compare(SB),NOSPLIT|NOFRAME,$0-56 @@ -18,7 +18,7 @@ TEXT bytes·Compare(SB),NOSPLIT|NOFRAME,$0-56 MOVD a_len+8(FP), R0 MOVD b_base+24(FP), R3 MOVD b_len+32(FP), R1 - ADD $56, RSP, R7 + MOVD $ret+48(FP), R7 B cmpbody<>(SB) TEXT runtime·cmpstring(SB),NOSPLIT|NOFRAME,$0-40 @@ -26,7 +26,7 @@ TEXT runtime·cmpstring(SB),NOSPLIT|NOFRAME,$0-40 MOVD a_len+8(FP), R0 MOVD b_base+16(FP), R3 MOVD b_len+24(FP), R1 - ADD $40, RSP, R7 + MOVD $ret+32(FP), R7 B cmpbody<>(SB) // On entry: @@ -37,30 +37,98 @@ TEXT runtime·cmpstring(SB),NOSPLIT|NOFRAME,$0-40 // R7 points to return value (-1/0/1 will be written here) // // On exit: -// R4, R5, and R6 are clobbered +// R4, R5, R6, R8, R9 and R10 are clobbered TEXT cmpbody<>(SB),NOSPLIT|NOFRAME,$0-0 CMP R2, R3 - BEQ samebytes // same starting pointers; compare lengths + BEQ samebytes // same starting pointers; compare lengths CMP R0, R1 - CSEL LT, R1, R0, R6 // R6 is min(R0, R1) + CSEL LT, R1, R0, R6 // R6 is min(R0, R1) - ADD R2, R6 // R2 is current byte in a, R6 is last byte in a to compare -loop: - CMP R2, R6 - BEQ samebytes // all compared bytes were the same; compare lengths - MOVBU.P 1(R2), R4 - MOVBU.P 1(R3), R5 + CMP $0, R6 + BEQ samebytes + BIC $0xf, R6, R10 + CBZ R10, small // length < 16 + ADD R2, R10 // end of chunk16 + // length >= 16 +chunk16_loop: + LDP.P 16(R2), (R4, R8) + LDP.P 16(R3), (R5, R9) CMP R4, R5 - BEQ loop - // bytes differed + BNE cmp + CMP R8, R9 + BNE cmpnext + CMP R10, R2 + BNE chunk16_loop + AND $0xf, R6, R6 + CBZ R6, samebytes + SUBS $8, R6 + BLT tail + // the length of tail > 8 bytes + MOVD.P 8(R2), R4 + MOVD.P 8(R3), R5 + CMP R4, R5 + BNE cmp + SUB $8, R6 + // compare last 8 bytes +tail: + MOVD (R2)(R6), R4 + MOVD (R3)(R6), R5 + CMP R4, R5 + BEQ samebytes +cmp: + REV R4, R4 + REV R5, R5 + CMP R4, R5 +ret: MOVD $1, R4 - CSNEG LT, R4, R4, R4 + CNEG HI, R4, R4 MOVD R4, (R7) RET +small: + TBZ $3, R6, lt_8 + MOVD (R2), R4 + MOVD (R3), R5 + CMP R4, R5 + BNE cmp + SUBS $8, R6 + BEQ samebytes + ADD $8, R2 + ADD $8, R3 + SUB $8, R6 + B tail +lt_8: + TBZ $2, R6, lt_4 + MOVWU (R2), R4 + MOVWU (R3), R5 + CMPW R4, R5 + BNE cmp + SUBS $4, R6 + BEQ samebytes + ADD $4, R2 + ADD $4, R3 +lt_4: + TBZ $1, R6, lt_2 + MOVHU (R2), R4 + MOVHU (R3), R5 + CMPW R4, R5 + BNE cmp + ADD $2, R2 + ADD $2, R3 +lt_2: + TBZ $0, R6, samebytes +one: + MOVBU (R2), R4 + MOVBU (R3), R5 + CMPW R4, R5 + BNE ret samebytes: - MOVD $1, R4 - CMP R0, R1 - CSNEG LT, R4, R4, R4 - CSEL EQ, ZR, R4, R4 + CMP R1, R0 + CSET NE, R4 + CNEG LO, R4, R4 MOVD R4, (R7) RET +cmpnext: + REV R8, R4 + REV R9, R5 + CMP R4, R5 + B ret