memcmp.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. /*
  2. * Copyright (C) 2013 Synopsys, Inc. (www.synopsys.com)
  3. * Copyright (C) 2007 ARC International (UK) LTD
  4. *
  5. * Licensed under the LGPL v2.1 or later, see the file COPYING.LIB in this tarball.
  6. */
  7. #include <sysdep.h>
  8. #include <features.h>
  9. #ifdef __LITTLE_ENDIAN__
  10. #define WORD2 r2
  11. #define SHIFT r3
  12. #else /* BIG ENDIAN */
  13. #define WORD2 r3
  14. #define SHIFT r2
  15. #endif
  16. ENTRY(memcmp)
  17. or r12,r0,r1
  18. asl_s r12,r12,30
  19. sub r3,r2,1
  20. brls r2,r12,.Lbytewise
  21. ld r4,[r0,0]
  22. ld r5,[r1,0]
  23. lsr.f lp_count,r3,3
  24. #ifdef __HS__
  25. /* In ARCv2 a branch can't be the last instruction in a zero overhead
  26. * loop.
  27. * So we move the branch to the start of the loop, duplicate it
  28. * after the end, and set up r12 so that the branch isn't taken
  29. * initially.
  30. */
  31. mov_s r12,WORD2
  32. lpne .Loop_end
  33. brne WORD2,r12,.Lodd
  34. ld WORD2,[r0,4]
  35. #else
  36. lpne .Loop_end
  37. ld_s WORD2,[r0,4]
  38. #endif
  39. ld_s r12,[r1,4]
  40. brne r4,r5,.Leven
  41. ld.a r4,[r0,8]
  42. ld.a r5,[r1,8]
  43. #ifdef __HS__
  44. .Loop_end:
  45. brne WORD2,r12,.Lodd
  46. #else
  47. brne WORD2,r12,.Lodd
  48. .Loop_end:
  49. #endif
  50. asl_s SHIFT,SHIFT,3
  51. bhs_s .Last_cmp
  52. brne r4,r5,.Leven
  53. ld r4,[r0,4]
  54. ld r5,[r1,4]
  55. #ifdef __LITTLE_ENDIAN__
  56. nop_s
  57. ; one more load latency cycle
  58. .Last_cmp:
  59. xor r0,r4,r5
  60. bset r0,r0,SHIFT
  61. sub_s r1,r0,1
  62. bic_s r1,r1,r0
  63. norm r1,r1
  64. b.d .Leven_cmp
  65. and r1,r1,24
  66. .Leven:
  67. xor r0,r4,r5
  68. sub_s r1,r0,1
  69. bic_s r1,r1,r0
  70. norm r1,r1
  71. ; slow track insn
  72. and r1,r1,24
  73. .Leven_cmp:
  74. asl r2,r4,r1
  75. asl r12,r5,r1
  76. lsr_s r2,r2,1
  77. lsr_s r12,r12,1
  78. j_s.d [blink]
  79. sub r0,r2,r12
  80. .balign 4
  81. .Lodd:
  82. xor r0,WORD2,r12
  83. sub_s r1,r0,1
  84. bic_s r1,r1,r0
  85. norm r1,r1
  86. ; slow track insn
  87. and r1,r1,24
  88. asl_s r2,r2,r1
  89. asl_s r12,r12,r1
  90. lsr_s r2,r2,1
  91. lsr_s r12,r12,1
  92. j_s.d [blink]
  93. sub r0,r2,r12
  94. #else /* BIG ENDIAN */
  95. .Last_cmp:
  96. neg_s SHIFT,SHIFT
  97. lsr r4,r4,SHIFT
  98. lsr r5,r5,SHIFT
  99. ; slow track insn
  100. .Leven:
  101. sub.f r0,r4,r5
  102. mov.ne r0,1
  103. j_s.d [blink]
  104. bset.cs r0,r0,31
  105. .Lodd:
  106. cmp_s WORD2,r12
  107. mov_s r0,1
  108. j_s.d [blink]
  109. bset.cs r0,r0,31
  110. #endif /* ENDIAN */
  111. .balign 4
  112. .Lbytewise:
  113. breq r2,0,.Lnil
  114. ldb r4,[r0,0]
  115. ldb r5,[r1,0]
  116. lsr.f lp_count,r3
  117. #ifdef __HS__
  118. mov r12,r3
  119. lpne .Lbyte_end
  120. brne r3,r12,.Lbyte_odd
  121. #else
  122. lpne .Lbyte_end
  123. #endif
  124. ldb_s r3,[r0,1]
  125. ldb r12,[r1,1]
  126. brne r4,r5,.Lbyte_even
  127. ldb.a r4,[r0,2]
  128. ldb.a r5,[r1,2]
  129. #ifdef __HS__
  130. .Lbyte_end:
  131. brne r3,r12,.Lbyte_odd
  132. #else
  133. brne r3,r12,.Lbyte_odd
  134. .Lbyte_end:
  135. #endif
  136. bcc .Lbyte_even
  137. brne r4,r5,.Lbyte_even
  138. ldb_s r3,[r0,1]
  139. ldb_s r12,[r1,1]
  140. .Lbyte_odd:
  141. j_s.d [blink]
  142. sub r0,r3,r12
  143. .Lbyte_even:
  144. j_s.d [blink]
  145. sub r0,r4,r5
  146. .Lnil:
  147. j_s.d [blink]
  148. mov r0,0
  149. END(memcmp)
  150. libc_hidden_def(memcmp)
  151. #ifdef __UCLIBC_SUSV3_LEGACY__
  152. strong_alias(memcmp,bcmp)
  153. #endif