memcpy.S 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. /*
  2. * Copyright (C) 2016-2017 Andes Technology, Inc.
  3. * Licensed under the LGPL v2.1, see the file COPYING.LIB in this tarball.
  4. */
  5. #include <sysdep.h>
  6. !==========================================================
  7. ! void *memcpy(void *dst, const void *src, int n);
  8. !
  9. ! dst: $r0
  10. ! src: $r1
  11. ! n : $r2
  12. ! ret: $r0 - pointer to the memory area dst.
  13. !==========================================================
  14. .weak memcpy
  15. ENTRY(memcpy)
  16. move $r5, $r0
  17. beq $r0, $r1, .Lquit_memcpy
  18. beqz $r2, .Lquit_memcpy
  19. srli $r3, $r2, #5 ! check if len < cache-line size 32
  20. beqz $r3, .Lword_copy_entry
  21. andi $r4, $r0, #0x3 ! check byte-align
  22. beqz $r4, .Lunalign_word_copy_entry
  23. addi $r4, $r4, #-4
  24. abs $r4, $r4 ! check how many un-align byte to copy
  25. sub $r2, $r2, $r4 ! update $R2
  26. .Lunalign_byte_copy:
  27. lbi.bi $r3, [$r1], #1
  28. addi $r4, $r4, #-1
  29. sbi.bi $r3, [$r0], #1
  30. bnez $r4, .Lunalign_byte_copy
  31. beqz $r2, .Lquit_memcpy
  32. .Lunalign_word_copy_entry:
  33. andi $r3, $r0, 0x1f ! check cache-line unaligncount
  34. beqz $r3, .Lcache_copy
  35. addi $r3, $r3, #-32
  36. abs $r3, $r3
  37. sub $r2, $r2, $r3 ! update $R2
  38. .Lunalign_word_copy:
  39. lmw.bim $r4, [$r1], $r4
  40. addi $r3, $r3, #-4
  41. smw.bim $r4, [$r0], $r4
  42. bnez $r3, .Lunalign_word_copy
  43. beqz $r2, .Lquit_memcpy
  44. addi $r3, $r2, #-32 ! to check $r2 < cache_line, than go to .Lword_copy
  45. bltz $r3, .Lword_copy_entry
  46. .Lcache_copy:
  47. srli $r3, $r2, #5
  48. beqz $r3, .Lword_copy_entry
  49. pushm $r6, $r13
  50. cfi_adjust_cfa_offset(32)
  51. cfi_rel_offset(r6, 0)
  52. cfi_rel_offset(r7, 4)
  53. cfi_rel_offset(r8, 8)
  54. cfi_rel_offset(r9, 12)
  55. cfi_rel_offset(r10, 16)
  56. cfi_rel_offset(r11, 20)
  57. cfi_rel_offset(r12, 24)
  58. cfi_rel_offset(r13, 28)
  59. .L3:
  60. lmw.bim $r6, [$r1], $r13
  61. addi $r3, $r3, #-1
  62. smw.bim $r6, [$r0], $r13
  63. bnez $r3, .L3
  64. popm $r6, $r13
  65. cfi_adjust_cfa_offset(-32)
  66. cfi_restore(r6)
  67. cfi_restore(r7)
  68. cfi_restore(r8)
  69. cfi_restore(r9)
  70. cfi_restore(r10)
  71. cfi_restore(r11)
  72. cfi_restore(r12)
  73. cfi_restore(r13)
  74. .Lword_copy_entry:
  75. andi $r2, $r2, #31
  76. beqz $r2, .Lquit_memcpy
  77. srli $r3, $r2, #2
  78. beqz $r3, .Lbyte_copy
  79. .Lword_copy:
  80. lmw.bim $r4, [$r1], $r4
  81. addi $r3, $r3, #-1
  82. smw.bim $r4, [$r0], $r4
  83. bnez $r3, .Lword_copy
  84. andi $r2, $r2, #3
  85. beqz $r2, .Lquit_memcpy
  86. .Lbyte_copy:
  87. lbi.bi $r3, [$r1], #1
  88. addi $r2, $r2, #-1
  89. sbi.bi $r3, [$r0], #1
  90. bnez $r2, .Lbyte_copy
  91. .Lquit_memcpy:
  92. move $r0, $r5
  93. ret
  94. END(memcpy)
  95. libc_hidden_def(memcpy)