Lines Matching refs:rsi

45 	and	$0xfffffffffffffff0, %rsi	/* force rsi 16 byte align */
51 pcmpeqb (%rsi), %xmm0 /* check 16 bytes in src for null */
64 mov %rcx, %r9 /* rsi alignment offset */
73 pcmpeqb 16(%rsi), %xmm0 /* check next 16 bytes in src for a null */
89 mov (%rsi, %r9), %rdx
91 mov 8(%rsi, %r9), %rdx
95 * so far destination rdi may be aligned by 16, re-calculate rsi and
97 * rcx is offset of rsi
120 lea 16(%r9, %rsi), %rsi
122 and $0xfffffffffffffff0, %rsi /* force rsi 16 byte align */
124 and $0xf, %ecx /* new src offset is 0 if rsi/rdi have same alignment */
152 movdqa (%rsi), %xmm1 /* fetch 16 bytes from src string */
154 add $16, %rsi
156 pcmpeqb (%rsi), %xmm0 /* check 16 bytes in src for a null */
167 movdqa (%rsi, %rcx), %xmm1
170 pcmpeqb (%rsi, %rcx), %xmm0
179 movdqa (%rsi, %rcx), %xmm1
182 pcmpeqb (%rsi, %rcx), %xmm0
191 movdqa (%rsi, %rcx), %xmm1
195 pcmpeqb (%rsi, %rcx), %xmm0
204 movdqa (%rsi, %rcx), %xmm1
207 pcmpeqb (%rsi, %rcx), %xmm0
218 * Based on above operation, start from (%r9 + rsi) to the left of this cache
233 movdqa 16(%rsi, %rcx), %xmm3
243 #palignr $15, (%rsi, %rcx), %xmm3
254 movdqa 16(%rsi, %rcx), %xmm3
264 #palignr $15, (%rsi, %rcx), %xmm3
279 pcmpeqb 16(%rsi, %rcx), %xmm0
288 movdqa 16(%rsi, %rcx), %xmm3
289 movdqa (%rsi, %rcx), %xmm2
301 pcmpeqb 16(%rsi, %rcx), %xmm0
310 movdqa 16(%rsi, %rcx), %xmm3
311 movdqa (%rsi, %rcx), %xmm2
330 * Based on above operation, start from (%r9 + rsi) to the left of this cache
345 movdqa 16(%rsi, %rcx), %xmm3
355 #palignr $14, (%rsi, %rcx), %xmm3
366 movdqa 16(%rsi, %rcx), %xmm3
376 #palignr $14, (%rsi, %rcx), %xmm3
390 pcmpeqb 16(%rsi, %rcx), %xmm0
399 movdqa 16(%rsi, %rcx), %xmm3
400 movdqa (%rsi, %rcx), %xmm2
413 pcmpeqb 16(%rsi, %rcx), %xmm0
422 movdqa 16(%rsi, %rcx), %xmm3
423 movdqa (%rsi, %rcx), %xmm2
442 * Based on above operation, start from (%r9 + rsi) to the left of this cache
457 movdqa 16(%rsi, %rcx), %xmm3
467 #palignr $13, (%rsi, %rcx), %xmm3
478 movdqa 16(%rsi, %rcx), %xmm3
488 #palignr $13, (%rsi, %rcx), %xmm3
502 pcmpeqb 16(%rsi, %rcx), %xmm0
511 movdqa 16(%rsi, %rcx), %xmm3
512 movdqa (%rsi, %rcx), %xmm2
525 pcmpeqb 16(%rsi, %rcx), %xmm0
534 movdqa 16(%rsi, %rcx), %xmm3
535 movdqa (%rsi, %rcx), %xmm2
554 * Based on above operation, start from (%r9 + rsi) to the left of this cache
569 movdqa 16(%rsi, %rcx), %xmm3
579 #palignr $12, (%rsi, %rcx), %xmm3
590 movdqa 16(%rsi, %rcx), %xmm3
600 #palignr $12, (%rsi, %rcx), %xmm3
614 pcmpeqb 16(%rsi, %rcx), %xmm0
623 movdqa 16(%rsi, %rcx), %xmm3
624 movdqa (%rsi, %rcx), %xmm2
637 pcmpeqb 16(%rsi, %rcx), %xmm0
646 movdqa 16(%rsi, %rcx), %xmm3
647 movdqa (%rsi, %rcx), %xmm2
666 * Based on above operation, start from (%r9 + rsi) to the left of this cache
681 movdqa 16(%rsi, %rcx), %xmm3
691 #palignr $11, (%rsi, %rcx), %xmm3
702 movdqa 16(%rsi, %rcx), %xmm3
712 #palignr $11, (%rsi, %rcx), %xmm3
726 pcmpeqb 16(%rsi, %rcx), %xmm0
735 movdqa 16(%rsi, %rcx), %xmm3
736 movdqa (%rsi, %rcx), %xmm2
749 pcmpeqb 16(%rsi, %rcx), %xmm0
758 movdqa 16(%rsi, %rcx), %xmm3
759 movdqa (%rsi, %rcx), %xmm2
778 * Based on above operation, start from (%r9 + rsi) to the left of this cache
793 movdqa 16(%rsi, %rcx), %xmm3
803 #palignr $10, (%rsi, %rcx), %xmm3
814 movdqa 16(%rsi, %rcx), %xmm3
824 #palignr $10, (%rsi, %rcx), %xmm3
838 pcmpeqb 16(%rsi, %rcx), %xmm0
847 movdqa 16(%rsi, %rcx), %xmm3
848 movdqa (%rsi, %rcx), %xmm2
861 pcmpeqb 16(%rsi, %rcx), %xmm0
870 movdqa 16(%rsi, %rcx), %xmm3
871 movdqa (%rsi, %rcx), %xmm2
890 * Based on above operation, start from (%r9 + rsi) to the left of this cache
905 movdqa 16(%rsi, %rcx), %xmm3
915 #palignr $9, (%rsi, %rcx), %xmm3
926 movdqa 16(%rsi, %rcx), %xmm3
936 #palignr $9, (%rsi, %rcx), %xmm3
950 pcmpeqb 16(%rsi, %rcx), %xmm0
959 movdqa 16(%rsi, %rcx), %xmm3
960 movdqa (%rsi, %rcx), %xmm2
973 pcmpeqb 16(%rsi, %rcx), %xmm0
982 movdqa 16(%rsi, %rcx), %xmm3
983 movdqa (%rsi, %rcx), %xmm2
1002 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1017 movdqa 16(%rsi, %rcx), %xmm3
1027 #palignr $8, (%rsi, %rcx), %xmm3
1038 movdqa 16(%rsi, %rcx), %xmm3
1048 #palignr $8, (%rsi, %rcx), %xmm3
1062 pcmpeqb 16(%rsi, %rcx), %xmm0
1071 movdqa 16(%rsi, %rcx), %xmm3
1072 movdqa (%rsi, %rcx), %xmm2
1085 pcmpeqb 16(%rsi, %rcx), %xmm0
1094 movdqa 16(%rsi, %rcx), %xmm3
1095 movdqa (%rsi, %rcx), %xmm2
1114 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1129 movdqa 16(%rsi, %rcx), %xmm3
1139 #palignr $7, (%rsi, %rcx), %xmm3
1150 movdqa 16(%rsi, %rcx), %xmm3
1160 #palignr $7, (%rsi, %rcx), %xmm3
1174 pcmpeqb 16(%rsi, %rcx), %xmm0
1183 movdqa 16(%rsi, %rcx), %xmm3
1184 movdqa (%rsi, %rcx), %xmm2
1197 pcmpeqb 16(%rsi, %rcx), %xmm0
1206 movdqa 16(%rsi, %rcx), %xmm3
1207 movdqa (%rsi, %rcx), %xmm2
1226 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1241 movdqa 16(%rsi, %rcx), %xmm3
1251 #palignr $6, (%rsi, %rcx), %xmm3
1262 movdqa 16(%rsi, %rcx), %xmm3
1272 #palignr $6, (%rsi, %rcx), %xmm3
1286 pcmpeqb 16(%rsi, %rcx), %xmm0
1295 movdqa 16(%rsi, %rcx), %xmm3
1296 movdqa (%rsi, %rcx), %xmm2
1309 pcmpeqb 16(%rsi, %rcx), %xmm0
1318 movdqa 16(%rsi, %rcx), %xmm3
1319 movdqa (%rsi, %rcx), %xmm2
1338 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1353 movdqa 16(%rsi, %rcx), %xmm3
1363 #palignr $5, (%rsi, %rcx), %xmm3
1374 movdqa 16(%rsi, %rcx), %xmm3
1384 #palignr $5, (%rsi, %rcx), %xmm3
1398 pcmpeqb 16(%rsi, %rcx), %xmm0
1407 movdqa 16(%rsi, %rcx), %xmm3
1408 movdqa (%rsi, %rcx), %xmm2
1421 pcmpeqb 16(%rsi, %rcx), %xmm0
1430 movdqa 16(%rsi, %rcx), %xmm3
1431 movdqa (%rsi, %rcx), %xmm2
1450 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1465 movdqa 16(%rsi, %rcx), %xmm3
1475 #palignr $4, (%rsi, %rcx), %xmm3
1486 movdqa 16(%rsi, %rcx), %xmm3
1496 #palignr $4, (%rsi, %rcx), %xmm3
1510 pcmpeqb 16(%rsi, %rcx), %xmm0
1519 movdqa 16(%rsi, %rcx), %xmm3
1520 movdqa (%rsi, %rcx), %xmm2
1533 pcmpeqb 16(%rsi, %rcx), %xmm0
1542 movdqa 16(%rsi, %rcx), %xmm3
1543 movdqa (%rsi, %rcx), %xmm2
1562 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1577 movdqa 16(%rsi, %rcx), %xmm3
1587 #palignr $3, (%rsi, %rcx), %xmm3
1598 movdqa 16(%rsi, %rcx), %xmm3
1608 #palignr $3, (%rsi, %rcx), %xmm3
1622 pcmpeqb 16(%rsi, %rcx), %xmm0
1631 movdqa 16(%rsi, %rcx), %xmm3
1632 movdqa (%rsi, %rcx), %xmm2
1645 pcmpeqb 16(%rsi, %rcx), %xmm0
1654 movdqa 16(%rsi, %rcx), %xmm3
1655 movdqa (%rsi, %rcx), %xmm2
1674 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1689 movdqa 16(%rsi, %rcx), %xmm3
1699 #palignr $2, (%rsi, %rcx), %xmm3
1710 movdqa 16(%rsi, %rcx), %xmm3
1720 #palignr $2, (%rsi, %rcx), %xmm3
1734 pcmpeqb 16(%rsi, %rcx), %xmm0
1743 movdqa 16(%rsi, %rcx), %xmm3
1744 movdqa (%rsi, %rcx), %xmm2
1757 pcmpeqb 16(%rsi, %rcx), %xmm0
1766 movdqa 16(%rsi, %rcx), %xmm3
1767 movdqa (%rsi, %rcx), %xmm2
1786 * Based on above operation, start from (%r9 + rsi) to the left of this cache
1801 movdqa 16(%rsi, %rcx), %xmm3
1811 #palignr $1, (%rsi, %rcx), %xmm3
1822 movdqa 16(%rsi, %rcx), %xmm3
1831 #palignr $1, (%rsi, %rcx), %xmm3
1845 pcmpeqb 16(%rsi, %rcx), %xmm0
1853 movdqa 16(%rsi, %rcx), %xmm3
1854 movdqa (%rsi, %rcx), %xmm2
1867 pcmpeqb 16(%rsi, %rcx), %xmm0
1876 movdqa 16(%rsi, %rcx), %xmm3
1877 movdqa (%rsi, %rcx), %xmm2
1900 add %r9, %rsi /* r9 holds offset of rsi */
1909 add %rcx, %rsi /* locate exact address for rsi */
1945 add %r9, %rsi /* next src char to copy */
1948 add %rcx, %rsi
1983 mov (%rsi), %rcx
2023 mov (%rsi), %cl
2034 mov (%rsi), %cx
2045 mov (%rsi), %cx
2047 mov 1(%rsi), %cx
2058 mov (%rsi), %ecx
2069 mov (%rsi), %ecx
2071 mov 1(%rsi), %edx
2082 mov (%rsi), %ecx
2084 mov 2(%rsi), %edx
2095 mov (%rsi), %ecx
2097 mov 3(%rsi), %edx
2108 mov (%rsi), %rcx
2110 mov 5(%rsi), %edx
2140 mov (%rsi), %rcx
2142 mov 8(%rsi), %rdx
2153 mov (%rsi), %rcx
2155 mov 6(%rsi), %edx
2166 mov (%rsi), %rcx
2168 mov 7(%rsi), %edx
2179 mov (%rsi), %rcx
2181 mov 8(%rsi), %edx
2192 mov (%rsi), %rcx
2194 mov 5(%rsi), %rcx
2205 mov (%rsi), %rcx
2207 mov 6(%rsi), %rcx
2218 mov (%rsi), %rcx
2220 mov 7(%rsi), %rcx
2251 mov (%rsi), %rcx
2253 mov 8(%rsi), %rdx
2255 mov 16(%rsi), %rcx
2266 mov (%rsi), %rcx
2268 mov 8(%rsi), %rdx
2270 mov 16(%rsi), %cl
2281 mov (%rsi), %rcx
2283 mov 8(%rsi), %rdx
2285 mov 16(%rsi), %cx
2296 mov (%rsi), %rcx
2298 mov 8(%rsi), %rdx
2300 mov 15(%rsi), %ecx
2311 mov (%rsi), %rcx
2313 mov 8(%rsi), %rdx
2315 mov 16(%rsi), %ecx
2326 mov (%rsi), %rcx
2328 mov 8(%rsi), %rdx
2330 mov 13(%rsi), %rcx
2341 mov (%rsi), %rcx
2343 mov 8(%rsi), %rdx
2345 mov 14(%rsi), %rcx
2356 mov (%rsi), %rcx
2358 mov 8(%rsi), %rdx
2360 mov 15(%rsi), %rcx
2388 mov (%rsi), %rcx
2390 mov 8(%rsi), %rdx
2392 mov 16(%rsi), %rcx
2394 mov 24(%rsi), %rdx
2405 mov (%rsi), %rcx
2407 mov 8(%rsi), %rdx
2409 mov 16(%rsi), %rcx
2411 mov 21(%rsi), %edx
2422 mov (%rsi), %rcx
2424 mov 8(%rsi), %rdx
2426 mov 16(%rsi), %rcx
2428 mov 22(%rsi), %edx
2439 mov (%rsi), %rcx
2441 mov 8(%rsi), %rdx
2443 mov 16(%rsi), %rcx
2445 mov 23(%rsi), %edx
2456 mov (%rsi), %rcx
2458 mov 8(%rsi), %rdx
2460 mov 16(%rsi), %rcx
2462 mov 24(%rsi), %edx
2473 mov (%rsi), %rcx
2475 mov 8(%rsi), %rdx
2477 mov 16(%rsi), %rcx
2479 mov 21(%rsi), %rdx
2490 mov (%rsi), %rcx
2492 mov 8(%rsi), %rdx
2494 mov 16(%rsi), %rcx
2496 mov 22(%rsi), %rdx
2507 mov (%rsi), %rcx
2509 mov 8(%rsi), %rdx
2511 mov 16(%rsi), %rcx
2513 mov 23(%rsi), %rdx