Lines Matching defs:in

6  * For conditions of distribution and use, see copyright notice in zlib.h
16 * (8K-32K worked best for my 256K cpu cache) and how much overhead there is in
20 * I am confident that this version will work in the general case, but I have
30 * ID bit on eflags and then use the cpuid instruction) is used in many
41 * MMX mode because many of it's x86 ALU instructions execute in .5 cycles and
43 * the input stream since the MMX code grabs bits in chunks of 32, which
51 * structure offsets which are hard coded in this file. This was last tested
52 * with zlib-1.2.0 which is currently in beta testing, newer versions of this
140 * struct z_stream offsets, in zlib.h
150 * struct inflate_state offsets, in inflate.h
177 #define in 44 /* unsigned char* */
189 * typedef enum inflate_mode consts, in inflate.h
191 #define INFLATE_MODE_TYPE 11 /* state->mode flags enum-ed in inflate.h */
242 /* in = strm->next_in;
244 * last = in + strm->avail_in - 11;
254 movl %eax, in(%esp)
319 movl in(%esp), in_r
322 ja .L_align_long /* if in < last */
324 addl $11, %ecx /* ecx = &in[ avail_in ] */
329 rep movsb /* memcpy( buf, in, avail_in ) */
333 leal buf(%esp), in_r /* in = buf */
334 movl in_r, last(%esp) /* last = in, do just one iteration */
387 cmpl $0x756e6547, %ebx /* check for GenuineIntel in ebx,ecx,edx */
426 /* while (in < last && out < end)
435 /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
439 * hold |= *((unsigned short *)in)++ << bits;
448 lodsw /* al = *(ushort *)in++ */
452 orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
461 /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
484 /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = len
491 * hold |= *((unsigned short *)in)++ << bits;
511 movb %cl, %ch /* stash op in ch, freeing cl */
513 lodsw /* al = *(ushort *)in++ */
517 orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
534 /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = dist
537 * hold |= *((unsigned short *)in)++ << bits;
551 lodsw /* al = *(ushort *)in++ */
555 orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
575 * hold |= *((unsigned short *)in)++ << bits;
591 movb %cl, %ch /* stash op in ch, freeing cl */
593 lodsw /* al = *(ushort *)in++ */
597 orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
623 movl in_r, in(%esp) /* save in so from can use it's reg */
645 movl in(%esp), in_r /* move in back to %esi, toss from */
848 /* regs: %esi = from, %esi = in, %ebp = hold, %bl = bits, %edi = out
856 * } while (in < last && out < end);
863 movl in(%esp), in_r /* move in back to %esi, toss from */
904 /* while (in < last && out < end)
923 por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
971 por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
1002 movl in_r, in(%esp) /* save in so from can use it's reg */
1024 movl in(%esp), in_r /* move in back to %esi, toss from */
1160 movl in(%esp), in_r /* move in back to %esi, toss from */
1209 movl in(%esp), in_r /* from_r has in's reg, put in back */
1231 * bits = %ebp when mmx, and in %ebx when non-mmx
1232 * hold = %hold_mm when mmx, and in %ebp when non-mmx
1233 * in = %esi
1256 * in -= len;
1261 * strm->next_in = in;
1279 subl %ebx, in_r /* in -= buf */
1282 addl %ebx, in_r /* in += strm->next_in */
1317 /* strm->avail_in = in < last ? 11 + (last - in) : 11 - (in - last) */
1320 jbe .L_last_is_smaller /* if (in >= last) */
1322 subl in_r, last_r /* last -= in */
1327 subl last_r, in_r /* in -= last */
1328 negl in_r /* in = -in */
1329 addl $11, in_r /* in += 11 */