#include <stddef.h>
#include "libavcodec/cabac.h"
+#include "cabac.h"
//FIXME use some macros to avoid duplicating get_cabac (cannot be done yet
//as that would make optimization work hard)
uint8_t *significant_coeff_ctx_base,
int *index, x86_reg last_off){
void *end= significant_coeff_ctx_base + max_coeff - 1;
- int minusstart= -(int)significant_coeff_ctx_base;
- int minusindex= 4-(int)index;
- int coeff_count;
- int low;
+ int minusstart= -(intptr_t)significant_coeff_ctx_base;
+ int minusindex= 4-(intptr_t)index;
+ int bit;
+ x86_reg coeff_count;
__asm__ volatile(
- "movl %a9(%4), %%esi \n\t"
- "movl %a10(%4), %3 \n\t"
-
"2: \n\t"
- BRANCHLESS_GET_CABAC("%%edx", "%4", "(%1)", "%3",
- "%w3", "%%esi", "%%eax", "%%al", "%a11")
+ BRANCHLESS_GET_CABAC("%4", "%6", "(%1)", "%3",
+ "%w3", "%5", "%k0", "%b0", "%a11")
- "test $1, %%edx \n\t"
+ "test $1, %4 \n\t"
" jz 3f \n\t"
- "add %8, %1 \n\t"
+ "add %10, %1 \n\t"
- BRANCHLESS_GET_CABAC("%%edx", "%4", "(%1)", "%3",
- "%w3", "%%esi", "%%eax", "%%al", "%a11")
+ BRANCHLESS_GET_CABAC("%4", "%6", "(%1)", "%3",
+ "%w3", "%5", "%k0", "%b0", "%a11")
- "sub %8, %1 \n\t"
- "mov %2, %%"REG_a" \n\t"
- "movl %5, %%ecx \n\t"
+ "sub %10, %1 \n\t"
+ "mov %2, %0 \n\t"
+ "movl %7, %%ecx \n\t"
"add %1, %%"REG_c" \n\t"
- "movl %%ecx, (%%"REG_a") \n\t"
+ "movl %%ecx, (%0) \n\t"
- "test $1, %%edx \n\t"
+ "test $1, %4 \n\t"
" jnz 4f \n\t"
- "add $4, %%"REG_a" \n\t"
- "mov %%"REG_a", %2 \n\t"
+ "add $4, %2 \n\t"
"3: \n\t"
"add $1, %1 \n\t"
- "cmp %6, %1 \n\t"
+ "cmp %8, %1 \n\t"
" jb 2b \n\t"
- "mov %2, %%"REG_a" \n\t"
- "movl %5, %%ecx \n\t"
+ "mov %2, %0 \n\t"
+ "movl %7, %%ecx \n\t"
"add %1, %%"REG_c" \n\t"
- "movl %%ecx, (%%"REG_a") \n\t"
+ "movl %%ecx, (%0) \n\t"
"4: \n\t"
- "add %7, %%eax \n\t"
- "shr $2, %%eax \n\t"
-
- "movl %%esi, %a9(%4) \n\t"
- "movl %3, %a10(%4) \n\t"
- :"=&a"(coeff_count), "+r"(significant_coeff_ctx_base), "+m"(index),
- "=&r"(low)
+ "add %9, %k0 \n\t"
+ "shr $2, %k0 \n\t"
+ :"=&q"(coeff_count), "+r"(significant_coeff_ctx_base), "+m"(index),
+ "+&r"(c->low), "=&r"(bit), "+&r"(c->range)
:"r"(c), "m"(minusstart), "m"(end), "m"(minusindex), "m"(last_off),
- "i"(offsetof(CABACContext, range)), "i"(offsetof(CABACContext, low)),
"i"(offsetof(CABACContext, bytestream))
- : "%"REG_c, "%edx", "%esi", "memory"
+ : "%"REG_c, "memory"
);
return coeff_count;
}
static int decode_significance_8x8_x86(CABACContext *c,
uint8_t *significant_coeff_ctx_base,
- int *index, x86_reg last_off, const uint8_t *sig_off){
- int minusindex= 4-(int)index;
- int coeff_count;
- int low;
+ int *index, uint8_t *last_coeff_ctx_base, const uint8_t *sig_off){
+ int minusindex= 4-(intptr_t)index;
+ int bit;
+ x86_reg coeff_count;
x86_reg last=0;
+ x86_reg state;
__asm__ volatile(
- "movl %a9(%4), %%esi \n\t"
- "movl %a10(%4), %3 \n\t"
-
- "mov %1, %%"REG_D" \n\t"
+ "mov %1, %6 \n\t"
"2: \n\t"
- "mov %7, %%"REG_a" \n\t"
- "movzbl (%%"REG_a", %%"REG_D"), %%edi \n\t"
- "add %6, %%"REG_D" \n\t"
+ "mov %10, %0 \n\t"
+ "movzbl (%0, %6), %k6 \n\t"
+ "add %9, %6 \n\t"
- BRANCHLESS_GET_CABAC("%%edx", "%4", "(%%"REG_D")", "%3",
- "%w3", "%%esi", "%%eax", "%%al", "%a11")
+ BRANCHLESS_GET_CABAC("%4", "%7", "(%6)", "%3",
+ "%w3", "%5", "%k0", "%b0", "%a12")
- "mov %1, %%edi \n\t"
- "test $1, %%edx \n\t"
+ "mov %1, %k6 \n\t"
+ "test $1, %4 \n\t"
" jz 3f \n\t"
- "movzbl "MANGLE(last_coeff_flag_offset_8x8)"(%%edi), %%edi\n\t"
- "add %6, %%"REG_D" \n\t"
- "add %8, %%"REG_D" \n\t"
+ "movzbl "MANGLE(last_coeff_flag_offset_8x8)"(%k6), %k6\n\t"
+ "add %11, %6 \n\t"
- BRANCHLESS_GET_CABAC("%%edx", "%4", "(%%"REG_D")", "%3",
- "%w3", "%%esi", "%%eax", "%%al", "%a11")
+ BRANCHLESS_GET_CABAC("%4", "%7", "(%6)", "%3",
+ "%w3", "%5", "%k0", "%b0", "%a12")
- "mov %2, %%"REG_a" \n\t"
- "mov %1, %%edi \n\t"
- "movl %%edi, (%%"REG_a") \n\t"
+ "mov %2, %0 \n\t"
+ "mov %1, %k6 \n\t"
+ "movl %k6, (%0) \n\t"
- "test $1, %%edx \n\t"
+ "test $1, %4 \n\t"
" jnz 4f \n\t"
- "add $4, %%"REG_a" \n\t"
- "mov %%"REG_a", %2 \n\t"
+ "add $4, %2 \n\t"
"3: \n\t"
- "addl $1, %%edi \n\t"
- "mov %%edi, %1 \n\t"
- "cmpl $63, %%edi \n\t"
+ "addl $1, %k6 \n\t"
+ "mov %k6, %1 \n\t"
+ "cmpl $63, %k6 \n\t"
" jb 2b \n\t"
- "mov %2, %%"REG_a" \n\t"
- "movl %%edi, (%%"REG_a") \n\t"
+ "mov %2, %0 \n\t"
+ "movl %k6, (%0) \n\t"
"4: \n\t"
- "addl %5, %%eax \n\t"
- "shr $2, %%eax \n\t"
-
- "movl %%esi, %a9(%4) \n\t"
- "movl %3, %a10(%4) \n\t"
- :"=&a"(coeff_count),"+m"(last), "+m"(index), "=&r"(low)
- :"r"(c), "m"(minusindex), "m"(significant_coeff_ctx_base), "m"(sig_off), "m"(last_off),
- "i"(offsetof(CABACContext, range)), "i"(offsetof(CABACContext, low)),
+ "addl %8, %k0 \n\t"
+ "shr $2, %k0 \n\t"
+ :"=&q"(coeff_count),"+m"(last), "+m"(index), "+&r"(c->low), "=&r"(bit),
+ "+&r"(c->range), "=&r"(state)
+ :"r"(c), "m"(minusindex), "m"(significant_coeff_ctx_base), "m"(sig_off), "m"(last_coeff_ctx_base),
"i"(offsetof(CABACContext, bytestream))
- : "%"REG_c, "%edx", "%esi", "%"REG_D, "memory"
+ : "%"REG_c, "memory"
);
return coeff_count;
}