# Copyright 2005 Chris Thomasson


.align 16
.globl np_ac_i686_atomic_dwcas_fence
np_ac_i686_atomic_dwcas_fence:
  pushl %esi
  pushl %ebx
  movl 16(%esp), %esi
  movl (%esi), %eax
  movl 4(%esi), %edx
  movl 20(%esp), %esi
  movl (%esi), %ebx
  movl 4(%esi), %ecx
  movl 12(%esp), %esi
  lock cmpxchg8b (%esi)
  jne np_ac_i686_atomic_dwcas_fence_fail
  xorl %eax, %eax
  popl %ebx
  popl %esi
  ret
    
np_ac_i686_atomic_dwcas_fence_fail:
  movl 16(%esp), %esi
  movl %eax, (%esi)
  movl %edx, 4(%esi)
  movl $1, %eax
  popl %ebx
  popl %esi
ret




.align 16
.globl ac_i686_stack_mpmc_push_cas
ac_i686_stack_mpmc_push_cas:
  movl 4(%esp), %edx
  movl (%edx), %eax
  movl 8(%esp), %ecx

ac_i686_stack_mpmc_push_cas_retry:
  movl %eax, (%ecx)
  lock cmpxchgl %ecx, (%edx)
  jne ac_i686_stack_mpmc_push_cas_retry
ret
 



.align 16
.globl np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas
np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas:
  pushl %esi
  pushl %ebx

np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_reload:
  movl 12(%esp), %esi
  movl 4(%esi), %edx
  movl (%esi), %eax

np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_retry:
  movl 16(%esp), %ebx
  movl %eax, (%ebx)
  mfence
  cmpl (%esi), %eax
  jne np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_reload
  test %eax, %eax
  je np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_fail
  movl (%eax), %ebx
  leal 1(%edx), %ecx
  lock cmpxchg8b (%esi)
  jne np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_retry

np_ac_i686_lfgc_smr_stack_mpmc_pop_dwcas_fail:
  movl 16(%esp), %esi
  xorl %ebx, %ebx
  movl %ebx, (%esi)
  popl %ebx
  popl %esi
ret




.align 16
.globl np_ac_i686_stack_mpmc_pop_dwcas
np_ac_i686_stack_mpmc_pop_dwcas:
  pushl %esi
  pushl %ebx
  movl 12(%esp), %esi
  movl 4(%esi), %edx
  movl (%esi), %eax

np_ac_i686_stack_mpmc_pop_dwcas_retry:
  test %eax, %eax
  je np_ac_i686_stack_mpmc_pop_dwcas_fail
  movl (%eax), %ebx
  leal 1(%edx), %ecx
  lock cmpxchg8b (%esi)
  jne np_ac_i686_stack_mpmc_pop_dwcas_retry

np_ac_i686_stack_mpmc_pop_dwcas_fail:
  popl %ebx
  popl %esi
ret




.align 16
.globl ac_i686_lfgc_smr_activate
ac_i686_lfgc_smr_activate:
  movl 4(%esp), %edx
  movl 8(%esp), %ecx

ac_i686_lfgc_smr_activate_reload:
  movl (%ecx), %eax
  movl %eax, (%edx)
  mfence
  cmpl (%ecx), %eax
  jne ac_i686_lfgc_smr_activate_reload
ret




.align 16
.globl ac_i686_lfgc_smr_deactivate
ac_i686_lfgc_smr_deactivate:
  movl 4(%esp), %ecx
  xorl %eax, %eax
  movl %eax, (%ecx)
ret




.align 16
.globl ac_i686_queue_spsc_push
ac_i686_queue_spsc_push:
  movl 4(%esp), %eax
  movl 8(%esp), %ecx
  movl 4(%eax), %edx
  # sfence may be needed here for future x86
  movl %ecx, (%edx)
  movl %ecx, 4(%eax)
ret




.align 16
.globl ac_i686_queue_spsc_pop
ac_i686_queue_spsc_pop:
  pushl %ebx
  movl 8(%esp), %ecx
  movl (%ecx), %eax
  cmpl 4(%ecx), %eax
  je ac_i686_queue_spsc_pop_failed
  movl (%eax), %edx
  # lfence may be needed here for future x86
  movl 12(%edx), %ebx
  movl %edx, (%ecx)
  movl %ebx, 12(%eax)
  popl %ebx
  ret

ac_i686_queue_spsc_pop_failed:
  xorl %eax, %eax
  popl %ebx
ret




.align 16
.globl ac_i686_mb_fence
ac_i686_mb_fence:
  mfence
ret




.align 16
.globl ac_i686_mb_naked
ac_i686_mb_naked:
ret




.align 16
.globl ac_i686_mb_store_fence
ac_i686_mb_store_fence:
  movl 4(%esp), %ecx
  movl 8(%esp), %eax
  mfence
  movl %eax, (%ecx)
ret




.align 16
.globl ac_i686_mb_store_naked
ac_i686_mb_store_naked:
  movl 4(%esp), %ecx
  movl 8(%esp), %eax
  movl %eax, (%ecx)
ret




.align 16
.globl ac_i686_mb_load_fence
ac_i686_mb_load_fence:
  movl 4(%esp), %ecx
  movl (%ecx), %eax
  mfence
ret




.align 16
.globl ac_i686_mb_load_naked
ac_i686_mb_load_naked:
  movl 4(%esp), %ecx
  movl (%ecx), %eax
ret




.align 16
.globl ac_i686_atomic_xchg_fence
ac_i686_atomic_xchg_fence:
  movl 4(%esp), %ecx
  movl 8(%esp), %eax
  xchgl %eax, (%ecx)
ret




.align 16
.globl ac_i686_atomic_xadd_fence
ac_i686_atomic_xadd_fence:
  movl 4(%esp), %ecx
  movl 8(%esp), %eax
  lock xaddl %eax, (%ecx)
ret




.align 16
.globl ac_i686_atomic_inc_fence
ac_i686_atomic_inc_fence:
  movl 4(%esp), %ecx
  movl $1, %eax
  lock xaddl %eax, (%ecx)
  incl %eax
ret




.align 16
.globl ac_i686_atomic_dec_fence
ac_i686_atomic_dec_fence:
  movl 4(%esp), %ecx
  movl $-1, %eax
  lock xaddl %eax, (%ecx)
  decl %eax
ret




.align 16
.globl ac_i686_atomic_cas_fence
ac_i686_atomic_cas_fence:
  movl 4(%esp), %ecx
  movl 8(%esp), %eax
  movl 12(%esp), %edx
  lock cmpxchgl %edx, (%ecx)
ret