/** Copyright 2013, Paweł Dziepak, pdziepak@quarnos.org.* Distributed under the terms of the MIT License.*** Copyright 2003, Marcus Overhagen. All rights reserved.** Distributed under the terms of the MIT License.**** Copyright 2001, Travis Geiselbrecht. All rights reserved.** Distributed under the terms of the NewOS License.*/#include <asm_defs.h>.text/* void atomic_set(int32* value, int32 newValue) */FUNCTION(atomic_set):movl 4(%esp), %edxmovl 8(%esp), %eaxlockaddl $0, (%esp)movl %eax, (%edx)retFUNCTION_END(atomic_set)/* int32 atomic_get_and_set(int32* value, int32 newValue) */FUNCTION(atomic_get_and_set):movl 4(%esp), %edxmovl 8(%esp), %eaxxchg %eax, (%edx)retFUNCTION_END(atomic_get_and_set)/* int32 atomic_test_and_set(int32* value, int32 newValue,int32 testAgainst) */FUNCTION(atomic_test_and_set):movl 4(%esp), %edxmovl 8(%esp), %ecxmovl 12(%esp), %eaxlockcmpxchgl %ecx, (%edx)retFUNCTION_END(atomic_test_and_set)/* int32 atomic_add(int32* value, int32 addValue) */FUNCTION(atomic_add):movl 4(%esp), %edxmovl 8(%esp), %eaxlockxaddl %eax, (%edx)retFUNCTION_END(atomic_add)/* int32 atomic_and(int32* value, int32 andValue) */FUNCTION(atomic_and):movl 4(%esp), %edx1:movl 8(%esp), %ecxmovl (%edx), %eaxandl %eax, %ecxlockcmpxchgl %ecx, (%edx)jnz 1bretFUNCTION_END(atomic_and)/* int32 atomic_or(int32* value, int32 orValue) */FUNCTION(atomic_or):movl 4(%esp), %edx1:movl 8(%esp), %ecxmovl (%edx), %eaxorl %eax, %ecxlockcmpxchgl %ecx, (%edx)jnz 1bretFUNCTION_END(atomic_or)/* int32 atomic_get(int32* value) */FUNCTION(atomic_get):movl 4(%esp), %edxmovl (%edx), %eaxlockaddl $0, (%esp)retFUNCTION_END(atomic_get)/* void atomic_set64(int64* value, int64 newValue) */FUNCTION(atomic_set64):push %esipush %ebxmovl 12(%esp), %esi /* value */movl 16(%esp), %ebx /* newValue low */movl 20(%esp), %ecx /* newValue high */1:movl (%esi), %eax /* testAgainst low */movl 4(%esi), %edx /* testAgainst high */lockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_set64)/* void atomic_get_and_set64(int64* value, int64 newValue) */FUNCTION(atomic_get_and_set64):push %esipush %ebxmovl 12(%esp), %esi /* value */movl 16(%esp), %ebx /* newValue low */movl 20(%esp), %ecx /* newValue high */1:movl (%esi), %eax /* testAgainst low */movl 4(%esi), %edx /* testAgainst high */lockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_get_and_set64)/* int64 atomic_test_and_set64(int64* value, int64 newValue,int64 testAgainst) */FUNCTION(atomic_test_and_set64):push %esipush %ebxmovl 12(%esp), %esi /* value */movl 16(%esp), %ebx /* newValue low */movl 20(%esp), %ecx /* newValue high */movl 24(%esp), %eax /* testAgainst low */movl 28(%esp), %edx /* testAgainst high */lockcmpxchg8b (%esi)pop %ebxpop %esiretFUNCTION_END(atomic_test_and_set64)/* int64 atomic_add64(int64* value, int64 addValue) */FUNCTION(atomic_add64):push %esipush %ebxmovl 12(%esp), %esi1:movl (%esi), %eaxmovl 4(%esi), %edxmovl %eax, %ebxmovl %edx, %ecxaddl 16(%esp), %ebxadcl 20(%esp), %ecxlockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_add64)/* int64 atomic_and64(int64* value, int64 andValue) */FUNCTION(atomic_and64):push %esipush %ebxmovl 12(%esp), %esi1:movl (%esi), %eaxmovl 4(%esi), %edxmovl %eax, %ebxmovl %edx, %ecxandl 16(%esp), %ebxandl 20(%esp), %ecxlockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_and64)/* int64 atomic_or64(int64* value, int64 orValue) */FUNCTION(atomic_or64):push %esipush %ebxmovl 12(%esp), %esi1:movl (%esi), %eaxmovl 4(%esi), %edxmovl %eax, %ebxmovl %edx, %ecxorl 16(%esp), %ebxorl 20(%esp), %ecxlockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_or64)/* int64 atomic_get64(int64* value) */FUNCTION(atomic_get64):push %esipush %ebxmovl 12(%esp), %esi1:movl (%esi), %eaxmovl 4(%esi), %edxmovl %eax, %ebxmovl %edx, %ecxlockcmpxchg8b (%esi)jnz 1bpop %ebxpop %esiretFUNCTION_END(atomic_get64)