// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. #include "textflag.h" // bool Cas(int32 *val, int32 old, int32 new) // Atomically: // if(*val == old){ // *val = new; // return 1; // } else // return 0; TEXT runtime∕internal∕atomic·Cas(SB), NOSPLIT, $0-17 MOVL ptr+0(FP), BX MOVL old+4(FP), AX MOVL new+8(FP), CX LOCK CMPXCHGL CX, 0(BX) SETEQ ret+16(FP) RET TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-17 JMP runtime∕internal∕atomic·Cas(SB) TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-17 JMP runtime∕internal∕atomic·Cas(SB) TEXT runtime∕internal∕atomic·Loaduintptr(SB), NOSPLIT, $0-12 JMP runtime∕internal∕atomic·Load(SB) TEXT runtime∕internal∕atomic·Loaduint(SB), NOSPLIT, $0-12 JMP runtime∕internal∕atomic·Load(SB) TEXT runtime∕internal∕atomic·Storeuintptr(SB), NOSPLIT, $0-8 JMP runtime∕internal∕atomic·Store(SB) TEXT runtime∕internal∕atomic·Loadint64(SB), NOSPLIT, $0-16 JMP runtime∕internal∕atomic·Load64(SB) TEXT runtime∕internal∕atomic·Xaddint64(SB), NOSPLIT, $0-24 JMP runtime∕internal∕atomic·Xadd64(SB) // bool runtime∕internal∕atomic·cas64(uint64 *val, uint64 old, uint64 new) // Atomically: // if(*val == *old){ // *val = new; // return 1; // } else { // return 0; // } TEXT runtime∕internal∕atomic·Cas64(SB), NOSPLIT, $0-25 MOVL ptr+0(FP), BX MOVQ old+8(FP), AX MOVQ new+16(FP), CX LOCK CMPXCHGQ CX, 0(BX) SETEQ ret+24(FP) RET // bool Casp1(void **val, void *old, void *new) // Atomically: // if(*val == old){ // *val = new; // return 1; // } else // return 0; TEXT runtime∕internal∕atomic·Casp1(SB), NOSPLIT, $0-17 MOVL ptr+0(FP), BX MOVL old+4(FP), AX MOVL new+8(FP), CX LOCK CMPXCHGL CX, 0(BX) SETEQ ret+16(FP) RET // uint32 Xadd(uint32 volatile *val, int32 delta) // Atomically: // *val += delta; // return *val; TEXT runtime∕internal∕atomic·Xadd(SB), NOSPLIT, $0-12 MOVL ptr+0(FP), BX MOVL delta+4(FP), AX MOVL AX, CX LOCK XADDL AX, 0(BX) ADDL CX, AX MOVL AX, ret+8(FP) RET TEXT runtime∕internal∕atomic·Xadd64(SB), NOSPLIT, $0-24 MOVL ptr+0(FP), BX MOVQ delta+8(FP), AX MOVQ AX, CX LOCK XADDQ AX, 0(BX) ADDQ CX, AX MOVQ AX, ret+16(FP) RET TEXT runtime∕internal∕atomic·Xadduintptr(SB), NOSPLIT, $0-12 JMP runtime∕internal∕atomic·Xadd(SB) TEXT runtime∕internal∕atomic·Xchg(SB), NOSPLIT, $0-12 MOVL ptr+0(FP), BX MOVL new+4(FP), AX XCHGL AX, 0(BX) MOVL AX, ret+8(FP) RET TEXT runtime∕internal∕atomic·Xchg64(SB), NOSPLIT, $0-24 MOVL ptr+0(FP), BX MOVQ new+8(FP), AX TESTL $7, BX JZ 2(PC) MOVL 0, BX // crash when unaligned XCHGQ AX, 0(BX) MOVQ AX, ret+16(FP) RET TEXT runtime∕internal∕atomic·Xchguintptr(SB), NOSPLIT, $0-12 JMP runtime∕internal∕atomic·Xchg(SB) TEXT runtime∕internal∕atomic·StorepNoWB(SB), NOSPLIT, $0-8 MOVL ptr+0(FP), BX MOVL val+4(FP), AX XCHGL AX, 0(BX) RET TEXT runtime∕internal∕atomic·Store(SB), NOSPLIT, $0-8 MOVL ptr+0(FP), BX MOVL val+4(FP), AX XCHGL AX, 0(BX) RET TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-8 JMP runtime∕internal∕atomic·Store(SB) TEXT runtime∕internal∕atomic·Store64(SB), NOSPLIT, $0-16 MOVL ptr+0(FP), BX MOVQ val+8(FP), AX XCHGQ AX, 0(BX) RET // void runtime∕internal∕atomic·Or8(byte volatile*, byte); TEXT runtime∕internal∕atomic·Or8(SB), NOSPLIT, $0-5 MOVL ptr+0(FP), BX MOVB val+4(FP), AX LOCK ORB AX, 0(BX) RET // void runtime∕internal∕atomic·And8(byte volatile*, byte); TEXT runtime∕internal∕atomic·And8(SB), NOSPLIT, $0-5 MOVL ptr+0(FP), BX MOVB val+4(FP), AX LOCK ANDB AX, 0(BX) RET