/*- * BSD LICENSE * * Copyright(c) 2015 RehiveTech. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * Neither the name of RehiveTech nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef _RTE_ATOMIC_ARM32_H_ #define _RTE_ATOMIC_ARM32_H_ #ifndef RTE_FORCE_INTRINSICS # error Platform must be built with CONFIG_RTE_FORCE_INTRINSICS #endif #ifdef __cplusplus extern "C" { #endif #include "generic/rte_atomic.h" /** * General memory barrier. * * Guarantees that the LOAD and STORE operations generated before the * barrier occur before the LOAD and STORE operations generated after. */ #define rte_mb() __sync_synchronize() /** * Write memory barrier. * * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. */ #define rte_wmb() do { asm volatile ("dmb st" : : : "memory"); } while (0) /** * Read memory barrier. * * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. */ #define rte_rmb() __sync_synchronize() #define rte_smp_mb() rte_mb() #define rte_smp_wmb() rte_wmb() #define rte_smp_rmb() rte_rmb() #define rte_io_mb() rte_mb() #define rte_io_wmb() rte_wmb() #define rte_io_rmb() rte_rmb() #define rte_cio_wmb() rte_wmb() #define rte_cio_rmb() rte_rmb() #ifdef __cplusplus } #endif #endif /* _RTE_ATOMIC_ARM32_H_ */