blob: f0e222f413463cf0478c195930b6ba605d9b47a7 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
|
/* Portions taken from Linux arch arm */
#ifndef __ASM_SYSTEM_H
#define __ASM_SYSTEM_H
#include <xen/lib.h>
#include <public/arch-arm.h>
#define nop() \
asm volatile ( "nop" )
#define sev() asm volatile("sev" : : : "memory")
#define wfe() asm volatile("wfe" : : : "memory")
#define wfi() asm volatile("wfi" : : : "memory")
#define isb() asm volatile("isb" : : : "memory")
#define dsb(scope) asm volatile("dsb " #scope : : : "memory")
#define dmb(scope) asm volatile("dmb " #scope : : : "memory")
#define mb() dsb(sy)
#ifdef CONFIG_ARM_64
#define rmb() dsb(ld)
#else
#define rmb() dsb(sy) /* 32-bit has no ld variant. */
#endif
#define wmb() dsb(st)
#define smp_mb() dmb(ish)
#ifdef CONFIG_ARM_64
#define smp_rmb() dmb(ishld)
#else
#define smp_rmb() dmb(ish) /* 32-bit has no ishld variant. */
#endif
#define smp_wmb() dmb(ishst)
/*
* This is used to ensure the compiler did actually allocate the register we
* asked it for some inline assembly sequences. Apparently we can't trust
* the compiler from one version to another so a bit of paranoia won't hurt.
* This string is meant to be concatenated with the inline asm string and
* will cause compilation to stop on mismatch.
* (for details, see gcc PR 15089)
*/
#define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t"
#if defined(CONFIG_ARM_32)
# include <asm/arm32/system.h>
#elif defined(CONFIG_ARM_64)
# include <asm/arm64/system.h>
#else
# error "unknown ARM variant"
#endif
#define arch_fetch_and_add(x, v) __sync_fetch_and_add(x, v)
#define arch_lock_acquire_barrier() smp_mb()
#define arch_lock_release_barrier() smp_mb()
extern struct vcpu *__context_switch(struct vcpu *prev, struct vcpu *next);
#endif
/*
* Local variables:
* mode: C
* c-file-style: "BSD"
* c-basic-offset: 4
* indent-tabs-mode: nil
* End:
*/
|