direct-io.hg

view xen/include/asm-powerpc/processor.h @ 11487:4fdf5151b187

[POWERPC] merge with xen-unstable.hg
Signed-off-by: Hollis Blanchard <hollisb@us.ibm.com>
author hollisb@localhost
date Mon Sep 18 12:48:56 2006 -0500 (2006-09-18)
parents bc349d862a5d
children a817acb39386
line source
1 /*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation; either version 2 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program; if not, write to the Free Software
14 * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15 *
16 * Copyright (C) IBM Corp. 2005, 2006
17 *
18 * Authors: Hollis Blanchard <hollisb@us.ibm.com>
19 */
21 #ifndef _ASM_PROCESSOR_H_
22 #define _ASM_PROCESSOR_H_
24 #include <xen/config.h>
25 #include <asm/reg_defs.h>
26 #include <asm/msr.h>
28 #define IOBMP_BYTES 8192
29 #define IOBMP_INVALID_OFFSET 0x8000
31 /* most assembler do not know this instruction */
32 #define HRFID .long 0x4c000224
34 #ifndef __ASSEMBLY__
35 #include <xen/types.h>
37 struct domain;
38 struct vcpu;
39 struct cpu_user_regs;
40 extern void show_registers(struct cpu_user_regs *);
41 extern void show_execution_state(struct cpu_user_regs *);
42 extern void show_backtrace(ulong sp, ulong lr, ulong pc);
43 extern unsigned int cpu_extent_order(void);
44 extern unsigned int cpu_default_rma_order_pages(void);
45 extern int cpu_rma_valid(unsigned int log);
46 extern uint cpu_large_page_orders(uint *sizes, uint max);
47 extern void cpu_initialize(int cpuid);
48 extern void cpu_init_vcpu(struct vcpu *);
49 extern void save_cpu_sprs(struct vcpu *);
50 extern void load_cpu_sprs(struct vcpu *);
52 /* XXX this could also land us in GDB */
53 #define dump_execution_state() BUG()
55 extern void __warn(char *file, int line);
56 #define WARN() __warn(__FILE__, __LINE__)
57 #define WARN_ON(_p) do { if (_p) WARN(); } while ( 0 )
59 #define ARCH_HAS_PREFETCH
60 static inline void prefetch(const void *x) {;}
62 static __inline__ void sync(void)
63 {
64 __asm__ __volatile__ ("sync");
65 }
67 static __inline__ void isync(void)
68 {
69 __asm__ __volatile__ ("isync");
70 }
72 static inline ulong mfmsr(void) {
73 ulong msr;
74 __asm__ __volatile__ ("mfmsr %0" : "=&r"(msr));
75 return msr;
76 }
78 static inline void nop(void) {
79 __asm__ __volatile__ ("nop");
80 }
81 #define cpu_relax() nop()
83 static inline unsigned int mftbu(void)
84 {
85 unsigned int tbu;
86 __asm__ __volatile__ ("mftbu %0" : "=r" (tbu));
87 return tbu;
88 }
90 static inline unsigned int mftbl(void)
91 {
92 unsigned int tbl;
93 __asm__ __volatile__ ("mftbl %0" : "=r" (tbl));
94 return tbl;
95 }
97 static inline unsigned int mfdec(void)
98 {
99 unsigned int tmp;
100 __asm__ __volatile__ ("mfdec %0" : "=r"(tmp));
101 return tmp;
102 }
103 static inline void mtdec(unsigned int ticks)
104 {
105 __asm__ __volatile__ ("mtdec %0" : : "r" (ticks));
106 }
108 static inline u32 mfpvr(void) {
109 u32 pvr;
110 asm volatile("mfpvr %0" : "=&r" (pvr));
111 return pvr;
112 }
114 static inline ulong mfr1(void)
115 {
116 ulong r1;
117 asm volatile("mr %0, 1" : "=&r" (r1));
118 return r1;
119 }
121 static inline void mtsprg0(ulong val)
122 {
123 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_SPRG0), "r"(val));
124 }
125 static inline ulong mfsprg0(void)
126 {
127 ulong val;
128 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_SPRG0));
129 return val;
130 }
132 static inline void mtsprg1(ulong val)
133 {
134 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_SPRG1), "r"(val));
135 }
136 static inline ulong mfsprg1(void)
137 {
138 ulong val;
139 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_SPRG1));
140 return val;
141 }
143 static inline void mtsprg2(ulong val)
144 {
145 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_SPRG2), "r"(val));
146 }
147 static inline ulong mfsprg2(void)
148 {
149 ulong val;
150 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_SPRG2));
151 return val;
152 }
154 static inline void mtsprg3(ulong val)
155 {
156 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_SPRG3), "r"(val));
157 }
158 static inline ulong mfsprg3(void)
159 {
160 ulong val;
161 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_SPRG3));
162 return val;
163 }
165 static inline void mtsdr1(ulong val)
166 {
167 __asm__ __volatile__ ("mtsdr1 %0" : : "r"(val));
168 }
169 static inline ulong mfsdr1(void)
170 {
171 ulong val;
172 __asm__ __volatile__ ("mfsdr1 %0" : "=r"(val));
173 return val;
174 }
176 static inline void mtdar(ulong val)
177 {
178 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_DAR), "r"(val));
179 }
180 static inline ulong mfdar(void)
181 {
182 ulong val;
183 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_DAR));
184 return val;
185 }
187 static inline void mtdsisr(ulong val)
188 {
189 __asm__ __volatile__ ("mtspr %0, %1" : : "i"(SPRN_DSISR), "r"(val));
190 }
191 static inline unsigned mfdsisr(void)
192 {
193 unsigned val;
194 __asm__ __volatile__ ("mfspr %0, %1" : "=r"(val) : "i"(SPRN_DSISR));
195 return val;
196 }
198 #ifdef CONFIG_MAMBO
199 static inline int on_mambo(void)
200 {
201 return !!(mfmsr() & MSR_MAMBO);
202 }
203 #else /* CONFIG_MAMBO */
204 static inline int on_mambo(void) { return 0; }
205 #endif
207 #endif /* __ASSEMBLY__ */
209 #include <asm/powerpc64/processor.h>
211 #endif