ia64/xen-unstable

view xen/common/bitmap.c @ 17062:0769835cf50f

x86 shadow: Reduce scope of shadow lock.

emulate_map_dest doesn't require holding lock, since
only shadow related operation possibly involved is to
remove shadow which is less frequent and can acquire
lock inside. Rest are either guest table walk or
per-vcpu monitor table manipulation

Signed-off-by Kevin Tian <kevin.tian@intel.com>
author Keir Fraser <keir.fraser@citrix.com>
date Thu Feb 14 10:33:12 2008 +0000 (2008-02-14)
parents 7c455af5998a
children
line source
1 /*
2 * lib/bitmap.c
3 * Helper functions for bitmap.h.
4 *
5 * This source code is licensed under the GNU General Public License,
6 * Version 2. See the file COPYING for more details.
7 */
8 #include <xen/config.h>
9 #include <xen/types.h>
10 #include <xen/errno.h>
11 #include <xen/bitmap.h>
12 #include <xen/bitops.h>
13 #include <asm/byteorder.h>
15 /*
16 * bitmaps provide an array of bits, implemented using an an
17 * array of unsigned longs. The number of valid bits in a
18 * given bitmap does _not_ need to be an exact multiple of
19 * BITS_PER_LONG.
20 *
21 * The possible unused bits in the last, partially used word
22 * of a bitmap are 'don't care'. The implementation makes
23 * no particular effort to keep them zero. It ensures that
24 * their value will not affect the results of any operation.
25 * The bitmap operations that return Boolean (bitmap_empty,
26 * for example) or scalar (bitmap_weight, for example) results
27 * carefully filter out these unused bits from impacting their
28 * results.
29 *
30 * These operations actually hold to a slightly stronger rule:
31 * if you don't input any bitmaps to these ops that have some
32 * unused bits set, then they won't output any set unused bits
33 * in output bitmaps.
34 *
35 * The byte ordering of bitmaps is more natural on little
36 * endian architectures. See the big-endian headers
37 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
38 * for the best explanations of this ordering.
39 */
41 int __bitmap_empty(const unsigned long *bitmap, int bits)
42 {
43 int k, lim = bits/BITS_PER_LONG;
44 for (k = 0; k < lim; ++k)
45 if (bitmap[k])
46 return 0;
48 if (bits % BITS_PER_LONG)
49 if (bitmap[k] & BITMAP_LAST_WORD_MASK(bits))
50 return 0;
52 return 1;
53 }
54 EXPORT_SYMBOL(__bitmap_empty);
56 int __bitmap_full(const unsigned long *bitmap, int bits)
57 {
58 int k, lim = bits/BITS_PER_LONG;
59 for (k = 0; k < lim; ++k)
60 if (~bitmap[k])
61 return 0;
63 if (bits % BITS_PER_LONG)
64 if (~bitmap[k] & BITMAP_LAST_WORD_MASK(bits))
65 return 0;
67 return 1;
68 }
69 EXPORT_SYMBOL(__bitmap_full);
71 int __bitmap_equal(const unsigned long *bitmap1,
72 const unsigned long *bitmap2, int bits)
73 {
74 int k, lim = bits/BITS_PER_LONG;
75 for (k = 0; k < lim; ++k)
76 if (bitmap1[k] != bitmap2[k])
77 return 0;
79 if (bits % BITS_PER_LONG)
80 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
81 return 0;
83 return 1;
84 }
85 EXPORT_SYMBOL(__bitmap_equal);
87 void __bitmap_complement(unsigned long *dst, const unsigned long *src, int bits)
88 {
89 int k, lim = bits/BITS_PER_LONG;
90 for (k = 0; k < lim; ++k)
91 dst[k] = ~src[k];
93 if (bits % BITS_PER_LONG)
94 dst[k] = ~src[k] & BITMAP_LAST_WORD_MASK(bits);
95 }
96 EXPORT_SYMBOL(__bitmap_complement);
98 /*
99 * __bitmap_shift_right - logical right shift of the bits in a bitmap
100 * @dst - destination bitmap
101 * @src - source bitmap
102 * @nbits - shift by this many bits
103 * @bits - bitmap size, in bits
104 *
105 * Shifting right (dividing) means moving bits in the MS -> LS bit
106 * direction. Zeros are fed into the vacated MS positions and the
107 * LS bits shifted off the bottom are lost.
108 */
109 void __bitmap_shift_right(unsigned long *dst,
110 const unsigned long *src, int shift, int bits)
111 {
112 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG;
113 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
114 unsigned long mask = (1UL << left) - 1;
115 for (k = 0; off + k < lim; ++k) {
116 unsigned long upper, lower;
118 /*
119 * If shift is not word aligned, take lower rem bits of
120 * word above and make them the top rem bits of result.
121 */
122 if (!rem || off + k + 1 >= lim)
123 upper = 0;
124 else {
125 upper = src[off + k + 1];
126 if (off + k + 1 == lim - 1 && left)
127 upper &= mask;
128 }
129 lower = src[off + k];
130 if (left && off + k == lim - 1)
131 lower &= mask;
132 dst[k] = upper << (BITS_PER_LONG - rem) | lower >> rem;
133 if (left && k == lim - 1)
134 dst[k] &= mask;
135 }
136 if (off)
137 memset(&dst[lim - off], 0, off*sizeof(unsigned long));
138 }
139 EXPORT_SYMBOL(__bitmap_shift_right);
142 /*
143 * __bitmap_shift_left - logical left shift of the bits in a bitmap
144 * @dst - destination bitmap
145 * @src - source bitmap
146 * @nbits - shift by this many bits
147 * @bits - bitmap size, in bits
148 *
149 * Shifting left (multiplying) means moving bits in the LS -> MS
150 * direction. Zeros are fed into the vacated LS bit positions
151 * and those MS bits shifted off the top are lost.
152 */
154 void __bitmap_shift_left(unsigned long *dst,
155 const unsigned long *src, int shift, int bits)
156 {
157 int k, lim = BITS_TO_LONGS(bits), left = bits % BITS_PER_LONG;
158 int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
159 for (k = lim - off - 1; k >= 0; --k) {
160 unsigned long upper, lower;
162 /*
163 * If shift is not word aligned, take upper rem bits of
164 * word below and make them the bottom rem bits of result.
165 */
166 if (rem && k > 0)
167 lower = src[k - 1];
168 else
169 lower = 0;
170 upper = src[k];
171 if (left && k == lim - 1)
172 upper &= (1UL << left) - 1;
173 dst[k + off] = lower >> (BITS_PER_LONG - rem) | upper << rem;
174 if (left && k + off == lim - 1)
175 dst[k + off] &= (1UL << left) - 1;
176 }
177 if (off)
178 memset(dst, 0, off*sizeof(unsigned long));
179 }
180 EXPORT_SYMBOL(__bitmap_shift_left);
182 void __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
183 const unsigned long *bitmap2, int bits)
184 {
185 int k;
186 int nr = BITS_TO_LONGS(bits);
188 for (k = 0; k < nr; k++)
189 dst[k] = bitmap1[k] & bitmap2[k];
190 }
191 EXPORT_SYMBOL(__bitmap_and);
193 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
194 const unsigned long *bitmap2, int bits)
195 {
196 int k;
197 int nr = BITS_TO_LONGS(bits);
199 for (k = 0; k < nr; k++)
200 dst[k] = bitmap1[k] | bitmap2[k];
201 }
202 EXPORT_SYMBOL(__bitmap_or);
204 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
205 const unsigned long *bitmap2, int bits)
206 {
207 int k;
208 int nr = BITS_TO_LONGS(bits);
210 for (k = 0; k < nr; k++)
211 dst[k] = bitmap1[k] ^ bitmap2[k];
212 }
213 EXPORT_SYMBOL(__bitmap_xor);
215 void __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
216 const unsigned long *bitmap2, int bits)
217 {
218 int k;
219 int nr = BITS_TO_LONGS(bits);
221 for (k = 0; k < nr; k++)
222 dst[k] = bitmap1[k] & ~bitmap2[k];
223 }
224 EXPORT_SYMBOL(__bitmap_andnot);
226 int __bitmap_intersects(const unsigned long *bitmap1,
227 const unsigned long *bitmap2, int bits)
228 {
229 int k, lim = bits/BITS_PER_LONG;
230 for (k = 0; k < lim; ++k)
231 if (bitmap1[k] & bitmap2[k])
232 return 1;
234 if (bits % BITS_PER_LONG)
235 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
236 return 1;
237 return 0;
238 }
239 EXPORT_SYMBOL(__bitmap_intersects);
241 int __bitmap_subset(const unsigned long *bitmap1,
242 const unsigned long *bitmap2, int bits)
243 {
244 int k, lim = bits/BITS_PER_LONG;
245 for (k = 0; k < lim; ++k)
246 if (bitmap1[k] & ~bitmap2[k])
247 return 0;
249 if (bits % BITS_PER_LONG)
250 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
251 return 0;
252 return 1;
253 }
254 EXPORT_SYMBOL(__bitmap_subset);
256 #if BITS_PER_LONG == 32
257 int __bitmap_weight(const unsigned long *bitmap, int bits)
258 {
259 int k, w = 0, lim = bits/BITS_PER_LONG;
261 for (k = 0; k < lim; k++)
262 w += hweight32(bitmap[k]);
264 if (bits % BITS_PER_LONG)
265 w += hweight32(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
267 return w;
268 }
269 #else
270 int __bitmap_weight(const unsigned long *bitmap, int bits)
271 {
272 int k, w = 0, lim = bits/BITS_PER_LONG;
274 for (k = 0; k < lim; k++)
275 w += hweight64(bitmap[k]);
277 if (bits % BITS_PER_LONG)
278 w += hweight64(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
280 return w;
281 }
282 #endif
283 EXPORT_SYMBOL(__bitmap_weight);
285 /*
286 * Bitmap printing & parsing functions: first version by Bill Irwin,
287 * second version by Paul Jackson, third by Joe Korty.
288 */
290 #define CHUNKSZ 32
291 #define nbits_to_hold_value(val) fls(val)
292 #define roundup_power2(val,modulus) (((val) + (modulus) - 1) & ~((modulus) - 1))
293 #define unhex(c) (isdigit(c) ? (c - '0') : (toupper(c) - 'A' + 10))
294 #define BASEDEC 10 /* fancier cpuset lists input in decimal */
296 /**
297 * bitmap_scnprintf - convert bitmap to an ASCII hex string.
298 * @buf: byte buffer into which string is placed
299 * @buflen: reserved size of @buf, in bytes
300 * @maskp: pointer to bitmap to convert
301 * @nmaskbits: size of bitmap, in bits
302 *
303 * Exactly @nmaskbits bits are displayed. Hex digits are grouped into
304 * comma-separated sets of eight digits per set.
305 */
306 int bitmap_scnprintf(char *buf, unsigned int buflen,
307 const unsigned long *maskp, int nmaskbits)
308 {
309 int i, word, bit, len = 0;
310 unsigned long val;
311 const char *sep = "";
312 int chunksz;
313 u32 chunkmask;
315 chunksz = nmaskbits & (CHUNKSZ - 1);
316 if (chunksz == 0)
317 chunksz = CHUNKSZ;
319 i = roundup_power2(nmaskbits, CHUNKSZ) - CHUNKSZ;
320 for (; i >= 0; i -= CHUNKSZ) {
321 chunkmask = ((1ULL << chunksz) - 1);
322 word = i / BITS_PER_LONG;
323 bit = i % BITS_PER_LONG;
324 val = (maskp[word] >> bit) & chunkmask;
325 len += scnprintf(buf+len, buflen-len, "%s%0*lx", sep,
326 (chunksz+3)/4, val);
327 chunksz = CHUNKSZ;
328 sep = ",";
329 }
330 return len;
331 }
332 EXPORT_SYMBOL(bitmap_scnprintf);
334 /*
335 * bscnl_emit(buf, buflen, rbot, rtop, bp)
336 *
337 * Helper routine for bitmap_scnlistprintf(). Write decimal number
338 * or range to buf, suppressing output past buf+buflen, with optional
339 * comma-prefix. Return len of what would be written to buf, if it
340 * all fit.
341 */
342 static inline int bscnl_emit(char *buf, int buflen, int rbot, int rtop, int len)
343 {
344 if (len > 0)
345 len += scnprintf(buf + len, buflen - len, ",");
346 if (rbot == rtop)
347 len += scnprintf(buf + len, buflen - len, "%d", rbot);
348 else
349 len += scnprintf(buf + len, buflen - len, "%d-%d", rbot, rtop);
350 return len;
351 }
353 /**
354 * bitmap_scnlistprintf - convert bitmap to list format ASCII string
355 * @buf: byte buffer into which string is placed
356 * @buflen: reserved size of @buf, in bytes
357 * @maskp: pointer to bitmap to convert
358 * @nmaskbits: size of bitmap, in bits
359 *
360 * Output format is a comma-separated list of decimal numbers and
361 * ranges. Consecutively set bits are shown as two hyphen-separated
362 * decimal numbers, the smallest and largest bit numbers set in
363 * the range. Output format is compatible with the format
364 * accepted as input by bitmap_parselist().
365 *
366 * The return value is the number of characters which would be
367 * generated for the given input, excluding the trailing '\0', as
368 * per ISO C99.
369 */
370 int bitmap_scnlistprintf(char *buf, unsigned int buflen,
371 const unsigned long *maskp, int nmaskbits)
372 {
373 int len = 0;
374 /* current bit is 'cur', most recently seen range is [rbot, rtop] */
375 int cur, rbot, rtop;
377 rbot = cur = find_first_bit(maskp, nmaskbits);
378 while (cur < nmaskbits) {
379 rtop = cur;
380 cur = find_next_bit(maskp, nmaskbits, cur+1);
381 if (cur >= nmaskbits || cur > rtop + 1) {
382 len = bscnl_emit(buf, buflen, rbot, rtop, len);
383 rbot = cur;
384 }
385 }
386 return len;
387 }
388 EXPORT_SYMBOL(bitmap_scnlistprintf);
390 /**
391 * bitmap_find_free_region - find a contiguous aligned mem region
392 * @bitmap: an array of unsigned longs corresponding to the bitmap
393 * @bits: number of bits in the bitmap
394 * @order: region size to find (size is actually 1<<order)
395 *
396 * This is used to allocate a memory region from a bitmap. The idea is
397 * that the region has to be 1<<order sized and 1<<order aligned (this
398 * makes the search algorithm much faster).
399 *
400 * The region is marked as set bits in the bitmap if a free one is
401 * found.
402 *
403 * Returns either beginning of region or negative error
404 */
405 int bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
406 {
407 unsigned long mask;
408 int pages = 1 << order;
409 int i;
411 if(pages > BITS_PER_LONG)
412 return -EINVAL;
414 /* make a mask of the order */
415 mask = (1ul << (pages - 1));
416 mask += mask - 1;
418 /* run up the bitmap pages bits at a time */
419 for (i = 0; i < bits; i += pages) {
420 int index = i/BITS_PER_LONG;
421 int offset = i - (index * BITS_PER_LONG);
422 if((bitmap[index] & (mask << offset)) == 0) {
423 /* set region in bimap */
424 bitmap[index] |= (mask << offset);
425 return i;
426 }
427 }
428 return -ENOMEM;
429 }
430 EXPORT_SYMBOL(bitmap_find_free_region);
432 /**
433 * bitmap_release_region - release allocated bitmap region
434 * @bitmap: a pointer to the bitmap
435 * @pos: the beginning of the region
436 * @order: the order of the bits to release (number is 1<<order)
437 *
438 * This is the complement to __bitmap_find_free_region and releases
439 * the found region (by clearing it in the bitmap).
440 */
441 void bitmap_release_region(unsigned long *bitmap, int pos, int order)
442 {
443 int pages = 1 << order;
444 unsigned long mask = (1ul << (pages - 1));
445 int index = pos/BITS_PER_LONG;
446 int offset = pos - (index * BITS_PER_LONG);
447 mask += mask - 1;
448 bitmap[index] &= ~(mask << offset);
449 }
450 EXPORT_SYMBOL(bitmap_release_region);
452 int bitmap_allocate_region(unsigned long *bitmap, int pos, int order)
453 {
454 int pages = 1 << order;
455 unsigned long mask = (1ul << (pages - 1));
456 int index = pos/BITS_PER_LONG;
457 int offset = pos - (index * BITS_PER_LONG);
459 /* We don't do regions of pages > BITS_PER_LONG. The
460 * algorithm would be a simple look for multiple zeros in the
461 * array, but there's no driver today that needs this. If you
462 * trip this BUG(), you get to code it... */
463 BUG_ON(pages > BITS_PER_LONG);
464 mask += mask - 1;
465 if (bitmap[index] & (mask << offset))
466 return -EBUSY;
467 bitmap[index] |= (mask << offset);
468 return 0;
469 }
470 EXPORT_SYMBOL(bitmap_allocate_region);
472 #ifdef __BIG_ENDIAN
474 void bitmap_long_to_byte(uint8_t *bp, const unsigned long *lp, int nbits)
475 {
476 unsigned long l;
477 int i, j, b;
479 for (i = 0, b = 0; nbits > 0; i++, b += sizeof(l)) {
480 l = lp[i];
481 for (j = 0; (j < sizeof(l)) && (nbits > 0); j++) {
482 bp[b+j] = l;
483 l >>= 8;
484 nbits -= 8;
485 }
486 }
487 }
489 void bitmap_byte_to_long(unsigned long *lp, const uint8_t *bp, int nbits)
490 {
491 unsigned long l;
492 int i, j, b;
494 for (i = 0, b = 0; nbits > 0; i++, b += sizeof(l)) {
495 l = 0;
496 for (j = 0; (j < sizeof(l)) && (nbits > 0); j++) {
497 l |= (unsigned long)bp[b+j] << (j*8);
498 nbits -= 8;
499 }
500 lp[i] = l;
501 }
502 }
504 #elif defined(__LITTLE_ENDIAN)
506 void bitmap_long_to_byte(uint8_t *bp, const unsigned long *lp, int nbits)
507 {
508 memcpy(bp, lp, (nbits+7)/8);
509 }
511 void bitmap_byte_to_long(unsigned long *lp, const uint8_t *bp, int nbits)
512 {
513 /* We may need to pad the final longword with zeroes. */
514 if (nbits & (BITS_PER_LONG-1))
515 lp[BITS_TO_LONGS(nbits)-1] = 0;
516 memcpy(lp, bp, (nbits+7)/8);
517 }
519 #endif