Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/lib/bitmap.c
26135 views
1
// SPDX-License-Identifier: GPL-2.0-only
2
/*
3
* lib/bitmap.c
4
* Helper functions for bitmap.h.
5
*/
6
7
#include <linux/bitmap.h>
8
#include <linux/bitops.h>
9
#include <linux/ctype.h>
10
#include <linux/device.h>
11
#include <linux/export.h>
12
#include <linux/slab.h>
13
14
/**
15
* DOC: bitmap introduction
16
*
17
* bitmaps provide an array of bits, implemented using an
18
* array of unsigned longs. The number of valid bits in a
19
* given bitmap does _not_ need to be an exact multiple of
20
* BITS_PER_LONG.
21
*
22
* The possible unused bits in the last, partially used word
23
* of a bitmap are 'don't care'. The implementation makes
24
* no particular effort to keep them zero. It ensures that
25
* their value will not affect the results of any operation.
26
* The bitmap operations that return Boolean (bitmap_empty,
27
* for example) or scalar (bitmap_weight, for example) results
28
* carefully filter out these unused bits from impacting their
29
* results.
30
*
31
* The byte ordering of bitmaps is more natural on little
32
* endian architectures. See the big-endian headers
33
* include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
34
* for the best explanations of this ordering.
35
*/
36
37
bool __bitmap_equal(const unsigned long *bitmap1,
38
const unsigned long *bitmap2, unsigned int bits)
39
{
40
unsigned int k, lim = bits/BITS_PER_LONG;
41
for (k = 0; k < lim; ++k)
42
if (bitmap1[k] != bitmap2[k])
43
return false;
44
45
if (bits % BITS_PER_LONG)
46
if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
47
return false;
48
49
return true;
50
}
51
EXPORT_SYMBOL(__bitmap_equal);
52
53
bool __bitmap_or_equal(const unsigned long *bitmap1,
54
const unsigned long *bitmap2,
55
const unsigned long *bitmap3,
56
unsigned int bits)
57
{
58
unsigned int k, lim = bits / BITS_PER_LONG;
59
unsigned long tmp;
60
61
for (k = 0; k < lim; ++k) {
62
if ((bitmap1[k] | bitmap2[k]) != bitmap3[k])
63
return false;
64
}
65
66
if (!(bits % BITS_PER_LONG))
67
return true;
68
69
tmp = (bitmap1[k] | bitmap2[k]) ^ bitmap3[k];
70
return (tmp & BITMAP_LAST_WORD_MASK(bits)) == 0;
71
}
72
73
void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits)
74
{
75
unsigned int k, lim = BITS_TO_LONGS(bits);
76
for (k = 0; k < lim; ++k)
77
dst[k] = ~src[k];
78
}
79
EXPORT_SYMBOL(__bitmap_complement);
80
81
/**
82
* __bitmap_shift_right - logical right shift of the bits in a bitmap
83
* @dst : destination bitmap
84
* @src : source bitmap
85
* @shift : shift by this many bits
86
* @nbits : bitmap size, in bits
87
*
88
* Shifting right (dividing) means moving bits in the MS -> LS bit
89
* direction. Zeros are fed into the vacated MS positions and the
90
* LS bits shifted off the bottom are lost.
91
*/
92
void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
93
unsigned shift, unsigned nbits)
94
{
95
unsigned k, lim = BITS_TO_LONGS(nbits);
96
unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
97
unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
98
for (k = 0; off + k < lim; ++k) {
99
unsigned long upper, lower;
100
101
/*
102
* If shift is not word aligned, take lower rem bits of
103
* word above and make them the top rem bits of result.
104
*/
105
if (!rem || off + k + 1 >= lim)
106
upper = 0;
107
else {
108
upper = src[off + k + 1];
109
if (off + k + 1 == lim - 1)
110
upper &= mask;
111
upper <<= (BITS_PER_LONG - rem);
112
}
113
lower = src[off + k];
114
if (off + k == lim - 1)
115
lower &= mask;
116
lower >>= rem;
117
dst[k] = lower | upper;
118
}
119
if (off)
120
memset(&dst[lim - off], 0, off*sizeof(unsigned long));
121
}
122
EXPORT_SYMBOL(__bitmap_shift_right);
123
124
125
/**
126
* __bitmap_shift_left - logical left shift of the bits in a bitmap
127
* @dst : destination bitmap
128
* @src : source bitmap
129
* @shift : shift by this many bits
130
* @nbits : bitmap size, in bits
131
*
132
* Shifting left (multiplying) means moving bits in the LS -> MS
133
* direction. Zeros are fed into the vacated LS bit positions
134
* and those MS bits shifted off the top are lost.
135
*/
136
137
void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
138
unsigned int shift, unsigned int nbits)
139
{
140
int k;
141
unsigned int lim = BITS_TO_LONGS(nbits);
142
unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
143
for (k = lim - off - 1; k >= 0; --k) {
144
unsigned long upper, lower;
145
146
/*
147
* If shift is not word aligned, take upper rem bits of
148
* word below and make them the bottom rem bits of result.
149
*/
150
if (rem && k > 0)
151
lower = src[k - 1] >> (BITS_PER_LONG - rem);
152
else
153
lower = 0;
154
upper = src[k] << rem;
155
dst[k + off] = lower | upper;
156
}
157
if (off)
158
memset(dst, 0, off*sizeof(unsigned long));
159
}
160
EXPORT_SYMBOL(__bitmap_shift_left);
161
162
/**
163
* bitmap_cut() - remove bit region from bitmap and right shift remaining bits
164
* @dst: destination bitmap, might overlap with src
165
* @src: source bitmap
166
* @first: start bit of region to be removed
167
* @cut: number of bits to remove
168
* @nbits: bitmap size, in bits
169
*
170
* Set the n-th bit of @dst iff the n-th bit of @src is set and
171
* n is less than @first, or the m-th bit of @src is set for any
172
* m such that @first <= n < nbits, and m = n + @cut.
173
*
174
* In pictures, example for a big-endian 32-bit architecture:
175
*
176
* The @src bitmap is::
177
*
178
* 31 63
179
* | |
180
* 10000000 11000001 11110010 00010101 10000000 11000001 01110010 00010101
181
* | | | |
182
* 16 14 0 32
183
*
184
* if @cut is 3, and @first is 14, bits 14-16 in @src are cut and @dst is::
185
*
186
* 31 63
187
* | |
188
* 10110000 00011000 00110010 00010101 00010000 00011000 00101110 01000010
189
* | | |
190
* 14 (bit 17 0 32
191
* from @src)
192
*
193
* Note that @dst and @src might overlap partially or entirely.
194
*
195
* This is implemented in the obvious way, with a shift and carry
196
* step for each moved bit. Optimisation is left as an exercise
197
* for the compiler.
198
*/
199
void bitmap_cut(unsigned long *dst, const unsigned long *src,
200
unsigned int first, unsigned int cut, unsigned int nbits)
201
{
202
unsigned int len = BITS_TO_LONGS(nbits);
203
unsigned long keep = 0, carry;
204
int i;
205
206
if (first % BITS_PER_LONG) {
207
keep = src[first / BITS_PER_LONG] &
208
(~0UL >> (BITS_PER_LONG - first % BITS_PER_LONG));
209
}
210
211
memmove(dst, src, len * sizeof(*dst));
212
213
while (cut--) {
214
for (i = first / BITS_PER_LONG; i < len; i++) {
215
if (i < len - 1)
216
carry = dst[i + 1] & 1UL;
217
else
218
carry = 0;
219
220
dst[i] = (dst[i] >> 1) | (carry << (BITS_PER_LONG - 1));
221
}
222
}
223
224
dst[first / BITS_PER_LONG] &= ~0UL << (first % BITS_PER_LONG);
225
dst[first / BITS_PER_LONG] |= keep;
226
}
227
EXPORT_SYMBOL(bitmap_cut);
228
229
bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
230
const unsigned long *bitmap2, unsigned int bits)
231
{
232
unsigned int k;
233
unsigned int lim = bits/BITS_PER_LONG;
234
unsigned long result = 0;
235
236
for (k = 0; k < lim; k++)
237
result |= (dst[k] = bitmap1[k] & bitmap2[k]);
238
if (bits % BITS_PER_LONG)
239
result |= (dst[k] = bitmap1[k] & bitmap2[k] &
240
BITMAP_LAST_WORD_MASK(bits));
241
return result != 0;
242
}
243
EXPORT_SYMBOL(__bitmap_and);
244
245
void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
246
const unsigned long *bitmap2, unsigned int bits)
247
{
248
unsigned int k;
249
unsigned int nr = BITS_TO_LONGS(bits);
250
251
for (k = 0; k < nr; k++)
252
dst[k] = bitmap1[k] | bitmap2[k];
253
}
254
EXPORT_SYMBOL(__bitmap_or);
255
256
void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
257
const unsigned long *bitmap2, unsigned int bits)
258
{
259
unsigned int k;
260
unsigned int nr = BITS_TO_LONGS(bits);
261
262
for (k = 0; k < nr; k++)
263
dst[k] = bitmap1[k] ^ bitmap2[k];
264
}
265
EXPORT_SYMBOL(__bitmap_xor);
266
267
bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
268
const unsigned long *bitmap2, unsigned int bits)
269
{
270
unsigned int k;
271
unsigned int lim = bits/BITS_PER_LONG;
272
unsigned long result = 0;
273
274
for (k = 0; k < lim; k++)
275
result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
276
if (bits % BITS_PER_LONG)
277
result |= (dst[k] = bitmap1[k] & ~bitmap2[k] &
278
BITMAP_LAST_WORD_MASK(bits));
279
return result != 0;
280
}
281
EXPORT_SYMBOL(__bitmap_andnot);
282
283
void __bitmap_replace(unsigned long *dst,
284
const unsigned long *old, const unsigned long *new,
285
const unsigned long *mask, unsigned int nbits)
286
{
287
unsigned int k;
288
unsigned int nr = BITS_TO_LONGS(nbits);
289
290
for (k = 0; k < nr; k++)
291
dst[k] = (old[k] & ~mask[k]) | (new[k] & mask[k]);
292
}
293
EXPORT_SYMBOL(__bitmap_replace);
294
295
bool __bitmap_intersects(const unsigned long *bitmap1,
296
const unsigned long *bitmap2, unsigned int bits)
297
{
298
unsigned int k, lim = bits/BITS_PER_LONG;
299
for (k = 0; k < lim; ++k)
300
if (bitmap1[k] & bitmap2[k])
301
return true;
302
303
if (bits % BITS_PER_LONG)
304
if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
305
return true;
306
return false;
307
}
308
EXPORT_SYMBOL(__bitmap_intersects);
309
310
bool __bitmap_subset(const unsigned long *bitmap1,
311
const unsigned long *bitmap2, unsigned int bits)
312
{
313
unsigned int k, lim = bits/BITS_PER_LONG;
314
for (k = 0; k < lim; ++k)
315
if (bitmap1[k] & ~bitmap2[k])
316
return false;
317
318
if (bits % BITS_PER_LONG)
319
if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
320
return false;
321
return true;
322
}
323
EXPORT_SYMBOL(__bitmap_subset);
324
325
#define BITMAP_WEIGHT(FETCH, bits) \
326
({ \
327
unsigned int __bits = (bits), idx, w = 0; \
328
\
329
for (idx = 0; idx < __bits / BITS_PER_LONG; idx++) \
330
w += hweight_long(FETCH); \
331
\
332
if (__bits % BITS_PER_LONG) \
333
w += hweight_long((FETCH) & BITMAP_LAST_WORD_MASK(__bits)); \
334
\
335
w; \
336
})
337
338
unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
339
{
340
return BITMAP_WEIGHT(bitmap[idx], bits);
341
}
342
EXPORT_SYMBOL(__bitmap_weight);
343
344
unsigned int __bitmap_weight_and(const unsigned long *bitmap1,
345
const unsigned long *bitmap2, unsigned int bits)
346
{
347
return BITMAP_WEIGHT(bitmap1[idx] & bitmap2[idx], bits);
348
}
349
EXPORT_SYMBOL(__bitmap_weight_and);
350
351
unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1,
352
const unsigned long *bitmap2, unsigned int bits)
353
{
354
return BITMAP_WEIGHT(bitmap1[idx] & ~bitmap2[idx], bits);
355
}
356
EXPORT_SYMBOL(__bitmap_weight_andnot);
357
358
void __bitmap_set(unsigned long *map, unsigned int start, int len)
359
{
360
unsigned long *p = map + BIT_WORD(start);
361
const unsigned int size = start + len;
362
int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
363
unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
364
365
while (len - bits_to_set >= 0) {
366
*p |= mask_to_set;
367
len -= bits_to_set;
368
bits_to_set = BITS_PER_LONG;
369
mask_to_set = ~0UL;
370
p++;
371
}
372
if (len) {
373
mask_to_set &= BITMAP_LAST_WORD_MASK(size);
374
*p |= mask_to_set;
375
}
376
}
377
EXPORT_SYMBOL(__bitmap_set);
378
379
void __bitmap_clear(unsigned long *map, unsigned int start, int len)
380
{
381
unsigned long *p = map + BIT_WORD(start);
382
const unsigned int size = start + len;
383
int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
384
unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
385
386
while (len - bits_to_clear >= 0) {
387
*p &= ~mask_to_clear;
388
len -= bits_to_clear;
389
bits_to_clear = BITS_PER_LONG;
390
mask_to_clear = ~0UL;
391
p++;
392
}
393
if (len) {
394
mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
395
*p &= ~mask_to_clear;
396
}
397
}
398
EXPORT_SYMBOL(__bitmap_clear);
399
400
/**
401
* bitmap_find_next_zero_area_off - find a contiguous aligned zero area
402
* @map: The address to base the search on
403
* @size: The bitmap size in bits
404
* @start: The bitnumber to start searching at
405
* @nr: The number of zeroed bits we're looking for
406
* @align_mask: Alignment mask for zero area
407
* @align_offset: Alignment offset for zero area.
408
*
409
* The @align_mask should be one less than a power of 2; the effect is that
410
* the bit offset of all zero areas this function finds plus @align_offset
411
* is multiple of that power of 2.
412
*/
413
unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
414
unsigned long size,
415
unsigned long start,
416
unsigned int nr,
417
unsigned long align_mask,
418
unsigned long align_offset)
419
{
420
unsigned long index, end, i;
421
again:
422
index = find_next_zero_bit(map, size, start);
423
424
/* Align allocation */
425
index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset;
426
427
end = index + nr;
428
if (end > size)
429
return end;
430
i = find_next_bit(map, end, index);
431
if (i < end) {
432
start = i + 1;
433
goto again;
434
}
435
return index;
436
}
437
EXPORT_SYMBOL(bitmap_find_next_zero_area_off);
438
439
/**
440
* bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
441
* @buf: pointer to a bitmap
442
* @pos: a bit position in @buf (0 <= @pos < @nbits)
443
* @nbits: number of valid bit positions in @buf
444
*
445
* Map the bit at position @pos in @buf (of length @nbits) to the
446
* ordinal of which set bit it is. If it is not set or if @pos
447
* is not a valid bit position, map to -1.
448
*
449
* If for example, just bits 4 through 7 are set in @buf, then @pos
450
* values 4 through 7 will get mapped to 0 through 3, respectively,
451
* and other @pos values will get mapped to -1. When @pos value 7
452
* gets mapped to (returns) @ord value 3 in this example, that means
453
* that bit 7 is the 3rd (starting with 0th) set bit in @buf.
454
*
455
* The bit positions 0 through @bits are valid positions in @buf.
456
*/
457
static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits)
458
{
459
if (pos >= nbits || !test_bit(pos, buf))
460
return -1;
461
462
return bitmap_weight(buf, pos);
463
}
464
465
/**
466
* bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
467
* @dst: remapped result
468
* @src: subset to be remapped
469
* @old: defines domain of map
470
* @new: defines range of map
471
* @nbits: number of bits in each of these bitmaps
472
*
473
* Let @old and @new define a mapping of bit positions, such that
474
* whatever position is held by the n-th set bit in @old is mapped
475
* to the n-th set bit in @new. In the more general case, allowing
476
* for the possibility that the weight 'w' of @new is less than the
477
* weight of @old, map the position of the n-th set bit in @old to
478
* the position of the m-th set bit in @new, where m == n % w.
479
*
480
* If either of the @old and @new bitmaps are empty, or if @src and
481
* @dst point to the same location, then this routine copies @src
482
* to @dst.
483
*
484
* The positions of unset bits in @old are mapped to themselves
485
* (the identity map).
486
*
487
* Apply the above specified mapping to @src, placing the result in
488
* @dst, clearing any bits previously set in @dst.
489
*
490
* For example, lets say that @old has bits 4 through 7 set, and
491
* @new has bits 12 through 15 set. This defines the mapping of bit
492
* position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
493
* bit positions unchanged. So if say @src comes into this routine
494
* with bits 1, 5 and 7 set, then @dst should leave with bits 1,
495
* 13 and 15 set.
496
*/
497
void bitmap_remap(unsigned long *dst, const unsigned long *src,
498
const unsigned long *old, const unsigned long *new,
499
unsigned int nbits)
500
{
501
unsigned int oldbit, w;
502
503
if (dst == src) /* following doesn't handle inplace remaps */
504
return;
505
bitmap_zero(dst, nbits);
506
507
w = bitmap_weight(new, nbits);
508
for_each_set_bit(oldbit, src, nbits) {
509
int n = bitmap_pos_to_ord(old, oldbit, nbits);
510
511
if (n < 0 || w == 0)
512
set_bit(oldbit, dst); /* identity map */
513
else
514
set_bit(find_nth_bit(new, nbits, n % w), dst);
515
}
516
}
517
EXPORT_SYMBOL(bitmap_remap);
518
519
/**
520
* bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
521
* @oldbit: bit position to be mapped
522
* @old: defines domain of map
523
* @new: defines range of map
524
* @bits: number of bits in each of these bitmaps
525
*
526
* Let @old and @new define a mapping of bit positions, such that
527
* whatever position is held by the n-th set bit in @old is mapped
528
* to the n-th set bit in @new. In the more general case, allowing
529
* for the possibility that the weight 'w' of @new is less than the
530
* weight of @old, map the position of the n-th set bit in @old to
531
* the position of the m-th set bit in @new, where m == n % w.
532
*
533
* The positions of unset bits in @old are mapped to themselves
534
* (the identity map).
535
*
536
* Apply the above specified mapping to bit position @oldbit, returning
537
* the new bit position.
538
*
539
* For example, lets say that @old has bits 4 through 7 set, and
540
* @new has bits 12 through 15 set. This defines the mapping of bit
541
* position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
542
* bit positions unchanged. So if say @oldbit is 5, then this routine
543
* returns 13.
544
*/
545
int bitmap_bitremap(int oldbit, const unsigned long *old,
546
const unsigned long *new, int bits)
547
{
548
int w = bitmap_weight(new, bits);
549
int n = bitmap_pos_to_ord(old, oldbit, bits);
550
if (n < 0 || w == 0)
551
return oldbit;
552
else
553
return find_nth_bit(new, bits, n % w);
554
}
555
EXPORT_SYMBOL(bitmap_bitremap);
556
557
#ifdef CONFIG_NUMA
558
/**
559
* bitmap_onto - translate one bitmap relative to another
560
* @dst: resulting translated bitmap
561
* @orig: original untranslated bitmap
562
* @relmap: bitmap relative to which translated
563
* @bits: number of bits in each of these bitmaps
564
*
565
* Set the n-th bit of @dst iff there exists some m such that the
566
* n-th bit of @relmap is set, the m-th bit of @orig is set, and
567
* the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
568
* (If you understood the previous sentence the first time your
569
* read it, you're overqualified for your current job.)
570
*
571
* In other words, @orig is mapped onto (surjectively) @dst,
572
* using the map { <n, m> | the n-th bit of @relmap is the
573
* m-th set bit of @relmap }.
574
*
575
* Any set bits in @orig above bit number W, where W is the
576
* weight of (number of set bits in) @relmap are mapped nowhere.
577
* In particular, if for all bits m set in @orig, m >= W, then
578
* @dst will end up empty. In situations where the possibility
579
* of such an empty result is not desired, one way to avoid it is
580
* to use the bitmap_fold() operator, below, to first fold the
581
* @orig bitmap over itself so that all its set bits x are in the
582
* range 0 <= x < W. The bitmap_fold() operator does this by
583
* setting the bit (m % W) in @dst, for each bit (m) set in @orig.
584
*
585
* Example [1] for bitmap_onto():
586
* Let's say @relmap has bits 30-39 set, and @orig has bits
587
* 1, 3, 5, 7, 9 and 11 set. Then on return from this routine,
588
* @dst will have bits 31, 33, 35, 37 and 39 set.
589
*
590
* When bit 0 is set in @orig, it means turn on the bit in
591
* @dst corresponding to whatever is the first bit (if any)
592
* that is turned on in @relmap. Since bit 0 was off in the
593
* above example, we leave off that bit (bit 30) in @dst.
594
*
595
* When bit 1 is set in @orig (as in the above example), it
596
* means turn on the bit in @dst corresponding to whatever
597
* is the second bit that is turned on in @relmap. The second
598
* bit in @relmap that was turned on in the above example was
599
* bit 31, so we turned on bit 31 in @dst.
600
*
601
* Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
602
* because they were the 4th, 6th, 8th and 10th set bits
603
* set in @relmap, and the 4th, 6th, 8th and 10th bits of
604
* @orig (i.e. bits 3, 5, 7 and 9) were also set.
605
*
606
* When bit 11 is set in @orig, it means turn on the bit in
607
* @dst corresponding to whatever is the twelfth bit that is
608
* turned on in @relmap. In the above example, there were
609
* only ten bits turned on in @relmap (30..39), so that bit
610
* 11 was set in @orig had no affect on @dst.
611
*
612
* Example [2] for bitmap_fold() + bitmap_onto():
613
* Let's say @relmap has these ten bits set::
614
*
615
* 40 41 42 43 45 48 53 61 74 95
616
*
617
* (for the curious, that's 40 plus the first ten terms of the
618
* Fibonacci sequence.)
619
*
620
* Further lets say we use the following code, invoking
621
* bitmap_fold() then bitmap_onto, as suggested above to
622
* avoid the possibility of an empty @dst result::
623
*
624
* unsigned long *tmp; // a temporary bitmap's bits
625
*
626
* bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
627
* bitmap_onto(dst, tmp, relmap, bits);
628
*
629
* Then this table shows what various values of @dst would be, for
630
* various @orig's. I list the zero-based positions of each set bit.
631
* The tmp column shows the intermediate result, as computed by
632
* using bitmap_fold() to fold the @orig bitmap modulo ten
633
* (the weight of @relmap):
634
*
635
* =============== ============== =================
636
* @orig tmp @dst
637
* 0 0 40
638
* 1 1 41
639
* 9 9 95
640
* 10 0 40 [#f1]_
641
* 1 3 5 7 1 3 5 7 41 43 48 61
642
* 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45
643
* 0 9 18 27 0 9 8 7 40 61 74 95
644
* 0 10 20 30 0 40
645
* 0 11 22 33 0 1 2 3 40 41 42 43
646
* 0 12 24 36 0 2 4 6 40 42 45 53
647
* 78 102 211 1 2 8 41 42 74 [#f1]_
648
* =============== ============== =================
649
*
650
* .. [#f1]
651
*
652
* For these marked lines, if we hadn't first done bitmap_fold()
653
* into tmp, then the @dst result would have been empty.
654
*
655
* If either of @orig or @relmap is empty (no set bits), then @dst
656
* will be returned empty.
657
*
658
* If (as explained above) the only set bits in @orig are in positions
659
* m where m >= W, (where W is the weight of @relmap) then @dst will
660
* once again be returned empty.
661
*
662
* All bits in @dst not set by the above rule are cleared.
663
*/
664
void bitmap_onto(unsigned long *dst, const unsigned long *orig,
665
const unsigned long *relmap, unsigned int bits)
666
{
667
unsigned int n, m; /* same meaning as in above comment */
668
669
if (dst == orig) /* following doesn't handle inplace mappings */
670
return;
671
bitmap_zero(dst, bits);
672
673
/*
674
* The following code is a more efficient, but less
675
* obvious, equivalent to the loop:
676
* for (m = 0; m < bitmap_weight(relmap, bits); m++) {
677
* n = find_nth_bit(orig, bits, m);
678
* if (test_bit(m, orig))
679
* set_bit(n, dst);
680
* }
681
*/
682
683
m = 0;
684
for_each_set_bit(n, relmap, bits) {
685
/* m == bitmap_pos_to_ord(relmap, n, bits) */
686
if (test_bit(m, orig))
687
set_bit(n, dst);
688
m++;
689
}
690
}
691
692
/**
693
* bitmap_fold - fold larger bitmap into smaller, modulo specified size
694
* @dst: resulting smaller bitmap
695
* @orig: original larger bitmap
696
* @sz: specified size
697
* @nbits: number of bits in each of these bitmaps
698
*
699
* For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
700
* Clear all other bits in @dst. See further the comment and
701
* Example [2] for bitmap_onto() for why and how to use this.
702
*/
703
void bitmap_fold(unsigned long *dst, const unsigned long *orig,
704
unsigned int sz, unsigned int nbits)
705
{
706
unsigned int oldbit;
707
708
if (dst == orig) /* following doesn't handle inplace mappings */
709
return;
710
bitmap_zero(dst, nbits);
711
712
for_each_set_bit(oldbit, orig, nbits)
713
set_bit(oldbit % sz, dst);
714
}
715
#endif /* CONFIG_NUMA */
716
717
unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags)
718
{
719
return kmalloc_array(BITS_TO_LONGS(nbits), sizeof(unsigned long),
720
flags);
721
}
722
EXPORT_SYMBOL(bitmap_alloc);
723
724
unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags)
725
{
726
return bitmap_alloc(nbits, flags | __GFP_ZERO);
727
}
728
EXPORT_SYMBOL(bitmap_zalloc);
729
730
unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node)
731
{
732
return kmalloc_array_node(BITS_TO_LONGS(nbits), sizeof(unsigned long),
733
flags, node);
734
}
735
EXPORT_SYMBOL(bitmap_alloc_node);
736
737
unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node)
738
{
739
return bitmap_alloc_node(nbits, flags | __GFP_ZERO, node);
740
}
741
EXPORT_SYMBOL(bitmap_zalloc_node);
742
743
void bitmap_free(const unsigned long *bitmap)
744
{
745
kfree(bitmap);
746
}
747
EXPORT_SYMBOL(bitmap_free);
748
749
static void devm_bitmap_free(void *data)
750
{
751
unsigned long *bitmap = data;
752
753
bitmap_free(bitmap);
754
}
755
756
unsigned long *devm_bitmap_alloc(struct device *dev,
757
unsigned int nbits, gfp_t flags)
758
{
759
unsigned long *bitmap;
760
int ret;
761
762
bitmap = bitmap_alloc(nbits, flags);
763
if (!bitmap)
764
return NULL;
765
766
ret = devm_add_action_or_reset(dev, devm_bitmap_free, bitmap);
767
if (ret)
768
return NULL;
769
770
return bitmap;
771
}
772
EXPORT_SYMBOL_GPL(devm_bitmap_alloc);
773
774
unsigned long *devm_bitmap_zalloc(struct device *dev,
775
unsigned int nbits, gfp_t flags)
776
{
777
return devm_bitmap_alloc(dev, nbits, flags | __GFP_ZERO);
778
}
779
EXPORT_SYMBOL_GPL(devm_bitmap_zalloc);
780
781
#if BITS_PER_LONG == 64
782
/**
783
* bitmap_from_arr32 - copy the contents of u32 array of bits to bitmap
784
* @bitmap: array of unsigned longs, the destination bitmap
785
* @buf: array of u32 (in host byte order), the source bitmap
786
* @nbits: number of bits in @bitmap
787
*/
788
void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf, unsigned int nbits)
789
{
790
unsigned int i, halfwords;
791
792
halfwords = DIV_ROUND_UP(nbits, 32);
793
for (i = 0; i < halfwords; i++) {
794
bitmap[i/2] = (unsigned long) buf[i];
795
if (++i < halfwords)
796
bitmap[i/2] |= ((unsigned long) buf[i]) << 32;
797
}
798
799
/* Clear tail bits in last word beyond nbits. */
800
if (nbits % BITS_PER_LONG)
801
bitmap[(halfwords - 1) / 2] &= BITMAP_LAST_WORD_MASK(nbits);
802
}
803
EXPORT_SYMBOL(bitmap_from_arr32);
804
805
/**
806
* bitmap_to_arr32 - copy the contents of bitmap to a u32 array of bits
807
* @buf: array of u32 (in host byte order), the dest bitmap
808
* @bitmap: array of unsigned longs, the source bitmap
809
* @nbits: number of bits in @bitmap
810
*/
811
void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, unsigned int nbits)
812
{
813
unsigned int i, halfwords;
814
815
halfwords = DIV_ROUND_UP(nbits, 32);
816
for (i = 0; i < halfwords; i++) {
817
buf[i] = (u32) (bitmap[i/2] & UINT_MAX);
818
if (++i < halfwords)
819
buf[i] = (u32) (bitmap[i/2] >> 32);
820
}
821
822
/* Clear tail bits in last element of array beyond nbits. */
823
if (nbits % BITS_PER_LONG)
824
buf[halfwords - 1] &= (u32) (UINT_MAX >> ((-nbits) & 31));
825
}
826
EXPORT_SYMBOL(bitmap_to_arr32);
827
#endif
828
829
#if BITS_PER_LONG == 32
830
/**
831
* bitmap_from_arr64 - copy the contents of u64 array of bits to bitmap
832
* @bitmap: array of unsigned longs, the destination bitmap
833
* @buf: array of u64 (in host byte order), the source bitmap
834
* @nbits: number of bits in @bitmap
835
*/
836
void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits)
837
{
838
int n;
839
840
for (n = nbits; n > 0; n -= 64) {
841
u64 val = *buf++;
842
843
*bitmap++ = val;
844
if (n > 32)
845
*bitmap++ = val >> 32;
846
}
847
848
/*
849
* Clear tail bits in the last word beyond nbits.
850
*
851
* Negative index is OK because here we point to the word next
852
* to the last word of the bitmap, except for nbits == 0, which
853
* is tested implicitly.
854
*/
855
if (nbits % BITS_PER_LONG)
856
bitmap[-1] &= BITMAP_LAST_WORD_MASK(nbits);
857
}
858
EXPORT_SYMBOL(bitmap_from_arr64);
859
860
/**
861
* bitmap_to_arr64 - copy the contents of bitmap to a u64 array of bits
862
* @buf: array of u64 (in host byte order), the dest bitmap
863
* @bitmap: array of unsigned longs, the source bitmap
864
* @nbits: number of bits in @bitmap
865
*/
866
void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits)
867
{
868
const unsigned long *end = bitmap + BITS_TO_LONGS(nbits);
869
870
while (bitmap < end) {
871
*buf = *bitmap++;
872
if (bitmap < end)
873
*buf |= (u64)(*bitmap++) << 32;
874
buf++;
875
}
876
877
/* Clear tail bits in the last element of array beyond nbits. */
878
if (nbits % 64)
879
buf[-1] &= GENMASK_ULL((nbits - 1) % 64, 0);
880
}
881
EXPORT_SYMBOL(bitmap_to_arr64);
882
#endif
883
884