summary refs log tree commit diff
path: root/arch/powerpc
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/sysdev/msi_bitmap.c36
1 files changed, 25 insertions, 11 deletions
diff --git a/arch/powerpc/sysdev/msi_bitmap.c b/arch/powerpc/sysdev/msi_bitmap.c
index a7c7a9fc7530..0c75214b6f92 100644
--- a/arch/powerpc/sysdev/msi_bitmap.c
+++ b/arch/powerpc/sysdev/msi_bitmap.c
@@ -20,32 +20,37 @@ int msi_bitmap_alloc_hwirqs(struct msi_bitmap *bmp, int num)
 	int offset, order = get_count_order(num);
 
 	spin_lock_irqsave(&bmp->lock, flags);
-	/*
-	 * This is fast, but stricter than we need. We might want to add
-	 * a fallback routine which does a linear search with no alignment.
-	 */
-	offset = bitmap_find_free_region(bmp->bitmap, bmp->irq_count, order);
+
+	offset = bitmap_find_next_zero_area(bmp->bitmap, bmp->irq_count, 0,
+					    num, (1 << order) - 1);
+	if (offset > bmp->irq_count)
+		goto err;
+
+	bitmap_set(bmp->bitmap, offset, num);
 	spin_unlock_irqrestore(&bmp->lock, flags);
 
-	pr_debug("msi_bitmap: allocated 0x%x (2^%d) at offset 0x%x\n",
-		 num, order, offset);
+	pr_debug("msi_bitmap: allocated 0x%x at offset 0x%x\n", num, offset);
 
 	return offset;
+err:
+	spin_unlock_irqrestore(&bmp->lock, flags);
+	return -ENOMEM;
 }
+EXPORT_SYMBOL(msi_bitmap_alloc_hwirqs);
 
 void msi_bitmap_free_hwirqs(struct msi_bitmap *bmp, unsigned int offset,
 			    unsigned int num)
 {
 	unsigned long flags;
-	int order = get_count_order(num);
 
-	pr_debug("msi_bitmap: freeing 0x%x (2^%d) at offset 0x%x\n",
-		 num, order, offset);
+	pr_debug("msi_bitmap: freeing 0x%x at offset 0x%x\n",
+		 num, offset);
 
 	spin_lock_irqsave(&bmp->lock, flags);
-	bitmap_release_region(bmp->bitmap, offset, order);
+	bitmap_clear(bmp->bitmap, offset, num);
 	spin_unlock_irqrestore(&bmp->lock, flags);
 }
+EXPORT_SYMBOL(msi_bitmap_free_hwirqs);
 
 void msi_bitmap_reserve_hwirq(struct msi_bitmap *bmp, unsigned int hwirq)
 {
@@ -180,6 +185,15 @@ static void __init test_basics(void)
 	msi_bitmap_free_hwirqs(&bmp, size / 2, 1);
 	check(msi_bitmap_alloc_hwirqs(&bmp, 1) == size / 2);
 
+	/* Check we get a naturally aligned offset */
+	check(msi_bitmap_alloc_hwirqs(&bmp, 2) % 2 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 4) % 4 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 8) % 8 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 9) % 16 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 3) % 4 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 7) % 8 == 0);
+	check(msi_bitmap_alloc_hwirqs(&bmp, 121) % 128 == 0);
+
 	msi_bitmap_free(&bmp);
 
 	/* Clients may check bitmap == NULL for "not-allocated" */