|
@@ -174,14 +174,19 @@ int __pci_read_base(struct pci_dev *dev, enum pci_bar_type type,
|
|
|
pci_read_config_dword(dev, pos, &sz);
|
|
|
pci_write_config_dword(dev, pos, l);
|
|
|
|
|
|
+ if (!sz)
|
|
|
+ goto fail; /* BAR not implemented */
|
|
|
+
|
|
|
/*
|
|
|
* All bits set in sz means the device isn't working properly.
|
|
|
- * If the BAR isn't implemented, all bits must be 0. If it's a
|
|
|
- * memory BAR or a ROM, bit 0 must be clear; if it's an io BAR, bit
|
|
|
- * 1 must be clear.
|
|
|
+ * If it's a memory BAR or a ROM, bit 0 must be clear; if it's
|
|
|
+ * an io BAR, bit 1 must be clear.
|
|
|
*/
|
|
|
- if (!sz || sz == 0xffffffff)
|
|
|
+ if (sz == 0xffffffff) {
|
|
|
+ dev_err(&dev->dev, "reg %x: invalid size %#x; broken device?\n",
|
|
|
+ pos, sz);
|
|
|
goto fail;
|
|
|
+ }
|
|
|
|
|
|
/*
|
|
|
* I don't know how l can have all bits set. Copied from old code.
|
|
@@ -244,13 +249,17 @@ int __pci_read_base(struct pci_dev *dev, enum pci_bar_type type,
|
|
|
pos, res);
|
|
|
}
|
|
|
} else {
|
|
|
- sz = pci_size(l, sz, mask);
|
|
|
+ u32 size = pci_size(l, sz, mask);
|
|
|
|
|
|
- if (!sz)
|
|
|
+ if (!size) {
|
|
|
+ dev_err(&dev->dev, "reg %x: invalid size "
|
|
|
+ "(l %#x sz %#x mask %#x); broken device?",
|
|
|
+ pos, l, sz, mask);
|
|
|
goto fail;
|
|
|
+ }
|
|
|
|
|
|
res->start = l;
|
|
|
- res->end = l + sz;
|
|
|
+ res->end = l + size;
|
|
|
|
|
|
dev_printk(KERN_DEBUG, &dev->dev, "reg %x: %pR\n", pos, res);
|
|
|
}
|