[PATCH 3/7] PCI/ASPM: Return U32_MAX instead of bit magic construct

Ilpo Järvinen posted 7 patches 2 years, 4 months ago
[PATCH 3/7] PCI/ASPM: Return U32_MAX instead of bit magic construct
Posted by Ilpo Järvinen 2 years, 4 months ago
Instead of returning a bit obscure -1U, make code's intent of returning
the maximum representable value more obvious by returning U32_MAX.

Signed-off-by: Ilpo Järvinen <ilpo.jarvinen@linux.intel.com>
---
 drivers/pci/pcie/aspm.c | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/drivers/pci/pcie/aspm.c b/drivers/pci/pcie/aspm.c
index 06f175d8dee5..4cd11ab27233 100644
--- a/drivers/pci/pcie/aspm.c
+++ b/drivers/pci/pcie/aspm.c
@@ -9,6 +9,7 @@
 
 #include <linux/bitfield.h>
 #include <linux/kernel.h>
+#include <linux/limits.h>
 #include <linux/math.h>
 #include <linux/module.h>
 #include <linux/moduleparam.h>
@@ -279,7 +280,7 @@ static u32 calc_l0s_latency(u32 lnkcap)
 static u32 calc_l0s_acceptable(u32 encoding)
 {
 	if (encoding == 0x7)
-		return -1U;
+		return U32_MAX;
 	return (64 << encoding);
 }
 
@@ -297,7 +298,7 @@ static u32 calc_l1_latency(u32 lnkcap)
 static u32 calc_l1_acceptable(u32 encoding)
 {
 	if (encoding == 0x7)
-		return -1U;
+		return U32_MAX;
 	return (1000 << encoding);
 }
 
-- 
2.30.2