summaryrefslogtreecommitdiff
path: root/py
diff options
context:
space:
mode:
Diffstat (limited to 'py')
-rw-r--r--py/mpconfig.h1
-rw-r--r--py/runtime.c8
2 files changed, 5 insertions, 4 deletions
diff --git a/py/mpconfig.h b/py/mpconfig.h
index d9a30cd30..3c1ed28d8 100644
--- a/py/mpconfig.h
+++ b/py/mpconfig.h
@@ -1538,7 +1538,6 @@ typedef double mp_float_t;
#ifndef BITS_PER_BYTE
#define BITS_PER_BYTE (8)
#endif
-#define BITS_PER_WORD (BITS_PER_BYTE * BYTES_PER_WORD)
// mp_int_t value with most significant bit set
#define WORD_MSBIT_HIGH (((mp_uint_t)1) << (BYTES_PER_WORD * 8 - 1))
diff --git a/py/runtime.c b/py/runtime.c
index c12271f4e..2e2fa1d17 100644
--- a/py/runtime.c
+++ b/py/runtime.c
@@ -387,7 +387,9 @@ mp_obj_t mp_binary_op(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t rhs) {
if (rhs_val < 0) {
// negative shift not allowed
mp_raise_ValueError(MP_ERROR_TEXT("negative shift count"));
- } else if (rhs_val >= (mp_int_t)BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
+ } else if (rhs_val >= (mp_int_t)(sizeof(lhs_val) * BITS_PER_BYTE)
+ || lhs_val > (MP_SMALL_INT_MAX >> rhs_val)
+ || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
// left-shift will overflow, so use higher precision integer
lhs = mp_obj_new_int_from_ll(lhs_val);
goto generic_binary_op;
@@ -404,10 +406,10 @@ mp_obj_t mp_binary_op(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t rhs) {
mp_raise_ValueError(MP_ERROR_TEXT("negative shift count"));
} else {
// standard precision is enough for right-shift
- if (rhs_val >= (mp_int_t)BITS_PER_WORD) {
+ if (rhs_val >= (mp_int_t)(sizeof(lhs_val) * BITS_PER_BYTE)) {
// Shifting to big amounts is underfined behavior
// in C and is CPU-dependent; propagate sign bit.
- rhs_val = BITS_PER_WORD - 1;
+ rhs_val = sizeof(lhs_val) * BITS_PER_BYTE - 1;
}
lhs_val >>= rhs_val;
}