summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSimo Sorce <ssorce@redhat.com>2009-05-28 09:24:05 -0400
committerSimo Sorce <ssorce@redhat.com>2009-05-28 09:55:19 -0400
commit8370c839486e7205e9131451e2ec04593e427e27 (patch)
tree7f0f2b02e92299ec465ff34c9546c9a631f1a57c
parent4fff9b914556f91f2caeca637fa8e76ceba91aed (diff)
downloadsssd-8370c839486e7205e9131451e2ec04593e427e27.tar.gz
sssd-8370c839486e7205e9131451e2ec04593e427e27.tar.xz
sssd-8370c839486e7205e9131451e2ec04593e427e27.zip
Use PTR_2_INT for alignment calculations
This version should be pointer size agnostic. Should make this code safe on both 32bit and 64bit.
-rw-r--r--server/util/nss_sha512crypt.c9
1 files changed, 5 insertions, 4 deletions
diff --git a/server/util/nss_sha512crypt.c b/server/util/nss_sha512crypt.c
index 47f676d5c..8ba16d4aa 100644
--- a/server/util/nss_sha512crypt.c
+++ b/server/util/nss_sha512crypt.c
@@ -92,6 +92,7 @@ static inline void b64_from_24bit(char **dest, size_t *len, size_t n,
*dest += i;
}
+#define PTR_2_INT(x) ((x) - ((__typeof__ (x)) NULL))
#define ALIGN64 __alignof__(uint64_t)
static int sha512_crypt_r(const char *key,
@@ -142,14 +143,14 @@ static int sha512_crypt_r(const char *key,
salt_len = MIN(strcspn(salt, "$"), SALT_LEN_MAX);
key_len = strlen(key);
- if ((((uint64_t)key) % ALIGN64) != 0) {
+ if ((PTR_2_INT(key) % ALIGN64) != 0) {
tmp = (char *)alloca(key_len + ALIGN64);
- key = copied_key = memcpy(tmp + ALIGN64 - (((uint64_t)tmp) % ALIGN64), key, key_len);
+ key = copied_key = memcpy(tmp + ALIGN64 - PTR_2_INT(tmp) % ALIGN64, key, key_len);
}
- if (((uint64_t)salt) % ALIGN64 != 0) {
+ if (PTR_2_INT(salt) % ALIGN64 != 0) {
tmp = (char *)alloca(salt_len + ALIGN64);
- salt = copied_salt = memcpy(tmp + ALIGN64 - ((uint64_t)tmp) % ALIGN64, salt, salt_len);
+ salt = copied_salt = memcpy(tmp + ALIGN64 - PTR_2_INT(tmp) % ALIGN64, salt, salt_len);
}
if (!nspr_nss_init_done) {