首页 > 关于 16 进制字符串转 ASCII 码的疑问

关于 16 进制字符串转 ASCII 码的疑问

#include <string.h>
#include <stdio.h>
#include <stdlib.h>

int set_hex(char *in, unsigned char *out, int size);

int main(int argc, char* argv[]) {
    char *in = (char *) malloc(sizeof(char) * (100 + 1));
    sprintf(in, "303132333435363738393A3B3C3D3E3F");  // ASCII "0123456789:;<=>?"
    printf("in=%s\n", in);

    unsigned char *out = (unsigned char *) malloc(sizeof(unsigned char) * (100 + 1));
    set_hex(in, out, 100);
    printf("in=%s\n", in);
    printf("out=%s\n", out);

    return 1;
}

int set_hex(char *in, unsigned char *out, int size) {
    int i, n;
    unsigned char j;

    n = strlen(in);

    if (n > (size * 2))
        return 0;

    memset(out, 0, size);

    for (i = 0; i < n; i++) {
        j = (unsigned char) *in;
        *(in++) = '\0';

        if (j == 0)
            break;

        if ((j >= '0') && (j <= '9'))
            j = j - '0';
        else if ((j >= 'A') && (j <= 'F'))
            j = j - 'A' + 10;
        else if ((j >= 'a') && (j <= 'f'))
            j = j - 'a' + 10;
        else
            return 0;

        const int index_ = i / 2;

        if (i & 1)
            out[index_] = out[index_] | j;
        else
            out[index_] = (j << 4);

        printf("i=%2d, index_=%2d, in=%s, out=%s\n", i, index_, in, out);
    }

    return 1;
}

in=303132333435363738393A3B3C3D3E3F
i= 0, index_= 0, in=03132333435363738393A3B3C3D3E3F, out=0
i= 1, index_= 0, in=3132333435363738393A3B3C3D3E3F, out=0
i= 2, index_= 1, in=132333435363738393A3B3C3D3E3F, out=00
i= 3, index_= 1, in=32333435363738393A3B3C3D3E3F, out=01
i= 4, index_= 2, in=2333435363738393A3B3C3D3E3F, out=010
i= 5, index_= 2, in=333435363738393A3B3C3D3E3F, out=012
i= 6, index_= 3, in=33435363738393A3B3C3D3E3F, out=0120
i= 7, index_= 3, in=3435363738393A3B3C3D3E3F, out=0123
i= 8, index_= 4, in=435363738393A3B3C3D3E3F, out=01230
i= 9, index_= 4, in=35363738393A3B3C3D3E3F, out=01234
i=10, index_= 5, in=5363738393A3B3C3D3E3F, out=012340
i=11, index_= 5, in=363738393A3B3C3D3E3F, out=012345
i=12, index_= 6, in=63738393A3B3C3D3E3F, out=0123450
i=13, index_= 6, in=3738393A3B3C3D3E3F, out=0123456
i=14, index_= 7, in=738393A3B3C3D3E3F, out=01234560
i=15, index_= 7, in=38393A3B3C3D3E3F, out=01234567
i=16, index_= 8, in=8393A3B3C3D3E3F, out=012345670
i=17, index_= 8, in=393A3B3C3D3E3F, out=012345678
i=18, index_= 9, in=93A3B3C3D3E3F, out=0123456780
i=19, index_= 9, in=3A3B3C3D3E3F, out=0123456789
i=20, index_=10, in=A3B3C3D3E3F, out=01234567890
i=21, index_=10, in=3B3C3D3E3F, out=0123456789:
i=22, index_=11, in=B3C3D3E3F, out=0123456789:0
i=23, index_=11, in=3C3D3E3F, out=0123456789:;
i=24, index_=12, in=C3D3E3F, out=0123456789:;0
i=25, index_=12, in=3D3E3F, out=0123456789:;<
i=26, index_=13, in=D3E3F, out=0123456789:;?

思考很很久,一直不明白
out[index_] = out[index_] | j;out[index_] = (j << 4);
具体表达的意思。

谢谢大家帮我解惑!

set_hex 函数来自 openssl/apps/enc.c


两位十六进制数字对应一位ASCII码,一位十六进制数字对应 4 位二进制数字

const int index_ = i / 2;

if (i & 1)
    out[index_] = out[index_] | j;
else
    out[index_] = (j << 4);

偶数的时候执行 out[index_] = (j << 4);
假设此时 j 的值为 0000xxxx(二进制,高位均为 0 )。将 j 的值左移 4 位,即 从 xxxx 变成 xxxx0000

奇数的时候执行 out[index_] = out[index_] | j;
假设此时 j 的值是 0000yyyy。将 j 的值与 xxxx0000 进行按位或,变成 xxxxyyyy

这样就将文本 XY 转化成了二进制值 xxxxyyyy。
例如 '30' 转化成 -> 二进制 00110000 -> 十进制 48 -> 对应 ASCII 码表示数字 0

【热门文章】
【热门文章】