Decimal To ASCII
This algorithm converts decimal numbers to ASCII code.
char* GetSubString(char* str, int index, int count) {
int strLen = strlen(str);
int lastIndex = index + count;
if (index >= 0 && lastIndex > strLen) return "";
char* subStr = malloc(count + 1);
for (int i = 0; i < count; i++) {
subStr[i] = str[index + i];
}
subStr[count] = '\0';
return subStr;
}
char* AppendString(const char* str1, const char* str2) {
int str1Len = strlen(str1);
int str2Len = strlen(str2);
int strLen = str1Len + str2Len + 1;
char* str = malloc(strLen);
for (int i = 0; i < str1Len; i++)
str[i] = str1[i];
for (int i = 0; i < str2Len; i++)
str[(str1Len + i)] = str2[i];
str[strLen - 1] = '\0';
return str;
}
char* CharToString(char c) {
char* str = malloc(2);
str[0] = c;
str[1] = '\0';
return str;
}
char* DecimalToASCII(char* dec) {
char* ascii = "";
int decLen = strlen(dec);
for (int i = 0; i < decLen; i += 3)
{
ascii = AppendString(ascii, CharToString((char)atoi(GetSubString(dec, i, 3))));
}
return ascii;
}
Example
char* data = "080114111103114097109109105110103032065108103111114105116104109115";
char* value = DecimalToASCII(data);
Output
Programming Algorithms