Below is my current char* to hex string function. I wrote it as an exercise in bit manipulation. It takes ~7ms on a AMD Athlon MP 2800+ to hexify a 10 million byte array. Is
The function as it is shown when I'm writing this produces incorrect output even when _hex2asciiU_value is fully specified. The following code works, and on my 2.33GHz Macbook Pro runs in about 1.9 seconds for 200,000,000 million characters.
#include
using namespace std;
static const size_t _h2alen = 256;
static char _hex2asciiU_value[_h2alen][3];
string char_to_hex( const unsigned char* _pArray, unsigned int _len )
{
string str;
str.resize(_len*2);
char* pszHex = &str[0];
const unsigned char* pEnd = _pArray + _len;
const char* pHex = _hex2asciiU_value[0];
for( const unsigned char* pChar = _pArray; pChar != pEnd; pChar++, pszHex += 2 ) {
pszHex[0] = _hex2asciiU_value[*pChar][0];
pszHex[1] = _hex2asciiU_value[*pChar][1];
}
return str;
}
int main() {
for(int i=0; i<_h2alen; i++) {
snprintf(_hex2asciiU_value[i], 3,"%02X", i);
}
size_t len = 200000000;
char* a = new char[len];
string t1;
string t2;
clock_t start;
srand(time(NULL));
for(int i=0; i " << (clock() - start)/(double)CLOCKS_PER_SEC << " seconds\n";
}