If I have an NSArray of NSNumber objects, how do I calculate the standard deviation of the numbers in the array?
Assuming it's safe to process all NSNumbers as double length floats (so you'll lose some precision if you've got some 64 bit integers at extreme ends of the range in there) and I've remembered the formula correctly a first implementation could be:
- (NSNumber *)meanOf:(NSArray *)array
{
double runningTotal = 0.0;
for(NSNumber *number in array)
{
runningTotal += [number doubleValue];
}
return [NSNumber numberWithDouble:(runningTotal / [array count])];
}
- (NSNumber *)standardDeviationOf:(NSArray *)array
{
if(![array count]) return nil;
double mean = [[self meanOf:array] doubleValue];
double sumOfSquaredDifferences = 0.0;
for(NSNumber *number in array)
{
double valueOfNumber = [number doubleValue];
double difference = valueOfNumber - mean;
sumOfSquaredDifferences += difference * difference;
}
return [NSNumber numberWithDouble:sqrt(sumOfSquaredDifferences / [array count])];
}