OK team, this is weird. [NSDecimalNumber integerValue] is behaving strangely.
I\'m sat at a breakpoint, trying to figure out why some parts of my app are broken in i
You can use use intValue or unsignedIntValue just fine, but NOT integerValue or unsignedIntegerValue. Here is a unit test that demonstrates the issue, showing that it's related to numbers requiring more than 64 bits of precision:
//
// NSDecimalNumberBugTests.m
//
// Created by Lane Roathe on 6/1/17.
// For Quicken, Inc.
//
#import
@interface NSDecimalNumberBugTests : XCTestCase
@end
@implementation NSDecimalNumberBugTests
- (void)setUp {
[super setUp];
// Put setup code here. This method is called before the invocation of each test method in the class.
}
- (void)tearDown {
// Put teardown code here. This method is called after the invocation of each test method in the class.
[super tearDown];
}
- (void)testBug {
// Use XCTAssert and related functions to verify your tests produce the correct results.
NSDecimalNumber* decimalLength;
NSUInteger interval;
// Start with a number that requires 65+ bits
// This FAILS (interval is zero)
decimalLength = [NSDecimalNumber decimalNumberWithString:@"1.8446744073709551616"];
interval = decimalLength.unsignedIntegerValue;
XCTAssert(interval == 1);
// This Works, interval is 1
interval = decimalLength.unsignedIntValue;
XCTAssert(interval == 1);
// Now test with a number that fits in 64 bits
// This WORKS (interval is 1)
decimalLength = [NSDecimalNumber decimalNumberWithString:@"1.8446744073709551615"];
interval = decimalLength.unsignedIntegerValue;
XCTAssert(interval == 1);
}
@end