MatrixTest.cpp revision 9ed2ecd3ac9b4601fccee8b7232b49bb2f9fed2e
1 2/* 3 * Copyright 2011 Google Inc. 4 * 5 * Use of this source code is governed by a BSD-style license that can be 6 * found in the LICENSE file. 7 */ 8#include "Test.h" 9#include "SkMath.h" 10#include "SkMatrix.h" 11#include "SkRandom.h" 12 13static bool nearly_equal_scalar(SkScalar a, SkScalar b) { 14 // Note that we get more compounded error for multiple operations when 15 // SK_SCALAR_IS_FIXED. 16#ifdef SK_SCALAR_IS_FLOAT 17 const SkScalar tolerance = SK_Scalar1 / 200000; 18#else 19 const SkScalar tolerance = SK_Scalar1 / 1024; 20#endif 21 22 return SkScalarAbs(a - b) <= tolerance; 23} 24 25static bool nearly_equal(const SkMatrix& a, const SkMatrix& b) { 26 for (int i = 0; i < 9; i++) { 27 if (!nearly_equal_scalar(a[i], b[i])) { 28 printf("not equal %g %g\n", (float)a[i], (float)b[i]); 29 return false; 30 } 31 } 32 return true; 33} 34 35static bool are_equal(skiatest::Reporter* reporter, 36 const SkMatrix& a, 37 const SkMatrix& b) { 38 bool equal = a == b; 39 bool cheapEqual = a.cheapEqualTo(b); 40 if (equal != cheapEqual) { 41#if SK_SCALAR_IS_FLOAT 42 bool foundZeroSignDiff = false; 43 for (int i = 0; i < 9; ++i) { 44 float aVal = a.get(i); 45 float bVal = b.get(i); 46 int aValI = *reinterpret_cast<int*>(&aVal); 47 int bValI = *reinterpret_cast<int*>(&bVal); 48 if (0 == aVal && 0 == bVal && aValI != bValI) { 49 foundZeroSignDiff = true; 50 } else { 51 REPORTER_ASSERT(reporter, aVal == bVal && aValI == aValI); 52 } 53 } 54 REPORTER_ASSERT(reporter, foundZeroSignDiff); 55 } else { 56 bool foundNaN = false; 57 for (int i = 0; i < 9; ++i) { 58 float aVal = a.get(i); 59 float bVal = b.get(i); 60 int aValI = *reinterpret_cast<int*>(&aVal); 61 int bValI = *reinterpret_cast<int*>(&bVal); 62 if (sk_float_isnan(aVal) && aValI == bValI) { 63 foundNaN = true; 64 } else { 65 REPORTER_ASSERT(reporter, aVal == bVal && aValI == bValI); 66 } 67 } 68 REPORTER_ASSERT(reporter, foundNaN); 69 } 70#else 71 REPORTER_ASSERT(reporter, false); 72#endif 73 } 74 return equal; 75} 76 77static bool is_identity(const SkMatrix& m) { 78 SkMatrix identity; 79 identity.reset(); 80 return nearly_equal(m, identity); 81} 82 83static void test_flatten(skiatest::Reporter* reporter, const SkMatrix& m) { 84 // add 100 in case we have a bug, I don't want to kill my stack in the test 85 char buffer[SkMatrix::kMaxFlattenSize + 100]; 86 uint32_t size1 = m.flatten(NULL); 87 uint32_t size2 = m.flatten(buffer); 88 REPORTER_ASSERT(reporter, size1 == size2); 89 REPORTER_ASSERT(reporter, size1 <= SkMatrix::kMaxFlattenSize); 90 91 SkMatrix m2; 92 uint32_t size3 = m2.unflatten(buffer); 93 REPORTER_ASSERT(reporter, size1 == size2); 94 REPORTER_ASSERT(reporter, are_equal(reporter, m, m2)); 95 96 char buffer2[SkMatrix::kMaxFlattenSize + 100]; 97 size3 = m2.flatten(buffer2); 98 REPORTER_ASSERT(reporter, size1 == size2); 99 REPORTER_ASSERT(reporter, memcmp(buffer, buffer2, size1) == 0); 100} 101 102void test_matrix_max_stretch(skiatest::Reporter* reporter) { 103 SkMatrix identity; 104 identity.reset(); 105 REPORTER_ASSERT(reporter, SK_Scalar1 == identity.getMaxStretch()); 106 107 SkMatrix scale; 108 scale.setScale(SK_Scalar1 * 2, SK_Scalar1 * 4); 109 REPORTER_ASSERT(reporter, SK_Scalar1 * 4 == scale.getMaxStretch()); 110 111 SkMatrix rot90Scale; 112 rot90Scale.setRotate(90 * SK_Scalar1); 113 rot90Scale.postScale(SK_Scalar1 / 4, SK_Scalar1 / 2); 114 REPORTER_ASSERT(reporter, SK_Scalar1 / 2 == rot90Scale.getMaxStretch()); 115 116 SkMatrix rotate; 117 rotate.setRotate(128 * SK_Scalar1); 118 REPORTER_ASSERT(reporter, SkScalarAbs(SK_Scalar1 - rotate.getMaxStretch()) <= SK_ScalarNearlyZero); 119 120 SkMatrix translate; 121 translate.setTranslate(10 * SK_Scalar1, -5 * SK_Scalar1); 122 REPORTER_ASSERT(reporter, SK_Scalar1 == translate.getMaxStretch()); 123 124 SkMatrix perspX; 125 perspX.reset(); 126 perspX.setPerspX(SkScalarToPersp(SK_Scalar1 / 1000)); 127 REPORTER_ASSERT(reporter, -SK_Scalar1 == perspX.getMaxStretch()); 128 129 SkMatrix perspY; 130 perspY.reset(); 131 perspY.setPerspX(SkScalarToPersp(-SK_Scalar1 / 500)); 132 REPORTER_ASSERT(reporter, -SK_Scalar1 == perspY.getMaxStretch()); 133 134 SkMatrix baseMats[] = {scale, rot90Scale, rotate, 135 translate, perspX, perspY}; 136 SkMatrix mats[2*SK_ARRAY_COUNT(baseMats)]; 137 for (size_t i = 0; i < SK_ARRAY_COUNT(baseMats); ++i) { 138 mats[i] = baseMats[i]; 139 bool invertable = mats[i].invert(&mats[i + SK_ARRAY_COUNT(baseMats)]); 140 REPORTER_ASSERT(reporter, invertable); 141 } 142 SkRandom rand; 143 for (int m = 0; m < 1000; ++m) { 144 SkMatrix mat; 145 mat.reset(); 146 for (int i = 0; i < 4; ++i) { 147 int x = rand.nextU() % SK_ARRAY_COUNT(mats); 148 mat.postConcat(mats[x]); 149 } 150 SkScalar stretch = mat.getMaxStretch(); 151 152 if ((stretch < 0) != mat.hasPerspective()) { 153 stretch = mat.getMaxStretch(); 154 } 155 156 REPORTER_ASSERT(reporter, (stretch < 0) == mat.hasPerspective()); 157 158 if (mat.hasPerspective()) { 159 m -= 1; // try another non-persp matrix 160 continue; 161 } 162 163 // test a bunch of vectors. None should be scaled by more than stretch 164 // (modulo some error) and we should find a vector that is scaled by 165 // almost stretch. 166 static const SkScalar gStretchTol = (105 * SK_Scalar1) / 100; 167 static const SkScalar gMaxStretchTol = (97 * SK_Scalar1) / 100; 168 SkScalar max = 0; 169 SkVector vectors[1000]; 170 for (size_t i = 0; i < SK_ARRAY_COUNT(vectors); ++i) { 171 vectors[i].fX = rand.nextSScalar1(); 172 vectors[i].fY = rand.nextSScalar1(); 173 if (!vectors[i].normalize()) { 174 i -= 1; 175 continue; 176 } 177 } 178 mat.mapVectors(vectors, SK_ARRAY_COUNT(vectors)); 179 for (size_t i = 0; i < SK_ARRAY_COUNT(vectors); ++i) { 180 SkScalar d = vectors[i].length(); 181 REPORTER_ASSERT(reporter, SkScalarDiv(d, stretch) < gStretchTol); 182 if (max < d) { 183 max = d; 184 } 185 } 186 REPORTER_ASSERT(reporter, SkScalarDiv(max, stretch) >= gMaxStretchTol); 187 } 188} 189 190void TestMatrix(skiatest::Reporter* reporter) { 191 SkMatrix mat, inverse, iden1, iden2; 192 193 mat.reset(); 194 mat.setTranslate(SK_Scalar1, SK_Scalar1); 195 mat.invert(&inverse); 196 iden1.setConcat(mat, inverse); 197 REPORTER_ASSERT(reporter, is_identity(iden1)); 198 199 mat.setScale(SkIntToScalar(2), SkIntToScalar(2)); 200 mat.invert(&inverse); 201 iden1.setConcat(mat, inverse); 202 REPORTER_ASSERT(reporter, is_identity(iden1)); 203 test_flatten(reporter, mat); 204 205 mat.setScale(SK_Scalar1/2, SK_Scalar1/2); 206 mat.invert(&inverse); 207 iden1.setConcat(mat, inverse); 208 REPORTER_ASSERT(reporter, is_identity(iden1)); 209 test_flatten(reporter, mat); 210 211 mat.setScale(SkIntToScalar(3), SkIntToScalar(5), SkIntToScalar(20), 0); 212 mat.postRotate(SkIntToScalar(25)); 213 REPORTER_ASSERT(reporter, mat.invert(NULL)); 214 mat.invert(&inverse); 215 iden1.setConcat(mat, inverse); 216 REPORTER_ASSERT(reporter, is_identity(iden1)); 217 iden2.setConcat(inverse, mat); 218 REPORTER_ASSERT(reporter, is_identity(iden2)); 219 test_flatten(reporter, mat); 220 test_flatten(reporter, iden2); 221 222 // rectStaysRect test 223 { 224 static const struct { 225 SkScalar m00, m01, m10, m11; 226 bool mStaysRect; 227 } 228 gRectStaysRectSamples[] = { 229 { 0, 0, 0, 0, false }, 230 { 0, 0, 0, SK_Scalar1, false }, 231 { 0, 0, SK_Scalar1, 0, false }, 232 { 0, 0, SK_Scalar1, SK_Scalar1, false }, 233 { 0, SK_Scalar1, 0, 0, false }, 234 { 0, SK_Scalar1, 0, SK_Scalar1, false }, 235 { 0, SK_Scalar1, SK_Scalar1, 0, true }, 236 { 0, SK_Scalar1, SK_Scalar1, SK_Scalar1, false }, 237 { SK_Scalar1, 0, 0, 0, false }, 238 { SK_Scalar1, 0, 0, SK_Scalar1, true }, 239 { SK_Scalar1, 0, SK_Scalar1, 0, false }, 240 { SK_Scalar1, 0, SK_Scalar1, SK_Scalar1, false }, 241 { SK_Scalar1, SK_Scalar1, 0, 0, false }, 242 { SK_Scalar1, SK_Scalar1, 0, SK_Scalar1, false }, 243 { SK_Scalar1, SK_Scalar1, SK_Scalar1, 0, false }, 244 { SK_Scalar1, SK_Scalar1, SK_Scalar1, SK_Scalar1, false } 245 }; 246 247 for (size_t i = 0; i < SK_ARRAY_COUNT(gRectStaysRectSamples); i++) { 248 SkMatrix m; 249 250 m.reset(); 251 m.set(SkMatrix::kMScaleX, gRectStaysRectSamples[i].m00); 252 m.set(SkMatrix::kMSkewX, gRectStaysRectSamples[i].m01); 253 m.set(SkMatrix::kMSkewY, gRectStaysRectSamples[i].m10); 254 m.set(SkMatrix::kMScaleY, gRectStaysRectSamples[i].m11); 255 REPORTER_ASSERT(reporter, 256 m.rectStaysRect() == gRectStaysRectSamples[i].mStaysRect); 257 } 258 } 259 260 mat.reset(); 261 mat.set(SkMatrix::kMScaleX, SkIntToScalar(1)); 262 mat.set(SkMatrix::kMSkewX, SkIntToScalar(2)); 263 mat.set(SkMatrix::kMTransX, SkIntToScalar(3)); 264 mat.set(SkMatrix::kMSkewY, SkIntToScalar(4)); 265 mat.set(SkMatrix::kMScaleY, SkIntToScalar(5)); 266 mat.set(SkMatrix::kMTransY, SkIntToScalar(6)); 267 SkScalar affine[6]; 268 REPORTER_ASSERT(reporter, mat.asAffine(affine)); 269 270 #define affineEqual(e) affine[SkMatrix::kA##e] == mat.get(SkMatrix::kM##e) 271 REPORTER_ASSERT(reporter, affineEqual(ScaleX)); 272 REPORTER_ASSERT(reporter, affineEqual(SkewY)); 273 REPORTER_ASSERT(reporter, affineEqual(SkewX)); 274 REPORTER_ASSERT(reporter, affineEqual(ScaleY)); 275 REPORTER_ASSERT(reporter, affineEqual(TransX)); 276 REPORTER_ASSERT(reporter, affineEqual(TransY)); 277 #undef affineEqual 278 279 mat.set(SkMatrix::kMPersp1, SkScalarToPersp(SK_Scalar1 / 2)); 280 REPORTER_ASSERT(reporter, !mat.asAffine(affine)); 281 282 SkMatrix mat2; 283 mat2.reset(); 284 mat.reset(); 285 SkScalar zero = 0; 286 mat.set(SkMatrix::kMSkewX, -zero); 287 REPORTER_ASSERT(reporter, are_equal(reporter, mat, mat2)); 288 289 mat2.reset(); 290 mat.reset(); 291 mat.set(SkMatrix::kMSkewX, SK_ScalarNaN); 292 mat2.set(SkMatrix::kMSkewX, SK_ScalarNaN); 293 // fixed pt doesn't have the property that NaN does not equal itself. 294#ifdef SK_SCALAR_IS_FIXED 295 REPORTER_ASSERT(reporter, are_equal(reporter, mat, mat2)); 296#else 297 REPORTER_ASSERT(reporter, !are_equal(reporter, mat, mat2)); 298#endif 299 300 test_matrix_max_stretch(reporter); 301} 302 303#include "TestClassDef.h" 304DEFINE_TESTCLASS("Matrix", MatrixTestClass, TestMatrix) 305