ノンパラの実験
はじめに
ノンパラメトリックに関する論文やチュートリアルを見ていると「混合正規分布の混合数も推定できちゃうんですよぱねぇ」みたいなこと書いてあったり、その図が載ってたりする。
試してみたいなぁと思ったので、書いて実験してみる。
理解が乏しいのでやり方が間違っている可能性が高く、コーディングがスマートじゃない、あくまで実験用。。。orz
説明
CRP-type samplingでやってみる。
あるいくつかの正規分布に従うデータが与えられるとき、その混合比πとパラメータθを推定する。
P(x|θ) : 正規分布
G0 : パラメータの事前分布(平均の事前分布、分散の事前分布)
θ_k : テーブルkのパラメータ(平均と分散)
α_0 : 集中度パラメータ
π : 混合数と混合比
→G0(のパラメータ)とα_0とデータを与える。
データは、1次元のを自分で用意して使う。以下は、各正規分布が離れているちょっとずるいデータ。。
N(-50,10)に従うデータを200個、N(0,10)に従うデータを300個、N(50,10)に従うデータを500個。
推定は、Neal2000のAlgorithm8を利用してみる。新しいテーブル数Mは1つにする。(というか、他のAlgorithmの積分の計算がよくわからない、、、)
パラメータの更新は、先日(http://d.hatena.ne.jp/jetbead/20120229/1330535260)の更新式を使う。
出力は、各テーブル(正規分布)に所属するのデータをそれぞれ出力のみ。(離れているデータを使うので、平均・最頻を出さないで、最終的な結果だけで、、、)
コード
#include <iostream> #include <vector> #include <set> #include <algorithm> #include <cmath> static const double PI = 3.14159265358979323846264338; //xorshift // 注意: longではなくint(32bit)にすべき unsigned long xor128(){ static unsigned long x=123456789, y=362436069, z=521288629, w=88675123; unsigned long t; t=(x^(x<<11)); x=y; y=z; z=w; return w=(w^(w>>19))^(t^(t>>8)); } //[0,1)の一様乱数 // 注意: int_maxぐらいで割るべき double frand(){ return xor128()%ULONG_MAX/static_cast<double>(ULONG_MAX); } //ガンマ乱数 double gamma_rand(double shape, double scale){ double n, b1, b2, c1, c2; if(4.0 < shape) n = 1.0/sqrt(shape); else if(0.4 < shape) n = 1.0/shape + (1.0/shape)*(shape-0.4)/3.6; else if(0.0 < shape) n = 1.0/shape; else return -1; b1 = shape - 1.0/n; b2 = shape + 1.0/n; if(0.4 < shape) c1 = b1 * (log(b1)-1.0) / 2.0; else c1 = 0; c2 = b2 * (log(b2)-1.0) / 2.0; while(true){ double v1 = frand(), v2 = frand(); double w1, w2, y, x; w1 = c1 + log(v1); w2 = c2 + log(v2); y = n * (b1*w2-b2*w1); if(y < 0) continue; x = n * (w2-w1); if(log(y) < x) continue; return exp(x) * scale; } return -1; } //逆ガンマ乱数 double invgamma_rand(double shape, double scale){ return 1.0/gamma_rand(shape, scale); } //正規乱数 double normal_rand(double mu, double sigma2){ double sigma = sqrt(sigma2); double u1 = frand(), u2 = frand(); double z1 = sqrt(-2*log(u1)) * cos(2*PI*u2); //double z2 = sqrt(-2*log(u1)) * sin(2*PI*u2); return mu + sigma*z1; } //離散分布に従う乱数 int prob_select(const std::vector<double> &p){ double Z = 0.0, sum = 0.0; double r = frand(); for(int i=0; i<p.size(); i++) Z += p[i]; for(int i=0; i<p.size()-1; i++){ sum += p[i]/Z; if(r<sum) return i; } return p.size()-1; } //正規分布 struct Gaussian { double mu, sigma2; double G0_mean, G0_sigma2, G0_shape, G0_scale; std::vector<double> _data; std::set<int> data_idx; bool isExist(int idx){ return data_idx.count(idx); } void eraseIdx(int idx){ data_idx.erase(idx); } void paramUpdate(){ double n_0 = G0_shape * 2.0; double S_0 = G0_scale * 2.0 / n_0; int n = data_idx.size(); double x_bar = 0.0; for(std::set<int>::iterator itr=data_idx.begin(); itr != data_idx.end(); itr++){ x_bar += _data[*itr]; } x_bar /= n; double m_1 = 1.0 + n; double n_1 = n_0 + n; double mu_1 = (1.0 * G0_mean + n * x_bar) / (1.0 + n); double nS_1 = n_0 * S_0; for(std::set<int>::iterator itr=data_idx.begin(); itr != data_idx.end(); itr++){ nS_1 += (_data[*itr]-x_bar)*(_data[*itr]-x_bar); } nS_1 += (1.0 * n) / (1.0 + n) * (x_bar - mu) * (x_bar - mu); sigma2 = invgamma_rand((n_1+1)/2.0, 2.0/(nS_1+m_1*(mu-mu_1)*(mu-mu_1))); mu = normal_rand(mu_1, sigma2/m_1); } Gaussian(std::vector<double> &data, double mu, double sigma2, double G0_mean, double G0_sigma2, double G0_shape, double G0_scale): _data(data),mu(mu),sigma2(sigma2),G0_mean(G0_mean),G0_sigma2(G0_sigma2),G0_shape(G0_shape),G0_scale(G0_scale){ data_idx.clear(); } }; //Dirichlet Process Gaussian Mixture Model class DPGMM { double alpha_0; double G0_mean, G0_sigma2, G0_shape, G0_scale; std::vector<double> _data; std::vector<Gaussian> _mixture; public: DPGMM(double alpha_0, double G0_mean, double G0_sigma2, double G0_shape, double G0_scale): alpha_0(alpha_0), G0_mean(G0_mean), G0_sigma2(G0_sigma2), G0_shape(G0_shape), G0_scale(G0_scale){ } //データをセットする void set_data(const std::vector<double> &data){ _data = data; _mixture.clear(); _mixture.push_back(Gaussian(_data, normal_rand(G0_mean,G0_sigma2), invgamma_rand(G0_shape,G0_scale), G0_mean, G0_sigma2, G0_shape, G0_scale)); for(int i=0; i<_data.size(); i++){ _mixture[0].data_idx.insert(i); //最初は全て同じテーブル } } //推定 // 注意:burn_in使ってない void inference(int iter, int burn_in){ int M = 1; //テーブル拡張数 for(int T=0; T<iter; T++){ //if(T%100==0) std::cout << T << std::endl; //お客をリサンプリング for(int i=0; i<_data.size(); i++){ //テーブルを拡張 for(int j=0; j<M; j++){ _mixture.push_back(Gaussian(_data, normal_rand(G0_mean,G0_sigma2), invgamma_rand(G0_shape,G0_scale), G0_mean, G0_sigma2, G0_shape, G0_scale)); } //客iを削除 int table_i = -1; //客iが座っていたテーブル for(int j=0; j<_mixture.size(); j++){ if(_mixture[j].isExist(i)){ table_i = j; break; } } _mixture[table_i].eraseIdx(i); //テーブルから削除 //新たに座るテーブルを選択 int select_table = -1; //新たに選ぶテーブル std::vector<double> p; //そのテーブルを選ぶ確率 for(int j=0; j<_mixture.size(); j++){ double mu_j = _mixture[j].mu; double sigma2_j = _mixture[j].sigma2; double x_i = _data[i]; double f_x = exp(-(x_i-mu_j)*(x_i-mu_j)/(2*sigma2_j))/sqrt(2*PI*sigma2_j); if(_mixture[j].data_idx.size()>0){ //既存のテーブル double p_d = (_mixture[j].data_idx.size())/(_data.size()-1+alpha_0); p.push_back(p_d*f_x); }else{ //新しいテーブル double p_d = (alpha_0/M)/(_data.size()-1+alpha_0); p.push_back(p_d*f_x); } } _mixture[prob_select(p)].data_idx.insert(i); //テーブルに座る //誰も座っていないテーブルを削除 for(std::vector<Gaussian>::iterator itr = _mixture.begin(); itr != _mixture.end();){ if(itr->data_idx.size() == 0) itr = _mixture.erase(itr); else ++itr; } } //各テーブルのパラメータを更新 for(int i=0; i<_mixture.size(); i++){ _mixture[i].paramUpdate(); } } } //結果の出力 // 注意:最終的な結果のみを表示 void output(){ for(int i=0; i<_mixture.size(); i++){ std::cout << "Table[" << i << "]" << std::endl; for(std::set<int>::iterator itr = _mixture[i].data_idx.begin(); itr != _mixture[i].data_idx.end(); itr++){ std::cout << " " << (*itr) << "\t" << _data[(*itr)] << std::endl; } } } }; int main(){ //1.データの作成 std::vector<double> data; for(int i=0; i<200; i++) data.push_back(normal_rand(-50,10)); for(int i=0; i<300; i++) data.push_back(normal_rand(0,10)); for(int i=0; i<500; i++) data.push_back(normal_rand(50,10)); //2.モデルの推定 ///集中度パラメータ double alpha_0 = 1; ///基底測度G_0 for Gaussian Parameter double G0_mean = 0; double G0_sigma2 = 1000; double G0_shape = 0.01; double G0_scale = 0.01; DPGMM dpgmm(alpha_0, G0_mean, G0_sigma2, G0_shape, G0_scale); dpgmm.set_data(data); dpgmm.inference(10000, 3000); //3.推定結果 dpgmm.output(); return 0; }
結果
iteration回した最後の結果。それっぽく分かれてくれてる。。。
本当は最後だけじゃなく、それまでの情報(平均値とか最頻indexとか)を使うべきだけど、、、
# データのindex データの値 Table[0] 2 -35.5214 3 -36.3714 4 -45.4479 5 -48.0307 6 -48.7147 7 -47.8853 8 -46.6952 9 -53.3751 10 -52.3716 11 -48.7485 12 -51.7934 13 -44.3183 14 -53.0196 15 -50.5134 16 -53.1481 17 -48.1244 18 -47.787 19 -49.4366 20 -47.6917 21 -47.8654 22 -50.9595 23 -49.2512 24 -51.4467 25 -50.8832 26 -40.1298 27 -49.7144 28 -51.6124 29 -52.6363 30 -43.0921 31 -48.0059 32 -50.2139 33 -47.9583 34 -47.411 35 -50.9434 36 -53.6684 37 -44.6689 38 -51.6912 39 -53.5678 40 -50.0215 41 -48.204 42 -48.2751 43 -51.0128 44 -52.8112 45 -48.3422 46 -43.4643 47 -47.5391 48 -50.4429 49 -50.5254 50 -54.2783 51 -49.3236 52 -50.7506 53 -51.4321 54 -50.5833 55 -52.9856 56 -53.77 57 -46.8769 58 -46.9255 59 -52.4172 60 -50.964 61 -50.5665 62 -52.0168 63 -49.2695 64 -43.7435 65 -50.2724 66 -50.1373 67 -48.2466 68 -53.1438 69 -50.5922 70 -46.4332 71 -51.0936 72 -48.5968 73 -48.8805 74 -49.1299 75 -48.5043 76 -48.0144 77 -49.1753 78 -57.0485 79 -51.0807 80 -47.7196 81 -51.0695 82 -48.8323 83 -50.8741 84 -47.0493 85 -51.5255 86 -56.2889 87 -48.9515 88 -46.2357 89 -47.1358 90 -48.5066 91 -50.1665 92 -44.1657 93 -49.9785 94 -50.5638 95 -47.9809 96 -51.3012 97 -48.2492 98 -54.1664 99 -48.6446 100 -54.9343 101 -53.66 102 -51.2418 103 -46.872 104 -52.0475 105 -50.6886 106 -49.4763 107 -55.9026 108 -54.7954 109 -44.1464 110 -48.1967 111 -46.257 112 -48.2709 113 -49.4791 114 -55.124 115 -50.3997 116 -51.4293 117 -59.3333 118 -56.8759 119 -48.9705 120 -48.9772 121 -46.7002 122 -48.4843 123 -52.0493 124 -49.4655 125 -49.4618 126 -42.5104 127 -46.6317 128 -50.0321 129 -51.2214 130 -56.5567 131 -52.945 132 -51.0209 133 -53.2828 134 -54.099 135 -51.6668 136 -52.5809 137 -50.33 138 -57.718 139 -51.5968 140 -50.1121 141 -53.7579 142 -57.0954 143 -53.8787 144 -45.7346 145 -53.1578 146 -54.0137 147 -50.4715 148 -56.1738 149 -48.3563 150 -49.7892 151 -50.6667 152 -49.8319 153 -49.8551 154 -55.0025 155 -56.2075 156 -54.1293 157 -47.7355 158 -51.6997 159 -54.9316 160 -51.1406 161 -44.481 162 -46.754 163 -55.4517 164 -48.4092 165 -45.7508 166 -45.7267 167 -48.3592 168 -49.5059 169 -53.379 170 -51.1818 171 -54.5743 172 -49.0745 173 -49.0509 174 -48.8907 175 -48.5935 176 -46.8176 177 -44.4825 178 -49.0936 179 -51.0238 180 -48.2486 181 -51.271 182 -46.9064 183 -50.076 184 -51.1754 185 -47.8776 186 -47.4335 187 -51.4389 188 -45.7688 189 -46.7081 190 -46.3274 191 -44.9674 192 -48.5076 193 -53.2341 194 -56.0897 195 -49.455 196 -49.2397 197 -44.2719 198 -47.3803 199 -51.757 Table[1] 200 -2.92195 201 -4.34472 202 7.9088 203 1.59877 204 -5.84475 205 1.3718 206 -0.811399 207 1.28305 208 -1.96917 209 2.62953 210 -1.51321 211 -1.52876 212 -2.60754 213 -2.7589 214 0.57361 215 -1.13896 216 -1.71683 217 -1.68824 218 1.47031 219 -2.67293 220 0.115342 221 3.10107 222 -0.908284 223 -1.12871 224 -0.837544 225 0.203887 226 1.16277 227 2.16046 228 -0.0544563 229 1.28793 230 8.0388 231 -3.78242 232 1.64791 233 -1.93224 234 -5.31764 235 -0.985437 236 -0.472555 237 0.76844 238 -2.90512 239 2.37592 240 1.42087 241 -1.75345 242 -5.02175 243 3.81046 244 0.369908 245 0.948802 246 0.662487 247 -2.34372 248 2.18163 249 4.46669 250 -0.329907 251 -2.86627 252 0.0536559 253 0.228065 254 1.71825 255 5.49655 256 2.21127 257 -3.9613 258 -3.55955 259 3.63743 260 -0.20251 261 -5.9997 262 5.7217 263 -0.656522 264 -3.95076 265 1.50664 266 -1.11293 267 -2.80075 268 -3.44006 269 -3.09413 270 6.09038 271 0.976819 272 1.77871 273 -2.38342 274 -0.305332 275 0.905309 276 4.18212 277 2.60173 278 -2.02888 279 4.98905 280 6.68565 281 -2.6741 282 4.84576 283 2.68951 284 2.6228 285 -1.91521 286 2.61355 287 -1.01709 288 -0.336861 289 1.54685 290 -2.86563 291 -0.907062 292 -2.11943 293 1.89301 294 -0.536777 295 -8.17238 296 -4.77668 297 6.56567 298 0.26962 299 -1.68811 300 -1.90068 301 0.34508 302 3.95192 303 -0.728926 304 1.56125 305 0.0806611 306 0.245732 307 -2.0148 308 -1.69024 309 -2.7257 310 -0.890567 311 4.1226 312 4.44221 313 -8.04163 314 3.41392 315 -3.2167 316 -1.42271 317 -1.07614 318 0.912057 319 -1.21545 320 2.0762 321 -0.0225722 322 1.20504 323 -1.29817 324 2.94811 325 -1.51383 326 2.13756 327 3.99463 328 3.12869 329 -0.321061 330 -2.73394 331 -3.4137 332 5.61272 333 -2.16095 334 -4.93361 335 0.525902 336 -1.73752 337 -5.58911 338 -1.7108 339 0.154274 340 1.55955 341 0.409572 342 -3.00971 343 -4.13514 344 1.9507 345 2.71577 346 -4.96309 347 -0.110848 348 -1.42427 349 -3.30063 350 -1.05048 351 1.07738 352 -1.46188 353 0.922515 354 4.33934 355 -2.81494 356 -2.10239 357 2.08945 358 0.477516 359 5.00287 360 -3.55963 361 -2.05141 362 3.57384 363 1.56325 364 3.85689 365 -2.17199 366 2.72356 367 4.61206 368 2.57464 369 0.560422 370 4.97427 371 0.0478507 372 -5.42315 373 0.514088 374 0.717833 375 -0.758897 376 -3.1755 377 -5.72692 378 -0.756768 379 -3.5582 380 -4.90604 381 -6.10331 382 -0.502797 383 3.09598 384 1.98153 385 -2.60599 386 -5.55868 387 -1.9863 388 2.45726 389 -4.01541 390 -0.525973 391 -2.7322 392 -0.206147 393 -0.412995 394 3.15333 395 0.770543 396 1.72107 397 -2.65519 398 2.15872 399 0.729241 400 -0.0905532 401 -0.766385 402 -2.67467 403 -2.89218 404 -1.63615 405 -1.35973 406 2.81353 407 2.56714 408 0.363525 409 -2.73775 410 -2.23986 411 2.38708 412 2.63184 413 1.80615 414 -1.4083 415 2.17974 416 0.0144896 417 0.0092946 418 -5.14309 419 6.99352 420 -2.38861 421 -2.56485 422 -6.94993 423 -2.55867 424 2.43975 425 1.55157 426 -1.73443 427 2.12182 428 -3.76086 429 -0.489185 430 1.27411 431 -3.92487 432 -5.36595 433 1.80887 434 -3.21689 435 -0.658408 436 4.25024 437 -2.37971 438 -0.700849 439 -1.78248 440 4.11031 441 0.657586 442 -4.8263 443 2.03736 444 -0.116081 445 0.658695 446 -5.39686 447 3.66709 448 2.70345 449 -2.82548 450 3.28067 451 2.9049 452 0.200489 453 -0.339768 454 -2.26415 455 -0.859511 456 1.48667 457 -0.922154 458 2.0078 459 -3.4651 460 -0.529343 461 -0.202698 462 1.66668 463 -2.22484 464 -1.43948 465 -0.174596 466 1.37627 467 -2.47439 468 2.74704 469 -4.19825 470 0.475422 471 -0.902795 472 -1.96388 473 -0.842048 474 2.10587 475 1.27579 476 0.714839 477 1.67816 478 4.14901 479 1.87643 480 2.25583 481 -3.58776 482 2.43742 483 -3.99507 484 1.47081 485 1.87625 486 1.23376 487 -2.04462 488 -0.612967 489 3.00301 490 1.3963 491 -4.98965 492 0.620377 493 -4.25943 494 2.91028 495 -0.401532 496 -1.61124 497 2.67809 498 -4.56348 499 -2.44084 Table[2] 500 47.2083 501 50.1333 502 49.0593 503 53.7273 504 47.6224 505 49.257 506 52.8288 507 52.2066 508 46.5897 509 49.7019 510 45.9157 511 51.1717 512 51.6923 513 50.9892 514 53.3416 515 44.6859 516 49.9599 517 49.4448 518 51.5572 519 57.286 520 46.1973 521 53.1693 522 52.7945 523 49.1164 524 50.6427 525 50.8951 526 46.6377 527 46.7923 528 51.7048 529 48.3918 530 50.0331 531 48.0992 532 51.5465 533 52.9936 534 49.6345 535 46.3751 536 48.6749 537 49.0931 538 50.5678 539 51.719 540 48.6139 541 49.4551 542 51.3585 543 51.7726 544 51.6984 545 50.0278 546 51.0227 547 44.5653 548 44.4812 549 49.4814 550 50.1419 551 48.39 552 49.0482 553 46.9835 554 56.8812 555 52.0208 556 52.2174 557 50.2577 558 56.1442 559 49.1339 560 49.8706 561 47.0276 562 51.5911 563 48.5077 564 53.4353 565 46.4436 566 56.6919 567 48.1306 568 51.5642 569 46.1931 570 52.4438 571 49.3824 572 51.4996 573 48.6297 574 48.2575 575 55.4545 576 49.863 577 53.3934 578 48.6832 579 50.1713 580 41.2899 581 55.051 582 48.3795 583 51.2265 584 53.7981 585 47.3477 586 53.0892 587 51.0884 588 46.7424 589 47.2892 590 51.3276 591 52.846 592 51.8075 593 51.1482 594 47.0198 595 48.6325 596 51.3365 597 47.4598 598 50.1481 599 50.8701 600 48.833 601 52.3115 602 50.2489 603 51.5298 604 48.3827 605 50.7647 606 47.5093 607 49.6233 608 46.8448 609 49.0365 610 51.4585 611 52.4883 612 49.5769 613 51.2774 614 52.7116 615 43.2847 616 52.3179 617 52.643 618 50.3444 619 56.2518 620 49.624 621 44.7577 622 54.6789 623 51.8825 624 44.4774 625 53.2974 626 46.8067 627 50.6386 628 53.064 629 50.1626 630 50.4756 631 49.3542 632 52.253 633 43.6257 634 50.8882 635 50.6808 636 52.8739 637 50.1468 638 47.2162 639 53.8351 640 49.0978 641 43.4824 642 49.6421 643 50.7634 644 44.3081 645 47.4656 646 48.4338 647 48.6757 648 57.5019 649 50.6206 650 46.8537 651 48.3813 652 51.7982 653 48.424 654 47.9283 655 47.0337 656 48.0934 657 55.6896 658 51.2767 659 49.4945 660 48.7087 661 49.333 662 46.3045 663 49.2549 664 49.2371 665 50.2047 666 49.6595 667 45.2564 668 50.1154 669 48.3832 670 46.3732 671 52.6561 672 52.6019 673 47.3666 674 47.2436 675 48.1765 676 52.4576 677 46.0745 678 51.215 679 49.7578 680 48.5021 681 49.5451 682 54.2666 683 47.0515 684 54.1405 685 49.7892 686 48.8238 687 47.2997 688 53.0064 689 46.0247 690 46.9157 691 53.6858 692 49.7301 693 46.5576 694 52.9799 695 50.7041 696 47.4629 697 50.6511 698 48.1133 699 49.5037 700 53.3731 701 53.5353 702 52.5137 703 51.5942 704 50.0586 705 47.3795 706 49.255 707 49.0336 708 45.1589 709 46.653 710 46.0391 711 51.6132 712 47.142 713 46.0209 714 50.1332 715 47.2314 716 48.2204 717 50.8915 718 49.3718 719 50.5643 720 57.0949 721 53.9162 722 50.0081 723 47.6147 724 49.8864 725 50.7303 726 51.9253 727 49.2549 728 48.3517 729 52.6285 730 50.6274 731 50.4569 732 52.1017 733 52.6353 734 53.4939 735 47.1914 736 46.435 737 53.0841 738 52.3783 739 51.794 740 53.5775 741 47.12 742 52.6386 743 48.9676 744 54.6517 745 51.1574 746 56.0696 747 51.5449 748 50.2992 749 50.3166 750 50.6935 751 49.6394 752 55.1366 753 53.7863 754 56.0628 755 47.6918 756 53.7175 757 45.8798 758 54.4283 759 44.7231 760 50.4676 761 49.3085 762 51.0676 763 41.6511 764 47.3657 765 52.1909 766 49.2877 767 45.1008 768 46.9269 769 51.402 770 53.2275 771 50.0152 772 51.4946 773 51.6075 774 54.9089 775 52.0244 776 46.7912 777 52.2315 778 47.2325 779 49.543 780 48.6524 781 52.4153 782 57.3265 783 51.7749 784 51.2529 785 48.7739 786 45.286 787 50.803 788 43.1362 789 52.3826 790 49.5456 791 51.5356 792 56.1188 793 50.9528 794 52.6905 795 46.11 796 49.708 797 48.8488 798 49.9329 799 49.7791 800 54.4539 801 52.1736 802 48.8191 803 52.657 804 45.2783 805 49.6953 806 50.6557 807 43.0968 808 50.3708 809 46.758 810 46.0757 811 49.9069 812 46.7772 813 47.993 814 50.7924 815 51.1395 816 46.9231 817 45.6459 818 57.4477 819 48.6586 820 49.6964 821 52.6133 822 46.233 823 51.3046 824 52.3569 825 52.1927 826 49.2518 827 54.9871 828 51.7533 829 49.8286 830 47.9469 831 47.4476 832 49.7752 833 50.3336 834 49.3951 835 51.9032 836 50.1919 837 44.8847 838 53.169 839 52.465 840 48.5866 841 50.1508 842 54.2019 843 51.2948 844 53.8337 845 51.8688 847 51.5235 848 51.0933 849 50.3949 850 48.83 851 49.6694 852 51.8897 853 54.6055 854 48.4679 855 46.5371 856 48.4887 857 49.6518 858 50.8344 859 51.7577 860 52.6648 861 49.59 862 52.0392 863 48.3213 864 56.1552 865 47.8037 866 47.5686 867 52.6098 868 50.7106 869 46.8837 870 48.2928 871 54.3736 872 48.7201 873 47.812 874 49.4399 875 50.5798 876 48.4775 877 50.0702 878 49.4787 879 52.8912 880 45.0419 881 54.1969 882 50.5366 883 48.0548 884 53.0176 885 50.3011 886 51.2065 887 49.026 888 48.6936 889 55.7794 890 45.8426 891 51.2555 892 52.6447 893 50.487 894 52.2281 895 54.8323 896 50.5128 897 53.5178 898 49.4288 899 51.2999 900 50.493 901 45.0754 902 52.88 903 52.8198 904 44.4267 905 50.2931 906 47.0122 907 46.9598 908 47.0083 909 48.7758 910 46.3166 911 45.1931 912 53.9486 913 47.4728 914 48.1687 915 42.162 916 56.9081 917 48.436 918 50.1222 919 55.0411 920 49.3187 921 45.4149 922 53.0721 923 49.8275 924 45.1732 925 45.2809 926 45.6984 927 51.3946 928 50.3734 929 51.4407 930 48.4081 931 55.5197 932 52.9908 933 49.5517 934 44.6276 935 50.1467 936 52.5191 937 47.1686 938 44.0294 939 51.7616 940 48.5467 941 49.3564 942 45.8278 943 50.4393 944 48.3418 945 50.4748 946 50.1085 947 49.3994 948 51.1022 949 49.1833 950 54.722 951 49.7215 952 52.0236 953 48.1955 954 46.7201 955 49.4768 956 49.5986 957 48.2752 958 45.9778 959 46.5852 960 51.9321 961 49.6427 962 46.9841 963 55.7479 964 52.9344 965 53.8702 966 52.2877 967 49.9191 968 47.2089 969 54.3887 970 51.0311 971 52.5679 972 46.8479 973 50.1765 974 55.1074 975 58.2637 976 50.2971 977 42.7912 978 50.6414 979 50.0517 980 52.4977 981 48.4231 982 47.6988 983 46.7652 984 50.5972 985 52.088 986 55.6902 987 48.426 988 48.8053 989 50.6716 990 46.6228 991 47.3814 992 51.1737 993 48.748 994 50.0627 995 55.2962 996 45.3248 997 59.8706 998 50.0316 999 44.0382 Table[3] 0 -30.9712 1 -31.307 Table[4] 846 58.531