Inference for Bugs model at "/home/kubo/nina/model200805no3/model.bug.txt", fit using WinBUGS, 3 chains, each with 20000 iterations (first 10000 discarded), n.thin = 50 n.sims = 600 iterations saved mean sd 2.5% 25% 50% 75% 97.5% Rhat n.eff log.ion[1] -1.521 0.527 -2.541 -1.884 -1.504 -1.162 -0.480 1.002 560 log.ion[2] 2.992 0.260 2.407 2.845 3.020 3.170 3.431 1.005 300 log.ion[3] 5.234 0.167 4.839 5.139 5.246 5.347 5.521 1.003 410 log.ion[4] 5.776 0.177 5.378 5.667 5.798 5.894 6.071 0.999 600 log.ion[5] 5.821 0.192 5.397 5.709 5.836 5.948 6.149 0.999 600 log.ion[6] 5.847 0.248 5.295 5.691 5.877 6.013 6.310 1.002 600 log.ion[7] 6.118 0.260 5.518 5.970 6.136 6.282 6.583 1.006 410 log.ion[8] -34.517 0.553 -35.651 -34.890 -34.480 -34.150 -33.459 0.999 600 log.ion[9] -12.494 0.343 -13.240 -12.700 -12.450 -12.260 -11.910 1.003 430 log.ion[10] -2.621 0.220 -3.104 -2.735 -2.602 -2.476 -2.235 1.009 230 log.ion[11] 0.594 0.243 0.060 0.450 0.616 0.760 1.039 0.999 600 log.ion[12] 1.059 0.267 0.457 0.885 1.084 1.235 1.529 1.015 600 log.ion[13] 2.782 0.292 2.097 2.622 2.818 2.980 3.285 1.003 470 log.ion[14] 4.642 0.232 4.129 4.511 4.657 4.816 5.057 1.004 600 log.ion[15] -7.648 0.307 -8.345 -7.824 -7.635 -7.427 -7.100 0.999 600 log.ion[16] 0.815 0.044 0.711 0.790 0.817 0.841 0.897 0.999 600 log.ion[17] 2.756 0.095 2.559 2.703 2.759 2.815 2.938 1.006 600 log.ion[18] 3.212 0.149 2.893 3.133 3.226 3.302 3.474 1.006 560 log.ion[19] 3.360 0.149 3.058 3.265 3.374 3.459 3.606 1.008 220 log.ion[20] 3.710 0.158 3.379 3.615 3.723 3.816 4.022 0.999 600 log.ion[21] 4.421 0.504 3.491 4.082 4.410 4.737 5.465 1.003 520 log.ion[22] -9.080 0.287 -9.662 -9.259 -9.058 -8.878 -8.562 1.004 390 log.ion[23] -1.838 0.278 -2.474 -1.998 -1.833 -1.653 -1.325 0.999 600 log.ion[24] 2.504 0.080 2.342 2.454 2.509 2.561 2.637 1.001 600 log.ion[25] 3.063 0.136 2.771 2.992 3.071 3.155 3.301 1.002 600 log.ion[26] 3.106 0.140 2.802 3.032 3.117 3.192 3.357 1.004 380 log.ion[27] 3.560 0.161 3.172 3.472 3.564 3.665 3.856 0.999 600 log.ion[28] 4.057 0.170 3.680 3.962 4.062 4.173 4.363 1.001 600 log.ion[29] -1.478 0.253 -2.011 -1.629 -1.457 -1.312 -1.005 1.007 600 log.ion[30] 3.829 0.122 3.571 3.748 3.842 3.916 4.031 1.001 600 log.ion[31] 5.265 0.158 4.915 5.174 5.276 5.369 5.558 0.999 600 log.ion[32] 5.827 0.170 5.507 5.723 5.843 5.934 6.130 1.003 600 log.ion[33] 5.871 0.186 5.441 5.770 5.882 5.995 6.198 1.001 600 log.ion[34] 6.097 0.213 5.614 5.984 6.106 6.235 6.459 1.006 600 log.ion[35] 6.428 0.209 5.971 6.297 6.442 6.566 6.814 1.005 600 log.ion[36] -34.647 0.327 -35.351 -34.853 -34.610 -34.410 -34.080 0.999 600 log.ion[37] -12.098 0.314 -12.830 -12.262 -12.050 -11.888 -11.590 0.999 600 log.ion[38] -2.365 0.200 -2.838 -2.469 -2.343 -2.236 -2.047 1.011 240 log.ion[39] 0.391 0.271 -0.209 0.236 0.415 0.580 0.887 1.002 600 log.ion[40] 1.040 0.281 0.416 0.857 1.064 1.246 1.506 1.006 600 log.ion[41] 2.816 0.289 2.179 2.649 2.848 3.020 3.297 0.999 600 log.ion[42] 4.083 0.303 3.449 3.887 4.083 4.293 4.648 1.000 600 log.ion[43] -7.574 0.295 -8.128 -7.740 -7.560 -7.378 -7.046 1.002 600 log.ion[44] 0.650 0.054 0.528 0.622 0.656 0.685 0.746 1.003 600 log.ion[45] 3.057 0.066 2.915 3.019 3.062 3.098 3.186 1.002 530 log.ion[46] 3.662 0.092 3.476 3.604 3.663 3.718 3.847 1.000 600 log.ion[47] 3.834 0.092 3.664 3.775 3.834 3.896 4.007 1.009 550 log.ion[48] 4.822 0.058 4.694 4.789 4.827 4.858 4.924 1.000 600 log.ion[49] 5.087 0.072 4.950 5.040 5.088 5.132 5.230 1.006 360 log.ion[50] -9.067 0.289 -9.679 -9.244 -9.039 -8.871 -8.573 1.003 600 log.ion[51] -1.945 0.294 -2.594 -2.113 -1.926 -1.762 -1.425 1.002 600 log.ion[52] 2.518 0.089 2.344 2.466 2.525 2.574 2.682 1.011 160 log.ion[53] 3.173 0.112 2.941 3.110 3.179 3.250 3.366 1.012 600 log.ion[54] 3.300 0.126 3.012 3.233 3.311 3.387 3.506 1.006 600 log.ion[55] 3.970 0.118 3.701 3.910 3.975 4.043 4.158 1.006 440 log.ion[56] 4.338 0.131 4.068 4.257 4.345 4.424 4.577 1.008 230 alpha -1.390 0.223 -1.810 -1.541 -1.392 -1.245 -0.953 1.010 360 beta1 2.111 0.792 0.483 1.599 2.081 2.642 3.692 0.999 600 beta2[1] -0.140 0.293 -0.770 -0.321 -0.120 0.053 0.393 1.012 250 beta2[2] -0.425 0.291 -0.989 -0.609 -0.400 -0.221 0.059 1.003 600 beta2[3] 0.358 0.303 -0.177 0.149 0.350 0.560 0.977 0.999 600 beta2[4] 0.008 0.268 -0.540 -0.139 0.004 0.168 0.549 1.012 500 beta2[5] 0.061 0.273 -0.493 -0.105 0.062 0.230 0.594 1.002 600 beta2[6] -0.429 0.277 -1.006 -0.607 -0.412 -0.240 0.064 1.012 600 beta2[7] 0.497 0.304 -0.026 0.296 0.472 0.683 1.109 1.003 600 beta2[8] 0.017 0.262 -0.507 -0.137 0.019 0.181 0.543 1.001 600 tau[1] 268.701 122.518 96.087 181.075 240.300 332.500 551.732 1.000 600 tau[2] 5.044 1.397 2.888 4.104 4.786 5.815 8.541 1.002 530 tau[3] 11.234 25.623 1.012 3.259 5.776 10.330 61.812 1.005 290 deviance -146.053 23.435 -190.905 -161.775 -145.550 -130.350 -98.120 0.999 600 For each parameter, n.eff is a crude measure of effective sample size, and Rhat is the potential scale reduction factor (at convergence, Rhat=1). DIC info (using the rule, pD = Dbar-Dhat) pD = 46.6 and DIC = -99.4 DIC is an estimate of expected predictive error (lower deviance is better).