# We computed a nested F test to determine whether the entire set of # variables that collectively made up the possibility of different # slopes for different occupation groups significantly improved # the fit of the regression. # Here's the sum of squares for the more complex model (from SAS): > 1423.2197 [1] 1423.220 # We subtract the sum of squares for the simpler model: > diffss <- 1423.2197 - 1198.68503 > diffss [1] 224.5347 # We create a mean square for the difference by dividing by the # difference in df for the two models: > diffms <- diffss/8 > diffms [1] 28.06683 # We create an F statistic using the error mean square from # the more complex model: > F <- diffms / 19.12461 > F [1] 1.467577 # Finally, we evaluate the F statistic: > 1 - pf(F, 8, 1278) [1] 0.1644083 # Next, we turned to power analysis, first for a simple regression # with R^2 of 0.1. # We calculate the effect size [R^2 / (1 - R^2)]: > f2 <- .1/.9 > f2 [1] 0.1111111 # We consider a wide range of possible sample sizes: > N <- seq(50,200,1) # For each sample size, we compute the noncentrality parameter: > lambda <- N*f2 # We calculate the denominator degrees of freedom for each sample size: > dfd <- N-2 # We compute the critical F for each sample size: > fcrit <- qf( .95, 1, dfd) # Finally, we compute the probabiity that a random draw from the # noncentral F distribution will exceed the critical F (for each # sample size): > Power <- 1 - pf( fcrit, 1, dfd, lambda) # Here's a table of power for each N. Note that the results agree # with the answers we got from G*Power. > cbind(N, dfd, lambda, Power) N dfd lambda Power [1,] 50 48 5.555556 0.6367304 [2,] 51 49 5.666667 0.6456807 [3,] 52 50 5.777778 0.6544613 [4,] 53 51 5.888889 0.6630731 [5,] 54 52 6.000000 0.6715171 [6,] 55 53 6.111111 0.6797947 [7,] 56 54 6.222222 0.6879070 [8,] 57 55 6.333333 0.6958555 [9,] 58 56 6.444444 0.7036416 [10,] 59 57 6.555556 0.7112668 [11,] 60 58 6.666667 0.7187328 [12,] 61 59 6.777778 0.7260413 [13,] 62 60 6.888889 0.7331940 [14,] 63 61 7.000000 0.7401929 [15,] 64 62 7.111111 0.7470396 [16,] 65 63 7.222222 0.7537363 [17,] 66 64 7.333333 0.7602848 [18,] 67 65 7.444444 0.7666871 [19,] 68 66 7.555556 0.7729454 [20,] 69 67 7.666667 0.7790616 [21,] 70 68 7.777778 0.7850379 [22,] 71 69 7.888889 0.7908764 [23,] 72 70 8.000000 0.7965793 [24,] 73 71 8.111111 0.8021487 [25,] 74 72 8.222222 0.8075868 [26,] 75 73 8.333333 0.8128957 [27,] 76 74 8.444444 0.8180777 [28,] 77 75 8.555556 0.8231350 [29,] 78 76 8.666667 0.8280697 [30,] 79 77 8.777778 0.8328841 [31,] 80 78 8.888889 0.8375804 [32,] 81 79 9.000000 0.8421606 [33,] 82 80 9.111111 0.8466271 [34,] 83 81 9.222222 0.8509819 [35,] 84 82 9.333333 0.8552273 [36,] 85 83 9.444444 0.8593654 [37,] 86 84 9.555556 0.8633983 [38,] 87 85 9.666667 0.8673281 [39,] 88 86 9.777778 0.8711570 [40,] 89 87 9.888889 0.8748869 [41,] 90 88 10.000000 0.8785201 [42,] 91 89 10.111111 0.8820584 [43,] 92 90 10.222222 0.8855040 [44,] 93 91 10.333333 0.8888588 [45,] 94 92 10.444444 0.8921248 [46,] 95 93 10.555556 0.8953040 [47,] 96 94 10.666667 0.8983982 [48,] 97 95 10.777778 0.9014094 [49,] 98 96 10.888889 0.9043394 [50,] 99 97 11.000000 0.9071902 [51,] 100 98 11.111111 0.9099634 [52,] 101 99 11.222222 0.9126609 [53,] 102 100 11.333333 0.9152845 [54,] 103 101 11.444444 0.9178359 [55,] 104 102 11.555556 0.9203167 [56,] 105 103 11.666667 0.9227288 [57,] 106 104 11.777778 0.9250737 [58,] 107 105 11.888889 0.9273531 [59,] 108 106 12.000000 0.9295685 [60,] 109 107 12.111111 0.9317215 [61,] 110 108 12.222222 0.9338137 [62,] 111 109 12.333333 0.9358466 [63,] 112 110 12.444444 0.9378216 [64,] 113 111 12.555556 0.9397403 [65,] 114 112 12.666667 0.9416039 [66,] 115 113 12.777778 0.9434140 [67,] 116 114 12.888889 0.9451719 [68,] 117 115 13.000000 0.9468789 [69,] 118 116 13.111111 0.9485364 [70,] 119 117 13.222222 0.9501456 [71,] 120 118 13.333333 0.9517078 [72,] 121 119 13.444444 0.9532242 [73,] 122 120 13.555556 0.9546961 [74,] 123 121 13.666667 0.9561246 [75,] 124 122 13.777778 0.9575109 [76,] 125 123 13.888889 0.9588561 [77,] 126 124 14.000000 0.9601613 [78,] 127 125 14.111111 0.9614276 [79,] 128 126 14.222222 0.9626560 [80,] 129 127 14.333333 0.9638476 [81,] 130 128 14.444444 0.9650034 [82,] 131 129 14.555556 0.9661243 [83,] 132 130 14.666667 0.9672114 [84,] 133 131 14.777778 0.9682656 [85,] 134 132 14.888889 0.9692877 [86,] 135 133 15.000000 0.9702787 [87,] 136 134 15.111111 0.9712395 [88,] 137 135 15.222222 0.9721708 [89,] 138 136 15.333333 0.9730736 [90,] 139 137 15.444444 0.9739486 [91,] 140 138 15.555556 0.9747967 [92,] 141 139 15.666667 0.9756186 [93,] 142 140 15.777778 0.9764150 [94,] 143 141 15.888889 0.9771867 [95,] 144 142 16.000000 0.9779344 [96,] 145 143 16.111111 0.9786587 [97,] 146 144 16.222222 0.9793605 [98,] 147 145 16.333333 0.9800402 [99,] 148 146 16.444444 0.9806987 [100,] 149 147 16.555556 0.9813364 [101,] 150 148 16.666667 0.9819540 [102,] 151 149 16.777778 0.9825521 [103,] 152 150 16.888889 0.9831313 [104,] 153 151 17.000000 0.9836921 [105,] 154 152 17.111111 0.9842351 [106,] 155 153 17.222222 0.9847609 [107,] 156 154 17.333333 0.9852698 [108,] 157 155 17.444444 0.9857625 [109,] 158 156 17.555556 0.9862394 [110,] 159 157 17.666667 0.9867009 [111,] 160 158 17.777778 0.9871477 [112,] 161 159 17.888889 0.9875801 [113,] 162 160 18.000000 0.9879985 [114,] 163 161 18.111111 0.9884033 [115,] 164 162 18.222222 0.9887951 [116,] 165 163 18.333333 0.9891742 [117,] 166 164 18.444444 0.9895409 [118,] 167 165 18.555556 0.9898957 [119,] 168 166 18.666667 0.9902389 [120,] 169 167 18.777778 0.9905709 [121,] 170 168 18.888889 0.9908920 [122,] 171 169 19.000000 0.9912026 [123,] 172 170 19.111111 0.9915030 [124,] 173 171 19.222222 0.9917936 [125,] 174 172 19.333333 0.9920745 [126,] 175 173 19.444444 0.9923462 [127,] 176 174 19.555556 0.9926089 [128,] 177 175 19.666667 0.9928629 [129,] 178 176 19.777778 0.9931085 [130,] 179 177 19.888889 0.9933459 [131,] 180 178 20.000000 0.9935754 [132,] 181 179 20.111111 0.9937973 [133,] 182 180 20.222222 0.9940117 [134,] 183 181 20.333333 0.9942190 [135,] 184 182 20.444444 0.9944194 [136,] 185 183 20.555556 0.9946130 [137,] 186 184 20.666667 0.9948002 [138,] 187 185 20.777778 0.9949810 [139,] 188 186 20.888889 0.9951558 [140,] 189 187 21.000000 0.9953247 [141,] 190 188 21.111111 0.9954878 [142,] 191 189 21.222222 0.9956455 [143,] 192 190 21.333333 0.9957978 [144,] 193 191 21.444444 0.9959449 [145,] 194 192 21.555556 0.9960871 [146,] 195 193 21.666667 0.9962244 [147,] 196 194 21.777778 0.9963571 [148,] 197 195 21.888889 0.9964852 [149,] 198 196 22.000000 0.9966089 [150,] 199 197 22.111111 0.9967284 [151,] 200 198 22.222222 0.9968439 # We can also plot the power function: > plot(N, Power,type='l') # Here's what it looks like when we allow smaller sample sizes: > N <- seq(3,100,1) > lambda <- N*f2 > dfd <- N-2 > fcrit <- qf( .95, 1, dfd) > Power <- 1 - pf( fcrit, 1, dfd, lambda) > plot(N, Power,type='l') # Note that it never comes down to zero. If we plot a flat line # at Y=0, it's off the bottom of our plot: > abline(c(0,0)) # Next, we considered a power analysis for an INCREMENT in R^2 of # .10 when we add two predictors to an analysis that already has # three predictors. # The effect size is the same as in the last analysis: > f2 [1] 0.1111111 # We consider a sequence of sample sizes: > N <- seq(3,200,1) > lambda <- N*f2 # This time the df is N-6 (= N - number of predictors - 1) > dfd <- N-6 # Oops: we went so low with N that the model is not identified # (because we had more parameters than data points): > fcrit<-qf(.95, 2, dfd) Warning message: In qf(p, df1, df2, lower.tail, log.p) : NaNs produced # We correct that problem: > N <- seq(10,200,1) > lambda <- N*f2 > dfd <- N-6 > fcrit<-qf(.95, 2, dfd) > Power <- 1-pf(fcrit, 2, dfd, lambda) > plot(N,Power) # Here's the table of power as a function of N (which again # agrees with our much easier analysis in G*Power): > cbind(N,Power) N Power [1,] 10 0.09484472 [2,] 11 0.10644293 [3,] 12 0.11782745 [4,] 13 0.12906525 [5,] 14 0.14020777 [6,] 15 0.15129113 [7,] 16 0.16233965 [8,] 17 0.17336914 [9,] 18 0.18438939 [10,] 19 0.19540586 [11,] 20 0.20642097 [12,] 21 0.21743494 [13,] 22 0.22844642 [14,] 23 0.23945294 [15,] 24 0.25045120 [16,] 25 0.26143736 [17,] 26 0.27240716 [18,] 27 0.28335612 [19,] 28 0.29427957 [20,] 29 0.30517278 [21,] 30 0.31603100 [22,] 31 0.32684950 [23,] 32 0.33762362 [24,] 33 0.34834880 [25,] 34 0.35902058 [26,] 35 0.36963462 [27,] 36 0.38018674 [28,] 37 0.39067290 [29,] 38 0.40108922 [30,] 39 0.41143199 [31,] 40 0.42169765 [32,] 41 0.43188283 [33,] 42 0.44198430 [34,] 43 0.45199902 [35,] 44 0.46192411 [36,] 45 0.47175687 [37,] 46 0.48149473 [38,] 47 0.49113531 [39,] 48 0.50067638 [40,] 49 0.51011587 [41,] 50 0.51945186 [42,] 51 0.52868256 [43,] 52 0.53780636 [44,] 53 0.54682177 [45,] 54 0.55572744 [46,] 55 0.56452214 [47,] 56 0.57320479 [48,] 57 0.58177444 [49,] 58 0.59023023 [50,] 59 0.59857144 [51,] 60 0.60679747 [52,] 61 0.61490781 [53,] 62 0.62290206 [54,] 63 0.63077992 [55,] 64 0.63854120 [56,] 65 0.64618579 [57,] 66 0.65371368 [58,] 67 0.66112493 [59,] 68 0.66841969 [60,] 69 0.67559820 [61,] 70 0.68266076 [62,] 71 0.68960774 [63,] 72 0.69643959 [64,] 73 0.70315682 [65,] 74 0.70976000 [66,] 75 0.71624975 [67,] 76 0.72262676 [68,] 77 0.72889178 [69,] 78 0.73504557 [70,] 79 0.74108898 [71,] 80 0.74702288 [72,] 81 0.75284819 [73,] 82 0.75856586 [74,] 83 0.76417689 [75,] 84 0.76968231 [76,] 85 0.77508316 [77,] 86 0.78038053 [78,] 87 0.78557555 [79,] 88 0.79066933 [80,] 89 0.79566305 [81,] 90 0.80055789 [82,] 91 0.80535504 [83,] 92 0.81005572 [84,] 93 0.81466117 [85,] 94 0.81917262 [86,] 95 0.82359133 [87,] 96 0.82791858 [88,] 97 0.83215563 [89,] 98 0.83630378 [90,] 99 0.84036430 [91,] 100 0.84433850 [92,] 101 0.84822766 [93,] 102 0.85203310 [94,] 103 0.85575610 [95,] 104 0.85939798 [96,] 105 0.86296003 [97,] 106 0.86644354 [98,] 107 0.86984981 [99,] 108 0.87318014 [100,] 109 0.87643580 [101,] 110 0.87961807 [102,] 111 0.88272824 [103,] 112 0.88576756 [104,] 113 0.88873730 [105,] 114 0.89163870 [106,] 115 0.89447301 [107,] 116 0.89724146 [108,] 117 0.89994526 [109,] 118 0.90258563 [110,] 119 0.90516378 [111,] 120 0.90768088 [112,] 121 0.91013812 [113,] 122 0.91253666 [114,] 123 0.91487765 [115,] 124 0.91716223 [116,] 125 0.91939153 [117,] 126 0.92156665 [118,] 127 0.92368871 [119,] 128 0.92575877 [120,] 129 0.92777792 [121,] 130 0.92974721 [122,] 131 0.93166768 [123,] 132 0.93354036 [124,] 133 0.93536626 [125,] 134 0.93714639 [126,] 135 0.93888172 [127,] 136 0.94057322 [128,] 137 0.94222185 [129,] 138 0.94382855 [130,] 139 0.94539424 [131,] 140 0.94691983 [132,] 141 0.94840621 [133,] 142 0.94985427 [134,] 143 0.95126487 [135,] 144 0.95263885 [136,] 145 0.95397706 [137,] 146 0.95528031 [138,] 147 0.95654942 [139,] 148 0.95778516 [140,] 149 0.95898833 [141,] 150 0.96015968 [142,] 151 0.96129995 [143,] 152 0.96240990 [144,] 153 0.96349022 [145,] 154 0.96454164 [146,] 155 0.96556485 [147,] 156 0.96656052 [148,] 157 0.96752932 [149,] 158 0.96847191 [150,] 159 0.96938891 [151,] 160 0.97028097 [152,] 161 0.97114870 [153,] 162 0.97199269 [154,] 163 0.97281353 [155,] 164 0.97361181 [156,] 165 0.97438808 [157,] 166 0.97514290 [158,] 167 0.97587681 [159,] 168 0.97659033 [160,] 169 0.97728399 [161,] 170 0.97795830 [162,] 171 0.97861374 [163,] 172 0.97925080 [164,] 173 0.97986995 [165,] 174 0.98047166 [166,] 175 0.98105638 [167,] 176 0.98162456 [168,] 177 0.98217661 [169,] 178 0.98271298 [170,] 179 0.98323406 [171,] 180 0.98374026 [172,] 181 0.98423197 [173,] 182 0.98470959 [174,] 183 0.98517348 [175,] 184 0.98562401 [176,] 185 0.98606154 [177,] 186 0.98648641 [178,] 187 0.98689898 [179,] 188 0.98729956 [180,] 189 0.98768849 [181,] 190 0.98806608 [182,] 191 0.98843264 [183,] 192 0.98878848 [184,] 193 0.98913387 [185,] 194 0.98946912 [186,] 195 0.98979451 [187,] 196 0.99011029 [188,] 197 0.99041675 [189,] 198 0.99071413 [190,] 199 0.99100270 [191,] 200 0.99128269 # Finally, we did a power analysis for a single slope in a multiple regression. # In SAS, we calculated the variance of semi-partial X (obtained by regressing # the X of interest on the other predictors, taking the residuals, and computing # their variance). Then we used the formula for converting the slope to the # R^2 metric (see today's Powerpoint): > (-.012)^2 * 3319/20 [1] 0.0238968 # We calculated the effect size, and then finished the analysis in G*Power. > f2 <- (.024)/(1-.024) > f2 [1] 0.02459016 >