First Consider the following Code:
setwd("/Users/soeren/Documents/RProjects/HomeworkDiving/JaysSolutionBigModel")
x <- read.csv("http://www.stat.yale.edu/~jay/625/diving/Diving2000.csv", as.is = TRUE)
x <- cbind(x, M = rep(tapply(x$JScore, factor(rep(1:(length(x$Rank)/7), e = 7)),
mean), e = 7)) #M is the mean
x <- cbind(x, id = rep(1:(length(x[, 1])/7), each = 7)) #id is an identifyer for each dive
###############################################################
getcontrasts1 <- function(x, Name, verbose = FALSE) {
############################################################# My function assumes x is a data frame containing two columns which should
############################################################# be factors (categorical variables).
# The result is a portion of a model matrix that provides indicators for the
# complete set of interactions between the factors, using 'contr.sum' across
# both the 'rows' and 'columns' of the matrix of coefficients.
# The challenge is to handle this correctly even when there are missing sets
# of interactions.
var1 <- names(x)[1]
var2 <- names(x)[2]
levels1 <- levels(x[, 1])
levels2 <- levels(x[, 2])
Nlevels1 <- length(levels(x[, 1]))
Nlevels2 <- length(levels(x[, 2]))
if (verbose) {
cat("Variables:", var1, var2, "\n")
cat("Levels of", var1, ":", levels1, "\n")
cat("Levels of", var2, ":", levels2, "\n")
}
# Step 1. Create a coefficient matrix indicating where the missing
# interactions are.
xmis <- table(x[, 1], x[, 2]) == 0
if (verbose)
print(xmis)
# Step 2. Create a coefficient matrix indicating which cells need to
# involve the contr.sum constraints (instead of introducing a new
# coefficient): 'c' indicates a column constraint, 'r' indicates a row
# constraint. The last position can be either a row or column constraint,
# without loss of generality.
xcon <- xmis
for (i in 1:nrow(xmis)) if (max(which(!xmis[i, ])) != 1)
xcon[i, max(which(!xmis[i, ]))] <- "r"
for (i in 1:ncol(xmis)) xcon[max(which(!xmis[, i])), i] <- "c"
xcon[xcon != "c" & xcon != "r"] <- ""
if (verbose)
print(xcon)
# Step 3. Get the job done. This is your task, hopefully made easier with
# the information constructed, above.
ans.pt <- t(xcon)
ans.pt[t(xmis)] <- "empty"
beta.size <- sum(c(ans.pt) == "")
ans <- matrix(0, ncol = beta.size, nrow = Nlevels1 * Nlevels2)
rownames(ans) <- numeric(Nlevels1 * Nlevels2)
colnames(ans) <- paste(Name, 1:beta.size, sep = "")
ans[(c(ans.pt) == ""), ] <- diag(beta.size)
for (i in 1:Nlevels1) {
for (j in 1:Nlevels2) {
rownames(ans)[(i - 1) * Nlevels2 + j] <- paste(Name, levels1[i],
levels2[j])
if (ans.pt[j, i] == "r") {
ans[(i - 1) * Nlevels2 + j, ] <- 0
for (b in 1:(j - 1)) {
if (j > 1)
ans[(i - 1) * Nlevels2 + j, ] <- ans[(i - 1) * Nlevels2 +
j, ] - ans[(i - 1) * Nlevels2 + b, ]
}
}
if (ans.pt[j, i] == "c") {
ans[(i - 1) * Nlevels2 + j, ] <- 0
for (b in 1:(i - 1)) {
if (i > 1)
ans[(i - 1) * Nlevels2 + j, ] <- ans[(i - 1) * Nlevels2 +
j, ] - ans[(b - 1) * Nlevels2 + j, ]
}
}
}
}
for (i in 1:Nlevels1) {
for (j in 1:Nlevels2) {
if (ans.pt[j, i] == "empty") {
ans[(i - 1) * Nlevels2 + j, ] <- NA
}
}
}
if (verbose)
print(t(ans.pt))
if (verbose)
print(ans)
return(ans)
}
getcontrasts2 <- function(f, Name) {
f <- factor(f)
l <- levels(f)
Nl <- length(l)
ans <- rbind(diag(Nl - 1), -1)
colnames(ans) <- paste(Name, 1:(Nl - 1), sep = "")
rownames(ans) <- paste(Name, " ", l, sep = "")
return(ans)
}
# ignore these Countries to avoid that the matrix is colinear
x <- x[x$Country != "ARG", ]
x <- x[x$Country != "ARM", ]
x <- x[x$Country != "AZE", ]
x <- x[x$Country != "CZE", ]
x <- x[x$Country != "GEO", ]
x <- x[x$Country != "HKG", ]
x <- x[x$Country != "ZIM", ]
x <- x[x$Country != "FIN", ]
x <- cbind(x, bbias = paste("bias", x$Judge, x$Country))
biasinput <- data.frame(Judge = factor(x$Judge), Country = factor(x$Country))
bias <- getcontrasts1(biasinput, "bias", verbose = FALSE)
# biasinputr <- data.frame(Judge=factor(x$Judge,unique(x$Judge)),
# Country=factor(x$Country)) biasr <- getcontrasts1(biasinput, 'bias',
# verbose=FALSE) #Can be executed for the p Values of ZAUTSEV Oleg
enth <- getcontrasts2(x$Judge, "enth")
qual <- getcontrasts2(x$id, "qual")
X <- matrix(nrow = nrow(x), ncol = 1 + ncol(bias) + ncol(enth) + ncol(qual))
X[, 1] <- 1
b <- (ncol(bias) + 1)
X[1:nrow(X), 2:b] <- bias[as.character(x$bbias[1:nrow(X)]), ]
c <- b + ncol(enth)
X[1:nrow(X), (b + 1):c] <- enth[paste("enth", x$Judge[1:nrow(X)]), ]
d <- c + ncol(qual)
X[1:nrow(X), (c + 1):d] <- qual[paste("qual", x$id[1:nrow(X)]), ]
colnames(X) <- c("inter", colnames(bias), colnames(enth), colnames(qual))
write.csv(X, "ModelM.csv")
########################################################################
LM <- lsfit(X, x$JScore, intercept = FALSE)
print <- ls.print(LM, print.it = FALSE)
b <- print$coef[[1]][2:726, 1]
p <- print$coef[[1]][2:726, 4]
expectation <- bias %*% b
pvalue <- -1000 * (apply(bias^2, 1, sum) != 1) + bias %*% p #get only the true pvals, the others gonna be NA
pvalue[pvalue < 0] <- NA
A <- cbind(expectation, pvalue)
Country.l <- levels(factor(x$Country))
NCountry <- length(Country.l)
Judge.l <- levels(factor(x$Judge))
Judge.lc <- unique(x[, c("Judge", "JCountry")])[order(unique(x$Judge)), ]
NJudge <- length(Judge.l)
A <- cbind(Judge = rep(Judge.l, each = NCountry), JCountry = rep(Judge.lc[,
2], each = NCountry), Country = rep(Country.l, by = NJudge), expectation = A[,
1], pvalue = A[, 2])
rownames(A) <- NULL
When we just look at the p-values for the t-test, that the prefernce term is 0. We can see, that almoust all judges have a p value less than .05. Which means that they are likely biased. The pvalue of “ZAITSEV Oleg” is not shown, but it can be calculated by reorder the factor, when bias is created (example shown in the code). His value is about 0.00006.
A[A[, 2] == A[, 3] & !is.na(pvalue), ]
## Judge JCountry Country expectation
## [1,] "ALT Walter" "GER" "GER" "0.349980377926253"
## [2,] "BARNETT Madeleine" "AUS" "AUS" "0.28269145772221"
## [3,] "BOOTHROYD Sydney" "GBR" "GBR" "0.303652369334303"
## [4,] "BOUSSARD Michel" "FRA" "FRA" "0.176313636452231"
## [5,] "BOYS Beverley" "CAN" "CAN" "0.221002676016291"
## [6,] "BURK Hans-Peter" "GER" "GER" "0.447664091574213"
## [7,] "CALDERON Felix" "PUR" "PUR" "0.207492250568533"
## [8,] "CRUZ Julia" "ESP" "ESP" "0.36329638097234"
## [9,] "GEISSBUHLER Michael" "SUI" "SUI" "1.30157213723131"
## [10,] "HUBER Peter" "AUT" "AUT" "0.436129879560929"
## [11,] "McFARLAND Steve" "USA" "USA" "0.201374560541817"
## [12,] "MENA Jesus" "MEX" "MEX" "0.31751071218745"
## [13,] "RUIZ-PEDREGUERA Rolando" "CUB" "CUB" "0.310362765140474"
## [14,] "SEAMAN Kathy" "CAN" "CAN" "0.17473185388175"
## [15,] "WANG Facheng" "CHN" "CHN" "-0.0430544546865896"
## [16,] "XU Yiming" "CHN" "CHN" "0.214764666183366"
## pvalue
## [1,] "0.00014344855095401"
## [2,] "0.000169059286506135"
## [3,] "0.00874964583453382"
## [4,] "0.216213165099392"
## [5,] "0.081261981879491"
## [6,] "0.00194098333854588"
## [7,] "0.325589015069774"
## [8,] "0.00934231859251307"
## [9,] "9.86296419227215e-05"
## [10,] "0.00898991999538189"
## [11,] "0.00467018611040426"
## [12,] "0.000257063770333041"
## [13,] "0.0251078041606519"
## [14,] "0.135612662858776"
## [15,] "0.663293540175683"
## [16,] "0.0507294078943507"
This is a simple linear regression model. I used the following regression matrix, because I used the following assumptions:
If bij denotes the preferency of judge i for Divers of Country j. Then we want to have the following equations: b1j + … + bnj = 0 and bi1 + … + bim = 0. Moreover the sum of the systematic tendency and the sum of the unobserved quality should be 0.
library(Matrix)
## Loading required package: lattice
R <- bdiag(bias, enth, qual)
colnames(R) <- c(colnames(bias), colnames(enth), colnames(qual))
rownames(R) <- c(rownames(bias), rownames(enth), rownames(qual))
head(R, 50)
## 50 x 2237 sparse Matrix of class "dgCMatrix"
## [[ suppressing 31 column names 'bias1', 'bias2', 'bias3' ... ]]
##
## bias ALT Walter AUS 1 . . . . . . . . . . . . . . .
## bias ALT Walter AUT . 1 . . . . . . . . . . . . . .
## bias ALT Walter BLR . . 1 . . . . . . . . . . . . .
## bias ALT Walter BRA . . . 1 . . . . . . . . . . . .
## bias ALT Walter CAN . . . . 1 . . . . . . . . . . .
## bias ALT Walter CHN . . . . . 1 . . . . . . . . . .
## bias ALT Walter COL . . . . . . 1 . . . . . . . . .
## bias ALT Walter CUB . . . . . . . 1 . . . . . . . .
## bias ALT Walter ESP . . . . . . . . 1 . . . . . . .
## bias ALT Walter FRA . . . . . . . . . 1 . . . . . .
## bias ALT Walter GBR . . . . . . . . . . 1 . . . . .
## bias ALT Walter GER . . . . . . . . . . . 1 . . . .
## bias ALT Walter GRE . . . . . . . . . . . . 1 . . .
## bias ALT Walter HUN . . . . . . . . . . . . . 1 . .
## bias ALT Walter INA . . . . . . . . . . . . . . 1 .
## bias ALT Walter ITA . . . . . . . . . . . . . . . 1
## bias ALT Walter JPN . . . . . . . . . . . . . . . .
## bias ALT Walter KAZ . . . . . . . . . . . . . . . .
## bias ALT Walter KOR . . . . . . . . . . . . . . . .
## bias ALT Walter MAS . . . . . . . . . . . . . . . .
## bias ALT Walter MEX . . . . . . . . . . . . . . . .
## bias ALT Walter PER . . . . . . . . . . . . . . . .
## bias ALT Walter PHI . . . . . . . . . . . . . . . .
## bias ALT Walter PRK . . . . . . . . . . . . . . . .
## bias ALT Walter PUR . . . . . . . . . . . . . . . .
## bias ALT Walter ROM . . . . . . . . . . . . . . . .
## bias ALT Walter RUS . . . . . . . . . . . . . . . .
## bias ALT Walter SUI . . . . . . . . . . . . . . . .
## bias ALT Walter SWE . . . . . . . . . . . . . . . .
## bias ALT Walter THA . . . . . . . . . . . . . . . .
## bias ALT Walter TPE . . . . . . . . . . . . . . . .
## bias ALT Walter UKR . . . . . . . . . . . . . . . .
## bias ALT Walter USA . . . . . . . . . . . . . . . .
## bias ALT Walter VEN -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1
## bias BARNETT Madeleine AUS . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine AUT . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine BLR . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine BRA . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CAN . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CHN . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine COL . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CUB . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine ESP . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine FRA . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GBR . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GER . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GRE . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine HUN . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine INA . . . . . . . . . . . . . . . .
## bias BARNETT Madeleine ITA . . . . . . . . . . . . . . . .
##
## bias ALT Walter AUS . . . . . . . . . . . . . . .
## bias ALT Walter AUT . . . . . . . . . . . . . . .
## bias ALT Walter BLR . . . . . . . . . . . . . . .
## bias ALT Walter BRA . . . . . . . . . . . . . . .
## bias ALT Walter CAN . . . . . . . . . . . . . . .
## bias ALT Walter CHN . . . . . . . . . . . . . . .
## bias ALT Walter COL . . . . . . . . . . . . . . .
## bias ALT Walter CUB . . . . . . . . . . . . . . .
## bias ALT Walter ESP . . . . . . . . . . . . . . .
## bias ALT Walter FRA . . . . . . . . . . . . . . .
## bias ALT Walter GBR . . . . . . . . . . . . . . .
## bias ALT Walter GER . . . . . . . . . . . . . . .
## bias ALT Walter GRE . . . . . . . . . . . . . . .
## bias ALT Walter HUN . . . . . . . . . . . . . . .
## bias ALT Walter INA . . . . . . . . . . . . . . .
## bias ALT Walter ITA . . . . . . . . . . . . . . .
## bias ALT Walter JPN 1 . . . . . . . . . . . . . .
## bias ALT Walter KAZ . 1 . . . . . . . . . . . . .
## bias ALT Walter KOR . . 1 . . . . . . . . . . . .
## bias ALT Walter MAS . . . 1 . . . . . . . . . . .
## bias ALT Walter MEX . . . . 1 . . . . . . . . . .
## bias ALT Walter PER . . . . . 1 . . . . . . . . .
## bias ALT Walter PHI . . . . . . 1 . . . . . . . .
## bias ALT Walter PRK . . . . . . . 1 . . . . . . .
## bias ALT Walter PUR . . . . . . . . 1 . . . . . .
## bias ALT Walter ROM . . . . . . . . . 1 . . . . .
## bias ALT Walter RUS . . . . . . . . . . 1 . . . .
## bias ALT Walter SUI . . . . . . . . . . . 1 . . .
## bias ALT Walter SWE . . . . . . . . . . . . 1 . .
## bias ALT Walter THA . . . . . . . . . . . . . 1 .
## bias ALT Walter TPE . . . . . . . . . . . . . . 1
## bias ALT Walter UKR . . . . . . . . . . . . . . .
## bias ALT Walter USA . . . . . . . . . . . . . . .
## bias ALT Walter VEN -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1
## bias BARNETT Madeleine AUS . . . . . . . . . . . . . . .
## bias BARNETT Madeleine AUT . . . . . . . . . . . . . . .
## bias BARNETT Madeleine BLR . . . . . . . . . . . . . . .
## bias BARNETT Madeleine BRA . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CAN . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CHN . . . . . . . . . . . . . . .
## bias BARNETT Madeleine COL . . . . . . . . . . . . . . .
## bias BARNETT Madeleine CUB . . . . . . . . . . . . . . .
## bias BARNETT Madeleine ESP . . . . . . . . . . . . . . .
## bias BARNETT Madeleine FRA . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GBR . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GER . . . . . . . . . . . . . . .
## bias BARNETT Madeleine GRE . . . . . . . . . . . . . . .
## bias BARNETT Madeleine HUN . . . . . . . . . . . . . . .
## bias BARNETT Madeleine INA . . . . . . . . . . . . . . .
## bias BARNETT Madeleine ITA . . . . . . . . . . . . . . .
##
## bias ALT Walter AUS ......
## bias ALT Walter AUT ......
## bias ALT Walter BLR ......
## bias ALT Walter BRA ......
## bias ALT Walter CAN ......
## bias ALT Walter CHN ......
## bias ALT Walter COL ......
## bias ALT Walter CUB ......
## bias ALT Walter ESP ......
## bias ALT Walter FRA ......
## bias ALT Walter GBR ......
## bias ALT Walter GER ......
## bias ALT Walter GRE ......
## bias ALT Walter HUN ......
## bias ALT Walter INA ......
## bias ALT Walter ITA ......
## bias ALT Walter JPN ......
## bias ALT Walter KAZ ......
## bias ALT Walter KOR ......
## bias ALT Walter MAS ......
## bias ALT Walter MEX ......
## bias ALT Walter PER ......
## bias ALT Walter PHI ......
## bias ALT Walter PRK ......
## bias ALT Walter PUR ......
## bias ALT Walter ROM ......
## bias ALT Walter RUS ......
## bias ALT Walter SUI ......
## bias ALT Walter SWE ......
## bias ALT Walter THA ......
## bias ALT Walter TPE ......
## bias ALT Walter UKR ......
## bias ALT Walter USA ......
## bias ALT Walter VEN ......
## bias BARNETT Madeleine AUS ......
## bias BARNETT Madeleine AUT ......
## bias BARNETT Madeleine BLR ......
## bias BARNETT Madeleine BRA ......
## bias BARNETT Madeleine CAN ......
## bias BARNETT Madeleine CHN ......
## bias BARNETT Madeleine COL ......
## bias BARNETT Madeleine CUB ......
## bias BARNETT Madeleine ESP ......
## bias BARNETT Madeleine FRA ......
## bias BARNETT Madeleine GBR ......
## bias BARNETT Madeleine GER ......
## bias BARNETT Madeleine GRE ......
## bias BARNETT Madeleine HUN ......
## bias BARNETT Madeleine INA ......
## bias BARNETT Madeleine ITA ......
##
## .....suppressing columns in show(); maybe adjust 'options(max.print= *)'
## ..............................
Attached are all calculated parameters of our model.
print$coef[[1]][sample(1:2000, 50), c(1, 2, 3, 4)]
## Estimate Std.Err t-value Pr(>|t|)
## bias393 0.185417 0.07566 2.45067 1.428e-02
## bias199 -0.028170 0.13766 -0.20463 8.379e-01
## bias496 -0.049928 0.20195 -0.24723 8.047e-01
## qual1025 -1.706208 0.16585 -10.28771 1.131e-24
## qual607 0.794660 0.16982 4.67929 2.925e-06
## bias398 0.076265 0.10157 0.75088 4.527e-01
## bias294 -0.015514 0.22062 -0.07032 9.439e-01
## qual350 1.254351 0.16553 7.57770 3.902e-14
## qual1215 -1.373655 0.16226 -8.46578 2.987e-17
## qual149 0.018386 0.16965 0.10838 9.137e-01
## qual1003 -2.147597 0.17484 -12.28335 2.232e-34
## bias252 0.172593 0.11825 1.45961 1.444e-01
## qual167 -0.311231 0.17327 -1.79618 7.250e-02
## qual870 -0.281294 0.16249 -1.73112 8.347e-02
## qual260 -0.053347 0.18371 -0.29038 7.715e-01
## qual1001 -0.770898 0.16601 -4.64356 3.478e-06
## bias308 -0.043062 0.09356 -0.46025 6.454e-01
## bias356 -0.402355 0.21872 -1.83956 6.587e-02
## bias258 -0.209836 0.12158 -1.72589 8.440e-02
## bias150 0.038772 0.15586 0.24876 8.036e-01
## qual192 0.139056 0.17804 0.78103 4.348e-01
## bias534 -0.013347 0.09115 -0.14644 8.836e-01
## qual988 -2.752567 0.16711 -16.47207 5.343e-60
## bias421 0.069431 0.12190 0.56956 5.690e-01
## qual1228 1.671419 0.16262 10.27826 1.246e-24
## bias607 -0.116900 0.15570 -0.75079 4.528e-01
## qual472 -0.321263 0.16182 -1.98536 4.714e-02
## bias130 0.006292 0.06085 0.10340 9.176e-01
## bias380 0.020102 0.17137 0.11730 9.066e-01
## qual480 0.354564 0.16149 2.19553 2.815e-02
## qual435 3.040273 0.16139 18.83781 1.589e-77
## qual1096 0.657673 0.16601 3.96154 7.510e-05
## qual256 -1.882660 0.17327 -10.86524 2.591e-27
## qual482 0.178737 0.16182 1.10457 2.694e-01
## bias633 0.015468 0.24132 0.06410 9.489e-01
## qual1187 0.910215 0.16321 5.57681 2.528e-08
## qual929 -0.231533 0.16737 -1.38334 1.666e-01
## qual453 1.793763 0.16494 10.87519 2.327e-27
## qual1236 1.557998 0.16312 9.55098 1.663e-21
## bias286 -0.014791 0.10633 -0.13911 8.894e-01
## qual270 -4.608309 0.17842 -25.82843 1.764e-141
## qual984 -3.689207 0.18505 -19.93675 2.144e-86
## bias406 0.556671 0.27638 2.01415 4.403e-02
## qual691 -2.714930 0.18003 -15.08077 1.036e-50
## qual1140 0.989073 0.22296 4.43600 9.284e-06
## bias21 -0.110425 0.09449 -1.16861 2.426e-01
## qual940 0.417745 0.16476 2.53549 1.125e-02
## qual28 0.536963 0.16239 3.30659 9.484e-04
## qual266 0.876493 0.17891 4.89899 9.818e-07
## qual981 -0.268087 0.17116 -1.56627 1.173e-01
A[1:50, ]
## Judge JCountry Country expectation
## [1,] "ALT Walter" "GER" "AUS" "-0.268937117538481"
## [2,] "ALT Walter" "GER" "AUT" "0.514245324411191"
## [3,] "ALT Walter" "GER" "BLR" "-0.418357197383256"
## [4,] "ALT Walter" "GER" "BRA" "-0.286999037563216"
## [5,] "ALT Walter" "GER" "CAN" "-0.172426704155232"
## [6,] "ALT Walter" "GER" "CHN" "-0.0666873651714911"
## [7,] "ALT Walter" "GER" "COL" "-0.208383224674666"
## [8,] "ALT Walter" "GER" "CUB" "0.0502180041315411"
## [9,] "ALT Walter" "GER" "ESP" "-0.112322558575204"
## [10,] "ALT Walter" "GER" "FRA" "0.174120298742416"
## [11,] "ALT Walter" "GER" "GBR" "0.18808097892196"
## [12,] "ALT Walter" "GER" "GER" "0.349980377926253"
## [13,] "ALT Walter" "GER" "GRE" "-0.127047779914508"
## [14,] "ALT Walter" "GER" "HUN" "-0.0114781864391741"
## [15,] "ALT Walter" "GER" "INA" "0.132011350422704"
## [16,] "ALT Walter" "GER" "ITA" "-0.0381893315564368"
## [17,] "ALT Walter" "GER" "JPN" "-0.156629932415374"
## [18,] "ALT Walter" "GER" "KAZ" "0.0493990332978534"
## [19,] "ALT Walter" "GER" "KOR" "0.152038137619261"
## [20,] "ALT Walter" "GER" "MAS" "0.0222616338113731"
## [21,] "ALT Walter" "GER" "MEX" "-0.110424573215492"
## [22,] "ALT Walter" "GER" "PER" "-0.0489835840767723"
## [23,] "ALT Walter" "GER" "PHI" "0.405783014257473"
## [24,] "ALT Walter" "GER" "PRK" "0.288238477349362"
## [25,] "ALT Walter" "GER" "PUR" "-0.0507859991431453"
## [26,] "ALT Walter" "GER" "ROM" "-0.19754750340658"
## [27,] "ALT Walter" "GER" "RUS" "-0.0899351197418434"
## [28,] "ALT Walter" "GER" "SUI" "-0.293158784685748"
## [29,] "ALT Walter" "GER" "SWE" "0.349577081373232"
## [30,] "ALT Walter" "GER" "THA" "0.120275675182481"
## [31,] "ALT Walter" "GER" "TPE" "0.0733845859997812"
## [32,] "ALT Walter" "GER" "UKR" "0.00796390591456093"
## [33,] "ALT Walter" "GER" "USA" "-0.06842692668195"
## [34,] "ALT Walter" "GER" "VEN" "-0.150856953022871"
## [35,] "BARNETT Madeleine" "AUS" "AUS" "0.28269145772221"
## [36,] "BARNETT Madeleine" "AUS" "AUT" "0.103033525680614"
## [37,] "BARNETT Madeleine" "AUS" "BLR" "0.167087081173668"
## [38,] "BARNETT Madeleine" "AUS" "BRA" "0.453051569176342"
## [39,] "BARNETT Madeleine" "AUS" "CAN" "0.15744682332704"
## [40,] "BARNETT Madeleine" "AUS" "CHN" "-0.00112901424887063"
## [41,] "BARNETT Madeleine" "AUS" "COL" "0.150081305781659"
## [42,] "BARNETT Madeleine" "AUS" "CUB" "-0.0256470299271736"
## [43,] "BARNETT Madeleine" "AUS" "ESP" "-0.142505827901596"
## [44,] "BARNETT Madeleine" "AUS" "FRA" "-0.0316419190590101"
## [45,] "BARNETT Madeleine" "AUS" "GBR" "-0.150006893102641"
## [46,] "BARNETT Madeleine" "AUS" "GER" "0.0726536879914104"
## [47,] "BARNETT Madeleine" "AUS" "GRE" "0.063593568352479"
## [48,] "BARNETT Madeleine" "AUS" "HUN" "0.111977329019889"
## [49,] "BARNETT Madeleine" "AUS" "INA" "-0.236624731710702"
## [50,] "BARNETT Madeleine" "AUS" "ITA" "0.00793946955288868"
## pvalue
## [1,] "0.00153899870086314"
## [2,] "0.000345398135236858"
## [3,] "0.0514280277555664"
## [4,] "0.0410840113387974"
## [5,] "0.0889047262289957"
## [6,] "0.417150652943755"
## [7,] "0.320219254513132"
## [8,] "0.687077334976614"
## [9,] "0.350178058814668"
## [10,] "0.168710191411152"
## [11,] "0.0558087331973524"
## [12,] "0.00014344855095401"
## [13,] "0.316550880998467"
## [14,] "0.938371928350902"
## [15,] "0.507450849711376"
## [16,] "0.758861490074198"
## [17,] "0.383441490213437"
## [18,] "0.631550884338204"
## [19,] "0.423613465477633"
## [20,] "0.887445925691801"
## [21,] "0.242596088343764"
## [22,] "0.853318996345444"
## [23,] "0.142439941488172"
## [24,] "0.0127707812228304"
## [25,] "0.768587455152503"
## [26,] "0.215096825358983"
## [27,] "0.274966059052421"
## [28,] "0.289280710882086"
## [29,] "0.0870007665773072"
## [30,] "0.649850263849335"
## [31,] "0.581075062993508"
## [32,] "0.932853027118642"
## [33,] "0.405481605125813"
## [34,] NA
## [35,] "0.000169059286506135"
## [36,] "0.456417529637073"
## [37,] "0.45250031906424"
## [38,] "0.000234507955597029"
## [39,] "0.0570548164970192"
## [40,] "0.987684441419668"
## [41,] "0.336929185352288"
## [42,] "0.798267364364345"
## [43,] "0.196760270883831"
## [44,] "0.771728794226691"
## [45,] "0.0765177985051482"
## [46,] "0.352455555599544"
## [47,] "0.599034970307996"
## [48,] "0.375244734036082"
## [49,] "0.118262298394363"
## [50,] "0.940286135732327"