-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathprobit_model_for_hird_et_al_2015.txt
125 lines (85 loc) · 4.06 KB
/
probit_model_for_hird_et_al_2015.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
model {
## Observation model
## Likelihood (Probit response)
for(i in 1:n.samples){for(j in 1:n.otus){
eta[i,j] <- alpha[i] + L[i,j] + inprod(LV[i,],Loading[j,])
Z[i,j] ~ dnorm( eta[i,j], 1 )
y[i,j] ~ dbern( step(Z[i,j]) )
} }
## Process model
#Linear predictor w/ covariates
for( i in 1:n.samples ) { for ( j in 1:n.otus ) {
L[i,j] <- gamma[j] + beta_stomachContents[j,full.stomachContents[i]] + beta_elevation[j,full.elevation[i]] + beta_diet[j,Diet[i]] + beta_location[j,Location[i]] + beta_sex[j,full.sex[i]] } }
## Prior & corner constraints on the latent factors
for( i in 1:n.samples ) { for( k in 1:n.latent ) { LV[i,k] ~ dnorm(0,1) } } #changed from dnorm(0,1)
## Loading - constraints on the upper diagonal as we are indexing Loading[j,k] and not Loading[k,j]
for( j in 1:(n.latent-1) ) { for ( k in (j+1):n.latent ) { Loading[j,k] <- 0 } } # Constraints to 0 on upper diagonal
for( j in 1:n.latent ) { Loading[j,j] ~ dnorm(0,1)I(0,) } # Sign constraints on diagonal elements
for( j in 2:n.latent ) { for( k in 1:(j-1) ) { Loading[j,k] ~ dnorm(0,1) } } # Free lower diagonals
for( j in (n.latent+1):n.otus) { for( k in 1:n.latent) { Loading[j,k] ~ dnorm(0,1) } }
## Priors on the rest of the params
## Overdispersion parameter
for( i in 1:n.otus ) { phi[i] ~ dt(0, pow(2.5,-2),1)I(0,) }
## Hierarchical structure on the rows
for( i in 1:n.samples ) { alpha[i] ~ dnorm(host.mean[hostID[i]],tau.sample) }
for( i in 1:n.hosts ) {
host.mean[i] <- host.eff[i] + host_phylo.eff[i]*scale.phylo
host.eff[i] ~ dnorm(mu.host[i],tau.host)
mu.host[i] ~ dt(0, pow(2.5,-2),1 )
}
tau.sample <- pow(sigma.sample,-2) #precision
sigma.sample ~ dt(0, pow(1,-2),1 )I(0,) #standard deviation
sigma2.sample <- pow(sigma.sample,2) #variance
tau.host <- pow(sigma.host,-2) #precision
sigma.host ~ dt(0, pow(1,-2),1 )I(0,) #standard deviation
sigma2.host <- pow(sigma.host,2) #variance
## Phylogenetic effect
scale.phylo ~ dexp(0.1)
## Phylogenetic variance-covariance prior
host_phylo.eff[1:n.hosts] ~ dmnorm(zeroes[1:n.hosts], phylo.prec[,])
phylo.prec[1:n.hosts,1:n.hosts] <- inverse(A[,])
for( i in 1:n.hosts ) { zeroes[i] <- 0 }
## Variance partitioning of row effects
#var.host_phylo <- scale.phylo^2
#var.host <- sigma2.host
#var.sample <- sigma2.sample
tot.var.alpha <- sigma2.sample + sigma2.host + scale.phylo^2
## Sum-to-zero constraint on the covariate coefficients
for( j in 1:n.otus ) {
gamma[j] ~ dt(0,pow(2.5,-2),1)
for(k in 2:n.stomachContents) { beta_stomachContents[j,k] ~ dt(0,pow(2.5,-2),1) }
beta_stomachContents[j,1] <- -sum(beta_stomachContents[j,2:n.stomachContents])
for(k in 2:n.elevation) { beta_elevation[j,k] ~ dt(0,pow(2.5,-2),1) }
beta_elevation[j,1] <- -sum(beta_elevation[j,2:n.elevation])
for(k in 2:n.diet) { beta_diet[j,k] ~ dt(0,pow(2.5,-2),1) }
beta_diet[j,1] <- -sum(beta_diet[j,2:n.diet])
for(k in 2:n.location) { beta_location[j,k] ~ dt(0,pow(2.5,-2),1) }
beta_location[j,1] <- -sum(beta_location[j,2:n.location])
beta_sex[j,1] ~ dt(0, pow(2.5,-2), 1)
beta_sex[j,2] <- -beta_sex[j,1]
}
## Rebuilt full vector for stomachContents, Sex and Elevation as they contain NAs
## stomachContents
for(i in 1:length(avail.vals.stomachContents)) {
full.stomachContents[avail.vals.stomachContents[i]] <- stomachContents.avail[i]
}
for(i in 1:length(miss.vals.stomachContents)) {
stomachContents.miss[i] ~ dbern(0.5)
full.stomachContents[miss.vals.stomachContents[i]] <- stomachContents.miss[i] + 1
}
## Sex
for(i in 1:length(avail.vals.sex)) {
full.sex[avail.vals.sex[i]] <- sex.avail[i]
}
for(i in 1:length(miss.vals.sex)) {
sex.miss[i] ~ dbern(0.5)
full.sex[miss.vals.sex[i]] <- sex.miss[i] + 1
}
## Elevation
for(i in 1:length(avail.vals.elevation)) {
full.elevation[avail.vals.elevation[i]] <- elevation.avail[i]
}
#There's only 1 NA for elevation, hence no loop needed
elevation.miss ~ dbern(0.5)
full.elevation[miss.vals.elevation] <- elevation.miss + 1
}