Skip to content

Commit

Permalink
R code for Tables
Browse files Browse the repository at this point in the history
  • Loading branch information
jayverhoef committed Nov 19, 2024
1 parent 77dcdd2 commit 2419817
Show file tree
Hide file tree
Showing 4 changed files with 160 additions and 14 deletions.
File renamed without changes.
132 changes: 132 additions & 0 deletions tables/Tables_statewide.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
setwd(paste0('/mnt/ExtraDrive1/Work/desktop_data/2024_papers/',
'akhs-abundance-trends/akhs-abundance-trends/data/'))
load('akpv_datacube.rda')
setwd(paste0('/mnt/ExtraDrive1/Work/desktop_data/2024_papers/',
'akhs-abundance-trends/akhs-abundance-trends/data-raw/'))
load('dTerr.rda')
load('dGlac.rda')
load('akpvpolys_sf.rda')

dstk = rbind(dTerr[,c('polyid', 'stockid', 'yr')],
dGlac[,c('polyid', 'stockid', 'yr')])

tabpath = paste0('/mnt/ExtraDrive1/Work/desktop_data/2024_papers/',
'akhs-abundance-trends/akhs-abundance-trends/tables/')

stockid_names = data.frame(stockid = 1:12, stocknames =
c('Aleutians',
'Pribilofs',
'Bristol Bay',
'North Kodiak',
'South Kodiak',
'Prince William Sound',
'Cook Inlet/Shelikof Strait',
'Glacier Bay/Icy Strait',
'Lynn Canal/Stephens Passage',
'Sitka/Chatham Strait',
'Dixon/Cape Decision',
'Clarence Strait')
)


#-------------------------------------------------------------------------------
# Tab_sample_sizes
#-------------------------------------------------------------------------------


num_sampled = matrix(NA, nrow = 12, ncol = 4)
for(i in 1:12) {
num_sampled[i,1] = i
num_sampled[i,2] = sum(akpvpolys_sf$stockid == i)
num_sampled[i,3] = length(unique(dTerr[dTerr$stockid == i, 'polyid'])) +
length(unique(dGlac[dGlac$stockid == i, 'polyid']))
num_sampled[i,4] = length(unique(
dTerr[dTerr$stockid == i & dTerr$count > 0, 'polyid'])) +
length(unique(
dGlac[dGlac$stockid == i & dGlac$count > 0, 'polyid']))
}
num_sampled

stkSSUsamples_meanmedian = NULL
for(i in 1:12) {
dtmp = dstk[dstk$stockid == i,]
dtmp[,'polyid'] = as.factor(as.character(dtmp$polyid))
stkSSUsamples_meanmedian = rbind(stkSSUsamples_meanmedian,
c(mean(table(dtmp$polyid)), median(table(dtmp$polyid))))
}
c(mean(table(dstk$polyid)), median(table(dstk$polyid)))

num_sampled = cbind(num_sampled, stkSSUsamples_meanmedian)
num_sampled = rbind(num_sampled, c(rep(NA, times = 4), c(mean(table(dstk$polyid)), median(table(dstk$polyid)))))
colnames(num_sampled) = c('stockid', 'n_SSU', 'n_sampled_onceplus','n_nonzero',
'times_sampled_mean', 'times_sampled_median')

write.table(num_sampled, file = paste0(tabpath, 'num_sampled.csv'), quote = FALSE,
sep = ',', row.names = FALSE)


#-------------------------------------------------------------------------------
# Tab_statewide_and_stocks
#-------------------------------------------------------------------------------

# State-wide Abundance
pop = matrix(NA, nrow = 1000, ncol = ncol(akpv_datacube[[1]]))
for(i in 1:1000) pop[i,] =
apply(akpv_datacube[[i]], 2, sum)
est = apply(pop, 2, mean)
se = sqrt(apply(pop, 2, var))
bot = apply(pop, 2, quantile, prob = .025)
top = apply(pop, 2, quantile, prob = .975)
cv = se/est
current_est_table = data.frame(stock = 'statewide', est = est[28],
se = se[28], bot = bot[28], top = top[28], cv = cv[28])
for(j in 1:12) {
stock_id = j
pop = matrix(NA, nrow = 1000, ncol = ncol(akpv_datacube[[1]]))
for(i in 1:1000) pop[i,] =
apply(akpv_datacube[[i]][attr(akpv_datacube[[i]], 'stockid') ==
stock_id,], 2, sum)
est = apply(pop, 2, mean)
se = sqrt(apply(pop, 2, var))
bot = apply(pop, 2, quantile, prob = .025)
top = apply(pop, 2, quantile, prob = .975)
cv = se/est
current_est_table = rbind(current_est_table,
data.frame(stock = stock_id, est = est[28],
se = se[28], bot = bot[28], top = top[28], cv = cv[28]))
}



#-------------------------------------------------------------------------------
# Probability of decreasing
#-------------------------------------------------------------------------------

pDecrease = rep(NA, times = 12)
for(j in 1:12) {
stock_id = j
pop = matrix(NA, nrow = 1000, ncol = ncol(akpv_datacube[[1]]))
for(i in 1:1000) pop[i,] =
apply(akpv_datacube[[i]][
attr(akpv_datacube[[i]], 'stockid') == stock_id,], 2, sum)
maxi = ncol(akpv_datacube[[1]])
trendlen = 8
propTrendMat = NULL
for(i in 1:(maxi - trendlen + 1))
propTrendMat = cbind(propTrendMat,
100*(exp(apply(pop,1,function(v){coef(lm(I(log(y))~x,
data.frame(x=1:8,y = v[i:(i + trendlen - 1)])))[2]}))-1))
pDecrease[j] = sum(propTrendMat[,21] < 0)/1000
}

current_est_table = cbind(current_est_table, c(NA, pDecrease))
colnames(current_est_table)[7] = 'pDecrease'

# ----------- Table of Results

write.csv(current_est_table, file = paste0(tabpath,'current_est_table.csv'),
quote = FALSE, row.names = FALSE)




28 changes: 14 additions & 14 deletions tables/current_est_table.csv
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
stock,est,se,bot,top,cv
statewide,215309.286,4708.48428227603,207737.8,226397.175,0.0218684682381792
1,6212.395,260.466446485425,5719.975,6727.075,0.041926897192697
2,136.715,24.7893179673279,97,195,0.181321127654814
3,36421.376,2221.27909760423,32297.2,40813.275,0.0609883354655307
4,8233.63,1225.83826035107,6216.9,10633.2,0.148881873529788
5,19490.352,2117.52203475281,16162.775,24764.675,0.10864462759589
6,38220.322,2094.65798984235,34220.55,42144.575,0.054804823199615
7,32004.969,2344.28142112256,28530.6,38614.275,0.0732474204590719
8,6029.958,868.133128826084,4419.95,7746.85,0.143970012531776
9,11567.612,839.968946620818,10130.85,13464.35,0.0726138589901544
10,12015.079,864.131422802617,10501.95,13919.225,0.0719205777009553
11,21390.831,1999.64156108334,17638.7,25609.75,0.0934812472261287
12,23586.047,2368.0871389137,20429.925,29412.7,0.100402035954295
stock,est,se,bot,top,cv,pDecrease
statewide,215309.286,4708.48428227603,207737.8,226397.175,0.0218684682381792,NA
1,6212.395,260.466446485425,5719.975,6727.075,0.041926897192697,0.6
2,136.715,24.7893179673279,97,195,0.181321127654814,0.645
3,36421.376,2221.27909760423,32297.2,40813.275,0.0609883354655307,0.421
4,8233.63,1225.83826035107,6216.9,10633.2,0.148881873529788,0.269
5,19490.352,2117.52203475281,16162.775,24764.675,0.10864462759589,0.208
6,38220.322,2094.65798984235,34220.55,42144.575,0.054804823199615,0.993
7,32004.969,2344.28142112256,28530.6,38614.275,0.0732474204590719,0.41
8,6029.958,868.133128826084,4419.95,7746.85,0.143970012531776,0.408
9,11567.612,839.968946620818,10130.85,13464.35,0.0726138589901544,0.626
10,12015.079,864.131422802617,10501.95,13919.225,0.0719205777009553,0.777
11,21390.831,1999.64156108334,17638.7,25609.75,0.0934812472261287,0.64
12,23586.047,2368.0871389137,20429.925,29412.7,0.100402035954295,0.851
14 changes: 14 additions & 0 deletions tables/num_sampled.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
stockid,n_SSU,n_sampled_onceplus,n_nonzero,times_sampled_mean,times_sampled_median
1,151,148,139,7.5,7
2,6,4,3,3.5,2
3,108,69,46,17.3478260869565,11
4,55,55,31,9.85454545454546,6
5,50,49,29,33.8775510204082,11
6,223,216,145,17.3194444444444,7
7,254,253,166,10.1383399209486,8
8,64,63,49,30.2063492063492,18
9,73,73,54,13.2739726027397,13
10,87,87,71,19.2873563218391,15
11,82,79,70,8.83544303797468,9
12,158,157,121,16.0573248407643,9
NA,NA,NA,NA,14.8427773343974,9

0 comments on commit 2419817

Please sign in to comment.