Central Valley Enhanced

Acoustic Tagging Project

logo





Hatchery-origin winter-run Chinook salmon

2017-2018 Season (PROVISIONAL DATA)



1. Project Status


Study is complete, all tags are no longer active. All times in Pacific Standard Time.

See tagging details below:
Release_time Number_fish_released Release_location Release_rkm Mean_length Mean_weight
2018-03-01 17:50:00 361 Bonnyview_Rel 540.258 111.9 16.5
2018-03-13 18:00:00 239 Bonnyview_Rel 540.258 97.5 10.7


2. Real-time Fish Detections

Sacramento real-time receivers deployed 2018-02-01, data current as of 2025-04-22 08:00:00. All times in Pacific Standard Time.

setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = ""))

library(cder)
library(reshape2)

detects_study <- read.csv("C:/Users/field/Desktop/Real-time data massaging/products/Study_detection_files/detects_Winter_H_2018.csv", stringsAsFactors = F)
detects_study$DateTime_PST <- as.POSIXct(detects_study$DateTime_PST, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")

tagcodes <- read.csv("qry_HexCodes.txt", stringsAsFactors = F)
tagcodes$RelDT <- as.POSIXct(tagcodes$RelDT, format = "%m/%d/%Y %I:%M:%S %p", tz = "Etc/GMT+8")

detects_study <- detects_study[detects_study$general_location == "TowerBridge",]
detects_study <- merge(detects_study,aggregate(list(first_detect = detects_study$DateTime_PST), by = list(TagCode= detects_study$TagCode), FUN = min))

detects_study$Day <- as.Date(detects_study$first_detect, "Etc/GMT+8")

detects_study <- merge(detects_study, tagcodes[,c("TagID_Hex", "RelDT", "StudyID", "tag_life")], by.x = "TagCode", by.y = "TagID_Hex")
  
starttime <- as.Date(min(detects_study$RelDT), "Etc/GMT+8")
endtime <- min(as.Date(c(Sys.time())), max(as.Date(detects_study$RelDT)+detects_study$tag_life))
wlk_flow <- cdec_query("COL", "20", "H", starttime, endtime+1)
wlk_flow$datetime <- as.Date(wlk_flow$DateTime)
wlk_flow_day <- aggregate(list(parameter_value = wlk_flow$Value), by = list(Day = wlk_flow$datetime), FUN = mean, na.rm = T)

daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))

rels <- unique(tagcodes[tagcodes$StudyID == unique(detects_study$StudyID), "RelDT"])
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_study$RelDT))])

tagcount <- aggregate(list(unique_tags = detects_study$TagCode), by = list(Day = detects_study$Day, RelDT = detects_study$RelDT ), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ RelDT)
                  
daterange1 <- merge(daterange, tagcount1, all.x=T)

if(length(rels_no_detects)>0){
  for(i in rels_no_detects){
    daterange1 <- cbind(daterange1, x=NA)
    names(daterange1)[names(daterange1) == 'x'] <- paste(i)
  }
}

daterange2 <- merge(daterange1, wlk_flow_day, by = "Day", all.x = T)

rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL

par(mar=c(6, 5, 2, 5) + 0.1)
barp <- barplot(t(daterange2[,1:ncol(daterange2)-1]), plot = FALSE, beside = T)
barplot(t(daterange2[,1:ncol(daterange2)-1]), beside = T, col=rainbow(rel_num), 
        xlab = "", ylab = "Number of fish arrivals per day", 
        ylim = c(0,max(daterange2[,1:ncol(daterange2)-1], na.rm = T)*1.2), 
        las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt = "n")#, 
        #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
        #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
legend(x ='topleft', legend = colnames(daterange2[,1:ncol(daterange2)-1]), fill= rainbow(rel_num), horiz = T, title = "Release Group")
ybreaks <- if(max(daterange2[,1:ncol(daterange2)-1], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)-1], na.rm = T)} else {5}
xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
barpmeans <- colMeans(barp)
axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2[xbreaks,]), las = 2)
axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)-1], na.rm = T), ybreaks))

par(new=T)

plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "blue", type = "l", lwd=2, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
mtext("Flow (cfs) at Colusa Bridge", side=4, line=3, cex=1.5, col="blue")
2.1 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Colusa Bridge

2.1 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Colusa Bridge




3. Survival Probability



3.1 Minimum survival to Tower Bridge (using CJS survival model)
Release Group Survival (%) SE 95% lower C.I. 95% upper C.I. Detection efficiency (%)
ALL 18.4 1.8 15.1 22.2 64.4
2018-03-01 17:50:00 19.1 2.2 15.1 23.9 NA
2018-03-13 18:00:00 17.3 3.0 12.2 23.9 NA



4. Detection Statistics



4.1 Detections for all release groups combined
general_location First_arrival Mean_arrival Fish_count Percent_arrived rkm
TowerBridge 2018-03-08 01:12:56 2018-03-16 21:21:57 71 11.83 172.000
I80-50_Br 2018-03-07 17:58:24 2018-03-17 00:11:12 73 12.17 170.748
Georgiana_Slough1 2018-03-24 19:50:52 2018-03-29 08:25:08 16 2.67 119.208
Sac_BlwGeorgiana 2018-03-22 17:51:34 2018-03-31 20:14:42 26 4.33 119.058
4.2 Detections for 2018-03-01 17:50:00 release group
general_location First_arrival Mean_arrival Fish_count Percent_arrived rkm
TowerBridge 2018-03-08 01:12:56 2018-03-14 18:23:01 47 13.02 172.000
I80-50_Br 2018-03-07 17:58:24 2018-03-15 07:40:52 55 15.24 170.748
Sac_BlwGeorgiana 2018-03-22 17:51:34 2018-03-31 19:26:52 6 1.66 119.058
4.3 Detections for 2018-03-13 18:00:00 release group
general_location First_arrival Mean_arrival Fish_count Percent_arrived rkm
TowerBridge 2018-03-19 17:33:49 2018-03-27 16:47:45 24 10.04 172.000
I80-50_Br 2018-03-18 04:46:45 2018-03-25 14:40:00 18 7.53 170.748
Georgiana_Slough1 2018-03-24 19:50:52 2018-03-29 08:25:08 16 6.69 119.208
Sac_BlwGeorgiana 2018-03-22 22:36:55 2018-03-31 20:47:59 20 8.37 119.058