Central Valley Enhanced

Acoustic Tagging Project

logo



American River hatchery-origin fall-run Chinook salmon

2017-2018 Season (PROVISIONAL DATA)


1. Project Status

Study is complete, all tags are no longer active. All times in Pacific Standard Time.

See tagging details below:
Release_time Number_fish_released Release_location Release_rkm Mean_length Mean_weight
2018-05-10 14:00:00 150 AR_Sunrise_Ramp 206.04 87.2 7.8


2. Real-time Fish Detections

setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = ""))

library(cder)
library(reshape2)

detects_study <- read.csv("C:/Users/field/Desktop/Real-time data massaging/products/Study_detection_files/detects_Nimbus-Fall-2018.csv", stringsAsFactors = F)
detects_study$DateTime_PST <- as.POSIXct(detects_study$DateTime_PST, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")

tagcodes <- read.csv("qry_HexCodes.txt", stringsAsFactors = F)
tagcodes$RelDT <- as.POSIXct(tagcodes$RelDT, format = "%m/%d/%Y %I:%M:%S %p", tz = "Etc/GMT+8")

detects_study <- detects_study[detects_study$general_location == "TowerBridge",]
detects_study <- merge(detects_study,aggregate(list(first_detect = detects_study$DateTime_PST), by = list(TagCode= detects_study$TagCode), FUN = min))

detects_study$Day <- as.Date(detects_study$first_detect, "Etc/GMT+8")

detects_study <- merge(detects_study, tagcodes[,c("TagID_Hex", "RelDT", "StudyID", "tag_life")], by.x = "TagCode", by.y = "TagID_Hex")
  
starttime <- as.Date(min(detects_study$RelDT), "Etc/GMT+8")
endtime <- min(as.Date(c(Sys.time())), max(as.Date(detects_study$RelDT)+detects_study$tag_life))
wlk_flow <- cdec_query("COL", "20", "H", starttime, endtime+1)
wlk_flow$datetime <- as.Date(wlk_flow$DateTime)
wlk_flow_day <- aggregate(list(parameter_value = wlk_flow$Value), by = list(Day = wlk_flow$datetime), FUN = mean, na.rm = T)

daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))

rels <- unique(tagcodes[tagcodes$StudyID == unique(detects_study$StudyID), "RelDT"])
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_study$RelDT))])

tagcount <- aggregate(list(unique_tags = detects_study$TagCode), by = list(Day = detects_study$Day, RelDT = detects_study$RelDT ), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ RelDT)
                  
daterange1 <- merge(daterange, tagcount1, all.x=T)

if(length(rels_no_detects)>0){
  for(i in rels_no_detects){
    daterange1 <- cbind(daterange1, x=NA)
    names(daterange1)[names(daterange1) == 'x'] <- paste(i)
  }
}

daterange2 <- merge(daterange1, wlk_flow_day, by = "Day", all.x = T)

rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL

par(mar=c(6, 5, 2, 5) + 0.1)
barp <- barplot(t(daterange2[,1:ncol(daterange2)-1]), plot = FALSE, beside = T)
barplot(t(daterange2[,1:ncol(daterange2)-1]), beside = T, col=rainbow(rel_num), 
        xlab = "", ylab = "Number of fish arrivals per day", 
        ylim = c(0,max(daterange2[,1:ncol(daterange2)-1], na.rm = T)*1.2), 
        las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt = "n")#, 
        #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
        #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)-1], fill= rainbow(rel_num), horiz = T, title = "Release Group")
ybreaks <- if(max(daterange2[,1:ncol(daterange2)-1], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)-1], na.rm = T)} else {5}
xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
barpmeans <- colMeans(barp)
axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2[xbreaks,]), las = 2)
axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)-1], na.rm = T), ybreaks))

par(new=T)

plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "blue", type = "l", lwd=2, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
mtext("Flow (cfs) at Colusa Bridge", side=4, line=3, cex=1.5, col="blue")
2.1 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Colusa Bridge

2.1 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Colusa Bridge


setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = ""))

#library(CDECRetrieve)
library(reshape2)

detects_study <- read.csv("C:/Users/field/Desktop/Real-time data massaging/products/Study_detection_files/detects_Nimbus-Fall-2018.csv", stringsAsFactors = F)

detects_study$DateTime_PST <- as.POSIXct(detects_study$DateTime_PST, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")

tagcodes <- read.csv("qry_HexCodes.txt", stringsAsFactors = F)
tagcodes$RelDT <- as.POSIXct(tagcodes$RelDT, format = "%m/%d/%Y %I:%M:%S %p", tz = "Etc/GMT+8")

detects_study <- detects_study[detects_study$general_location %in% c("Benicia_west", "Benicia_east"),]

if (nrow(detects_study)>0) {
  detects_study <- merge(detects_study,aggregate(list(first_detect = detects_study$DateTime_PST), by = list(TagCode= detects_study$TagCode), FUN = min))
  
  detects_study$Day <- as.Date(detects_study$first_detect, "Etc/GMT+8")
  
  detects_study <- merge(detects_study, tagcodes[,c("TagID_Hex", "RelDT", "StudyID")], by.x = "TagCode", by.y = "TagID_Hex")
  
  starttime <- as.Date(min(detects_study$RelDT), "Etc/GMT+8")
  #endtime <- as.Date(c(Sys.time()))#, max(detects_study$first_detect)+60*60*24)))
  #wlk_flow <- cdec_query("COL", "20", "H", starttime, endtime+1)
  #wlk_flow$datetime <- as.Date(wlk_flow$datetime)
  #wlk_flow_day <- aggregate(list(parameter_value = wlk_flow$parameter_value), by = list(Day = wlk_flow$datetime), FUN = mean, na.rm = T)
  
  daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
  
  rels <- unique(tagcodes[tagcodes$StudyID == unique(detects_study$StudyID), "RelDT"])
  rel_num <- length(rels)
  rels_no_detects <- as.character(rels[!(rels %in% unique(detects_study$RelDT))])
  
  tagcount <- aggregate(list(unique_tags = detects_study$TagCode), by = list(Day = detects_study$Day, RelDT = detects_study$RelDT ), FUN = function(x){length(unique(x))})
  tagcount1 <- reshape2::dcast(tagcount, Day ~ RelDT)
                    
  daterange1 <- merge(daterange, tagcount1, all.x=T)
  
  if(length(rels_no_detects)>0){
    for(i in rels_no_detects){
      daterange1 <- cbind(daterange1, x=NA)
      names(daterange1)[names(daterange1) == 'x'] <- paste(i)
    }
  }
  
  #daterange2 <- merge(daterange1, wlk_flow_day, by = "Day", all.x = T)
  daterange2 <- daterange1
  
  rownames(daterange2) <- daterange2$Day
  daterange2$Day <- NULL
  
  par(mar=c(6, 5, 2, 5) + 0.1)
  barp <- barplot(t(daterange2[,1:ncol(daterange2)]), plot = FALSE, beside = T)
  barplot(t(daterange2[,1:ncol(daterange2)]), beside = T, col=rainbow(rel_num), 
          xlab = "", ylab = "Number of fish arrivals per day", 
          ylim = c(0,max(daterange2[,1:ncol(daterange2)], na.rm = T)*1.2), 
          las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt = "n")#, 
          #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
          #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
  legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)], fill= rainbow(rel_num), horiz = T, title = "Release Group")
  ybreaks <- if(max(daterange2[,1:ncol(daterange2)], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)], na.rm = T)} else {5}
  xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
  barpmeans <- colMeans(barp)
  axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2)[xbreaks], las = 2)
  axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)], na.rm = T), ybreaks))
  box()

#par(new=T)

#plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "blue", type = "l", lwd=2, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
#axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
#mtext("Flow (cfs) at Colusa Bridge", side=4, line=3, cex=1.5, col="blue")

}else{
  print("No detections at Benicia yet")
}
2.2 Detections at Benicia Bridge

2.2 Detections at Benicia Bridge



3. Survival Probability


3.1 Minimum survival to Tower Bridge (using CJS survival model)
Release Group Survival (%) SE 95% lower C.I. 95% upper C.I. Detection efficiency (%)
ALL 68.1 3.8 60.2 75.1 98.8
2018-05-10 14:00:00 68.1 3.8 60.2 75.1 98.8


3.2 Minimum survival to Benicia Bridge East Span (using CJS survival model)
Release Group Survival (%) SE 95% lower C.I. 95% upper C.I. Detection efficiency (%)
ALL 2 1.1 0.6 6 100
2018-05-10 14:00:00 2 1.1 0.6 6 100



4. Detection Statistics


4.1 Detections for all release groups combined
general_location First_arrival Mean_arrival Fish_count Percent_arrived rkm
TowerBridge 2018-05-11 07:45:51 2018-05-12 14:23:16 101 67.33 172.000
I80-50_Br 2018-05-11 01:34:33 2018-05-12 21:11:36 85 56.67 170.748
Georgiana_Slough1 2018-05-14 01:54:43 2018-05-14 13:36:01 3 2.00 119.208
Sac_BlwGeorgiana 2018-05-12 19:11:53 2018-05-14 08:16:36 9 6.00 119.058
Sac_BlwGeorgiana2 2018-05-12 19:22:12 2018-05-14 05:01:45 10 6.67 118.398
Benicia_east 2018-05-16 10:12:34 2018-05-17 18:06:39 3 2.00 52.240
Benicia_west 2018-05-16 10:29:56 2018-05-18 09:06:14 2 1.33 52.040
4.2 Detections for 2018-05-10 14:00:00 release group
general_location First_arrival Mean_arrival Fish_count Percent_arrived rkm
TowerBridge 2018-05-11 07:45:51 2018-05-12 14:54:01 101 67.33 172.000
I80-50_Br 2018-05-11 01:34:33 2018-05-12 19:36:28 85 56.67 170.748
Georgiana_Slough1 2018-05-14 01:54:43 2018-05-14 13:36:01 3 2.00 119.208
Sac_BlwGeorgiana 2018-05-12 19:11:53 2018-05-14 10:53:53 9 6.00 119.058
Sac_BlwGeorgiana2 2018-05-12 19:22:12 2018-05-14 06:54:15 10 6.67 118.398
Benicia_east 2018-05-16 10:12:34 2018-05-17 18:06:39 3 2.00 52.240
Benicia_west 2018-05-16 10:29:56 2018-05-18 09:06:14 2 1.33 52.040