try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
library(knitr)
library(kableExtra)
library(lubridate)
library(data.table)
library(ggplot2)
library(RMark)
library(scales)
library(viridis)
library(forcats)
library(reshape2)
library(png)
library(dataRetrieval)
library(rerddap)
library(plotly)
study <- "SacRiverSpringJPE_2024"
detects_study <- fread("study_detections_archive_2024.csv", stringsAsFactors = F,
colClasses = c(DateTime_PST = "character", RelDT = "character")) %>%
filter(Study_ID == study) %>%
mutate(DateTime_PST = as.POSIXct(DateTime_PST, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8"),
release_time = as.POSIXct(RelDT, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8")) %>%
rename(., weight=Weight, length=Length, release_rkm=Rel_rkm, release_location=Rel_loc, river_km=rkm)
detects_study2 <- fread("study_detections_archive_2024.csv", stringsAsFactors = F,
colClasses = c(DateTime_PST = "character", RelDT = "character")) %>%
filter(Study_ID == "Spring_Pulse_2024") %>%
mutate(DateTime_PST = as.POSIXct(DateTime_PST, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8"),
release_time = as.POSIXct(RelDT, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8")) %>%
rename(., weight=Weight, length=Length, release_rkm=Rel_rkm, release_location=Rel_loc, river_km=rkm)
latest <- read.csv("latest_download.csv", stringsAsFactors = F)$x
##################################################################################################################
#### TO RUN THE FOLLOWING CODE CHUNKS FROM HERE ON DOWN USING R ERDDAP, UN-COMMENT THESE NEXT 9 LINES OF CODE ####
##################################################################################################################
# cache_delete_all()
# query = paste('&','Study_ID','="', study, '"', sep = '')
# datafile=URLencode(paste("https://oceanview.pfeg.noaa.gov/erddap/tabledap/","FEDcalFishTrack",".csv?",query,sep = ""))
# options(url.method = "libcurl", download.file.method = "libcurl", timeout = 180)
# detects_study <- data.frame(read.csv(datafile,row.names = NULL, stringsAsFactors = F))
# detects_study <- detects_study[-1,]
# detects_study$DateTime_PST <- as.POSIXct(detects_study$local_time, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")
# detects_study$release_time <- as.POSIXct(detects_study$release_time, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")
# detects_study$river_km <- as.numeric(detects_study$river_km)
##################################################################################################################
Study is complete, all tags are no longer active as of 2024-07-20. All times in Pacific Standard Time.
if (nrow(detects_study) == 0){
cat("Study has not yet begun ")
} else {
if (min(detects_study$release_time) > Sys.time()){
cat("Study has not yet begun, below data is a placeholder: ")
}
if (min(detects_study$release_time) < Sys.time()){
cat(paste("Study began on ", min(detects_study$release_time), ", see tagging details below:", sep = ""))
}
######################################
#### RELEASE GROUPS ASSIGNED HERE ####
######################################
detects_study <- detects_study[detects_study$release_location =="RBDD_Rel" & detects_study$release_time > as.POSIXct("2024-04-08 12:00:00"),]
detects_study <- rbind(detects_study,detects_study2)
detects_study[detects_study$release_time > as.POSIXct("2024-04-08 12:00:00") & detects_study$release_time < as.POSIXct("2024-04-20 12:00:00"), "Release"] <- "1.Before pulse 1 - CDFW"
detects_study[detects_study$release_time > as.POSIXct("2024-04-20 12:00:00") & detects_study$release_time < as.POSIXct("2024-04-26 12:00:00"), "Release"] <- "2.During pulse 1 - CDFW"
detects_study[detects_study$release_time > as.POSIXct("2024-04-28 12:00:00") & detects_study$release_time < as.POSIXct("2024-05-05 12:00:00"), "Release"] <- "3.After pulse 1 - CDFW"
detects_study[detects_study$release_time > as.POSIXct("2024-05-05 12:00:00") & detects_study$release_time < as.POSIXct("2024-05-10 12:00:00"), "Release"] <- "4.During pulse 2 - UCSC"
detects_study[detects_study$release_time > as.POSIXct("2024-05-10 12:00:00") & detects_study$release_time < as.POSIXct("2024-05-16 12:00:00"), "Release"] <- "5.After pulse 2 - UCSC"
detects_study[detects_study$release_time > as.POSIXct("2024-05-19 12:00:00") & detects_study$release_time < as.POSIXct("2024-05-23 12:00:00"), "Release"] <- "6.During pulse 3 - UCSC"
detects_study[detects_study$release_time > as.POSIXct("2024-05-23 12:00:00") & detects_study$release_time < as.POSIXct("2024-05-31 12:00:00"), "Release"] <- "7.After pulse 3 - UCSC"
#detects_study$Release <- factor(detects_study$Release, levels = c("Before pulse 1 - CDFW",
# "During pulse 1 - CDFW",
# "After pulse 1 - CDFW",
# "During pulse 2 - UCSC"))
study_tagcodes <- as.data.frame(unique(detects_study[,c("TagCode", "release_time", "weight", "length", "release_rkm",
"release_location", "Release","Rel_latitude","Rel_longitude")]))
release_stats <- study_tagcodes %>%
group_by(Release) %>%
summarise(First_release_time = min(release_time),
Last_release_time = max(release_time),
Number_fish_released = length(unique(TagCode)),
Release_location = head(release_location, 1),
Release_rkm = head(release_rkm,1),
Mean_length = mean(length, na.rm=T),
Mean_weight = mean(weight, na.rm=T),
Release_lat = head(Rel_latitude,1),
Release_lon = head(Rel_longitude,1)) %>%
mutate(Mean_length = round(Mean_length, 1),
Mean_weight = round(Mean_weight, 1),
First_release_time = format(First_release_time, tz = "Etc/GMT+8"),
Last_release_time = format(Last_release_time, tz = "Etc/GMT+8")) %>%
arrange(Release)
release_stats_all <- study_tagcodes %>%
summarise(First_release_time = min(release_time),
Last_release_time = max(release_time),
Number_fish_released = length(unique(TagCode)),
Release_location = NA,
Release_rkm = mean(release_rkm,na.rm = T),
Mean_length = mean(length, na.rm=T),
Mean_weight = mean(weight, na.rm=T),
Release_lat = head(Rel_latitude,1),
Release_lon = head(Rel_longitude,1)) %>%
mutate(Mean_length = round(Mean_length, 1),
Mean_weight = round(Mean_weight, 1),
Release_rkm = round(Release_rkm,1),
First_release_time = format(First_release_time, tz = "Etc/GMT+8"),
Last_release_time = format(Last_release_time, tz = "Etc/GMT+8"))
release_stats <- rbind(release_stats, data.frame(Release = "ALL", release_stats_all))
kable(release_stats[,!names(release_stats)%in% c("Release_lon","Release_lat","release_location")], format = "html", row.names = F) %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left")
}
Study began on 2024-04-05 17:30:00, see tagging details below:
Release | First_release_time | Last_release_time | Number_fish_released | Release_location | Release_rkm | Mean_length | Mean_weight |
---|---|---|---|---|---|---|---|
1.Before pulse 1 - CDFW | 2024-04-18 11:00:00 | 2024-04-18 11:00:00 | 201 | RBDD_Rel | 461.579 | 81.4 | 6.0 |
2.During pulse 1 - CDFW | 2024-04-23 12:00:00 | 2024-04-24 10:10:00 | 200 | RBDD_Rel | 461.579 | 82.2 | 6.3 |
3.After pulse 1 - CDFW | 2024-04-30 12:00:00 | 2024-05-01 09:00:00 | 200 | RBDD_Rel | 461.579 | 91.7 | 8.8 |
4.During pulse 2 - UCSC | 2024-05-07 08:15:00 | 2024-05-08 08:30:00 | 224 | RBDD_Rel | 461.579 | 97.0 | 11.5 |
5.After pulse 2 - UCSC | 2024-05-14 08:30:00 | 2024-05-15 08:30:00 | 235 | RBDD_Rel | 461.579 | 102.5 | 13.6 |
6.During pulse 3 - UCSC | 2024-05-21 08:35:00 | 2024-05-22 08:30:00 | 199 | RBDD_Rel | 461.579 | 102.4 | 13.4 |
7.After pulse 3 - UCSC | 2024-05-29 09:06:00 | 2024-05-30 08:37:00 | 116 | RBDD_Rel | 461.579 | 105.2 | 14.4 |
ALL | 2024-04-18 11:00:00 | 2024-05-30 08:37:00 | 1375 | NA | 461.600 | 94.2 | 10.4 |
library(leaflet)
library(maps)
library(htmlwidgets)
library(leaflet.extras)
library(dplyr)
library(dbplyr)
library(DBI)
library(odbc)
library(data.table)
# Create connection with cloud database
con <- dbConnect(odbc(),
Driver = "SQL Server",
Server = "calfishtrack-server.database.windows.net",
Database = "realtime_detections",
UID = "realtime_user",
PWD = "Pass@123",
Port = 1433)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
## THIS CODE CHUNK WILL NOT WORK IF USING ONLY ERDDAP DATA, REQUIRES ACCESS TO LOCAL FILES
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
cat("No detections yet")
# Use dbplyr to load realtime_locs and qryHexCodes sql table
gen_locs <- tbl(con, "realtime_locs") %>% collect()
# gen_locs <- read.csv("realtime_locs.csv", stringsAsFactors = F) %>% filter(is.na(stop))
leaflet(data = gen_locs[is.na(gen_locs$stop),]) %>%
# setView(-72.14600, 43.82977, zoom = 8) %>%
addProviderTiles("Esri.WorldStreetMap", group = "Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addMarkers(~longitude, ~latitude, label = ~general_location, group = "Receiver Sites", popup = ~location) %>%
# addAwesomeMarkers(~lon_dd, ~lat_dd, label = ~locality, group = "Sites", icon=icons) %>%
addScaleBar(position = "bottomleft") %>%
addLayersControl(
baseGroups = c("Street Map", "Satellite", "Relief"),
overlayGroups = c("Receiver Sites"),
options = layersControlOptions(collapsed = FALSE)) %>%
addSearchFeatures(targetGroups = c("Receiver Sites"))
} else {
# Use dbplyr to load realtime_locs and qryHexCodes sql table
gen_locs <- tbl(con, "realtime_locs") %>% collect()
# gen_locs <- read.csv("realtime_locs.csv", stringsAsFactors = F)
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life)*1.5)))
beacon_by_day <- fread("beacon_by_day.csv", stringsAsFactors = F) %>%
mutate(day = as.Date(day)) %>%
# Subset to only look at data for the correct beacon for that day
filter(TagCode == beacon) %>%
# Only keep beacon by day for days since fish were released
filter(day >= as.Date(min(study_tagcodes$release_time)) & day <= endtime) %>%
dplyr::left_join(., gen_locs[,c("location", "general_location","rkm")], by = "location")
arrivals_per_day <- detects_study %>%
group_by(general_location, TagCode) %>%
summarise(DateTime_PST = min(DateTime_PST, na.rm = T)) %>%
arrange(TagCode, general_location) %>%
mutate(day = as.Date(DateTime_PST, "%Y-%m-%d", tz = "Etc/GMT+8")) %>%
group_by(day, general_location) %>%
summarise(New_arrivals = length(TagCode)) %>%
na.omit() %>%
mutate(day = as.Date(day)) %>%
dplyr::left_join(unique(beacon_by_day[,c("general_location", "day", "rkm")]), .,
by = c("general_location", "day")) %>%
arrange(general_location, day) %>%
mutate(day = as.factor(day)) %>%
filter(general_location != "Bench_test") %>% # Remove bench test
filter(!(is.na(general_location))) # Remove NA locations
## Remove sites that were not operation the whole time
#### FOR THE SEASONAL SURVIVAL PAGE, KEEP ALL SITES SINCE PEOPLE WANT TO SEE DETECTIONS OF LATER FISH AT NEWLY
#### DEPLOYED SPOTS
gen_locs_days_in_oper <- arrivals_per_day %>%
group_by(general_location) %>%
summarise(days_in_oper = length(day))
#gen_locs_days_in_oper <- gen_locs_days_in_oper[gen_locs_days_in_oper$days_in_oper ==
# max(gen_locs_days_in_oper$days_in_oper),]
arrivals_per_day_in_oper <- arrivals_per_day %>%
filter(general_location %in% gen_locs_days_in_oper$general_location)
fish_per_site <- arrivals_per_day_in_oper %>%
group_by(general_location) %>%
summarise(fish_count = sum(New_arrivals, na.rm=T))
gen_locs_mean_coords <- gen_locs %>%
filter(is.na(stop) & general_location %in% fish_per_site$general_location) %>%
group_by(general_location) %>%
summarise(latitude = mean(latitude), # estimate mean lat and lons for each genloc
longitude = mean(longitude))
fish_per_site <- merge(fish_per_site, gen_locs_mean_coords)
release_stats_agg <- aggregate(cbind(Release_lon, Release_lat) ~ Release_location, data = release_stats[release_stats$Release != "ALL",], FUN = mean)
release_stats_agg <- merge(release_stats_agg, aggregate(Number_fish_released ~ Release_location, data = release_stats[release_stats$Release != "ALL",], FUN = sum))
if(!is.na(release_stats$Release_lat[1])){
leaflet(data = fish_per_site) %>%
addProviderTiles("Esri.WorldStreetMap", group = "Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addPulseMarkers(data = fish_per_site[seq(from = 1, to = nrow(fish_per_site), by = 2),], lng = ~longitude, lat = ~latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, direction = "left", textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addPulseMarkers(data = fish_per_site[seq(from = 2, to = nrow(fish_per_site), by = 2),], lng = ~longitude, lat = ~latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, direction = "right", textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addCircleMarkers(data = release_stats_agg, ~Release_lon, ~Release_lat, label = ~Number_fish_released, stroke = F, color = "blue", fillOpacity = 1,
group = "Release Sites", popup = ~Release_location, labelOptions = labelOptions(noHide = T, textsize = "15px")) %>%
addScaleBar(position = "bottomleft") %>%
addLegend("bottomright", labels = c("Receivers", "Release locations"), colors = c("red","blue")) %>%
addLayersControl(baseGroups = c("Street Map", "Satellite", "Relief"), options = layersControlOptions(collapsed = FALSE))
} else {
leaflet(data = fish_per_site) %>%
addProviderTiles("Esri.WorldStreetMap", group = "Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addPulseMarkers(lng = fish_per_site$longitude, lat = fish_per_site$latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addScaleBar(position = "bottomleft") %>%
addLayersControl(baseGroups = c("Street Map", "Satellite", "Relief"),
options = layersControlOptions(collapsed = FALSE))
}
}
2.1 Map of unique fish detections at operational realtime detection locations
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
library(cder)
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) > 0){
detects_study <- detects_study[order(detects_study$TagCode, detects_study$DateTime_PST),]
## Now estimate the time in hours between the previous and next detection, for each detection.
detects_study$prev_genloc <- shift(detects_study$general_location, fill = NA, type = "lag")
#detects_study$prev_genloc <- shift(detects_study$General_Location, fill = NA, type = "lag")
## Now make NA the time diff values when it's between 2 different tagcodes or genlocs
detects_study[which(detects_study$TagCode != shift(detects_study$TagCode, fill = NA, type = "lag")), "prev_genloc"] <- NA
detects_study[which(detects_study$general_location != detects_study$prev_genloc), "prev_genloc"] <- NA
detects_study$mov_score <- 0
detects_study[is.na(detects_study$prev_genloc), "mov_score"] <- 1
detects_study$mov_counter <- cumsum(detects_study$mov_score)
detects_summary <- aggregate(list(first_detect = detects_study$DateTime_PST), by = list(TagCode = detects_study$TagCode, release_time = detects_study$release_time, mov_counter = detects_study$mov_counter ,general_location = detects_study$general_location, river_km = detects_study$river_km, release_rkm = detects_study$release_rkm), min)
detects_summary <- detects_summary[is.na(detects_summary$first_detect) == F,]
releases <- aggregate(list(first_detect = detects_study$release_time), by = list(TagCode = detects_study$TagCode, release_time = detects_study$release_time, release_location=detects_study$release_location, release_rkm = detects_study$release_rkm), min)
releases$river_km <- releases$release_rkm
releases$mov_counter <- NA
releases$general_location <- paste(releases$release_location,"_RELEASE")
releases$release_location <- NULL
detects_summary <- rbindlist(list(detects_summary, releases), use.names = T)
detects_summary <- detects_summary[order(detects_summary$TagCode, detects_summary$first_detect),]
starttime <- as.Date(min(detects_study$release_time), "Etc/GMT+8")
## Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d"))+1, max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
#par(mar=c(6, 5, 2, 5) + 0.1)
MLW <- data.frame()
CLW <- data.frame()
TIS <- data.frame()
FRE <- data.frame()
## download weir data only if release loc is above at least fremont
if(max(detects_study$release_rkm) > 209.5){
MLW <- try(cdec_query(stations = c("MLW"), sensors = 20, durations = "H", start.date = starttime, end.date = endtime))
CLW <- try(cdec_query(stations = c("CLW"), sensors = 20, durations = "H", start.date = starttime, end.date = endtime))
TIS <- try(cdec_query(stations = c("TIS"), sensors = 20, durations = "H", start.date = starttime, end.date = endtime))
FRE <- try(cdec_query(stations = c("FRE"), sensors = 20, durations = "H", start.date = starttime, end.date = endtime))
}
if(inherits(MLW, "try-error")|
inherits(CLW, "try-error")|
inherits(TIS, "try-error")|
inherits(FRE, "try-error")|
nrow(MLW)==0|
nrow(CLW)==0|
nrow(TIS)==0|
nrow(FRE)==0){
plot_ly(detects_summary, x = ~first_detect, y = ~river_km, color = ~TagCode, width = 900, height = 600, dynamicTicks = TRUE, connectgaps = TRUE, mode = "lines+markers", type = "scatter",hoverinfo = "text",
text = ~paste("</br> TagCode: ", TagCode,
"</br> Arrival: ", first_detect,
"</br> Location: ", general_location)) %>%
layout(showlegend = T,
xaxis = list(title = "<b> Date <b>", mirror=T,ticks="outside",showline=T, range=c(starttime,endtime)),
yaxis = list(title = "<b> Kilometers upstream of the Golden Gate <b>", mirror=T,ticks="outside",showline=T, range=c(max(detects_study$Rel_rkm)+10, min(gen_locs[is.na(gen_locs$stop),"rkm"])-10)),
legend = list(title=list(text='<b> Tag Code </b>')),
margin=list(l = 50,r = 100,b = 50,t = 50))
}else{
MLW[is.na(MLW$Value)==F, "rkm"] <- 326.3
CLW[is.na(CLW$Value)==F, "rkm"] <- 310.2
TIS[is.na(TIS$Value)==F, "rkm"] <- 267.7
FRE[is.na(FRE$Value)==F, "rkm"]<- 209.5
MLW[1,"rkm"] <- 1000 ## this ensures it shows up on legend even when weirs arent overtopping
CLW[1,"rkm"] <- 1000 ## this ensures it shows up on legend even when weirs arent overtopping
TIS[1,"rkm"] <- 1000 ## this ensures it shows up on legend even when weirs arent overtopping
FRE[1,"rkm"] <- 1000 ## this ensures it shows up on legend even when weirs arent overtopping
plot_ly(width = 900, height = 600) %>%
add_trace(x=~MLW$DateTime, y=~MLW$rkm, type = "scatter", mode = "markers", name = "Moulton spill", marker=list(symbol="square-open", color="red")) %>%
add_trace(x=~CLW$DateTime, y=~CLW$rkm, type = "scatter", mode = "markers", name = "Colusa spill", marker=list(symbol="square-open", color="blue")) %>%
add_trace(x=~TIS$DateTime, y=~TIS$rkm, type = "scatter", mode = "markers", name = "Tisdale spill", marker=list(symbol="square-open", color="orange")) %>%
add_trace(x=~FRE$DateTime, y=~FRE$rkm, type = "scatter", mode = "markers", name = "Fremont spill", marker=list(symbol="square-open", color="green")) %>%
add_trace(data=detects_summary, x = ~first_detect, y = ~river_km, color = ~TagCode, dynamicTicks = TRUE, connectgaps = TRUE, mode = "lines+markers", type = "scatter",hoverinfo = "text",
text = ~paste("</br> TagCode: ", detects_summary$TagCode,"</br> Arrival: ", detects_summary$first_detect,"</br> Location: ", detects_summary$general_location)) %>%
layout(showlegend = T, xaxis = list(title = "<b> Date <b>", mirror=T,ticks="outside",showline=T, range=c(starttime,endtime)),
yaxis = list(title = "<b> Kilometers upstream of the Golden Gate <b>", mirror=T,ticks="outside",showline=T, range=c(min(gen_locs[is.na(gen_locs$stop),"rkm"], na.rm = T)-10, max(detects_study$release_rkm, na.rm = T)+10)),legend = list(title=list(text="<b> Weir Spill and Tag Codes </b>")),
#legend2 = list(title=list(text="<b> Tag Code </b>")),
margin=list(l = 50,r = 100,b = 50,t = 50))
}
}else{
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Kilometers upstream of the Golden Gate")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
}
2.2 Waterfall Detection Plot
_______________________________________________________________________________________________________
library(tidyr)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_3 <- detects_study %>% filter(general_location == "Blw_Salt_RT")
if(nrow(detects_3) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_3 <- detects_3 %>%
dplyr::left_join(., detects_3 %>%
group_by(TagCode) %>%
summarise(first_detect = min(DateTime_PST))) %>%
mutate(Day = as.Date(as.Date(first_detect, "Etc/GMT+8")))
starttime <- as.Date(min(detects_3$release_time), "Etc/GMT+8")
# Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_3$Release))])
tagcount1 <- detects_3 %>%
group_by(Day, Release) %>%
summarise(unique_tags = length(unique(TagCode))) %>%
spread(Release, unique_tags)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == "x"] <- paste(i)
}
}
# Download flow data
flow_day <- readNWISuv(siteNumbers = "11377100", parameterCd="00060", startDate = starttime,
endDate = endtime+1) %>%
mutate(Day = as.Date(format(dateTime, "%Y-%m-%d"))) %>%
group_by(Day) %>%
summarise(parameter_value = mean(X_00060_00000))
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange2 <- daterange1[,order(colnames(daterange1))] %>%
dplyr::left_join(., flow_day, by = "Day")
rownames(daterange2) <- daterange2$Day
daterange2$Date <- daterange2$Day
daterange2$Day <- NULL
daterange2_flow <- daterange2 %>% select(Date, parameter_value)
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))],
id.vars = "Date", variable.name = ".")
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
par(mar=c(6, 5, 2, 5) + 0.1)
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Bend Bridge",
automargin = TRUE
)
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations(text="Release (click on legend<br> items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.02, yanchor="bottom", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date,
y=~daterange2_flow$parameter_value,
line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE,
inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks="outside",showline=T),
yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks="outside",showline=T),
legend = list(orientation = "h",x = 0.2, y = 1.01, yanchor="bottom", xanchor="left"),
margin=list(l = 50,r = 100,b = 50,t = 50))
}
2.3 Detections at Salt Creek versus Sacramento River flows at Bend Bridge for duration of tag life
_______________________________________________________________________________________________________
library(tidyr)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_4 <- detects_study %>% filter(general_location == "MeridianBr")
if(nrow(detects_4) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_4 <- detects_4 %>%
dplyr::left_join(., detects_4 %>%
group_by(TagCode) %>%
summarise(first_detect = min(DateTime_PST))) %>%
mutate(Day = as.Date(as.Date(first_detect, "Etc/GMT+8")))
starttime <- as.Date(min(detects_4$release_time), "Etc/GMT+8")
# Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_4$Release))])
tagcount1 <- detects_4 %>%
group_by(Day, Release) %>%
summarise(unique_tags = length(unique(TagCode))) %>%
spread(Release, unique_tags)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == "x"] <- paste(i)
}
}
# Download flow data
flow_day <- readNWISuv(siteNumbers = "11390500", parameterCd="00060", startDate = starttime,
endDate = endtime+1) %>%
mutate(Day = as.Date(format(dateTime, "%Y-%m-%d"))) %>%
group_by(Day) %>%
summarise(parameter_value = mean(X_00060_00000))
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange2 <- daterange1[,order(colnames(daterange1))] %>%
dplyr::left_join(., flow_day, by = "Day")
rownames(daterange2) <- daterange2$Day
daterange2$Date <- daterange2$Day
daterange2$Day <- NULL
daterange2_flow <- daterange2 %>% select(Date, parameter_value)
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))],
id.vars = "Date", variable.name = ".")
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
par(mar=c(6, 5, 2, 5) + 0.1)
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Wilkins Slough",
automargin = TRUE
)
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations(text="Release (click on legend<br> items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.02, yanchor="bottom", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date,
y=~daterange2_flow$parameter_value,
line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE,
inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks="outside",showline=T),
yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks="outside",showline=T),
legend = list(orientation = "h",x = 0.2, y = 1.01, yanchor="bottom", xanchor="left"),
margin=list(l = 50,r = 100,b = 50,t = 50))
}
2.4 Detections at Meridian Bridge versus Sacramento River flows at Wilkins Slough for duration of tag life
_______________________________________________________________________________________________________
library(tidyr)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_5 <- detects_study %>% filter(general_location == "TowerBridge")
if(nrow(detects_5) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_5 <- detects_5 %>%
dplyr::left_join(., detects_5 %>%
group_by(TagCode) %>%
summarise(first_detect = min(DateTime_PST))) %>%
mutate(Day = as.Date(as.Date(first_detect, "Etc/GMT+8")))
starttime <- as.Date(min(detects_5$release_time), "Etc/GMT+8")
# Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_5$Release))])
tagcount1 <- detects_5 %>%
group_by(Day, Release) %>%
summarise(unique_tags = length(unique(TagCode))) %>%
spread(Release, unique_tags)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == "x"] <- paste(i)
}
}
# Download flow data
flow_day <- readNWISuv(siteNumbers = "11425500", parameterCd="00060", startDate = starttime,
endDate = endtime+1) %>%
mutate(Day = as.Date(format(dateTime, "%Y-%m-%d"))) %>%
group_by(Day) %>%
summarise(parameter_value = mean(X_00060_00000))
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange2 <- daterange1[,order(colnames(daterange1))] %>%
dplyr::left_join(., flow_day, by = "Day")
rownames(daterange2) <- daterange2$Day
daterange2$Date <- daterange2$Day
daterange2$Day <- NULL
daterange2_flow <- daterange2 %>% select(Date, parameter_value)
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))],
id.vars = "Date", variable.name = ".")
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
par(mar=c(6, 5, 2, 5) + 0.1)
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Verona",
automargin = TRUE
)
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations(text="Release (click on legend<br> items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.02, yanchor="bottom", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date,
y=~daterange2_flow$parameter_value,
line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE,
inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks="outside",showline=T),
yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks="outside",showline=T),
legend = list(orientation = "h",x = 0.2, y = 1.01, yanchor="bottom", xanchor="left"),
margin=list(l = 50,r = 100,b = 50,t = 50))
}
2.5 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Verona for duration of tag life
_______________________________________________________________________________________________________
library(tidyr)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_6 <- detects_study %>% filter(general_location == "Benicia_west" | general_location == "Benicia_east")
if(nrow(detects_6) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_6 <- detects_6 %>%
dplyr::left_join(., detects_6 %>%
group_by(TagCode) %>%
summarise(first_detect = min(DateTime_PST))) %>%
mutate(Day = as.Date(as.Date(first_detect, "Etc/GMT+8")))
starttime <- as.Date(min(detects_6$release_time), "Etc/GMT+8")
# Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_6$Release))])
tagcount1 <- detects_6 %>%
group_by(Day, Release) %>%
summarise(unique_tags = length(unique(TagCode))) %>%
spread(Release, unique_tags)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == "x"] <- paste(i)
}
}
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange1 <- daterange1[,order(colnames(daterange1))]
daterange2 <- daterange1
rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL
par(mar=c(6, 5, 2, 5) + 0.1)
daterange2$Date <- as.Date(row.names(daterange2))
daterange3 <- melt(daterange2, id.vars = "Date", variable.name = ".", )
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations(text="Release (click on legend<br> items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.02, yanchor="bottom", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks="outside",showline=T),
yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks="outside",showline=T),
legend = list(orientation = "h",x = 0.2, y = 1.01, yanchor="bottom", xanchor="left"),
margin=list(l = 50,r = 100,b = 50,t = 50))
}
2.6 Detections at Benicia Bridge for duration of tag life
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_tower <- detects_study %>% filter(general_location == "TowerBridge")
if(nrow(detects_tower) == 0){
WR.surv <- data.frame("Release"=NA, "Survival (%)"="NO DETECTIONS YET", "SE"=NA, "95% lower C.I."=NA,
"95% upper C.I."=NA, "Detection efficiency (%)"=NA)
colnames(WR.surv) <- c("Release", "Survival (%)", "SE", "95% lower C.I.",
"95% upper C.I.", "Detection efficiency (%)")
print(kable(WR.surv, row.names = F, "html", caption = "3.1 Minimum survival to Tower Bridge (using CJS
survival model). If Yolo Bypass Weirs are overtopping during migration, fish may have taken
that route, and therefore this is a minimum estimate of survival") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"),
full_width = F, position = "left"))
} else {
study_count <- nrow(study_tagcodes)
# Only do survival to Sac for now
surv <- detects_study %>% filter(river_km > 168 & river_km < 175 )
surv_w_salt <- detects_study %>% filter(river_km > 168 & river_km < 175 | river_km > 456)
# calculate mean and SD travel time
travel <- aggregate(list(first_detect = surv$DateTime_PST), by = list(Release = surv$Release, TagCode = surv$TagCode, RelDT = surv$RelDT), min)
travel$days <- as.numeric(difftime(travel$first_detect, travel$RelDT, units = "days"))
travel_final <- aggregate(list(mean_travel_time = travel$days), by = list(Release = travel$Release), mean)
travel_final <- merge(travel_final, aggregate(list(sd_travel_time = travel$days), by = list(Release = travel$Release), sd))
travel_final <- merge(travel_final, aggregate(list(n = travel$days), by = list(Release = travel$Release), length))
travel_final <- rbind(travel_final, data.frame(Release = "ALL", mean_travel_time = mean(travel$days), sd_travel_time = sd(travel$days),n = nrow(travel)))
# Create inp for survival estimation
inp <- as.data.frame(reshape2::dcast(surv_w_salt, TagCode ~ river_km, fun.aggregate = length))
# Sort columns by river km in descending order
gen_loc_sites <- ncol(inp)-1 # Count number of genlocs
if(gen_loc_sites < 3){
WR.surv <- data.frame("Release"=NA, "Survival (%)"="NOT ENOUGH DETECTIONS", "SE"=NA, "95% lower C.I."=NA,
"95% upper C.I."=NA, "Detection efficiency (%)"=NA)
colnames(WR.surv) <- c("Release", "Survival (%)", "SE", "95% lower C.I.", "95% upper C.I.",
"Detection efficiency (%)")
print(kable(WR.surv, row.names = F, "html", caption = "3.1 Minimum survival to Tower Bridge (using CJS
survival model). If Yolo Bypass Weirs are overtopping during migration, fish may
have taken that route, and therefore this is a minimum estimate of survival") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"),
full_width = F,position = "left"))
} else {
inp <- inp[,c(1,order(names(inp[,2:(gen_loc_sites+1)]), decreasing = T)+1)] %>%
dplyr::left_join(study_tagcodes, ., by = "TagCode")
inp2 <- inp[,(ncol(inp)-gen_loc_sites+1):ncol(inp)] %>%
replace(is.na(.), 0) %>%
replace(., . > 0, 1)
inp <- cbind(inp, inp2)
groups <- as.character(sort(unique(inp$Release)))
surv_w_salt$Release <- factor(surv_w_salt$Release, levels = groups)
inp[,groups] <- 0
for (i in groups) {
inp[as.character(inp$Release) == i, i] <- 1
}
inp$inp_final <- paste("1",apply(inp2, 1, paste, collapse=""),sep="")
if(length(groups) > 1){
# make sure factor levels have a release that has detections first. if first release in factor order
# has zero detectins, model goes haywire
inp.df <- data.frame(ch = as.character(inp$inp_final), freq = 1,
rel = factor(inp$Release, levels = names(sort(table(surv_w_salt$Release),decreasing = T))),
stringsAsFactors = F)
WR.process <- process.data(inp.df, model="CJS", begin.time=1, groups = "rel")
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl,
model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
WR.mark.rel <- mark(WR.process, WR.ddl,
model.parameters=list(Phi=list(formula=~time*rel),p=list(formula=~time)),
silent = T, output = F)
WR.surv <- round(WR.mark.all$results$real[2,c("estimate", "se", "lcl", "ucl")] * 100,1)
WR.surv <- rbind(WR.surv, round(WR.mark.rel$results$real[seq(from=2,to=length(groups)*3,by = 3),
c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv$Detection_efficiency <- NA
WR.surv[1,"Detection_efficiency"] <- round(WR.mark.all$results$real[gen_loc_sites+2,"estimate"] * 100,1)
WR.surv <- cbind(c("ALL", names(sort(table(surv_w_salt$Release),decreasing = T))), WR.surv)
}
if(length(intersect(colnames(inp),groups)) < 2){
inp$inp_final <- paste("1",apply(inp2, 1, paste, collapse=""), " ", 1,sep = "")
write.table(inp$inp_final,"WRinp.inp",row.names = F, col.names = F, quote = F)
WRinp <- convert.inp("WRinp.inp")
WR.process <- process.data(WRinp, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl,
model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
WR.mark.rel <- mark(WR.process, WR.ddl,
model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
WR.surv <- round(WR.mark.all$results$real[1,c("estimate", "se", "lcl", "ucl")] * 100,1)
WR.surv <- rbind(WR.surv, round(WR.mark.rel$results$real[seq(from=1,to=length(groups)*2,by = 2),
c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv$Detection_efficiency <- NA
WR.surv[1,"Detection_efficiency"] <- round(WR.mark.all$results$real[gen_loc_sites+1,"estimate"] * 100,1)
WR.surv <- cbind(c("ALL", groups), WR.surv)
}
colnames(WR.surv)[1] <- "Release"
WR.surv <- merge(WR.surv, travel_final, by = "Release", all.x = T)
WR.surv$mean_travel_time <- round(WR.surv$mean_travel_time,1)
WR.surv$sd_travel_time <- round(WR.surv$sd_travel_time,1)
colnames(WR.surv) <- c("Release", "Survival (%)", "SE", "95% lower C.I.",
"95% upper C.I.", "Detection efficiency (%)", "Mean time to Tower (days)", "SD of time to Tower (days)","Count")
WR.surv <- WR.surv %>% arrange(., Release)
print(kable(WR.surv, row.names = F, "html", caption = "3.1 Minimum survival to Tower Bridge (using CJS
survival model), and travel time. If Yolo Bypass Weirs are overtopping during migration, fish may have taken
that route, and therefore this is a minimum estimate of survival") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"),
full_width = F, position = "left"))
}
}
Release | Survival (%) | SE | 95% lower C.I. | 95% upper C.I. | Detection efficiency (%) | Mean time to Tower (days) | SD of time to Tower (days) | Count |
---|---|---|---|---|---|---|---|---|
1.Before pulse 1 - CDFW | 48.3 | 3.9 | 40.7 | 56.0 | NA | 5.7 | 3.3 | 89 |
2.During pulse 1 - CDFW | 62.5 | 4.5 | 53.4 | 70.7 | NA | 5.8 | 3.5 | 87 |
3.After pulse 1 - CDFW | 9.9 | 2.2 | 6.4 | 15.1 | NA | 7.1 | 5.2 | 19 |
4.During pulse 2 - UCSC | 42.0 | 7.5 | 28.5 | 56.9 | NA | 3.8 | 1.4 | 20 |
5.After pulse 2 - UCSC | 6.2 | 2.3 | 3.0 | 12.5 | NA | 5.2 | 1.0 | 7 |
6.During pulse 3 - UCSC | 26.4 | 3.2 | 20.6 | 33.1 | NA | 3.3 | 0.4 | 51 |
7.After pulse 3 - UCSC | 4.3 | 1.9 | 1.8 | 9.9 | NA | 5.6 | 1.5 | 5 |
ALL | 23.5 | 1.5 | 20.7 | 26.5 | 77.2 | 5.2 | 3.2 | 278 |
# Download flow data
flow_day <- readNWISuv(siteNumbers = "11390500", parameterCd="00060", startDate = starttime,
endDate = endtime+1)
surv <- detects_study %>% filter(general_location == "MeridianBr")
# calculate mean and SD travel time
travel <- aggregate(list(first_detect = surv$DateTime_PST), by = list(Release = surv$Release, TagCode = surv$TagCode, RelDT = surv$RelDT), min)
arrival_median <- aggregate(list(`Median arrival` = travel$first_detect), by = list(Release = travel$Release), median)
arrival_10perc <- aggregate(list(arrival_10perc = travel$first_detect), by = list(Release = travel$Release), quantile,0.1)
arrival_90perc <- aggregate(list(arrival_90perc = travel$first_detect), by = list(Release = travel$Release), quantile,0.9)
WR.surv <- merge(WR.surv, arrival_median)
WR.surv <- merge(WR.surv, arrival_10perc)
WR.surv <- merge(WR.surv, arrival_90perc)
WR.surv$Median.arrivalminus1sd <- WR.surv$Median.arrival - (WR.surv$`SD of time to Tower (days)`*60*60*24)
WR.surv$Median.arrivalplus1sd <- WR.surv$Median.arrival + (WR.surv$`SD of time to Tower (days)`*60*60*24)
WR.surv <- WR.surv[is.na(WR.surv$`Mean time to Tower (days)`)==F,]
coeff <- max(flow_day$X_00060_00000)/140
ggplot() +
geom_path(data = flow_day, aes(x = dateTime, y = (X_00060_00000-min(flow_day$X_00060_00000))/coeff), color = "blue", linewidth = 1.3) +
geom_point(data = WR.surv, aes(y=`Survival (%)`, x=`Median.arrival`), size = 4) +
#geom_text(data = WR.surv, aes(y=`Survival (%)`, x=`Median.arrival`, label = Release), size = 4)+
xlab("Median arrival to Meridian Bridge (w/ 10-90th percentile)")+
geom_errorbar(data = WR.surv,aes(ymin = `95% lower C.I.`, ymax = `95% upper C.I.`,y=`Survival (%)`, x=`Median.arrival`), width = 1.5, linewidth =1.2) +
geom_errorbarh(data = WR.surv,aes(xmin =arrival_10perc,xmax = arrival_90perc,y=`Survival (%)`), linewidth =1.2, height = 0) +
scale_y_continuous(name = "% Survival to Sacramento (w/ 95% confidence intervals)",limits = c(0,100),sec.axis = sec_axis(~.*coeff + min(flow_day$X_00060_00000), name="Flow at Wilkins Slough (cfs)")) +
theme_bw() +
theme(axis.title.y.right = element_text(color ="blue"),axis.text=element_text(size=12),
axis.title=element_text(size=14,face="bold"))
3.2 Survival per release group vs. flow at Wilkins Slough
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
route_results_possible <- FALSE
if(nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
results_short <- data.frame("Measure"=NA, "Estimate"="NO DETECTIONS YET", "SE"=NA, "95% lower C.I."=NA,
"95% upper C.I."=NA)
colnames(results_short) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(results_short, row.names = F, "html", caption = "3.3 Reach-specific survival and probability
of entering Georgiana Slough") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"),
full_width = F, position = "left"))
} else {
# Only do survival to Georg split for now
test2 <- detects_study %>%
filter(general_location %in% c("TowerBridge", "I80-50_Br","SacRiverWalnutGrove_2", "Sac_BlwGeorgiana",
"Sac_BlwGeorgiana2", "Georg_Sl_1", "Georgiana_Slough2"))
# We can only do a multi-state model if there is at least one detection in each route
if(nrow(test2[test2$general_location %in% c("Sac_BlwGeorgiana", "Sac_BlwGeorgiana2"),]) == 0 |
nrow(test2[test2$general_location %in% c("Georg_Sl_1", "Georgiana_Slough2"),]) == 0){
results_short <- data.frame("Measure"=NA, "Estimate"="NOT ENOUGH DETECTIONS", "SE"=NA, "95% lower C.I."=NA,
"95% upper C.I."=NA)
colnames(results_short) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(results_short, row.names = F, "html", caption = "3.3 Reach-specific survival and probability of
entering Georgiana Slough") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"),
full_width = F, position = "left"))
} else {
# Make tagcode character
study_tagcodes$TagCode <- as.character(study_tagcodes$TagCode)
# Make a crosstab query with frequencies for all tag/location combination
test2$general_location <- factor(test2$general_location,
levels = c("TowerBridge", "I80-50_Br","SacRiverWalnutGrove_2", "Sac_BlwGeorgiana",
"Sac_BlwGeorgiana2", "Georg_Sl_1", "Georgiana_Slough2"))
test2$TagCode <- factor(test2$TagCode, levels = study_tagcodes$TagCode)
mytable <- table(test2$TagCode, test2$general_location) # A will be rows, B will be columns
# Change all frequencies bigger than 1 to 1. Here you could change your minimum cutoff to 2 detections,
# and then make another command that changes all detections=1 to 0
mytable[mytable>0] <- "A"
# Order in order of rkm
mytable2 <- mytable[, c("TowerBridge", "I80-50_Br","SacRiverWalnutGrove_2", "Sac_BlwGeorgiana", "Sac_BlwGeorgiana2",
"Georg_Sl_1", "Georgiana_Slough2")]
# Now sort the crosstab rows alphabetically
mytable2 <- mytable2[order(row.names(mytable2)),]
mytable2[which(mytable2[, "Sac_BlwGeorgiana"]=="A"), "Sac_BlwGeorgiana"] <- "A"
mytable2[which(mytable2[, "Sac_BlwGeorgiana2"]=="A"), "Sac_BlwGeorgiana2"] <- "A"
mytable2[which(mytable2[, "Georg_Sl_1"]=="A"), "Georg_Sl_1"] <- "B"
mytable2[which(mytable2[, "Georgiana_Slough2"]=="A"), "Georgiana_Slough2"] <- "B"
# Now order the study_tagcodes table the same way
study_tagcodes <- study_tagcodes[order(study_tagcodes$TagCode),]
# Paste together (concatenate) the data from each column of the crosstab into one string per row, add to tagging_meta.
# For this step, make sure both are sorted by FishID
study_tagcodes$inp_part1 <- apply(mytable2[,1:3],1,paste,collapse="")
study_tagcodes$inp_partA <- apply(mytable2[,4:5],1,paste,collapse="")
study_tagcodes$inp_partB <- apply(mytable2[,6:7],1,paste,collapse="")
# find last detection at each genloc
departure <- aggregate(list(depart = test2$DateTime_PST), by = list(TagCode = test2$TagCode, last_location = test2$general_location), FUN = max)
# subset for just juncture locations
departure <- departure[departure$last_location %in% c("Sac_BlwGeorgiana", "Sac_BlwGeorgiana2", "Georg_Sl_1", "Georgiana_Slough2"),]
# Find genloc of last known detection per tag
last_depart <- aggregate(list(depart = departure$depart), by = list(TagCode = departure$TagCode), FUN = max)
last_depart1 <- merge(last_depart, departure)
study_tagcodes <- merge(study_tagcodes, last_depart1[,c("TagCode", "last_location")], by = "TagCode", all.x = T)
# Assume that the Sac is default pathway, and for fish that were detected in neither route, it would get a "00" in inp so does not matter anyway
study_tagcodes$inp_final <- paste("A",study_tagcodes$inp_part1, study_tagcodes$inp_partA," 1 ;", sep = "")
# now put in exceptions...fish that were seen in georgiana last
study_tagcodes[study_tagcodes$last_location %in% c("Georg_Sl_1", "Georgiana_Slough2"), "inp_final"] <-
paste("A",
study_tagcodes[study_tagcodes$last_location %in% c("Georg_Sl_1", "Georgiana_Slough2"), "inp_part1"],
study_tagcodes[study_tagcodes$last_location %in% c("Georg_Sl_1", "Georgiana_Slough2"), "inp_partB"],
" 1 ;",
sep = "")
# At this point, some fish might not have been deemed to ever take a route based on last visit analysis. If so, model cannot be run
if(any(grepl(pattern = "A", study_tagcodes$inp_final)==T) & any(grepl(pattern = "B", study_tagcodes$inp_final)==T)){
write.table(study_tagcodes$inp_final,"WRinp_multistate.inp",row.names = F, col.names = F, quote = F)
WRinp <- convert.inp("WRinp_multistate.inp")
dp <- process.data(WRinp, model="Multistrata")
ddl <- make.design.data(dp)
#### p ####
# Cannott be seen at 2B or 3B or 4B (tower or I80 or walnut grove) and currently second georg is missing so set to 0 as well and to 1 for first line
ddl$p$fix = NA
ddl$p$fix[ddl$p$stratum == "B" & ddl$p$time %in% c(2,3,4,6)] = 0
ddl$p$fix[ddl$p$stratum == "B" & ddl$p$time %in% c(5)] = 1
#### Psi ####
# Only 1 transition allowed:
# from A to B at time interval 4 to 5
ddl$Psi$fix = 0
# A to B can only happen for interval 3-4
ddl$Psi$fix[ddl$Psi$stratum == "A"&
ddl$Psi$tostratum == "B" &
ddl$Psi$time == 4] = NA
#### Phi a.k.a. S ####
ddl$S$fix = NA
# None in B for reaches 1,2,3,4 and fixing it to 1 for 5 (between two georg lines). All getting fixed to 1
ddl$S$fix[ddl$S$stratum == "B" & ddl$S$time %in% c(1,2,3,4,5)] = 1
# For route A, fixing it to 1 for 5 (between two blw_georg lines)
ddl$S$fix[ddl$S$stratum == "A" & ddl$S$time == 5] = 1
# We use -1 at beginning of formula to remove intercept. This is because different routes probably should not share the same intercept
p.timexstratum = list(formula=~-1+stratum:time)
Psi.stratumxtime = list(formula=~-1+stratum:time)
S.stratumxtime = list(formula=~-1+stratum:time)
# Run model a first time
S.timexstratum.p.timexstratum.Psi.timexstratum = mark(dp, ddl,
model.parameters = list(S = S.stratumxtime,p = p.timexstratum,Psi = Psi.stratumxtime),
realvcv = T, silent = T, output = F)
# Identify any parameter estimates at 1, which would likely have bad SE estimates.
profile.intervals <- which(S.timexstratum.p.timexstratum.Psi.timexstratum$results$real$estimate %in% c(0,1) &
!S.timexstratum.p.timexstratum.Psi.timexstratum$results$real$fixed == "Fixed")
# Rerun model using profile interval estimation for the tricky parameters
S.timexstratum.p.timexstratum.Psi.timexstratum = mark(dp, ddl,
model.parameters=list(S=S.stratumxtime,p= p.timexstratum,Psi=Psi.stratumxtime),
realvcv = T, profile.int = profile.intervals, silent = T, output = F)
results <- S.timexstratum.p.timexstratum.Psi.timexstratum$results$real
results_short <- results[rownames(results) %in% c("S sA g1 c1 a0 o1 t1",
"S sA g1 c1 a1 o2 t2",
"S sA g1 c1 a2 o3 t3",
"S sA g1 c1 a3 o4 t4",
"p sA g1 c1 a1 o1 t2",
"p sA g1 c1 a2 o2 t3",
"p sA g1 c1 a3 o3 t4",
"p sA g1 c1 a4 o4 t5",
"p sB g1 c1 a4 o4 t5",
"Psi sA toB g1 c1 a3 o4 t4"),]
results_short <- round(results_short[,c("estimate", "se", "lcl", "ucl")] * 100,1)
# Now find estimate and CIs for AtoA route at junction
Psilist = get.real(S.timexstratum.p.timexstratum.Psi.timexstratum,"Psi",vcv=TRUE)
Psivalues = Psilist$estimates
routes <- TransitionMatrix(Psivalues[Psivalues$time==4 & Psivalues$cohort==1,],vcv.real=Psilist$vcv.real)
results_short$Measure <- c("Survival from Release to TowerBridge (minimum estimate since fish may have taken Yolo
Bypass)", "Survival from TowerBridge to I80-50_Br", "% arrived from I80-50_Br to Walnut Grove (not survival because
fish may have taken Sutter/Steam)","Survival from Walnut Grove to Georgiana Slough confluence","Detection probability at TowerBridge",
"Detection probability at I80-50_Br", "Detection probability at Walnut Grove", "Detection probability at Blw_Georgiana",
#"Detection probability at Georgiana Slough (fixed at 1)",
"Routing probability into Georgiana Slough (Conditional on fish arriving to junction)")
results_short <- results_short[,c("Measure", "estimate", "se", "lcl", "ucl")]
colnames(results_short) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(results_short, row.names = F, "html", caption = "3.3 Reach-specific survival and probability of entering Georgiana Slough") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
route_results_possible <- TRUE
} else {
results_short <- data.frame("Measure"=NA, "Estimate"="NOT ENOUGH DETECTIONS YET", "SE"=NA, "95% lower C.I."=NA, "95% upper C.I."=NA)
colnames(results_short) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(results_short, row.names = F, "html", caption = "3.3 Reach-specific survival and probability of entering Georgiana Slough") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
}
}
}
Measure | Estimate | SE | 95% lower C.I. | 95% upper C.I. |
---|---|---|---|---|
Survival from Release to TowerBridge (minimum estimate since fish may have taken Yolo Bypass) | 21.1 | 1.1 | 19.0 | 23.4 |
Survival from TowerBridge to I80-50_Br | 98.9 | 1.7 | 81.7 | 99.9 |
% arrived from I80-50_Br to Walnut Grove (not survival because fish may have taken Sutter/Steam) | 64.5 | 3.0 | 58.5 | 70.1 |
Survival from Walnut Grove to Georgiana Slough confluence | 92.4 | 1.9 | 87.6 | 95.5 |
Detection probability at TowerBridge | 75.5 | 2.6 | 70.0 | 80.2 |
Detection probability at I80-50_Br | 90.3 | 2.2 | 85.1 | 93.8 |
Detection probability at Walnut Grove | 100.0 | 0.0 | 98.9 | 100.0 |
Detection probability at Blw_Georgiana | 98.1 | 1.1 | 94.1 | 99.4 |
Routing probability into Georgiana Slough (Conditional on fish arriving to junction) | 9.9 | 2.3 | 6.3 | 15.4 |
# If you do not have access to local files, uncomment and run next lines of code
#download.file("https://raw.githubusercontent.com/CalFishTrack/real-time/master/data/georg.png",destfile = "georg.png", quiet = T, mode = "wb")
georg <- readPNG("georg.png")
par(mar = c(2,0,0,0))
# Set up the plot area
plot(1:2, type="n", xlab="", ylab="", xaxt = "n", yaxt = "n")
# Get the plot information so the image will fill the plot box, and draw it
lim <- par()
rasterImage(georg, lim$usr[1], lim$usr[3], lim$usr[2], lim$usr[4])
if(nrow(detects_study[is.na(detects_study$DateTime_PST) == F,]) == 0){
legend(x = 1.55,y = 1.6,legend = "No detections yet",col = "white", box.col = "light gray", bg = "light gray")
legend(x = 1.55,y = 1.45,legend = "No detections yet",col = "white", box.col = "light gray", bg = "light gray")
} else if (route_results_possible == F){
legend(x = 1.55,y = 1.6,legend = "Too few detections",col = "white", box.col = "light gray", bg = "light gray")
legend(x = 1.55,y = 1.45,legend = "Too few detections",col = "white", box.col = "light gray", bg = "light gray")
} else {
legend(x = 1.55,y = 1.6,legend = paste(round(routes$TransitionMat["A","A"],3)*100,
"% (", round(routes$lcl.TransitionMat["A","A"],3)*100, "-",
round(routes$ucl.TransitionMat["A","A"],3)*100,")", sep =""),
col = "white", box.col = "light gray", bg = "light gray")
legend(1.55,1.45, legend = paste(round(routes$TransitionMat["A","B"],3)*100,
"% (", round(routes$lcl.TransitionMat["A","B"],3)*100, "-",
round(routes$ucl.TransitionMat["A","B"],3)*100,")", sep =""),
box.col = "light gray", bg = "light gray")
}
mtext(text = "3.4 Routing Probabilities at Georgiana Slough Junction (with 95% C.I.s)", cex = 1.3, side = 1, line = 0.2, adj = 0)
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
plot(1:2, type = "n",xaxt = "n", yaxt = "n",
xlab = "Range of days study fish were present at Georgiana Sl Junction",
ylab = "Routing probability into Georgiana Slough at the junction")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else if(route_results_possible == F){
plot(1:2, type = "n",xaxt = "n", yaxt = "n",
xlab = "Range of days study fish were present at Georgiana Sl Junction",
ylab = "Routing probability into Georgiana Slough at the junction")
text(1.5,1.5, labels = "TOO FEW DETECTIONS", cex = 2)
} else {
library(repmis)
trytest <- try(source_data("https://code.usgs.gov/crrl_qfes/Enhanced_Acoustic_Telemetry_Project/raw/master/EAT_data_2023.Rdata?raw=True"))
if (inherits(trytest, "try-error")){
plot(1:2, type = "n",xaxt = "n", yaxt = "n",
xlab = "Range of days study fish were present at Georgiana Sl Junction",
ylab = "Routing probability into Georgiana Slough at the junction")
text(1.5,1.5, labels = "ERROR DOWNLOADING STARS", cex = 2)
} else {
detects_5 <- detects_study %>% filter(general_location == "SacRiverWalnutGrove_2")
detects_5 <- detects_5 %>%
dplyr::left_join(., detects_5 %>%
group_by(TagCode) %>%
summarise(first_detect = min(DateTime_PST))) %>%
mutate(Day = as.Date(as.Date(first_detect, "Etc/GMT+8")))
tagcount5 <- detects_5 %>%
group_by(Day) %>%
summarise(unique_tags = length(unique(TagCode)))
tagcount5$density <- tagcount5$unique_tags / sum(tagcount5$unique_tags)
test2 <- as.data.frame(test2)
# first, find min and max arrivals at georg for a study
min_georg <- as.Date(format(min(test2[test2$general_location %in% c("Sac_BlwGeorgiana", "Sac_BlwGeorgiana2","Georg_Sl_1",
"Georgiana_Slough2"),"DateTime_PST"]), "%Y-%m-%d"))
max_georg <- as.Date(format(max(test2[test2$general_location %in% c("Sac_BlwGeorgiana", "Sac_BlwGeorgiana2","Georg_Sl_1",
"Georgiana_Slough2"),"DateTime_PST"]), "%Y-%m-%d"))
psi_study <- psi_GeoCond[psi_GeoCond$Date <= max_georg & psi_GeoCond$Date >=min_georg-1,]
ggplot()+
geom_bar(data = tagcount5, aes(x = Day, y = density), alpha = 0.4, stat = "identity", fill = "red") +
geom_ribbon(data = psi_study, aes(x=Date, ymin = psi_geo.10, ymax = psi_geo.90), alpha = 0.3) +
geom_line(data = psi_study, aes(x=Date, y=psi_geo.50), size = 1.2) +
geom_point(aes(x = median(rep(tagcount5$Day, tagcount5$unique_tags)), y = tail(results_short$Estimate,1)/100), size = 4)+
geom_errorbar(width = 0.2, size = 1.2, aes(ymin = tail(results_short$`95% lower C.I.`,1)/100, ymax = tail(results_short$`95% upper C.I.`,1)/100, x = median(rep(tagcount5$Day, tagcount5$unique_tags))))+
xlim(c(min_georg, max_georg))+
xlab("Range of days study fish were present at Georgiana Sl Junction") +
geom_hline(yintercept=0, linetype="dashed") +
geom_segment(aes(x=median(rep(tagcount5$Day, tagcount5$unique_tags)),y =-0.01,xend=median(rep(tagcount5$Day, tagcount5$unique_tags)),yend =0.01)) +
geom_text(aes(x=median(rep(tagcount5$Day, tagcount5$unique_tags)),y =-0.02, label = "median"), color = "red")+
scale_y_continuous(name = "Routing probability into Georgiana Slough at the junction",limits = c(-0.02,1), sec.axis = dup_axis(name="Histogram of arrivals to junction")) +
theme_bw()+
theme(axis.text.y.right = element_text(color = "red"), axis.title.y.right = element_text(color = "red"))
}
}
3.5 STARS prediction (ribbon) vs. empirical (point) estimate of Routing Probability at Georgiana Slough Junction
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
try(Delta <- read.csv("Delta_surv.csv", stringsAsFactors = F))
if(nrow(detects_study[is.na(detects_study$DateTime_PST) == F,]) == 0){
WR.surv1 <- data.frame("Measure"=NA, "Estimate"="NO DETECTIONS YET", "SE"=NA, "95% lower C.I."=NA, "95% upper C.I."=NA)
colnames(WR.surv1) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(WR.surv1, row.names = F, "html", caption = "3.6 Minimum through-Delta survival: City of Sacramento to Benicia (using CJS survival model)") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else {
test4 <- detects_study[detects_study$general_location %in% c("TowerBridge", "I80-50_Br", "Benicia_west", "Benicia_east"),]
if(nrow(test4[test4$general_location =="Benicia_west",]) == 0 | nrow(test4[test4$general_location =="Benicia_east",]) == 0){
WR.surv1 <- data.frame("Measure"=NA, "Estimate"="NOT ENOUGH DETECTIONS", "SE"=NA, "95% lower C.I."=NA, "95% upper C.I."=NA)
colnames(WR.surv1) <- c("Measure", "Estimate", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(WR.surv1, row.names = F, "html", caption = "3.6 Minimum through-Delta survival: City of Sacramento to Benicia (using CJS survival model)") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else {
# calculate mean and SD travel time
sac <- test4[test4$general_location %in% c("TowerBridge", "I80-50_Br"),]
ben <- test4[test4$general_location %in% c("Benicia_west", "Benicia_east"),]
travel_sac <- aggregate(list(first_detect_sac = sac$DateTime_PST), by = list(Release = sac$Release, TagCode = sac$TagCode), min)
travel_ben <- aggregate(list(first_detect_ben = ben$DateTime_PST), by = list(Release = ben$Release, TagCode = ben$TagCode), min)
travel <- merge(travel_sac, travel_ben, by = c("Release","TagCode"))
travel$days <- as.numeric(difftime(travel$first_detect_ben, travel$first_detect_sac, units = "days"))
if(nrow(travel)>0){
travel_final <- aggregate(list(mean_travel_time = travel$days), by = list(Release = travel$Release), mean)
travel_final <- merge(travel_final, aggregate(list(sd_travel_time = travel$days), by = list(Release = travel$Release), sd))
travel_final <- merge(travel_final, aggregate(list(n = travel$days), by = list(Release = travel$Release), length))
travel_final <- rbind(travel_final, data.frame(Release = "ALL", mean_travel_time = mean(travel$days), sd_travel_time = sd(travel$days), n = nrow(travel)))
}else{
travel_final <- data.frame(Release = c(unique(detects_study$Release),"ALL"), mean_travel_time = NA, sd_travel_time = NA, n = NA)
}
travel_final <- aggregate(list(mean_travel_time = travel$days), by = list(Release = travel$Release), mean)
travel_final <- merge(travel_final, aggregate(list(sd_travel_time = travel$days), by = list(Release = travel$Release), sd))
travel_final <- merge(travel_final, aggregate(list(n = travel$days), by = list(Release = travel$Release), length))
travel_final <- rbind(travel_final, data.frame(Release = "ALL", mean_travel_time = mean(travel$days), sd_travel_time = sd(travel$days), n = nrow(travel)))
inp <- as.data.frame(reshape2::dcast(test4, TagCode ~ general_location, fun.aggregate = length))
# add together detections at Tower and I80 to ensure good detection entering Delta
if("I80-50_Br" %in% colnames(inp) & "TowerBridge" %in% colnames(inp)){
inp$`I80-50_Br` <- inp$`I80-50_Br` + inp$TowerBridge
} else if("TowerBridge" %in% colnames(inp)){
inp$`I80-50_Br` <- inp$TowerBridge
}
# Sort columns by river km in descending order, this also removes TowerBridge, no longer needed
inp <- inp[,c("TagCode","I80-50_Br", "Benicia_east", "Benicia_west")]
# Count number of genlocs
gen_loc_sites <- ncol(inp)-1
inp <- inp[,c(1,order(names(inp[,2:(gen_loc_sites+1)]), decreasing = T)+1)]
inp <- merge(study_tagcodes, inp, by = "TagCode", all.x = T)
inp2 <- inp[,(ncol(inp)-gen_loc_sites+1):ncol(inp)]
inp2[is.na(inp2)] <- 0
inp2[inp2 > 0] <- 1
inp <- cbind(inp, inp2)
groups <- as.character(sort(unique(inp$Release)))
groups_w_detects <- names(table(detects_study[which(detects_study$river_km < 53),"Release"]))
inp[,groups] <- 0
for(i in groups){
inp[as.character(inp$Release) == i, i] <- 1
}
inp$inp_final <- paste("1",apply(inp2, 1, paste, collapse=""),sep="")
if(length(groups) > 1){
# make sure factor levels have a release that has detections first. if first release in factor order has zero #detectins, model goes haywire
inp.df <- data.frame(ch = as.character(inp$inp_final), freq = 1, rel = inp$Release, stringsAsFactors = F)
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
inp.df <- inp.df[inp.df$rel %in% groups_w_detects,]
inp.df$rel <- factor(inp.df$rel, levels = groups_w_detects)
if(length(groups_w_detects) > 1){
WR.process <- process.data(inp.df, model="CJS", begin.time=1, groups = "rel")
WR.ddl <- make.design.data(WR.process)
WR.mark.rel <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time*rel),p=list(formula=~time)),
silent = T, output = F)
} else {
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.rel <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
}
WR.surv <- cbind(Release = "ALL",round(WR.mark.all$results$real[2,c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv.rel <- cbind(Release = groups_w_detects,
round(WR.mark.rel$results$real[seq(from=2,to=length(groups_w_detects)*3,by = 3),
c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv.rel <- merge(WR.surv.rel, data.frame(Release = groups), all.y = T)
WR.surv.rel[is.na(WR.surv.rel$estimate),"estimate"] <- 0
WR.surv <- rbind(WR.surv, WR.surv.rel)
} else {
inp.df <- data.frame(ch = as.character(inp$inp_final), freq = 1, stringsAsFactors = F)
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)),
silent = T, output = F)
WR.surv <- cbind(Release = c("ALL", groups),round(WR.mark.all$results$real[2,c("estimate", "se", "lcl", "ucl")] * 100,1))
}
WR.surv1 <- WR.surv
colnames(WR.surv1)[1] <- "Release"
WR.surv1 <- merge(WR.surv1, travel_final, by = "Release", all.x = T)
WR.surv1$mean_travel_time <- round(WR.surv1$mean_travel_time,1)
WR.surv1$sd_travel_time <- round(WR.surv1$sd_travel_time,1)
colnames(WR.surv1) <- c("Release", "Survival (%)", "SE", "95% lower C.I.",
"95% upper C.I.", "Mean Delta passage (days)", "SD of Delta Passage (days)","Count")
#colnames(WR.surv1) <- c("Release Group", "Survival (%)", "SE", "95% lower C.I.", "95% upper C.I.")
print(kable(WR.surv1, row.names = F, "html", caption = "3.6 Minimum through-Delta survival, and travel time: City of Sacramento to Benicia (using CJS survival model)") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
if(exists("Delta")==T & is.numeric(WR.surv1[1,2])){
reltimes <- aggregate(list(RelDT = study_tagcodes$release_time), by = list(Release = study_tagcodes$Release), FUN = mean)
reltimes <- rbind(reltimes, data.frame(Release = "ALL", RelDT = mean(study_tagcodes$release_time)))
# Assign whether the results are tentative or final
quality <- "tentative"
if(endtime < as.Date(format(Sys.time(), "%Y-%m-%d"))){
quality <- "final"}
WR.surv <- merge(WR.surv, reltimes, by = "Release", all.x = T)
WR.surv$RelDT <- as.POSIXct(WR.surv$RelDT, origin = "1970-01-01")
Delta$RelDT <- as.POSIXct(Delta$RelDT)
# remove old benicia record for this studyID
Delta <- Delta[!Delta$StudyID %in% "Spring_Pulse_2024",]
Delta <- rbind(Delta, data.frame(WR.surv, StudyID = "Spring_Pulse_2024", data_quality = quality))
write.csv(Delta, "Delta_surv.csv", row.names = F, quote = F)
}
}
}
Release | Survival (%) | SE | 95% lower C.I. | 95% upper C.I. | Mean Delta passage (days) | SD of Delta Passage (days) | Count |
---|---|---|---|---|---|---|---|
1.Before pulse 1 - CDFW | 24.7 | 4.6 | 16.9 | 34.7 | 4.1 | 1.2 | 22 |
2.During pulse 1 - CDFW | 24.1 | 4.6 | 16.3 | 34.2 | 3.6 | 0.7 | 21 |
3.After pulse 1 - CDFW | 26.3 | 10.1 | 11.4 | 49.8 | 3.4 | 1.2 | 5 |
4.During pulse 2 - UCSC | 25.0 | 9.7 | 10.8 | 47.8 | 3.1 | 0.1 | 5 |
5.After pulse 2 - UCSC | 14.3 | 13.2 | 2.0 | 58.1 | 2.9 | NA | 1 |
6.During pulse 3 - UCSC | 15.7 | 5.1 | 8.0 | 28.4 | 3.0 | 0.6 | 8 |
7.After pulse 3 - UCSC | 20.0 | 17.9 | 2.7 | 69.1 | 3.2 | NA | 1 |
ALL | 22.7 | 2.5 | 18.1 | 28.0 | 3.6 | 1.0 | 63 |
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
try(benicia <- read.csv("benicia_surv.csv", stringsAsFactors = F))
detects_benicia <- detects_study[detects_study$general_location %in% c("Benicia_west", "Benicia_east"),]
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
if(nrow(detects_benicia) == 0){
if(as.numeric(difftime(Sys.time(), min(detects_study$RelDT), units = "days"))>30){
WR.surv <- data.frame("Release"="ALL", "estimate"=0, "se"=NA, "lcl"=NA, "ucl"=NA, "Detection_efficiency"=NA)
} else {
WR.surv <- data.frame("Release"=NA, "estimate"="NO DETECTIONS YET", "se"=NA, "lcl"=NA, "ucl"=NA, "Detection_efficiency"=NA)
}
WR.surv1 <- WR.surv
colnames(WR.surv1) <- c("Release Group", "Survival (%)", "SE", "95% lower C.I.", "95% upper C.I.", "Detection efficiency (%)")
print(kable(WR.surv1, row.names = F, "html", caption = "3.7 Minimum survival to Benicia Bridge East Span (using CJS survival model)") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else if(length(table(detects_benicia$general_location)) == 1){
if(as.numeric(difftime(Sys.time(), min(detects_study$RelDT), units = "days"))>30){
WR.surv <- data.frame("Release"="ALL", "estimate"=round(length(unique(detects_benicia$TagCode))/length(unique(detects_study$TagCode))*100,1),
"se"=NA, "lcl"=NA, "ucl"=NA, "Detection_efficiency"=NA)
} else {
WR.surv <- data.frame("Release" = NA, "estimate" = "NOT ENOUGH DETECTIONS", "se" = NA, "lcl" = NA, "ucl" = NA, "Detection_efficiency" = NA)
}
WR.surv1 <- WR.surv
colnames(WR.surv1) <- c("Release Group", "Survival (%)", "SE", "95% lower C.I.", "95% upper C.I.", "Detection efficiency (%)")
print(kable(WR.surv1, row.names = F, "html", caption = "3.7 Minimum survival to Benicia Bridge East Span (using CJS survival model)") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else {
# Only do survival to Benicia here
test3 <- detects_study[which(detects_study$river_km < 53),]
test3_w_salt <- detects_study[which(detects_study$river_km < 53 | detects_study$river_km > 456),]
# calculate mean and SD travel time
travel <- aggregate(list(first_detect = test3$DateTime_PST), by = list(Release = test3$Release, TagCode = test3$TagCode, RelDT = test3$RelDT), min)
travel$days <- as.numeric(difftime(travel$first_detect, travel$RelDT, units = "days"))
travel_final <- aggregate(list(mean_travel_time = travel$days), by = list(Release = travel$Release), mean)
travel_final <- merge(travel_final, aggregate(list(sd_travel_time = travel$days), by = list(Release = travel$Release), sd))
travel_final <- merge(travel_final, aggregate(list(n = travel$days), by = list(Release = travel$Release), length))
travel_final <- rbind(travel_final, data.frame(Release = "ALL", mean_travel_time = mean(travel$days), sd_travel_time = sd(travel$days), n = nrow(travel)))
# Create inp for survival estimation
inp <- as.data.frame(reshape2::dcast(test3_w_salt, TagCode ~ river_km, fun.aggregate = length))
# Sort columns by river km in descending order
# Count number of genlocs
gen_loc_sites <- ncol(inp)-1
inp <- inp[,c(1,order(as.numeric(names(inp[,2:(gen_loc_sites+1)])), decreasing = T)+1)]
inp <- merge(study_tagcodes, inp, by = "TagCode", all.x = T)
inp2 <- inp[,(ncol(inp)-gen_loc_sites+1):ncol(inp)]
inp2[is.na(inp2)] <- 0
inp2[inp2 > 0] <- 1
inp <- cbind(inp, inp2)
groups <- as.character(sort(unique(inp$Release)))
groups_w_detects <- names(table(test3_w_salt$Release))
inp[,groups] <- 0
for(i in groups){
inp[as.character(inp$Release) == i, i] <- 1
}
inp$inp_final <- paste("1",apply(inp2, 1, paste, collapse=""),sep="")
if(length(groups) > 1){
# make sure factor levels have a release that has detections first. if first release in factor order has zero #detectins, model goes haywire
inp.df <- data.frame(ch = as.character(inp$inp_final), freq = 1, rel = inp$Release, stringsAsFactors = F)
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)), silent = T, output = F)
inp.df <- inp.df[inp.df$rel %in% groups_w_detects,]
inp.df$rel <- factor(inp.df$rel, levels = groups_w_detects)
if(length(groups_w_detects) > 1){
WR.process <- process.data(inp.df, model="CJS", begin.time=1, groups = "rel")
WR.ddl <- make.design.data(WR.process)
WR.mark.rel <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time*rel),p=list(formula=~time)), silent = T, output = F)
} else {
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.rel <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)), silent = T, output = F)
}
WR.surv <- cbind(Release = "ALL",round(WR.mark.all$results$real[2,c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv.rel <- cbind(Release = groups_w_detects, round(WR.mark.rel$results$real[seq(from=2,to=length(groups_w_detects)*3,by = 3),
c("estimate", "se", "lcl", "ucl")] * 100,1))
WR.surv.rel <- merge(WR.surv.rel, data.frame(Release = groups), all.y = T)
WR.surv.rel[is.na(WR.surv.rel$estimate),"estimate"] <- 0
WR.surv <- rbind(WR.surv, WR.surv.rel)
} else {
inp.df <- data.frame(ch = as.character(inp$inp_final), freq = 1, stringsAsFactors = F)
WR.process <- process.data(inp.df, model="CJS", begin.time=1)
WR.ddl <- make.design.data(WR.process)
WR.mark.all <- mark(WR.process, WR.ddl, model.parameters=list(Phi=list(formula=~time),p=list(formula=~time)), silent = T, output = F)
WR.surv <- cbind(Release = c("ALL", groups),round(WR.mark.all$results$real[1,c("estimate", "se", "lcl", "ucl")] * 100,1))
}
WR.surv$Detection_efficiency <- NA
WR.surv[1,"Detection_efficiency"] <- round(WR.mark.all$results$real[gen_loc_sites+2,"estimate"] * 100,1)
WR.surv1 <- WR.surv
colnames(WR.surv1)[1] <- "Release"
WR.surv1 <- merge(WR.surv1, travel_final, by = "Release", all.x = T)
WR.surv1$mean_travel_time <- round(WR.surv1$mean_travel_time,1)
WR.surv1$sd_travel_time <- round(WR.surv1$sd_travel_time,1)
colnames(WR.surv1) <- c("Release", "Survival (%)", "SE", "95% lower C.I.",
"95% upper C.I.", "Detection efficiency (%)", "Mean time to Benicia (days)", "SD of time to Benicia (days)", "Count")
#colnames(WR.surv1) <- c("Release Group", "Survival (%)", "SE", "95% lower C.I.", "95% upper C.I.", "Detection efficiency (%)")
print(kable(WR.surv1, row.names = F, "html", caption = "3.7 Minimum survival to Benicia Bridge East Span (using CJS survival model), and travel time") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
}
Release | Survival (%) | SE | 95% lower C.I. | 95% upper C.I. | Detection efficiency (%) | Mean time to Benicia (days) | SD of time to Benicia (days) | Count |
---|---|---|---|---|---|---|---|---|
1.Before pulse 1 - CDFW | 12.7 | 2.6 | 8.5 | 18.6 | NA | 10.1 | 4.4 | 22 |
2.During pulse 1 - CDFW | 20.6 | 4.1 | 13.7 | 29.7 | NA | 9.6 | 4.8 | 21 |
3.After pulse 1 - CDFW | 2.5 | 1.1 | 1.0 | 5.9 | NA | 11.1 | 3.5 | 5 |
4.During pulse 2 - UCSC | 10.7 | 4.6 | 4.5 | 23.4 | NA | 7.7 | 2.6 | 5 |
5.After pulse 2 - UCSC | 0.9 | 0.9 | 0.1 | 6.3 | NA | 7.0 | NA | 1 |
6.During pulse 3 - UCSC | 4.0 | 1.4 | 2.0 | 7.8 | NA | 6.3 | 0.6 | 8 |
7.After pulse 3 - UCSC | 0.9 | 0.9 | 0.1 | 5.9 | NA | 10.5 | NA | 1 |
ALL | 4.7 | 0.7 | 3.4 | 6.3 | 100 | 9.3 | 4.2 | 63 |
if(exists("benicia")==T & is.numeric(WR.surv1[1,2])){
# Find mean release time per release group, and ALL
reltimes <- aggregate(list(RelDT = study_tagcodes$release_time), by = list(Release = study_tagcodes$Release), FUN = mean)
reltimes <- rbind(reltimes, data.frame(Release = "ALL", RelDT = mean(study_tagcodes$release_time)))
# Assign whether the results are tentative or final
quality <- "tentative"
if(endtime < as.Date(format(Sys.time(), "%Y-%m-%d"))){
quality <- "final"
}
WR.surv <- merge(WR.surv, reltimes, by = "Release", all.x = T)
WR.surv$RelDT <- as.POSIXct(WR.surv$RelDT, origin = "1970-01-01")
benicia$RelDT <- as.POSIXct(benicia$RelDT)
# remove old benicia record for this studyID
benicia <- benicia[!benicia$StudyID == "Spring_Pulse_2024",]
benicia <- rbind(benicia, data.frame(WR.surv, StudyID = "Spring_Pulse_2024", data_quality = quality))
write.csv(benicia, "benicia_surv.csv", row.names = F, quote = F)
}
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
if(nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
"No detections yet"
} else {
arrivals <- detects_study %>%
group_by(general_location, TagCode) %>%
summarise(DateTime_PST = min(DateTime_PST)) %>%
arrange(TagCode)
tag_stats <- arrivals %>%
group_by(general_location) %>%
summarise(First_arrival = min(DateTime_PST),
Mean_arrival = mean(DateTime_PST),
Last_arrival = max(DateTime_PST),
Fish_count = length(unique(TagCode))) %>%
mutate(Percent_arrived = round(Fish_count/nrow(study_tagcodes) * 100,2)) %>%
dplyr::left_join(., unique(detects_study[,c("general_location", "river_km")])) %>%
arrange(desc(river_km)) %>%
mutate(First_arrival = format(First_arrival, tz = "Etc/GMT+8"),
Mean_arrival = format(Mean_arrival, tz = "Etc/GMT+8"),
Last_arrival = format(Last_arrival, tz = "Etc/GMT+8")) %>%
na.omit()
print(kable(tag_stats, row.names = F,
caption = "4.1 Detections for all releases combined",
"html") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
count <- 0
for(j in sort(unique(study_tagcodes$Release))){
if(nrow(detects_study[detects_study$Release == j,]) > 0){
count <- count + 1
arrivals1 <- detects_study %>%
filter(Release == j) %>%
group_by(general_location, TagCode) %>%
summarise(DateTime_PST = min(DateTime_PST)) %>%
arrange(TagCode)
rel_count <- nrow(study_tagcodes[study_tagcodes$Release == j,])
tag_stats1 <- arrivals1 %>%
group_by(general_location) %>%
summarise(First_arrival = min(DateTime_PST),
Mean_arrival = mean(DateTime_PST),
Last_arrival = max(DateTime_PST),
Fish_count = length(unique(TagCode))) %>%
mutate(Percent_arrived = round(Fish_count/rel_count * 100,2)) %>%
dplyr::left_join(., unique(detects_study[,c("general_location", "river_km")])) %>%
arrange(desc(river_km)) %>%
mutate(First_arrival = format(First_arrival, tz = "Etc/GMT+8"),
Mean_arrival = format(Mean_arrival, tz = "Etc/GMT+8"),
Last_arrival = format(Last_arrival, tz = "Etc/GMT+8")) %>%
na.omit()
final_stats <- kable(tag_stats1, row.names = F,
caption = paste("4.2.", count, " Detections for ", j, " release groups", sep = ""),
"html")
print(kable_styling(final_stats, bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else {
cat("\n\n\\pagebreak\n")
print(paste("No detections for",j,"release group yet", sep=" "), quote = F)
cat("\n\n\\pagebreak\n")
}
}
}
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-04-18 11:55:59 | 2024-05-08 09:39:44 | 2024-06-03 17:35:11 | 838 | 60.95 | 457.000 |
MeridianBr | 2024-04-20 01:48:21 | 2024-05-04 05:25:55 | 2024-06-01 18:07:26 | 346 | 25.16 | 290.848 |
TowerBridge | 2024-04-21 15:19:17 | 2024-05-07 09:02:09 | 2024-06-05 16:38:45 | 219 | 15.93 | 172.000 |
I80-50_Br | 2024-04-21 12:24:26 | 2024-05-05 02:56:10 | 2024-06-05 17:02:13 | 259 | 18.84 | 170.748 |
SacRiverWalnutGrove_2 | 2024-04-22 09:59:01 | 2024-05-03 15:50:32 | 2024-06-11 00:54:20 | 185 | 13.45 | 120.300 |
Georg_Sl_1 | 2024-04-22 13:39:09 | 2024-04-30 07:47:08 | 2024-05-26 12:39:01 | 17 | 1.24 | 119.600 |
Sac_BlwGeorgiana | 2024-04-22 10:25:30 | 2024-05-02 06:15:41 | 2024-06-06 12:14:19 | 151 | 10.98 | 119.058 |
Sac_BlwGeorgiana2 | 2024-04-22 10:41:11 | 2024-05-02 03:58:47 | 2024-06-06 12:25:20 | 154 | 11.20 | 118.398 |
Benicia_east | 2024-04-24 09:04:12 | 2024-05-07 11:50:42 | 2024-06-08 20:46:43 | 63 | 4.58 | 52.240 |
Benicia_west | 2024-04-24 09:10:35 | 2024-05-08 13:55:50 | 2024-06-08 20:58:07 | 49 | 3.56 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-04-18 11:55:59 | 2024-04-18 18:24:18 | 2024-05-01 21:53:40 | 147 | 73.13 | 457.000 |
MeridianBr | 2024-04-20 01:48:21 | 2024-04-22 22:05:58 | 2024-05-07 21:26:27 | 97 | 48.26 | 290.848 |
TowerBridge | 2024-04-21 15:19:17 | 2024-04-24 03:36:07 | 2024-05-03 02:22:47 | 61 | 30.35 | 172.000 |
I80-50_Br | 2024-04-21 12:24:26 | 2024-04-24 07:32:46 | 2024-05-09 13:20:01 | 84 | 41.79 | 170.748 |
SacRiverWalnutGrove_2 | 2024-04-22 09:59:01 | 2024-04-24 19:23:41 | 2024-05-10 07:24:34 | 72 | 35.82 | 120.300 |
Georg_Sl_1 | 2024-04-22 13:39:09 | 2024-04-24 11:36:08 | 2024-05-02 21:56:34 | 10 | 4.98 | 119.600 |
Sac_BlwGeorgiana | 2024-04-22 10:25:30 | 2024-04-24 21:36:47 | 2024-05-10 07:58:04 | 61 | 30.35 | 119.058 |
Sac_BlwGeorgiana2 | 2024-04-22 10:41:11 | 2024-04-24 21:29:55 | 2024-05-10 08:08:25 | 62 | 30.85 | 118.398 |
Benicia_east | 2024-04-24 09:04:12 | 2024-04-28 12:33:23 | 2024-05-12 12:48:12 | 22 | 10.95 | 52.240 |
Benicia_west | 2024-04-24 09:10:35 | 2024-04-29 10:53:40 | 2024-05-12 12:54:31 | 16 | 7.96 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-04-23 13:39:44 | 2024-04-24 23:33:39 | 2024-05-13 21:44:38 | 82 | 41.0 | 457.000 |
MeridianBr | 2024-04-25 01:38:13 | 2024-04-28 09:55:45 | 2024-05-10 00:09:19 | 103 | 51.5 | 290.848 |
TowerBridge | 2024-04-26 10:42:27 | 2024-04-30 01:20:17 | 2024-05-11 14:43:54 | 59 | 29.5 | 172.000 |
I80-50_Br | 2024-04-26 11:07:08 | 2024-04-29 18:31:09 | 2024-05-11 15:09:40 | 81 | 40.5 | 170.748 |
SacRiverWalnutGrove_2 | 2024-04-27 10:44:36 | 2024-04-30 10:54:59 | 2024-05-10 17:34:17 | 60 | 30.0 | 120.300 |
Georg_Sl_1 | 2024-04-28 04:14:49 | 2024-04-30 23:16:09 | 2024-05-03 23:25:08 | 4 | 2.0 | 119.600 |
Sac_BlwGeorgiana | 2024-04-27 11:11:24 | 2024-04-30 08:48:22 | 2024-05-10 17:55:00 | 52 | 26.0 | 119.058 |
Sac_BlwGeorgiana2 | 2024-04-27 11:21:20 | 2024-04-30 07:11:25 | 2024-05-10 18:08:51 | 54 | 27.0 | 118.398 |
Benicia_east | 2024-04-29 09:49:24 | 2024-05-03 17:29:07 | 2024-05-16 12:52:58 | 21 | 10.5 | 52.240 |
Benicia_west | 2024-04-29 09:53:06 | 2024-05-03 14:23:43 | 2024-05-16 12:58:20 | 18 | 9.0 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-04-30 15:10:26 | 2024-05-01 06:00:11 | 2024-05-07 14:05:58 | 189 | 94.5 | 457.000 |
MeridianBr | 2024-05-02 16:34:49 | 2024-05-04 07:39:05 | 2024-05-11 22:15:38 | 42 | 21.0 | 290.848 |
TowerBridge | 2024-05-04 07:36:14 | 2024-05-08 08:31:39 | 2024-05-26 07:32:04 | 17 | 8.5 | 172.000 |
I80-50_Br | 2024-05-04 08:07:19 | 2024-05-07 03:01:43 | 2024-05-15 04:39:37 | 16 | 8.0 | 170.748 |
SacRiverWalnutGrove_2 | 2024-05-05 01:59:37 | 2024-05-07 10:27:56 | 2024-05-14 03:41:46 | 12 | 6.0 | 120.300 |
Georg_Sl_1 | 2024-05-05 02:58:11 | 2024-05-05 02:58:11 | 2024-05-05 02:58:11 | 1 | 0.5 | 119.600 |
Sac_BlwGeorgiana | 2024-05-05 19:54:00 | 2024-05-07 18:23:55 | 2024-05-14 04:09:41 | 11 | 5.5 | 119.058 |
Sac_BlwGeorgiana2 | 2024-05-05 20:09:39 | 2024-05-07 18:41:51 | 2024-05-14 04:25:26 | 11 | 5.5 | 118.398 |
Benicia_east | 2024-05-08 07:54:36 | 2024-05-11 21:54:34 | 2024-05-17 02:18:20 | 5 | 2.5 | 52.240 |
Benicia_west | 2024-05-08 13:42:22 | 2024-05-12 20:02:07 | 2024-05-17 02:21:52 | 2 | 1.0 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-05-07 09:24:23 | 2024-05-07 20:25:58 | 2024-05-08 10:15:00 | 40 | 17.86 | 457.000 |
MeridianBr | 2024-05-08 23:57:06 | 2024-05-10 04:47:14 | 2024-05-16 18:33:05 | 22 | 9.82 | 290.848 |
TowerBridge | 2024-05-10 09:20:55 | 2024-05-11 14:31:55 | 2024-05-17 14:15:49 | 19 | 8.48 | 172.000 |
I80-50_Br | 2024-05-10 09:38:44 | 2024-05-11 14:27:44 | 2024-05-17 14:41:15 | 20 | 8.93 | 170.748 |
SacRiverWalnutGrove_2 | 2024-05-10 23:57:11 | 2024-05-12 12:47:48 | 2024-05-18 12:34:55 | 12 | 5.36 | 120.300 |
Sac_BlwGeorgiana | 2024-05-11 00:21:09 | 2024-05-12 13:13:22 | 2024-05-18 12:54:46 | 12 | 5.36 | 119.058 |
Sac_BlwGeorgiana2 | 2024-05-11 00:36:09 | 2024-05-12 13:31:50 | 2024-05-18 13:35:26 | 12 | 5.36 | 118.398 |
Benicia_east | 2024-05-13 11:29:04 | 2024-05-15 11:28:49 | 2024-05-20 16:06:46 | 5 | 2.23 | 52.240 |
Benicia_west | 2024-05-13 18:59:29 | 2024-05-16 07:45:21 | 2024-05-20 16:12:28 | 3 | 1.34 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-05-14 09:43:30 | 2024-05-15 02:38:32 | 2024-05-15 14:14:34 | 95 | 40.43 | 457.000 |
MeridianBr | 2024-05-16 12:19:54 | 2024-05-17 10:57:50 | 2024-05-18 00:17:15 | 13 | 5.53 | 290.848 |
TowerBridge | 2024-05-19 10:26:55 | 2024-05-20 08:42:16 | 2024-05-21 13:46:47 | 7 | 2.98 | 172.000 |
I80-50_Br | 2024-05-19 11:09:54 | 2024-05-20 09:10:25 | 2024-05-21 14:12:59 | 7 | 2.98 | 170.748 |
SacRiverWalnutGrove_2 | 2024-05-20 06:49:42 | 2024-05-20 16:46:30 | 2024-05-21 11:42:22 | 3 | 1.28 | 120.300 |
Sac_BlwGeorgiana | 2024-05-20 08:11:26 | 2024-05-20 17:41:14 | 2024-05-21 11:58:19 | 3 | 1.28 | 119.058 |
Sac_BlwGeorgiana2 | 2024-05-20 08:20:16 | 2024-05-20 17:51:34 | 2024-05-21 12:08:13 | 3 | 1.28 | 118.398 |
Benicia_east | 2024-05-22 08:51:48 | 2024-05-22 08:51:48 | 2024-05-22 08:51:48 | 1 | 0.43 | 52.240 |
Benicia_west | 2024-05-22 09:01:00 | 2024-05-22 09:01:00 | 2024-05-22 09:01:00 | 1 | 0.43 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-05-21 09:29:02 | 2024-05-21 22:35:57 | 2024-06-03 17:35:11 | 175 | 87.94 | 457.000 |
MeridianBr | 2024-05-22 22:51:10 | 2024-05-23 17:12:30 | 2024-05-24 21:05:59 | 64 | 32.16 | 290.848 |
TowerBridge | 2024-05-24 08:41:33 | 2024-05-25 06:14:42 | 2024-05-27 02:38:55 | 51 | 25.63 | 172.000 |
I80-50_Br | 2024-05-24 09:04:02 | 2024-05-25 04:19:28 | 2024-05-26 09:36:03 | 46 | 23.12 | 170.748 |
SacRiverWalnutGrove_2 | 2024-05-25 01:04:39 | 2024-05-27 01:51:18 | 2024-06-11 00:54:20 | 24 | 12.06 | 120.300 |
Georg_Sl_1 | 2024-05-25 03:38:08 | 2024-05-25 20:08:34 | 2024-05-26 12:39:01 | 2 | 1.01 | 119.600 |
Sac_BlwGeorgiana | 2024-05-25 14:44:07 | 2024-05-26 07:17:59 | 2024-05-26 22:15:27 | 10 | 5.03 | 119.058 |
Sac_BlwGeorgiana2 | 2024-05-25 14:54:29 | 2024-05-26 07:32:42 | 2024-05-26 22:38:02 | 10 | 5.03 | 118.398 |
Benicia_east | 2024-05-27 10:26:07 | 2024-05-28 09:16:39 | 2024-05-29 09:09:29 | 8 | 4.02 | 52.240 |
Benicia_west | 2024-05-27 10:29:32 | 2024-05-28 10:28:49 | 2024-05-29 09:11:37 | 8 | 4.02 | 52.040 |
general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
---|---|---|---|---|---|---|
Blw_Salt_RT | 2024-05-29 09:53:22 | 2024-05-29 19:19:25 | 2024-05-30 20:24:50 | 110 | 94.83 | 457.000 |
MeridianBr | 2024-05-31 23:03:40 | 2024-06-01 08:30:19 | 2024-06-01 18:07:26 | 5 | 4.31 | 290.848 |
TowerBridge | 2024-06-02 19:36:36 | 2024-06-04 03:08:23 | 2024-06-05 16:38:45 | 5 | 4.31 | 172.000 |
I80-50_Br | 2024-06-02 19:58:38 | 2024-06-04 03:32:08 | 2024-06-05 17:02:13 | 5 | 4.31 | 170.748 |
SacRiverWalnutGrove_2 | 2024-06-03 13:27:50 | 2024-06-05 00:42:08 | 2024-06-06 11:56:27 | 2 | 1.72 | 120.300 |
Sac_BlwGeorgiana | 2024-06-03 13:49:18 | 2024-06-05 01:01:48 | 2024-06-06 12:14:19 | 2 | 1.72 | 119.058 |
Sac_BlwGeorgiana2 | 2024-06-03 14:12:07 | 2024-06-05 01:18:43 | 2024-06-06 12:25:20 | 2 | 1.72 | 118.398 |
Benicia_east | 2024-06-08 20:46:43 | 2024-06-08 20:46:43 | 2024-06-08 20:46:43 | 1 | 0.86 | 52.240 |
Benicia_west | 2024-06-08 20:58:07 | 2024-06-08 20:58:07 | 2024-06-08 20:58:07 | 1 | 0.86 | 52.040 |
library(dplyr)
library(dbplyr)
library(DBI)
library(odbc)
library(data.table)
# Create connection with cloud database
con <- dbConnect(odbc(),
Driver = "SQL Server",
Server = "calfishtrack-server.database.windows.net",
Database = "realtime_detections",
UID = "realtime_user",
PWD = "Pass@123",
Port = 1433)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
# THIS CODE CHUNK WILL NOT WORK IF USING ONLY ERDDAP DATA, REQUIRES ACCESS TO LOCAL FILES
if(nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
"No detections yet"
} else {
arrivals <- detects_study %>%
group_by(general_location, TagCode) %>%
summarise(DateTime_PST = min(DateTime_PST)) %>%
mutate(day = as.Date(DateTime_PST, "%Y-%m-%d", tz = "Etc/GMT+8"))
# Use dbplyr to load realtime_locs and qryHexCodes sql table
gen_locs <- tbl(con, "realtime_locs") %>% collect()
# gen_locs <- read.csv("realtime_locs.csv", stringsAsFactors = F)
beacon_by_day <- fread("beacon_by_day.csv", stringsAsFactors = F) %>%
mutate(day = as.Date(day)) %>%
filter(TagCode == beacon) %>% # Now subset to only look at data for the correct beacon for that day
filter(day >= as.Date(min(study_tagcodes$release_time)) &
day <= endtime) %>% # Now only keep beacon by day for days since fish were released
dplyr::left_join(., gen_locs[,c("location", "general_location","rkm")], by = "location")
arrivals_per_day <- arrivals %>%
group_by(day, general_location) %>%
summarise(New_arrivals = length(TagCode)) %>%
arrange(general_location) %>% na.omit() %>%
mutate(day = as.Date(day)) %>%
dplyr::left_join(unique(beacon_by_day[,c("general_location", "day", "rkm")]),
., by = c("general_location", "day")) %>%
arrange(general_location, day) %>%
mutate(day = factor(day)) %>%
filter(general_location != "Bench_test") %>% # Remove bench test and other NA locations
filter(!(is.na(general_location))) %>%
arrange(desc(rkm)) %>% # Change order of data to plot decreasing river_km
mutate(general_location = factor(general_location, unique(general_location)))
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life)*1.5)))
crosstab <- xtabs(formula = arrivals_per_day$New_arrivals ~ arrivals_per_day$day + arrivals_per_day$general_location,
addNA =T)
crosstab[is.na(crosstab)] <- ""
crosstab[crosstab==0] <- NA
crosstab <- as.data.frame.matrix(crosstab)
kable(crosstab, align = "c", caption = "4.3 Fish arrivals per day (\"NA\" means receivers were non-operational)") %>%
kable_styling(c("striped", "condensed"), font_size = 11, full_width = F, position = "left", fixed_thead = TRUE) %>%
column_spec(column = 1:ncol(crosstab),width_min = "50px",border_left = T, border_right = T) %>%
column_spec(1, bold = T, width_min = "75px")%>%
scroll_box(height = "700px")
}
Blw_Salt_RT | MeridianBr | Stan_Valley_Oak | TowerBridge | I80-50_Br | MiddleRiver | Clifton_Court_US_Radial_Gates | Holland_Cut_Quimby | CVP_Tank | CVP_Trash_Rack_1 | Clifton_Court_Intake_Canal | Old_River_Quimby | SacRiverWalnutGrove_2 | Georg_Sl_1 | Sac_BlwGeorgiana | Sac_BlwGeorgiana2 | Benicia_east | Benicia_west | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2024-04-18 | 132 | |||||||||||||||||
2024-04-19 | 10 | |||||||||||||||||
2024-04-20 | 2 | 39 | ||||||||||||||||
2024-04-21 | 1 | 20 | 12 | 15 | ||||||||||||||
2024-04-22 | 6 | 23 | 32 | 26 | 4 | 22 | 22 | |||||||||||
2024-04-23 | 43 | 6 | 4 | 6 | 22 | 4 | 17 | 18 | ||||||||||
2024-04-24 | 28 | 5 | 6 | 9 | 3 | 3 | 3 | 2 | 1 | |||||||||
2024-04-25 | 1 | 33 | 1 | 1 | 6 | 6 | 6 | 4 | 3 | |||||||||
2024-04-26 | 2 | 28 | 15 | 18 | 2 | 2 | 2 | 6 | 3 | |||||||||
2024-04-27 | 2 | 14 | 16 | 32 | 13 | 1 | 11 | 12 | 3 | 2 | ||||||||
2024-04-28 | 1 | 6 | 10 | 11 | 27 | 2 | 24 | 25 | ||||||||||
2024-04-29 | 1 | 9 | 3 | 5 | 5 | 5 | 4 | 4 | 4 | |||||||||
2024-04-30 | 90 | 12 | 6 | 7 | 5 | 4 | 5 | 7 | 6 | |||||||||
2024-05-01 | 97 | 6 | 11 | 10 | 4 | 4 | 4 | 4 | 3 | |||||||||
2024-05-02 | 2 | 10 | 3 | 6 | 5 | 1 | 4 | 4 | 3 | 3 | ||||||||
2024-05-03 | 2 | 17 | 2 | 3 | 6 | 2 | 3 | 3 | ||||||||||
2024-05-04 | 12 | 5 | 6 | 1 | 2 | 1 | 1 | 1 | ||||||||||
2024-05-05 | 5 | 6 | 6 | 3 | 1 | 2 | 3 | 3 | 3 | |||||||||
2024-05-06 | 1 | 7 | 2 | 1 | 5 | 4 | 4 | 1 | 1 | |||||||||
2024-05-07 | 23 | 4 | 3 | 4 | 2 | 3 | 3 | 1 | 1 | |||||||||
2024-05-08 | 19 | 2 | 3 | 2 | 2 | 2 | 2 | 1 | 1 | |||||||||
2024-05-09 | 15 | 1 | 2 | 4 | 2 | 2 | ||||||||||||
2024-05-10 | 5 | 8 | 9 | 3 | 3 | 3 | 1 | |||||||||||
2024-05-11 | 1 | 9 | 10 | 6 | 7 | 7 | 2 | 1 | ||||||||||
2024-05-12 | 2 | 2 | 3 | 3 | 3 | 3 | 1 | |||||||||||
2024-05-13 | 1 | 2 | 2 | 1 | 1 | |||||||||||||
2024-05-14 | 28 | 2 | 2 | 2 | 3 | 1 | ||||||||||||
2024-05-15 | 67 | 1 | 1 | 1 | ||||||||||||||
2024-05-16 | 3 | 1 | 1 | |||||||||||||||
2024-05-17 | 10 | 1 | 1 | 1 | 1 | |||||||||||||
2024-05-18 | 1 | 1 | 1 | 1 | ||||||||||||||
2024-05-19 | 2 | 2 | ||||||||||||||||
2024-05-20 | 4 | 4 | 2 | 2 | 2 | 1 | 1 | |||||||||||
2024-05-21 | 94 | 1 | 1 | 1 | 1 | 1 | ||||||||||||
2024-05-22 | 80 | 8 | 1 | 1 | ||||||||||||||
2024-05-23 | 33 | |||||||||||||||||
2024-05-24 | 23 | 18 | 18 | |||||||||||||||
2024-05-25 | 27 | 25 | 6 | 1 | 3 | 2 | ||||||||||||
2024-05-26 | 6 | 3 | 14 | 1 | 7 | 8 | ||||||||||||
2024-05-27 | 1 | 1 | 3 | 3 | ||||||||||||||
2024-05-28 | 1 | 3 | 3 | |||||||||||||||
2024-05-29 | 71 | 1 | 2 | 2 | ||||||||||||||
2024-05-30 | 39 | |||||||||||||||||
2024-05-31 | 1 | |||||||||||||||||
2024-06-01 | 4 | |||||||||||||||||
2024-06-02 | 1 | 1 | ||||||||||||||||
2024-06-03 | 1 | 2 | 2 | 1 | 1 | 1 | ||||||||||||
2024-06-04 | ||||||||||||||||||
2024-06-05 | 2 | 2 | ||||||||||||||||
2024-06-06 | 1 | 1 | 1 | |||||||||||||||
2024-06-07 | ||||||||||||||||||
2024-06-08 | 1 | 1 | ||||||||||||||||
2024-06-09 | ||||||||||||||||||
2024-06-10 | ||||||||||||||||||
2024-06-11 | 1 | |||||||||||||||||
2024-06-12 | ||||||||||||||||||
2024-06-13 | ||||||||||||||||||
2024-06-14 | ||||||||||||||||||
2024-06-15 | ||||||||||||||||||
2024-06-16 | ||||||||||||||||||
2024-06-17 | ||||||||||||||||||
2024-06-18 | ||||||||||||||||||
2024-06-19 | ||||||||||||||||||
2024-06-20 | ||||||||||||||||||
2024-06-21 | ||||||||||||||||||
2024-06-22 | ||||||||||||||||||
2024-06-23 |
rm(list = ls())
cleanup(ask = F)
For questions or comments, please contact cyril.michel@noaa.gov