try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
library(knitr)
library(kableExtra)
library(lubridate)
library(data.table)
library(ggplot2)
library(RMark)
library(scales)
library(viridis)
library(forcats)
library(reshape2)
library(png)
library(dataRetrieval)
library(rerddap)
library(plotly)
##################################################################################################################
#### ASSIGN STUDY ID IN THIS NEXT LINE OF CODE ####
study <- "Thermal_Thresholds_2022"
##################################################################################################################
detects_study <- fread("study_detections_archive_2023.csv", stringsAsFactors = F, colClasses = c(DateTime_PST = "character", RelDT = "character"))
detects_study <- as.data.frame(detects_study[detects_study$Study_ID == study,])
detects_study$DateTime_PST <- as.POSIXct(detects_study$DateTime_PST, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8")
detects_study$release_time <- as.POSIXct(detects_study$RelDT, format = "%Y-%m-%d %H:%M:%S", tz="Etc/GMT+8")
colnames(detects_study)[which(colnames(detects_study) == "Weight")] <- "weight"
colnames(detects_study)[which(colnames(detects_study) == "Length")] <- "length"
colnames(detects_study)[which(colnames(detects_study) == "Rel_rkm")] <- "release_rkm"
colnames(detects_study)[which(colnames(detects_study) == "Rel_loc")] <- "release_location"
colnames(detects_study)[which(colnames(detects_study) == "rkm")] <- "river_km"
latest <- read.csv("latest_download.csv", stringsAsFactors = F)$x
##################################################################################################################
#### TO RUN THE FOLLOWING CODE CHUNKS FROM HERE ON DOWN USING R ERDDAP, UN-COMMENT THESE NEXT 9 LINES OF CODE ####
##################################################################################################################
# cache_delete_all()
# query=paste('&',"Study_ID",'="',study,'"',sep = '')
# datafile=URLencode(paste("https://oceanview.pfeg.noaa.gov/erddap/tabledap/","FEDcalFishTrack",".csv?",query,sep = ''))
# options(url.method = "libcurl", download.file.method = "libcurl", timeout = 180)
# detects_study <- data.frame(read.csv(datafile,row.names = NULL, stringsAsFactors = F))
# detects_study <- detects_study[-1,]
# detects_study$DateTime_PST <- as.POSIXct(detects_study$local_time, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")
# detects_study$release_time <- as.POSIXct(detects_study$release_time, format = "%Y-%m-%d %H:%M:%S", "Etc/GMT+8")
# detects_study$river_km <- as.numeric(detects_study$river_km)
##################################################################################################################
if (nrow(detects_study) == 0){
cat("Study has not yet begun\n ")
}else{
if (min(detects_study$release_time) > Sys.time()){
cat("Study has not yet begun, below data is a placeholder:\n ")
}
if (min(detects_study$release_time) < Sys.time()){
cat(paste("Study began on ", min(detects_study$release_time), ", see tagging details below:\n ", sep = ""))
}
########################################################################
#### ASSIGN RELEASE GROUPS HERE ####
#######################################################################
detects_study$Release <- "ALL"
#detects_study[detects_study$release_time > as.POSIXct("2021-01-30 14:31:00"), "Release"] <- "Release 3"
#######################################################################
study_tagcodes <- as.data.frame(unique(detects_study[,c("TagCode", "release_time", "weight", "length", "release_rkm",
"release_location", "Release","Rel_latitude","Rel_longitude")]))
release_stats <- study_tagcodes %>%
group_by(Release) %>%
summarise(First_release_time = min(release_time),
Last_release_time = max(release_time),
Number_fish_released = length(unique(TagCode)),
Release_location = head(release_location, 1),
Release_rkm = head(release_rkm,1),
Mean_length = mean(length, na.rm=T),
Mean_weight = mean(weight, na.rm=T),
Release_lat = head(Rel_latitude,1),
Release_lon = head(Rel_longitude,1)) %>%
mutate(Mean_length = round(Mean_length, 1),
Mean_weight = round(Mean_weight, 1),
First_release_time = format(First_release_time, tz = "Etc/GMT+8"),
Last_release_time = format(Last_release_time, tz = "Etc/GMT+8")) %>%
arrange(First_release_time)
kable(release_stats[,!names(release_stats)%in% c("Release_lon","Release_lat")], format = "html", row.names = F) %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left")
}
Study began on 2022-08-03 06:58:40, see tagging details below:
| Release | First_release_time | Last_release_time | Number_fish_released | Release_location | Release_rkm | Mean_length | Mean_weight |
|---|---|---|---|---|---|---|---|
| ALL | 2022-08-03 06:58:40 | 2022-09-08 13:38:55 | 96 | Gulf of Farallones | 0 | 758.3 | NaN |
Study is complete, all tags are no longer active as of 2023-03-04. All times in Pacific Standard Time.
library(leaflet)
library(maps)
library(htmlwidgets)
library(leaflet.extras)
library(dplyr)
library(dbplyr)
library(DBI)
library(odbc)
library(data.table)
# Create connection with cloud database
con <- dbConnect(odbc(),
Driver = "SQL Server",
Server = "calfishtrack-server.database.windows.net",
Database = "realtime_detections",
UID = "readonlylogin",
PWD = "Pass0123",
Port = 1433)
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
## THIS CODE CHUNK WILL NOT WORK IF USING ONLY ERDDAP DATA, REQUIRES ACCESS TO LOCAL FILES
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
cat("No detections yet")
# Use dbplyr to load realtime_locs and qryHexCodes sql table
gen_locs <- tbl(con, "realtime_locs") %>% collect()
dbDisconnect(con)# %>% filter(is.na(stop))
leaflet(data = gen_locs[is.na(gen_locs$stop),]) %>%
# setView(-72.14600, 43.82977, zoom = 8) %>%
addProviderTiles("Esri.WorldStreetMap", group = "Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addMarkers(~longitude, ~latitude, label = ~general_location, group = "Receiver Sites", popup = ~location) %>%
# addAwesomeMarkers(~lon_dd, ~lat_dd, label = ~locality, group = "Sites", icon=icons) %>%
addScaleBar(position = "bottomleft") %>%
addLayersControl(
baseGroups = c("Street Map", "Satellite", "Relief"),
overlayGroups = c("Receiver Sites"),
options = layersControlOptions(collapsed = FALSE)) %>%
addSearchFeatures(targetGroups = c("Receiver Sites"))
} else {
# Use dbplyr to load realtime_locs and qryHexCodes sql table
gen_locs <- tbl(con, "realtime_locs") %>% collect()
dbDisconnect(con)#
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")),
max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life)*1.5)))
beacon_by_day <- fread("beacon_by_day.csv", stringsAsFactors = F) %>%
mutate(day = as.Date(day)) %>%
# Subset to only look at data for the correct beacon for that day
filter(TagCode == beacon) %>%
# Only keep beacon by day for days since fish were released
filter(day >= as.Date(min(study_tagcodes$release_time)) & day <= endtime) %>%
dplyr::left_join(., gen_locs[,c("location", "general_location","rkm")], by = "location")
arrivals_per_day <- detects_study %>%
group_by(general_location, TagCode) %>%
summarise(DateTime_PST = min(DateTime_PST, na.rm = T)) %>%
arrange(TagCode, general_location) %>%
mutate(day = as.Date(DateTime_PST, "%Y-%m-%d", tz = "Etc/GMT+8")) %>%
group_by(day, general_location) %>%
summarise(New_arrivals = length(TagCode)) %>%
na.omit() %>%
mutate(day = as.Date(day)) %>%
dplyr::left_join(unique(beacon_by_day[,c("general_location", "day", "rkm")]), .,
by = c("general_location", "day")) %>%
arrange(general_location, day) %>%
mutate(day = as.factor(day)) %>%
filter(general_location != "Bench_test") %>% # Remove bench test
filter(!(is.na(general_location))) # Remove NA locations
## Remove sites that were not operation the whole time
#### FOR THE SEASONAL SURVIVAL PAGE, KEEP ALL SITES SINCE PEOPLE WANT TO SEE DETECTIONS OF LATER FISH AT NEWLY
#### DEPLOYED SPOTS
gen_locs_days_in_oper <- arrivals_per_day %>%
group_by(general_location) %>%
summarise(days_in_oper = length(day))
#gen_locs_days_in_oper <- gen_locs_days_in_oper[gen_locs_days_in_oper$days_in_oper ==
# max(gen_locs_days_in_oper$days_in_oper),]
arrivals_per_day_in_oper <- arrivals_per_day %>%
filter(general_location %in% gen_locs_days_in_oper$general_location)
fish_per_site <- arrivals_per_day_in_oper %>%
group_by(general_location) %>%
summarise(fish_count = sum(New_arrivals, na.rm=T))
gen_locs_mean_coords <- gen_locs %>%
filter(is.na(stop) & general_location %in% fish_per_site$general_location) %>%
group_by(general_location) %>%
summarise(latitude = mean(latitude), # estimate mean lat and lons for each genloc
longitude = mean(longitude),
rkm = mean(rkm))
fish_per_site <- merge(fish_per_site, gen_locs_mean_coords)
fish_per_site <- fish_per_site[order(fish_per_site$rkm),]
if(!is.na(release_stats$Release_lat[1])){
leaflet(data = fish_per_site) %>%
addProviderTiles("Esri.WorldStreetMap", group = "Street Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addPulseMarkers(data = fish_per_site[seq(from = 1, to = nrow(fish_per_site), by = 2),], lng = ~longitude, lat = ~latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, direction = "left", textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addPulseMarkers(data = fish_per_site[seq(from = 2, to = nrow(fish_per_site), by = 2),], lng = ~longitude, lat = ~latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, direction = "right", textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addCircleMarkers(data = release_stats, ~Release_lon, ~Release_lat, label = ~Number_fish_released, stroke = F, color = "blue", fillOpacity = 1,
group = "Release Sites", popup = ~Release_location, labelOptions = labelOptions(noHide = T, textsize = "15px")) %>%
addScaleBar(position = "bottomleft") %>%
addLegend("bottomright", labels = c("Receivers", "Release locations"), colors = c("red","blue")) %>%
addLayersControl(baseGroups = c("Street Map", "Satellite", "Relief"), options = layersControlOptions(collapsed = FALSE))
} else {
leaflet(data = fish_per_site) %>%
addProviderTiles("Esri.NatGeoWorldMap", group = "Street Map") %>%
addProviderTiles("Esri.WorldImagery", group = "Satellite") %>%
addProviderTiles("Esri.WorldShadedRelief", group = "Relief") %>%
# Marker data are from the sites data frame. We need the ~ symbols
# to indicate the columns of the data frame.
addPulseMarkers(lng = fish_per_site$longitude, lat = fish_per_site$latitude, label = ~fish_count,
labelOptions = labelOptions(noHide = T, textsize = "15px"), group = "Receiver Sites",
popup = ~general_location, icon = makePulseIcon(heartbeat = 1.3)) %>%
addScaleBar(position = "bottomleft") %>%
addLayersControl(baseGroups = c("Street Map", "Satellite", "Relief"),
options = layersControlOptions(collapsed = FALSE))
}
}
2.1 Map of unique fish detections at operational realtime detection locations
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) > 0){
detects_study <- detects_study[order(detects_study$TagCode, detects_study$DateTime_PST),]
## Now estimate the time in hours between the previous and next detection, for each detection.
detects_study$prev_genloc <- shift(detects_study$general_location, fill = NA, type = "lag")
#detects_study$prev_genloc <- shift(detects_study$General_Location, fill = NA, type = "lag")
## Now make NA the time diff values when it's between 2 different tagcodes or genlocs
detects_study[which(detects_study$TagCode != shift(detects_study$TagCode, fill = NA, type = "lag")), "prev_genloc"] <- NA
detects_study[which(detects_study$general_location != detects_study$prev_genloc), "prev_genloc"] <- NA
detects_study$mov_score <- 0
detects_study[is.na(detects_study$prev_genloc), "mov_score"] <- 1
detects_study$mov_counter <- cumsum(detects_study$mov_score)
detects_summary <- aggregate(list(first_detect = detects_study$DateTime_PST), by = list(TagCode = detects_study$TagCode, length = detects_study$length, release_time = detects_study$release_time, mov_counter = detects_study$mov_counter ,general_location = detects_study$general_location, river_km = detects_study$river_km, release_rkm = detects_study$release_rkm), min)
detects_summary <- detects_summary[is.na(detects_summary$first_detect) == F,]
releases <- aggregate(list(first_detect = detects_summary$release_time), by = list(TagCode = detects_summary$TagCode, length = detects_summary$length, release_time = detects_summary$release_time, release_rkm = detects_summary$release_rkm), min)
releases$river_km <- releases$release_rkm
releases$mov_counter <- NA
releases$general_location <- NA
detects_summary <- rbindlist(list(detects_summary, releases), use.names = T)
detects_summary <- detects_summary[order(detects_summary$TagCode, detects_summary$first_detect),]
starttime <- as.Date(min(detects_study$release_time), "Etc/GMT+8")
## Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d"))+1, max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
#par(mar=c(6, 5, 2, 5) + 0.1)
#plot_ly(detects_summary, width = 900, height = 600, dynamicTicks = TRUE) %>%
plot_ly(detects_summary, x = ~first_detect, y = ~river_km, color = ~TagCode, width = 900, height = 600, dynamicTicks = TRUE, connectgaps = TRUE, mode = "lines+markers", type = "scatter") %>%
layout(showlegend = T, title = paste("Total # of individual fish detected =",length(unique(detects_summary$TagCode))),
xaxis = list(title = "<b> Date <b>", mirror=T,ticks="outside",showline=T, range=c(starttime,endtime)),
yaxis = list(title = "<b> Kilometers upstream of the Golden Gate <b>", mirror=T,ticks="outside",showline=T, range=c(max(detects_study$Rel_rkm)+10, min(gen_locs[is.na(gen_locs$stop),"rkm"])-10)),
legend = list(title=list(text='<b> Tag Code </b>')),
margin=list(l = 50,r = 100,b = 50,t = 50)
)
}else{
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Kilometers upstream of the Golden Gate")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
}
2.2 Waterfall Detection Plot
_______________________________________________________________________________________________________
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_benicia <- detects_study[detects_study$general_location %in% c("Benicia_west", "Benicia_east"),]
if (nrow(detects_benicia)>0) {
detects_benicia <- merge(detects_benicia,aggregate(list(first_detect = detects_benicia$DateTime_PST), by = list(TagCode= detects_benicia$TagCode), FUN = min))
detects_benicia$Day <- as.Date(detects_benicia$first_detect, "Etc/GMT+8")
starttime <- as.Date(min(detects_benicia$release_time), "Etc/GMT+8")
## Endtime should be either now or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
#wlk_flow <- cdec_query("COL", "20", "H", starttime, endtime+1)
#wlk_flow$datetime <- as.Date(wlk_flow$datetime)
#wlk_flow_day <- aggregate(list(parameter_value = wlk_flow$parameter_value), by = list(Day = wlk_flow$datetime), FUN = mean, na.rm = T)
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_benicia$Release))])
tagcount <- aggregate(list(unique_tags = detects_benicia$TagCode), by = list(Day = detects_benicia$Day, Release = detects_benicia$Release ), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ Release)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == 'x'] <- paste(i)
}
}
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange1 <- daterange1[,order(colnames(daterange1))]
#daterange2 <- merge(daterange1, wlk_flow_day, by = "Day", all.x = T)
daterange2 <- daterange1
rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL
par(mar=c(6, 5, 2, 5) + 0.1)
# barp <- barplot(t(daterange2[,1:ncol(daterange2)]), plot = FALSE, beside = T)
# barplot(t(daterange2[,1:ncol(daterange2)]), beside = T, col=brewer.pal(n = rel_num, name = "Dark2"),
# xlab = "", ylab = "Number of fish arrivals per day",
# ylim = c(0,max(daterange2[,1:ncol(daterange2)], na.rm = T)*1.2),
# las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt = "n", border = NA)#,
# #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
# #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
# legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)], fill= brewer.pal(n = rel_num, name = "Set1"), horiz = T, title = "Release")
# ybreaks <- if(max(daterange2[,1:ncol(daterange2)], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)], na.rm = T)} else {5}
# xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
# barpmeans <- colMeans(barp)
# axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2)[xbreaks], las = 2)
# axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)], na.rm = T), ybreaks))
# box()
daterange2$Date <- as.Date(row.names(daterange2))
daterange3 <- melt(daterange2, id.vars = "Date", variable.name = ".", )
# p <- ggplot(data = daterange3, aes(x = Date, y = value, color = ., fill = .)) +
# geom_bar(stat='identity') +
# ylab("Number of fish arrivals per day") +
# #xlim(range(daterange$Day)) +
# #geom_line(data= daterange2_flow, aes(x = Date, y = parameter_value/500), color = alpha("#947FFF", alpha = 0.5))+
# #scale_x_date(date_breaks = "5 days") +
# #scale_y_continuous(name = "Number of fish arrivals per day",
# # Add a second axis and specify its features
# # sec.axis = sec_axis(~.*500, name="Second Axis")) +
# theme_bw() +
# theme(panel.border = element_rect(colour = "black", fill=NA))
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations( text="Release (click on legend items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.056, yanchor="top", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
#add_lines(x=~daterange2_flow$Date, y=~daterange2_flow$parameter_value, line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE, inherit=FALSE) %>%
layout(showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks='outside',showline=T), yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks='outside',showline=T),
legend = list(orientation = "h",x = 0.34, y = 1.066),
margin=list(l = 50,r = 100,b = 50,t = 50)
)
}else{
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
}
2.3 Detections at Benicia Bridge for duration of tag life
_______________________________________________________________________________________________________
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_tower <- detects_study[detects_study$general_location == "TowerBridge",]
#wlk_flow <- read.csv("wlk.csv")
if (nrow(detects_tower) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_tower <- merge(detects_tower,aggregate(list(first_detect = detects_tower$DateTime_PST), by = list(TagCode= detects_tower$TagCode), FUN = min))
detects_tower$Day <- as.Date(detects_tower$first_detect, "Etc/GMT+8")
starttime <- as.Date(min(detects_tower$release_time), "Etc/GMT+8")
## Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
## download wilkins slough flow data
wlk_flow <- readNWISuv(siteNumbers = "11390500", parameterCd="00060", startDate = starttime, endDate = endtime+1)
wlk_flow$datetime <- as.Date(format(wlk_flow$dateTime, "%Y-%m-%d"))
wlk_flow_day <- aggregate(list(parameter_value = wlk_flow$X_00060_00000),
by = list(Day = wlk_flow$datetime),
FUN = mean, na.rm = T)
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
#rels <- unique(study_tagcodes[study_tagcodes$StudyID == unique(detects_tower$StudyID), "Release"])
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_tower$Release))])
tagcount <- aggregate(list(unique_tags = detects_tower$TagCode), by = list(Day = detects_tower$Day, Release = detects_tower$Release ), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ Release)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == 'x'] <- paste(i)
}
}
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange1 <- daterange1[,order(colnames(daterange1))]
daterange2 <- merge(daterange1, wlk_flow_day, by = "Day", all.x = T)
rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL
par(mar=c(6, 5, 2, 5) + 0.1)
# barp <- barplot(t(daterange2[,1:ncol(daterange2)-1]), plot = FALSE, beside = T)
# barplot(t(daterange2[,1:ncol(daterange2)-1]), beside = T, col=brewer.pal(n = rel_num, name = "Set1"),
# xlab = "", ylab = "Number of fish arrivals per day",
# ylim = c(0,max(daterange2[,1:ncol(daterange2)-1], na.rm = T)*1.2),
# las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt ="n", border = NA)#,
# #border=NA
# #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
# #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
# legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)-1], fill= brewer.pal(n = rel_num, name = "Set1"), horiz = T, title = "Release")
# ybreaks <- if(max(daterange2[,1:ncol(daterange2)-1], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)-1], na.rm = T)} else {5}
# xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
# barpmeans <- colMeans(barp)
# axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2[xbreaks,]), las = 2)
# axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)-1], na.rm = T), ybreaks))
#
# par(new=T)
#
# plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "lightslateblue", type = "l", lwd=1.5, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
# axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
# mtext("Flow (cfs) at Wilkins Slough", side=4, line=3, cex=1.5, col="lightslateblue")
daterange2$Date <- as.Date(row.names(daterange2))
daterange2_flow <- daterange2[,c("Date", "parameter_value")]
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))], id.vars = "Date", variable.name = ".")
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Wilkins Slough",
automargin = TRUE
)
# p <- ggplot(data = daterange3, aes(x = Date, y = value, color = ., fill = .)) +
# geom_bar(stat='identity') +
# ylab("Number of fish arrivals per day") +
# #xlim(c(as.Date("2021-02-01"), as.Date("2021-02-05"))) +
# #geom_line(data= daterange2_flow, aes(x = Date, y = parameter_value/500), color = alpha("#947FFF", alpha = 0.5))+
# #scale_x_date(date_breaks = "5 days") +
# #scale_y_continuous(name = "Number of fish arrivals per day",
# # Add a second axis and specify its features
# # sec.axis = sec_axis(~.*500, name="Second Axis")) +
# theme_bw() +
# theme(panel.border = element_rect(colour = "black", fill=NA))
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations( text="Release (click on legend items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.056, yanchor="top", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date, y=~daterange2_flow$parameter_value, line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE, inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks='outside',showline=T), yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks='outside',showline=T),
legend = list(orientation = "h",x = 0.34, y = 1.066),
margin=list(l = 50,r = 100,b = 50,t = 50)
)
}
2.4 Detections at Tower Bridge (downtown Sacramento) versus Sacramento River flows at Wilkins Slough for duration of tag life
_______________________________________________________________________________________________________
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_butte <- detects_study[detects_study$general_location == "MeridianBr",]
if (nrow(detects_butte) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_butte <- merge(detects_butte,aggregate(list(first_detect = detects_butte$DateTime_PST), by = list(TagCode= detects_butte$TagCode), FUN = min))
detects_butte$Day <- as.Date(detects_butte$first_detect, "Etc/GMT+8")
starttime <- as.Date(min(detects_butte$release_time), "Etc/GMT+8")
## Endtime should be either now, or end of predicted tag life, whichever comes first
#endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_butte$release_time)+60), max(as.Date(detects_butte$release_time)+(as.numeric(detects_butte$tag_life)*1.5)))
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
#BTC_flow <- cdec_query("BTC", "20", "H", starttime, endtime+1)
## download bend bridge flow data
BTC_flow <- readNWISuv(siteNumbers = "11377100", parameterCd="00060", startDate = starttime, endDate = endtime+1)
BTC_flow$datetime <- as.Date(format(BTC_flow$dateTime, "%Y-%m-%d"))
BTC_flow_day <- aggregate(list(parameter_value = BTC_flow$X_00060_00000),
by = list(Day = BTC_flow$datetime),
FUN = mean, na.rm = T)
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
#rels <- unique(study_tagcodes[study_tagcodes$StudyID == unique(detects_butte$StudyID), "Release"])
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_butte$Release))])
tagcount <- aggregate(list(unique_tags = detects_butte$TagCode), by = list(Day = detects_butte$Day, Release = detects_butte$Release), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ Release)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == 'x'] <- paste(i)
}
}
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange1 <- daterange1[,order(colnames(daterange1))]
daterange2 <- merge(daterange1, BTC_flow_day, by = "Day", all.x = T)
rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL
par(mar=c(6, 5, 2, 5) + 0.1)
# barp <- barplot(t(daterange2[,1:ncol(daterange2)-1]), plot = FALSE, beside = T)
# barplot(t(daterange2[,1:ncol(daterange2)-1]), beside = T, col=brewer.pal(n = rel_num, name = "Set1"),
# xlab = "", ylab = "Number of fish arrivals per day",
# ylim = c(0,max(daterange2[,1:ncol(daterange2)-1], na.rm = T)*1.2),
# las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt ="n", border = NA)#,
# #border=NA
# #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
# #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
# legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)-1], fill= brewer.pal(n = rel_num, name = "Set1"), horiz = T, title = "Release")
# ybreaks <- if(max(daterange2[,1:ncol(daterange2)-1], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)-1], na.rm = T)} else {5}
# xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
# barpmeans <- colMeans(barp)
# axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2[xbreaks,]), las = 2)
# axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)-1], na.rm = T), ybreaks))
#
# par(new=T)
#
# plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "lightslateblue", type = "l", lwd=1.5, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
# axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
# mtext("Flow (cfs) at Bend Bridge", side=4, line=3, cex=1.5, col="lightslateblue")
daterange2$Date <- as.Date(row.names(daterange2))
daterange2_flow <- daterange2[,c("Date", "parameter_value")]
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))], id.vars = "Date", variable.name = ".")
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Bend Bridge",
automargin = TRUE
)
# p <- ggplot(data = daterange3, aes(x = Date, y = value, color = ., fill = .)) +
# geom_bar(stat='identity') +
# ylab("Number of fish arrivals per day") +
# #xlim(c(as.Date("2021-02-01"), as.Date("2021-02-05"))) +
# #geom_line(data= daterange2_flow, aes(x = Date, y = parameter_value/500), color = alpha("#947FFF", alpha = 0.5))+
# #scale_x_date(date_breaks = "5 days") +
# #scale_y_continuous(name = "Number of fish arrivals per day",
# # Add a second axis and specify its features
# # sec.axis = sec_axis(~.*500, name="Second Axis")) +
# theme_bw() +
# theme(panel.border = element_rect(colour = "black", fill=NA))
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations( text="Release (click on legend items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.056, yanchor="top", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date, y=~daterange2_flow$parameter_value, line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE, inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks='outside',showline=T), yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks='outside',showline=T),
legend = list(orientation = "h",x = 0.34, y = 1.066),
margin=list(l = 50,r = 100,b = 50,t = 50)
)
}
2.5 Detections at Meridian Bridge versus Sacramento River flows at Bend Bridge for duration of tag life
_______________________________________________________________________________________________________
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
detects_saltcrk <- detects_study[detects_study$general_location == "Blw_Salt_RT",]
if (nrow(detects_saltcrk) == 0){
plot(1:2, type = "n", xlab = "",xaxt = "n", yaxt = "n", ylab = "Number of fish arrivals per day")
text(1.5,1.5, labels = "NO DETECTIONS YET", cex = 2)
} else {
detects_saltcrk <- merge(detects_saltcrk,aggregate(list(first_detect = detects_saltcrk$DateTime_PST), by = list(TagCode= detects_saltcrk$TagCode), FUN = min))
detects_saltcrk$Day <- as.Date(detects_saltcrk$first_detect, "Etc/GMT+8")
starttime <- as.Date(min(detects_saltcrk$release_time), "Etc/GMT+8")
## Endtime should be either now, or end of predicted tag life, whichever comes first
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life))))
#BTC_flow <- cdec_query("BTC", "20", "H", starttime, endtime+1)
## download bend bridge flow data
BTC_flow <- readNWISuv(siteNumbers = "11377100", parameterCd="00060", startDate = starttime, endDate = endtime+1)
BTC_flow$datetime <- as.Date(format(BTC_flow$dateTime, "%Y-%m-%d"))
BTC_flow_day <- aggregate(list(parameter_value = BTC_flow$X_00060_00000),
by = list(Day = BTC_flow$datetime),
FUN = mean, na.rm = T)
daterange <- data.frame(Day = seq.Date(from = starttime, to = endtime, by = "day"))
#rels <- unique(study_tagcodes[study_tagcodes$StudyID == unique(detects_butte$StudyID), "Release"])
rels <- unique(study_tagcodes$Release)
rel_num <- length(rels)
rels_no_detects <- as.character(rels[!(rels %in% unique(detects_saltcrk$Release))])
tagcount <- aggregate(list(unique_tags = detects_saltcrk$TagCode), by = list(Day = detects_saltcrk$Day, Release = detects_saltcrk$Release), FUN = function(x){length(unique(x))})
tagcount1 <- reshape2::dcast(tagcount, Day ~ Release)
daterange1 <- merge(daterange, tagcount1, all.x=T)
daterange1[is.na(daterange1)] <- 0
if(length(rels_no_detects)>0){
for(i in rels_no_detects){
daterange1 <- cbind(daterange1, x=NA)
names(daterange1)[names(daterange1) == 'x'] <- paste(i)
}
}
## reorder columns in alphabetical order so its coloring in barplots is consistent
daterange1 <- daterange1[,order(colnames(daterange1))]
daterange2 <- merge(daterange1, BTC_flow_day, by = "Day", all.x = T)
rownames(daterange2) <- daterange2$Day
daterange2$Day <- NULL
par(mar=c(6, 5, 2, 5) + 0.1)
# barp <- barplot(t(daterange2[,1:ncol(daterange2)-1]), plot = FALSE, beside = T)
# barplot(t(daterange2[,1:ncol(daterange2)-1]), beside = T, col=brewer.pal(n = rel_num, name = "Set1"),
# xlab = "", ylab = "Number of fish arrivals per day",
# ylim = c(0,max(daterange2[,1:ncol(daterange2)-1], na.rm = T)*1.2),
# las = 2, xlim=c(0,max(barp)+1), cex.lab = 1.5, yaxt = "n", xaxt ="n", border = NA)#,
# #border=NA
# #legend.text = colnames(daterange2[,1:ncol(daterange2)-1]),
# #args.legend = list(x ='topright', bty='n', inset=c(-0.2,0)), title = "Release Group")
# legend(x ='topleft', legend = colnames(daterange2)[1:ncol(daterange2)-1], fill= brewer.pal(n = rel_num, name = "Set1"), horiz = T, title = "Release")
# ybreaks <- if(max(daterange2[,1:ncol(daterange2)-1], na.rm = T) < 4) {max(daterange2[,1:ncol(daterange2)-1], na.rm = T)} else {5}
# xbreaks <- if(ncol(barp) > 10) {seq(1, ncol(barp), 2)} else {1:ncol(barp)}
# barpmeans <- colMeans(barp)
# axis(1, at = barpmeans[xbreaks], labels = rownames(daterange2[xbreaks,]), las = 2)
# axis(2, at = pretty(0:max(daterange2[,1:ncol(daterange2)-1], na.rm = T), ybreaks))
#
# par(new=T)
#
# plot(x = barpmeans, daterange2$parameter_value, yaxt = "n", xaxt = "n", ylab = "", xlab = "", col = "lightslateblue", type = "l", lwd=1.5, xlim=c(0,max(barp)+1), ylim = c(min(daterange2$parameter_value, na.rm = T), max(daterange2$parameter_value, na.rm=T)*1.1))#, ylab = "Returning adults", xlab= "Outmigration year", yaxt="n", col="red", pch=20)
# axis(side = 4)#, labels = c(2000:2016), at = c(2000:2016))
# mtext("Flow (cfs) at Bend Bridge", side=4, line=3, cex=1.5, col="lightslateblue")
daterange2$Date <- as.Date(row.names(daterange2))
daterange2_flow <- daterange2[,c("Date", "parameter_value")]
daterange3 <- melt(daterange2[,!(names(daterange2) %in% c("parameter_value"))], id.vars = "Date", variable.name = ".")
ay <- list(
overlaying = "y",
nticks = 5,
color = "#947FFF",
side = "right",
title = "Flow (cfs) at Bend Bridge",
automargin = TRUE
)
# p <- ggplot(data = daterange3, aes(x = Date, y = value, color = ., fill = .)) +
# geom_bar(stat='identity') +
# ylab("Number of fish arrivals per day") +
# #xlim(c(as.Date("2021-02-01"), as.Date("2021-02-05"))) +
# #geom_line(data= daterange2_flow, aes(x = Date, y = parameter_value/500), color = alpha("#947FFF", alpha = 0.5))+
# #scale_x_date(date_breaks = "5 days") +
# #scale_y_continuous(name = "Number of fish arrivals per day",
# # Add a second axis and specify its features
# # sec.axis = sec_axis(~.*500, name="Second Axis")) +
# theme_bw() +
# theme(panel.border = element_rect(colour = "black", fill=NA))
daterange3$. <- factor(daterange3$., levels = sort(unique(daterange3$.), decreasing = T))
plot_ly(daterange3, width = 900, height = 600, dynamicTicks = TRUE) %>%
add_bars(x = ~Date, y = ~value, color = ~.) %>%
add_annotations( text="Release (click on legend items to isolate)", xref="paper", yref="paper",
x=0.01, xanchor="left",
y=1.056, yanchor="top", # Same y as legend below
legendtitle=TRUE, showarrow=FALSE ) %>%
add_lines(x=~daterange2_flow$Date, y=~daterange2_flow$parameter_value, line = list(color = alpha("#947FFF", alpha = 0.5)), yaxis="y2", showlegend=FALSE, inherit=FALSE) %>%
layout(yaxis2 = ay,showlegend = T,
barmode = "stack",
xaxis = list(title = "Date", mirror=T,ticks='outside',showline=T), yaxis = list(title = "Number of fish arrivals per day", mirror=T,ticks='outside',showline=T),
legend = list(orientation = "h",x = 0.34, y = 1.066),
margin=list(l = 50,r = 100,b = 50,t = 50)
)
}
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
"No detections yet"
} else {
arrivals <- aggregate(list(DateTime_PST = detects_study$DateTime_PST), by = list(general_location = detects_study$general_location, TagCode = detects_study$TagCode), FUN = min)
tag_stats <- aggregate(list(First_arrival = arrivals$DateTime_PST),
by= list(general_location = arrivals$general_location),
FUN = min)
tag_stats <- merge(tag_stats,
aggregate(list(Mean_arrival = arrivals$DateTime_PST),
by= list(general_location = arrivals$general_location),
FUN = mean),
by = c("general_location"))
tag_stats <- merge(tag_stats,
aggregate(list(Last_arrival = arrivals$DateTime_PST),
by= list(general_location = arrivals$general_location),
FUN = max),
by = c("general_location"))
tag_stats <- merge(tag_stats,
aggregate(list(Fish_count = arrivals$TagCode),
by= list(general_location = arrivals$general_location),
FUN = function(x) {length(unique(x))}),
by = c("general_location"))
tag_stats$Percent_arrived <- round(tag_stats$Fish_count/nrow(study_tagcodes) * 100,2)
tag_stats <- merge(tag_stats, unique(detects_study[,c("general_location", "river_km")]))
tag_stats <- tag_stats[order(tag_stats$river_km, decreasing = T),]
tag_stats[,c("First_arrival", "Mean_arrival", "Last_arrival")] <- format(tag_stats[,c("First_arrival", "Mean_arrival", "Last_arrival")], tz = "Etc/GMT+8")
tag_stats <- tag_stats[is.na(tag_stats$First_arrival)==F,]
print(kable(tag_stats, row.names = F,
caption = "4.1 Detections for all releases combined",
"html") %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
for (j in sort(unique(study_tagcodes$Release))) {
if(nrow(detects_study[detects_study$Release == j,]) > 0 ) {
temp <- detects_study[detects_study$Release == j,]
arrivals1 <- aggregate(list(DateTime_PST = temp$DateTime_PST), by = list(general_location = temp$general_location, TagCode = temp$TagCode), FUN = min)
rel_count <- nrow(study_tagcodes[study_tagcodes$Release == j,])
tag_stats1 <- aggregate(list(First_arrival = arrivals1$DateTime_PST),
by= list(general_location = arrivals1$general_location),
FUN = min)
tag_stats1 <- merge(tag_stats1,
aggregate(list(Mean_arrival = arrivals1$DateTime_PST),
by= list(general_location = arrivals1$general_location),
FUN = mean),
by = c("general_location"))
tag_stats1 <- merge(tag_stats1,
aggregate(list(Last_arrival = arrivals1$DateTime_PST),
by= list(general_location = arrivals1$general_location),
FUN = max),
by = c("general_location"))
tag_stats1 <- merge(tag_stats1,
aggregate(list(Fish_count = arrivals1$TagCode),
by= list(general_location = arrivals1$general_location),
FUN = function(x) {length(unique(x))}),
by = c("general_location"))
tag_stats1$Percent_arrived <- round(tag_stats1$Fish_count/rel_count * 100,2)
tag_stats1 <- merge(tag_stats1, unique(detects_study[,c("general_location", "river_km")]))
tag_stats1 <- tag_stats1[order(tag_stats1$river_km, decreasing = T),]
tag_stats1[,c("First_arrival", "Mean_arrival", "Last_arrival")] <- format(tag_stats1[,c("First_arrival", "Mean_arrival", "Last_arrival")], tz = "Etc/GMT+8")
tag_stats1 <- tag_stats1[is.na(tag_stats1$First_arrival)==F,]
final_stats <- kable(tag_stats1, row.names = F,
caption = paste("4.2 Detections for",j,"release groups", sep = " "),
"html")
print(kable_styling(final_stats, bootstrap_options = c("striped", "hover", "condensed", "responsive", "bordered"), full_width = F, position = "left"))
} else {
cat("\n\n\\pagebreak\n")
print(paste("No detections for",j,"release group yet", sep=" "), quote = F)
cat("\n\n\\pagebreak\n")
}
}
}
| general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
|---|---|---|---|---|---|---|
| MeridianBr | 2022-09-21 05:43:27 | 2022-09-21 05:43:27 | 2022-09-21 05:43:27 | 1 | 1.04 | 290.848 |
| TowerBridge | 2022-09-16 01:27:12 | 2022-10-01 08:45:33 | 2022-10-31 18:56:45 | 9 | 9.38 | 172.000 |
| I80-50_Br | 2022-09-16 01:07:38 | 2022-09-23 12:49:30 | 2022-10-03 04:41:44 | 6 | 6.25 | 170.748 |
| Holland_Cut_Quimby | 2022-10-03 07:52:13 | 2022-10-03 07:52:13 | 2022-10-03 07:52:13 | 1 | 1.04 | 145.000 |
| Old_River_Quimby | 2022-10-03 12:35:02 | 2022-10-03 12:35:02 | 2022-10-03 12:35:02 | 1 | 1.04 | 141.000 |
| Sac_BlwGeorgiana | 2022-09-20 08:24:03 | 2022-10-18 09:27:18 | 2022-11-25 16:38:46 | 4 | 4.17 | 119.058 |
| Sac_BlwGeorgiana2 | 2022-09-20 08:31:09 | 2022-10-15 10:32:13 | 2022-11-25 17:00:35 | 6 | 6.25 | 118.398 |
| Benicia_east | 2022-09-03 05:27:21 | 2022-09-21 20:34:55 | 2022-11-10 00:43:30 | 25 | 26.04 | 52.240 |
| Benicia_west | 2022-09-04 12:13:00 | 2022-09-22 21:05:36 | 2022-11-10 00:26:40 | 24 | 25.00 | 52.040 |
| general_location | First_arrival | Mean_arrival | Last_arrival | Fish_count | Percent_arrived | river_km |
|---|---|---|---|---|---|---|
| MeridianBr | 2022-09-21 05:43:27 | 2022-09-21 05:43:27 | 2022-09-21 05:43:27 | 1 | 1.04 | 290.848 |
| TowerBridge | 2022-09-16 01:27:12 | 2022-10-01 08:45:33 | 2022-10-31 18:56:45 | 9 | 9.38 | 172.000 |
| I80-50_Br | 2022-09-16 01:07:38 | 2022-09-23 12:49:30 | 2022-10-03 04:41:44 | 6 | 6.25 | 170.748 |
| Holland_Cut_Quimby | 2022-10-03 07:52:13 | 2022-10-03 07:52:13 | 2022-10-03 07:52:13 | 1 | 1.04 | 145.000 |
| Old_River_Quimby | 2022-10-03 12:35:02 | 2022-10-03 12:35:02 | 2022-10-03 12:35:02 | 1 | 1.04 | 141.000 |
| Sac_BlwGeorgiana | 2022-09-20 08:24:03 | 2022-10-18 09:27:18 | 2022-11-25 16:38:46 | 4 | 4.17 | 119.058 |
| Sac_BlwGeorgiana2 | 2022-09-20 08:31:09 | 2022-10-15 10:32:13 | 2022-11-25 17:00:35 | 6 | 6.25 | 118.398 |
| Benicia_east | 2022-09-03 05:27:21 | 2022-09-21 20:34:55 | 2022-11-10 00:43:30 | 25 | 26.04 | 52.240 |
| Benicia_west | 2022-09-04 12:13:00 | 2022-09-22 21:05:36 | 2022-11-10 00:26:40 | 24 | 25.00 | 52.040 |
try(setwd(paste(file.path(Sys.getenv("USERPROFILE"),"Desktop",fsep="\\"), "\\Real-time data massaging\\products", sep = "")))
## THIS CODE CHUNK WILL NOT WORK IF USING ONLY ERDDAP DATA, REQUIRES ACCESS TO LOCAL FILES
if (nrow(detects_study[is.na(detects_study$DateTime_PST)==F,]) == 0){
"No detections yet"
} else {
arrivals <- aggregate(list(DateTime_PST = detects_study$DateTime_PST), by = list(general_location = detects_study$general_location, TagCode = detects_study$TagCode), FUN = min)
beacon_by_day <- fread("beacon_by_day.csv", stringsAsFactors = F)
beacon_by_day$day <- as.Date(beacon_by_day$day)
gen_locs <- read.csv("realtime_locs.csv", stringsAsFactors = F)
arrivals$day <- as.Date(format(arrivals$DateTime_PST, "%Y-%m-%d", tz = "Etc/GMT+8"))
arrivals_per_day <- aggregate(list(New_arrivals = arrivals$TagCode), by = list(day = arrivals$day, general_location = arrivals$general_location), length)
arrivals_per_day$day <- as.Date(arrivals_per_day$day)
## Now subset to only look at data for the correct beacon for that day
beacon_by_day <- as.data.frame(beacon_by_day[which(beacon_by_day$TagCode == beacon_by_day$beacon),])
endtime <- min(as.Date(format(Sys.time(), "%Y-%m-%d")), max(as.Date(detects_study$release_time)+(as.numeric(detects_study$tag_life)*1.5)))
## Now only keep beacon by day for days since fish were released
beacon_by_day <- beacon_by_day[beacon_by_day$day >= as.Date(min(study_tagcodes$release_time)) & beacon_by_day$day <= endtime,]
beacon_by_day <- merge(beacon_by_day, gen_locs[,c("location", "general_location","rkm")], by = "location", all.x = T)
arrivals_per_day <- merge(unique(beacon_by_day[,c("general_location", "day", "rkm")]), arrivals_per_day, all.x = T, by = c("general_location", "day"))
arrivals_per_day$day <- factor(arrivals_per_day$day)
## Remove bench test and other NA locations
arrivals_per_day <- arrivals_per_day[!arrivals_per_day$general_location == "Bench_test",]
arrivals_per_day <- arrivals_per_day[is.na(arrivals_per_day$general_location) == F,]
## Change order of data to plot decreasing river_km
arrivals_per_day <- arrivals_per_day[order(arrivals_per_day$rkm, decreasing = T),]
arrivals_per_day$general_location <- factor(arrivals_per_day$general_location, unique(arrivals_per_day$general_location))
#
# ggplot(data=arrivals_per_day, aes(x=general_location, y=fct_rev(as_factor(day)))) +
# geom_tile(fill = "lightgray", color = "black") +
# geom_text(aes(label=New_arrivals)) +
# labs(x="General Location", y = "Date") +
# theme(panel.background = element_blank(), axis.text.x = element_text(angle = 90, hjust = 1))
crosstab <- xtabs(formula = arrivals_per_day$New_arrivals ~ arrivals_per_day$day + arrivals_per_day$general_location, addNA =T)
crosstab[is.na(crosstab)] <- ""
crosstab[crosstab==0] <- NA
crosstab <- as.data.frame.matrix(crosstab)
#colnames(crosstab) <- c("Butte Br", "Tower Br", "I8050 Br", "Old River", "Middle River", "CVP Tanks", "Georg Slough1", "Sac_Blw Georg1", "Georg Slough2", "Sac_Blw Georg2", "Benicia East", "Benicia West")
kable(crosstab, align = "c") %>%
kable_styling(c("striped", "condensed"), font_size = 11, full_width = F, position = "left") %>%
#row_spec(0, angle = -45) %>%
column_spec(column = 1:ncol(crosstab),width_min = "50px",border_left = T, border_right = T) %>%
column_spec(1, bold = T, width_min = "75px")%>%
scroll_box(height = "700px")
}
| Blw_Salt_RT | ButteBrRT | MeridianBr | TowerBridge | I80-50_Br | Old River | MiddleRiver | Clifton_Court_US_Radial_Gates | Holland_Cut_Quimby | CVP_Tank | CVP_Trash_Rack_1 | Clifton_Court_Intake_Canal | Old_River_Quimby | Sac_BlwGeorgiana | Sac_BlwGeorgiana2 | Benicia_east | Benicia_west | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 2022-08-03 | NA | ||||||||||||||||
| 2022-08-04 | NA | ||||||||||||||||
| 2022-08-05 | NA | ||||||||||||||||
| 2022-08-06 | NA | ||||||||||||||||
| 2022-08-07 | NA | ||||||||||||||||
| 2022-08-08 | NA | ||||||||||||||||
| 2022-08-09 | NA | ||||||||||||||||
| 2022-08-10 | NA | ||||||||||||||||
| 2022-08-11 | NA | ||||||||||||||||
| 2022-08-12 | NA | ||||||||||||||||
| 2022-08-13 | NA | ||||||||||||||||
| 2022-08-14 | NA | ||||||||||||||||
| 2022-08-15 | NA | ||||||||||||||||
| 2022-08-16 | NA | ||||||||||||||||
| 2022-08-17 | NA | ||||||||||||||||
| 2022-08-18 | NA | ||||||||||||||||
| 2022-08-19 | NA | ||||||||||||||||
| 2022-08-20 | NA | ||||||||||||||||
| 2022-08-21 | NA | ||||||||||||||||
| 2022-08-22 | NA | ||||||||||||||||
| 2022-08-23 | NA | ||||||||||||||||
| 2022-08-24 | NA | ||||||||||||||||
| 2022-08-25 | NA | ||||||||||||||||
| 2022-08-26 | NA | ||||||||||||||||
| 2022-08-27 | NA | ||||||||||||||||
| 2022-08-28 | NA | ||||||||||||||||
| 2022-08-29 | NA | ||||||||||||||||
| 2022-08-30 | NA | ||||||||||||||||
| 2022-08-31 | NA | ||||||||||||||||
| 2022-09-01 | NA | ||||||||||||||||
| 2022-09-02 | NA | ||||||||||||||||
| 2022-09-03 | NA | 1 | |||||||||||||||
| 2022-09-04 | NA | 1 | |||||||||||||||
| 2022-09-05 | NA | ||||||||||||||||
| 2022-09-06 | NA | ||||||||||||||||
| 2022-09-07 | NA | ||||||||||||||||
| 2022-09-08 | NA | 1 | |||||||||||||||
| 2022-09-09 | NA | 2 | 2 | ||||||||||||||
| 2022-09-10 | NA | 1 | 1 | ||||||||||||||
| 2022-09-11 | NA | 2 | 2 | ||||||||||||||
| 2022-09-12 | NA | 1 | |||||||||||||||
| 2022-09-13 | NA | 1 | 1 | ||||||||||||||
| 2022-09-14 | NA | 1 | 1 | ||||||||||||||
| 2022-09-15 | NA | 2 | 3 | ||||||||||||||
| 2022-09-16 | NA | 1 | 1 | ||||||||||||||
| 2022-09-17 | NA | NA | 2 | 1 | |||||||||||||
| 2022-09-18 | NA | 1 | 1 | NA | 1 | 1 | |||||||||||
| 2022-09-19 | NA | NA | |||||||||||||||
| 2022-09-20 | NA | 1 | 1 | NA | 1 | 1 | 1 | ||||||||||
| 2022-09-21 | NA | 1 | NA | 1 | 3 | ||||||||||||
| 2022-09-22 | NA | NA | |||||||||||||||
| 2022-09-23 | NA | NA | 1 | 1 | |||||||||||||
| 2022-09-24 | NA | NA | |||||||||||||||
| 2022-09-25 | NA | NA | |||||||||||||||
| 2022-09-26 | NA | 2 | 2 | NA | 1 | 1 | |||||||||||
| 2022-09-27 | NA | NA | 1 | ||||||||||||||
| 2022-09-28 | NA | NA | 1 | ||||||||||||||
| 2022-09-29 | NA | NA | 1 | 1 | |||||||||||||
| 2022-09-30 | NA | NA | 1 | 1 | |||||||||||||
| 2022-10-01 | NA | 1 | NA | ||||||||||||||
| 2022-10-02 | NA | NA | 1 | ||||||||||||||
| 2022-10-03 | NA | 1 | 1 | NA | 1 | 1 | |||||||||||
| 2022-10-04 | NA | NA | |||||||||||||||
| 2022-10-05 | NA | NA | 2 | 2 | |||||||||||||
| 2022-10-06 | NA | NA | |||||||||||||||
| 2022-10-07 | NA | NA | |||||||||||||||
| 2022-10-08 | NA | NA | |||||||||||||||
| 2022-10-09 | NA | NA | |||||||||||||||
| 2022-10-10 | NA | NA | |||||||||||||||
| 2022-10-11 | NA | NA | |||||||||||||||
| 2022-10-12 | NA | NA | |||||||||||||||
| 2022-10-13 | NA | NA | |||||||||||||||
| 2022-10-14 | NA | NA | |||||||||||||||
| 2022-10-15 | NA | NA | |||||||||||||||
| 2022-10-16 | NA | NA | 1 | ||||||||||||||
| 2022-10-17 | NA | 1 | NA | ||||||||||||||
| 2022-10-18 | NA | NA | |||||||||||||||
| 2022-10-19 | NA | NA | |||||||||||||||
| 2022-10-20 | NA | NA | |||||||||||||||
| 2022-10-21 | NA | NA | |||||||||||||||
| 2022-10-22 | NA | NA | |||||||||||||||
| 2022-10-23 | NA | NA | |||||||||||||||
| 2022-10-24 | NA | NA | |||||||||||||||
| 2022-10-25 | NA | NA | 1 | 1 | |||||||||||||
| 2022-10-26 | NA | NA | |||||||||||||||
| 2022-10-27 | NA | NA | NA | ||||||||||||||
| 2022-10-28 | NA | NA | NA | ||||||||||||||
| 2022-10-29 | NA | NA | NA | ||||||||||||||
| 2022-10-30 | NA | NA | NA | ||||||||||||||
| 2022-10-31 | NA | NA | 1 | NA | |||||||||||||
| 2022-11-01 | NA | NA | NA | ||||||||||||||
| 2022-11-02 | NA | NA | NA | ||||||||||||||
| 2022-11-03 | NA | NA | NA | ||||||||||||||
| 2022-11-04 | NA | NA | NA | ||||||||||||||
| 2022-11-05 | NA | NA | NA | 1 | 1 | ||||||||||||
| 2022-11-06 | NA | NA | NA | ||||||||||||||
| 2022-11-07 | NA | NA | NA | ||||||||||||||
| 2022-11-08 | NA | NA | NA | ||||||||||||||
| 2022-11-09 | NA | NA | NA | ||||||||||||||
| 2022-11-10 | NA | NA | NA | 1 | 1 | ||||||||||||
| 2022-11-11 | NA | NA | NA | ||||||||||||||
| 2022-11-12 | NA | NA | NA | ||||||||||||||
| 2022-11-13 | NA | NA | NA | ||||||||||||||
| 2022-11-14 | NA | NA | NA | ||||||||||||||
| 2022-11-15 | NA | NA | NA | ||||||||||||||
| 2022-11-16 | NA | NA | NA | ||||||||||||||
| 2022-11-17 | NA | NA | NA | ||||||||||||||
| 2022-11-18 | NA | NA | NA | ||||||||||||||
| 2022-11-19 | NA | NA | NA | ||||||||||||||
| 2022-11-20 | NA | NA | NA | ||||||||||||||
| 2022-11-21 | NA | NA | NA | ||||||||||||||
| 2022-11-22 | NA | NA | NA | ||||||||||||||
| 2022-11-23 | NA | NA | NA | ||||||||||||||
| 2022-11-24 | NA | NA | NA | ||||||||||||||
| 2022-11-25 | NA | NA | NA | 1 | 1 | ||||||||||||
| 2022-11-26 | NA | NA | NA | ||||||||||||||
| 2022-11-27 | NA | NA | NA | ||||||||||||||
| 2022-11-28 | NA | NA | NA | ||||||||||||||
| 2022-11-29 | NA | NA | NA | ||||||||||||||
| 2022-11-30 | NA | NA | NA | NA | |||||||||||||
| 2022-12-01 | NA | NA | NA | NA | |||||||||||||
| 2022-12-02 | NA | NA | NA | NA | |||||||||||||
| 2022-12-03 | NA | NA | NA | NA | |||||||||||||
| 2022-12-04 | NA | NA | NA | NA | |||||||||||||
| 2022-12-05 | NA | NA | NA | NA | |||||||||||||
| 2022-12-06 | NA | NA | NA | NA | |||||||||||||
| 2022-12-07 | NA | NA | NA | ||||||||||||||
| 2022-12-08 | NA | NA | NA | ||||||||||||||
| 2022-12-09 | NA | NA | NA | ||||||||||||||
| 2022-12-10 | NA | NA | NA | ||||||||||||||
| 2022-12-11 | NA | NA | NA | ||||||||||||||
| 2022-12-12 | NA | NA | NA | ||||||||||||||
| 2022-12-13 | NA | NA | NA | ||||||||||||||
| 2022-12-14 | NA | NA | NA | ||||||||||||||
| 2022-12-15 | NA | NA | NA | ||||||||||||||
| 2022-12-16 | NA | NA | NA | ||||||||||||||
| 2022-12-17 | NA | NA | NA | ||||||||||||||
| 2022-12-18 | NA | NA | NA | ||||||||||||||
| 2022-12-19 | NA | NA | NA | ||||||||||||||
| 2022-12-20 | NA | NA | NA | ||||||||||||||
| 2022-12-21 | NA | NA | NA | ||||||||||||||
| 2022-12-22 | NA | NA | NA | ||||||||||||||
| 2022-12-23 | NA | NA | NA | ||||||||||||||
| 2022-12-24 | NA | NA | NA | ||||||||||||||
| 2022-12-25 | NA | NA | NA | ||||||||||||||
| 2022-12-26 | NA | NA | NA | ||||||||||||||
| 2022-12-27 | NA | NA | NA | ||||||||||||||
| 2022-12-28 | NA | NA | NA | ||||||||||||||
| 2022-12-29 | NA | NA | NA | ||||||||||||||
| 2022-12-30 | NA | NA | NA | ||||||||||||||
| 2022-12-31 | NA | NA | NA | ||||||||||||||
| 2023-01-01 | NA | NA | NA | ||||||||||||||
| 2023-01-02 | NA | NA | NA | ||||||||||||||
| 2023-01-03 | NA | NA | NA | ||||||||||||||
| 2023-01-04 | NA | NA | NA | ||||||||||||||
| 2023-01-05 | NA | NA | NA | ||||||||||||||
| 2023-01-06 | NA | NA | NA | ||||||||||||||
| 2023-01-07 | NA | NA | NA | ||||||||||||||
| 2023-01-08 | NA | NA | NA | ||||||||||||||
| 2023-01-09 | NA | NA | NA | ||||||||||||||
| 2023-01-10 | NA | NA | NA | ||||||||||||||
| 2023-01-11 | NA | NA | NA | ||||||||||||||
| 2023-01-12 | NA | NA | NA | ||||||||||||||
| 2023-01-13 | NA | NA | NA | ||||||||||||||
| 2023-01-14 | NA | NA | NA | ||||||||||||||
| 2023-01-15 | NA | NA | NA | ||||||||||||||
| 2023-01-16 | NA | NA | NA | ||||||||||||||
| 2023-01-17 | NA | NA | NA | ||||||||||||||
| 2023-01-18 | NA | NA | NA | ||||||||||||||
| 2023-01-19 | NA | NA | NA | ||||||||||||||
| 2023-01-20 | NA | NA | NA | ||||||||||||||
| 2023-01-21 | NA | NA | NA | ||||||||||||||
| 2023-01-22 | NA | NA | NA | ||||||||||||||
| 2023-01-23 | NA | NA | NA | ||||||||||||||
| 2023-01-24 | NA | NA | NA | ||||||||||||||
| 2023-01-25 | NA | NA | NA | ||||||||||||||
| 2023-01-26 | NA | NA | NA | ||||||||||||||
| 2023-01-27 | NA | NA | NA | ||||||||||||||
| 2023-01-28 | NA | NA | NA | ||||||||||||||
| 2023-01-29 | NA | NA | NA | ||||||||||||||
| 2023-01-30 | NA | NA | NA | ||||||||||||||
| 2023-01-31 | NA | NA | NA | ||||||||||||||
| 2023-02-01 | NA | NA | NA | ||||||||||||||
| 2023-02-02 | NA | NA | NA | ||||||||||||||
| 2023-02-03 | NA | NA | NA | ||||||||||||||
| 2023-02-04 | NA | NA | NA | ||||||||||||||
| 2023-02-05 | NA | NA | |||||||||||||||
| 2023-02-06 | NA | NA | |||||||||||||||
| 2023-02-07 | NA | NA | |||||||||||||||
| 2023-02-08 | NA | NA | |||||||||||||||
| 2023-02-09 | NA | NA | |||||||||||||||
| 2023-02-10 | NA | NA | |||||||||||||||
| 2023-02-11 | NA | NA | |||||||||||||||
| 2023-02-12 | NA | NA | |||||||||||||||
| 2023-02-13 | NA | NA | |||||||||||||||
| 2023-02-14 | NA | NA | |||||||||||||||
| 2023-02-15 | NA | NA | |||||||||||||||
| 2023-02-16 | NA | NA | |||||||||||||||
| 2023-02-17 | NA | NA | |||||||||||||||
| 2023-02-18 | NA | NA | |||||||||||||||
| 2023-02-19 | NA | NA | |||||||||||||||
| 2023-02-20 | NA | NA | |||||||||||||||
| 2023-02-21 | NA | NA | |||||||||||||||
| 2023-02-22 | NA | NA | |||||||||||||||
| 2023-02-23 | NA | NA | |||||||||||||||
| 2023-02-24 | NA | NA | |||||||||||||||
| 2023-02-25 | NA | NA | |||||||||||||||
| 2023-02-26 | NA | NA | |||||||||||||||
| 2023-02-27 | NA | NA | |||||||||||||||
| 2023-02-28 | NA | NA | |||||||||||||||
| 2023-03-01 | NA | NA | |||||||||||||||
| 2023-03-02 | NA | NA | |||||||||||||||
| 2023-03-03 | NA | NA | |||||||||||||||
| 2023-03-04 | NA | NA |
rm(list = ls())
cleanup(ask = F)
For questions or comments, please contact cyril.michel@noaa.gov