library(tidyverse)
library(magrittr)
head_dir <- "D:/Miller_etal_2018_discharge_data/02_extracted_csv"
COMID_ls <- data.frame(COMID=817165)
Q_file_ls <- list.files(head_dir, full.names = TRUE, recursive = TRUE)
# grab the file names that match the list of COMIDs for each site from the join tables
matching_Q_file_names <-
Q_file_ls[unlist(lapply(X=COMID_ls$COMID,
FUN= function(x){grep(pattern=x, x=Q_file_ls)}))]
# if your list of COMIDs has any duplicates, this line will remove those so your not
# downloading and extracting the same file multiple times
matching_Q_file_names_nodup <- matching_Q_file_names[!duplicated(matching_Q_file_names)]
# Import the csv files for each COMID
dat <- bind_rows(lapply(X=matching_Q_file_names_nodup,FUN=read_csv))
View(dat)
# DATA FILTERING for year and month ranges; data unit coversions and skew test for CIs
(dat_filter <- dat %>%
filter(Year >= 1990 & Year <= 2015) %>%
filter(Month >= 5 & Month <= 9) %>%
mutate(ci_test = if_else((P10 < MEAN & MEAN < P90),0,1),
cmsQ = Estimated.Q*0.0283168,  # 1 cfs = 0.0283168 cms
cmsQkm2 = MEAN*0.0283168) %>%  # 1 cms = 35.314666212661 cfs
# USE THESE COMMENTED OUT WHEN THE 'ci_test' HAS 1S IN IT
# cmsQ = P50_Q*0.0283168,  # 1 cfs = 0.0283168 cms
# cmsQkm2 = P50*0.0283168) %>%  # 1 cms = 35.314666212661 cfs
select(COMID,Year,Month,cmsQ,cmsQkm2,ci_test))  #
#check the ci_test data to see if any 1s exist in the summary.
summary(dat_filter$ci_test)
write_csv(dat_filter,path=paste0(head_dir,"OUTPUT_CSV_SUMMARY_XX817165X_FILE",".csv"))
