Skip to content

Instantly share code, notes, and snippets.

@ougx
Last active August 29, 2015 14:15
Show Gist options
  • Save ougx/cb33a1cf761dea01fa0d to your computer and use it in GitHub Desktop.
Save ougx/cb33a1cf761dea01fa0d to your computer and use it in GitHub Desktop.
#This script is developed by Gengxin (Michael) Ou on 2/18/2015
#This script require the package dataRetrieval, hydroGOF and ggplot2.
#The purpose of this script is to plot the SWAT results and compared it with USGS data.
#load the packages
library("dataRetrieval")
library("ggplot2")
library("hydroGOF")
beginDate <- as.Date("2006-01-01")
endDate <- as.Date("2008-12-31")
##################################### USGS Streamflow ####################################
#processing streamflow data
obsites <- c("07325840","07325850")
runoff.obs<-readNWISdv(obsites, "00060","2006-01-01", "2008-12-31")
runoff.site<-readNWISsite(obsites)
#convert to cms
runoff.obs$X_00060_00003 <- runoff.obs$X_00060_00003 * 0.3048^3
head(runoff.obs)
str(runoff.obs)
##################################### SWAT Streamflow ####################################
setwd("D:\\@Workspace\\SWAT_Tutorial\\Map\\SWAT\\Scenarios\\Default\\TxtInOut")
rchs <- c(3,10)
#rch
rchwidth<-c(-5,5,-27,rep(12, 4))
swatrch <- read.fwf(file="output.rch", widths = rchwidth, header = FALSE, skip = 9)
# 1 2 3 4
# FLOW_INcms FLOW_OUTcms EVAPcms TLOSScms
names(swatrch)<-c("rch","flowin","flowout","evap","tloss")
nrch <- max(swatrch$rch)
ndays <- nrow(swatrch)/nrch
swatrch$date <- rep(seq.Date(beginDate, endDate, by = "days"), each = nrch)
head(swatrch)
swatrch <- subset(swatrch, rch %in% rchs)
runoff <- data.frame(Flow = c(swatrch$flowout, runoff.obs$X_00060_00003),
Site = C(factor(rep(obsites, ndays)), runoff.obs$site_no),
Date = c(swatrch$date, runoff.obs$Date),
Model = rep(c("Simulated", "Observed"), each = ndays*2))
p.q0 <- ggplot(data = runoff, aes(x = Date, y = Flow, color = Model)) +
geom_line(aes(linetype = Model)) + facet_grid(Site ~ .) +
scale_y_continuous("Discharge (cms)")
ggsave(p.q0, file="p.q0.png", width=8, height=6,dpi=600)
#This script is developed by Gengxin (Michael) Ou on 2/18/2015
#This script require the package dataRetrieval and ggplot2.
#The purpose of this script is to pre-process the USGS streamflow data and the ars and mesonet weather data.
#the ars and mesonet weather data needed to be downloaded from their website.
# http://ars.mesonet.org/webrequest/
# http://www.mesonet.org/index.php/weather/daily_data_retrieval
#load the packages
library("dataRetrieval")
library("ggplot2")
##################################### ARS Micronet ####################################
#read and combine the ars data
# STID - station ID -- required
# DM - day of the month (CST) -- required
# RAINt - total rainfall (mm); See Notes
# SRADt - total solar radiation (MJ/m^2)
# RELH(a/x/n) - (ave/max/min) relative humidity at 1.5 m (%)
# TAIR(a/x/n) - (ave/max/min) air temperature at 1.5 m (deg C)
# TS05(a/x/n) - (ave/max/min) soil temperature at 05 cm depth (deg C)
# TS10(a/x/n) - (ave/max/min) soil temperature at 10 cm depth (deg C)
# TS15(a/x/n) - (ave/max/min) soil temperature at 15 cm depth (deg C)
# TS25(a/x/n) - (ave/max/min) soil temperature at 25 cm depth (deg C)
# TS30(a/x/n) - (ave/max/min) soil temperature at 30 cm depth (deg C)
# TS45(a/x/n) - (ave/max/min) soil temperature at 45 cm depth (deg C)
# VW05(a/x/n) - (ave/max/min) volumetric water content at 5 cm depth (water fraction by volume)
# VW25(a/x/n) - (ave/max/min) volumetric water content at 25 cm depth (water fraction by volume)
# VW45(a/x/n) - (ave/max/min) volumetric water content at 45 cm depth (water fraction by volume)
# SKIN(a/x/n) - (ave/max/min) skin temperature (deg C) [available at a select number of Little Washita sites only]
# g - good (passed QC tests - extremely low probability of error)
# S - suspect (QC tests indicate some suspicion - low probability of error)
# W - warning (several QC test failures - high probability of error)
# F - failure (known error)
# M - missing (data not received)
# I - ignore
# N - not installed
# U - unknown
beginDate <- as.Date("2006-01-01")
endDate <- as.Date("2008-12-31")
sites<-c("F102","F106","F107","F110","F112")
mon <- seq.Date(beginDate, endDate, by = "months")
#set your working directory
setwd("D:\\!Cloud\\OneDrive\\!@postdoc\\SWAT_Model_Tutorial\\ars_data")
n = 0
for (site in sites){
for (file in paste0(format(mon, format="%Y%m"), site, ".txt")) {
if (file.exists(file)){
ars <- read.table(file, skip = 4, header = TRUE)
ars$DM <- as.Date(paste0(substr(file, 1, 6), ars$DM), , format = "%Y%m%d")
n = n + 1
if (n > 1) {
if (length(colnames(arsall)) != length(colnames(ars))){
colName <- union(colnames(arsall), colnames(ars))
#for new file
nr <- nrow(ars)
newCol <- setdiff(colName, colnames(ars))
ars[newCol] <- NA
#old file
nr <- nrow(arsall)
newCol <- setdiff(colName, colnames(arsall))
arsall[newCol] <- NA
}
arsall <- rbind(arsall, ars)
}
else{
arsall <- ars
}
}
}
}
setwd("D:\\!Cloud\\OneDrive\\!@postdoc\\SWAT_Model_Tutorial")
arsall <-read.csv("arsall.csv")
head(arsall)
write.csv(arsall, "arsall.csv")
rh<- data.frame(Date = rep(arsall$DM, 3),
Site = rep(arsall$STID, 3),
RH = c(arsall$RELHa,arsall$RELHn,arsall$RELHx),
Legend = rep(c("Average", "Minimum", "Maximum"), each = nrow(arsall)))
p.rh <- ggplot(data = subset(rh, RH > -90), aes(x = Date, y = RH, color = Legend)) +
geom_line() +
facet_grid(Site ~ .) +
scale_y_continuous("Relative Humidity, %")
ggsave(p.rh, file="p.rh.png", width=8, height=8,dpi=600)
vw<- data.frame(Date = rep(arsall$DM, 3),
Site = rep(arsall$STID, 3),
Moisture = c(arsall$VW05a,arsall$VW25a,arsall$VW45a),
Depth = rep(c("05 cm", "25 cm", "45 cm"), each = nrow(arsall)))
head(vw)
p.vw <- ggplot(data = subset(vw, Moisture > -90), aes(x = Date, y = Moisture, color = Depth)) +
geom_line() + facet_grid(Site ~ .) +
scale_y_continuous("Soil Moisture")
ggsave(p.vw, file="p.vw.png", width=8, height=8,dpi=600)
TAIR<- data.frame(Date = rep(arsall$DM, 2),
Site = rep(arsall$STID, 2),
Temperature = c(arsall$TAIRn,arsall$TAIRx),
Legend = rep(c("Minimum", "Maximum"), each = nrow(arsall)))
p.tair <- ggplot(data = subset(TAIR, Temperature > -90), aes(x = Date, y = Temperature, color = Legend)) +
geom_line() + facet_grid(Site ~ .) +
scale_y_continuous("Temperature")
ggsave(p.tair, file="p.tair.png", width=8, height=8,dpi=600)
#A -99.0 value is used to fill in skipped daily data and to fill in measured
#climate records so that all records have the same starting and ending date.
for (c in 1:ncol(arsall)){
if (is.numeric(arsall[,c])){
arsall[arsall[,c] <= -99,c] = -99
}
}
#Convert units
arsall$RELHa[arsall$RELHa> -99] = arsall$RELHa[arsall$RELHa> -99] / 100
arssite <- read.csv("geoars.csv", header = FALSE, skip = 34)
names(arssite) <- c("STNM", "STID", "NAME", "CITY", "RANG", "CDIR", "CNTY","LAT", "LON", "ELEV","CDIV","CLAS", "SUBC", "DATC", "DATD")
head(arssite)
arssite <- arssite[toupper(arssite$STID) %in% toupper(sites),]
### Precipitation Gage Location Table
#Field Name Field Format Definition
#ID Integer Gage identification number (not used by interface)
#NAME String max 8 chars Corresponding table name string
#LAT Floating point Latitude in decimal degrees
#LONG Floating point Longitude in decimal degrees
#ELEVATION integer Elevation of rain gage (m)
nsite = length(sites)
for (w in c("t", "s", "r", "p")){
write.table(data.frame(ID = 1:nsite,
NAME = paste0(w,sites),
LAT = arssite$LAT,
LONG = arssite$LON,
ELEVATION = arssite$ELEV),
paste0(w,"gage.txt"),
col.names = TRUE,
row.names = FALSE,
quote = FALSE,
sep = ",")
}
### Daily Precipitation Data Table
#First yyyymmdd string Starting day of precipitation
#All other lines Floating point, free format, Amount of precipitation (mm)
for (site in sites){
file <- paste0("p", site, ".txt")
write(format(beginDate, "%Y%m%d"), file)
write(arsall$RAINt[arsall$STID == toupper(site)], file, append = TRUE, sep = "\n")
}
### Temperature Gage Location Table
#Field Name Field Format Definition
#ID Integer Gage identification number (not used by interface)
#NAME String max 8 chars Corresponding table name string
#LAT Floating point Latitude in decimal degrees
#LONG Floating point Longitude in decimal degrees
#ELEVATION integer Elevation of temperature gage (m)
### Temperature Data Table
#Line Field format Definition
#First yyyymmdd string Starting day of data
#All other lines floating point, floating point: string numbers use comma to separate values Daily maximum, daily minimum temperature (C)
for (site in sites){
file <- paste0("t", site, ".txt")
write(format(beginDate, "%Y%m%d"), file)
write.table(arsall[arsall$STID == toupper(site), c("TAIRx", "TAIRn")],
file,
append = TRUE,
row.names = FALSE,
col.names = FALSE,
sep = ",")
}
### Solar Radiation, Wind Speed, or Relative Humidity Gage Location Table
#Field Name Field Format Definition
#ID Integer Gage identification number (not used by interface)
#NAME String max 8 chars Corresponding table name string
#LAT Floating point Latitude in decimal degrees
#LONG Floating point Longitude in decimal degrees
#ELEVATION integer Elevation of solar/wind/RH gage (m)
### Solar Radiation Data Table
#Line Field format Definition
#First yyyymmdd string Starting day of data All other lines floating point string number Daily solar radiation (MJ/m2/day)
for (site in sites){
file <- paste0("s", site, ".txt")
write(format(beginDate, "%Y%m%d"), file)
write.table(arsall[arsall$STID == toupper(site), c("SRADt")],
file,
append = TRUE,
row.names = FALSE,
col.names = FALSE,
sep = ",")
}
### Relative Humidity Data Table
#Line Field format Definition
#First yyyymmdd string Starting day of data
#All other lines floating point string number Daily relative humidity (fraction)
for (site in sites){
file <- paste0("r", site, ".txt")
write(format(beginDate, "%Y%m%d"), file)
write.table(arsall[arsall$STID == toupper(site), c("RELHa")],
file,
append = TRUE,
row.names = FALSE,
col.names = FALSE,
sep = ",")
}
##################################### Mesonet ####################################
### Wind Speed Data Table
#Line Field format Definition
#First yyyymmdd string Starting day of data
#All other lines floating point string number Daily average wind speed (m/s)
#FID Shape * STNM STID NAME CITY RANG CDIR CNTY NLAT ELON ELEV QLOC CDIV CLAS SUBC WCR5 WCS5 A5 N5 BULK5 GRAV5 SAND5 SILT5 CLAY5 TEXT5 WCR25 WCS25 A25 N25 BULK25 GRAV25 SAND25 SILT25 CLAY25 TEXT25 WCR60 WCS60 A60 N60 BULK60 GRAV60 SAND60 SILT60 CLAY60 TEXT60 WCR75 WCS75 A75 N75 BULK75 GRAV75 SAND75 SILT75 CLAY75 TEXT75 DATC DATD
#57 Point ZM 45 HINT Hinton Hinton 7 W Caddo 35.48439 -98.48151 493 2 7 STANDARD 0 0.178 0.462 34.267 2.177 1.36 0 75.99 17.73 6.27 Sandy loam 0.175 0.408 30.834 2.651 1.6 0 87.17 6.01 6.82 Loamy sand 0.178 0.447 30.412 2.658 1.43 0 85.95 7.21 6.84 Loamy sand 0.179 0.442 27.414 2.547 1.45 0 79.17 13.79 7.04 Loamy sand 19940101 20991231
write.table(data.frame(ID = 1,
NAME = "wHINT",
LAT = 35.48439,
LONG = -98.48151,
ELEVATION = 493),
"wgage.txt",
col.names = TRUE,
row.names = FALSE,
quote = FALSE,
sep = ",")
meso <- read.csv("mesonet_hint.csv")
head(meso)
w99 <- meso$WSPD > -99
meso$WSPD[w99] = meso$WSPD[w99]*0.4470389 #convert from MPH to MPS
meso$WSPD[! w99] = -99
file <- "wHINT.txt"
write(format(beginDate, "%Y%m%d"), file)
write.table(meso$WSPD,
file,
append = TRUE,
row.names = FALSE,
col.names = FALSE,
sep = ",")
##################################### USGS Streamflow ####################################
#processing streamflow data
obsites <- c("07325840","07325850")
runoff.obs<-readNWISdv(obsites, "00060","2006-01-01", "2008-12-31")
runoff.site<-readNWISsite(obsites)
#convert to cms
runoff.obs$X_00060_00003 <- runoff.obs$X_00060_00003 * 0.3048^3
p.q <- ggplot(data = runoff.obs, aes(x = Date, y = X_00060_00003)) +
geom_line() + facet_grid(site_no ~ .) +
scale_y_continuous("Observed discharge (cms)")
ggsave(p.q, file="p.q.png", width=8, height=6,dpi=600)
i=0
for (site in obsites){
i = i + 1
roff <- subset(runoff.obs, site_no == site)[c("Date","X_00060_00003")]
roff$Date <- paste0("Q",i,format(roff$Date, "%Y%m%d"))
write.table(roff[c("Date","X_00060_00003")],
paste0("runoff", site, ".txt"),
append = FALSE,
row.names = TRUE,
col.names = TRUE,
sep = "\t",
quote = FALSE)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment