Introduction

Today, we will work with daily water temperature and air temperature data observed for 31 rivers in Spain. The goal of this tutorial is to identify the best model for predicting the maximum water temperature given the maximum air temperature. In the preview below, W represents the daily maximum water temperature and A represents the daily maximum air temperature. The data contains almost a full year of data for each of the 31 different rivers.

## # A tibble: 6 × 8
##   JULIAN_DAY  YEAR     L     W     A  TIME MONTH   DAY
##        <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1          1  2003   103  14.2  21.2     1     1     1
## 2          2  2003   103  14.4  16.8     2     1     2
## 3          3  2003   103  14.4  15.4     3     1     3
## 4          4  2003   103  10.9  10.8     4     1     4
## 5          5  2003   103  10.8  11.7     5     1     5
## 6          6  2003   103  10.7  12.4     6     1     6

Part 1: Examining the Relationship

Chunk 1: Overall Relationship

Please modify the code chunk to visualize the relationship between A and W with scatterplot (set alpha=0.3) and smooth line.

ggplot(data=DATA) +
  geom_point(aes(x=A,y=W),alpha=0.3)+
  geom_smooth(aes(x=A,y=W)) +
  theme_minimal()
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Chunk 2: Location-Specific Relationship

WAPLOT.func=function(Location){

  DATA %>% filter(L == Location) %>%
    ggplot()+
    geom_point(aes(x=A,y=W),alpha=0.3)+
    geom_smooth(aes(x=A,y=W)) +
    theme_minimal()
}

WAPLOT.func(103)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'

WAPLOT.func(105)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'

WAPLOT.func(918)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'

Chunk 3: Split Data into Train and Test Sets

set.seed(216)
TEST.LOCATIONS=sample(x=unique(DATA$L),size=3,replace=F)

TRAIN = anti_join(DATA,tibble(L=TEST.LOCATIONS),by="L")
TEST = semi_join(DATA,tibble(L=TEST.LOCATIONS),by="L")

Chunk 4: Plots of Relationship for Train and Test Data

WAPLOT2.func=function(DATA){
  ggplot(data=DATA)+
    geom_point(aes(x=A,y=W),alpha=0.3)+
    geom_smooth(aes(x=A,y=W)) +
    theme_minimal()
}

WAPLOT2.func(TRAIN)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

WAPLOT2.func(TEST)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

Part 2: Linear Regression Model

Chunk 1: Fitting Linear Model to Train Data

linmod=lm(W~A,data=TRAIN)
summary(linmod)
## 
## Call:
## lm(formula = W ~ A, data = TRAIN)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -12.1495  -2.1024  -0.1857   1.8851  16.8637 
## 
## Coefficients:
##             Estimate Std. Error t value            Pr(>|t|)    
## (Intercept) 3.394990   0.087161   38.95 <0.0000000000000002 ***
## A           0.649422   0.004101  158.36 <0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 3.075 on 8866 degrees of freedom
##   (1371 observations deleted due to missingness)
## Multiple R-squared:  0.7388, Adjusted R-squared:  0.7388 
## F-statistic: 2.508e+04 on 1 and 8866 DF,  p-value: < 0.00000000000000022

Chunk 2: Getting Predictions from Linear Model

TRAIN2 = TRAIN %>% add_predictions(linmod,var="linpred")
TEST2 = TEST %>% add_predictions(linmod,var="linpred")
TruevsPred = function(test_data, train_data, col_name){
  SPLIT = c(rep("TEST",dim(test_data)[1]),rep("TRAIN",dim(train_data)[1]))
  cbind(rbind(test_data,train_data),SPLIT) %>% 
    ggplot()+geom_point(aes(x=W,y=get(col_name),
                color=factor(SPLIT,levels=c("TRAIN","TEST"))),alpha=0.2) + 
    theme_minimal() + geom_abline(intercept=0,slope=1) +
    guides(color=guide_legend(title="Dataset")) + xlab("True Value") +
    ylab("Predicted Value Under MODEL")  
}
TruevsPred(TEST2, TRAIN2, 'linpred')

Chunk 3: Getting Residuals from Linear Model

TRAIN3 = TRAIN2 %>% add_residuals(linmod,var="linres")
TEST3 = TEST2 %>% add_residuals(linmod,var="linres")
MSEandMAE = function(test_data, col_name){
  residual = unlist(test_data[col_name])
  MAE = mean(abs(residual),na.rm=T)
  MSE = mean((residual)^2,na.rm=T)
  result = matrix(c(MAE,MSE),1,2)
  colnames(result) = c("MAE","MSE")
  rownames(result) = c("Loss")
  return(result)
}
MSEandMAE(TEST3,'linres')
##           MAE      MSE
## Loss 2.750323 11.23319

Part 3: Polynomial Regression Model

Chunk 1: Fitting Polynomial Regression Models

poly2mod=lm(W~A+I(A^2),data=TRAIN)
poly3mod=lm(W~A+I(A^2)+I(A^3),data=TRAIN)
poly4mod=lm(W~A+I(A^2)+I(A^3)+I(A^4),data=TRAIN)
anova(linmod,poly2mod,poly3mod,poly4mod,test="Chisq")
## Analysis of Variance Table
## 
## Model 1: W ~ A
## Model 2: W ~ A + I(A^2)
## Model 3: W ~ A + I(A^2) + I(A^3)
## Model 4: W ~ A + I(A^2) + I(A^3) + I(A^4)
##   Res.Df   RSS Df Sum of Sq              Pr(>Chi)    
## 1   8866 83855                                       
## 2   8865 83553  1    302.01         0.00000001284 ***
## 3   8864 82840  1    713.09 < 0.00000000000000022 ***
## 4   8863 82730  1    109.48             0.0006152 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Chunk 2: Getting Predictions from Polynomial Models

TRAIN4 =TRAIN3 %>% 
  add_predictions(poly2mod,var="poly2pred") %>%
  add_predictions(poly3mod,var="poly3pred") %>%
  add_predictions(poly4mod,var="poly4pred")
  
TEST4 =TEST3 %>% 
  add_predictions(poly2mod,var="poly2pred") %>%
  add_predictions(poly3mod,var="poly3pred") %>%
  add_predictions(poly4mod,var="poly4pred")
TruevsPred(TEST4, TRAIN4, 'poly2pred') 

TruevsPred(TEST4, TRAIN4, 'poly3pred') 

TruevsPred(TEST4, TRAIN4, 'poly4pred') 

Chunk 3: Getting Residuals from Polynomial Models

TRAIN5 =TRAIN4 %>% 
  add_residuals(poly2mod,var="poly2res") %>%
  add_residuals(poly3mod,var="poly3res") %>%
  add_residuals(poly4mod,var="poly4res")

TEST5 =TEST4 %>% 
  add_residuals(poly2mod,var="poly2res") %>%
  add_residuals(poly3mod,var="poly3res") %>%
  add_residuals(poly4mod,var="poly4res")
MSEandMAE(TEST5,'poly2res')
##           MAE      MSE
## Loss 2.732399 11.18813
MSEandMAE(TEST5,'poly3res')
##           MAE     MSE
## Loss 2.706833 11.0815
MSEandMAE(TEST5,'poly4res')
##           MAE      MSE
## Loss 2.715366 11.14699

Intermission:

The function save.image() in R can be used to save all objects in the global environment. This is very helpful when you want work off your results without rerunning all previous R code. The name of the exported information should contain the file extension .Rdata. These files can be extremely large depending how much RAM was utilized in your R session. The function load() can be used to import a previous workspace.

For more information on .Rdata file types, see https://fileinfo.com/extension/rdata for help.

save.image("Tutorial.Rdata")