Skip to content

Commit

Permalink
finished 2b run models, ready to run
Browse files Browse the repository at this point in the history
  • Loading branch information
brianstock committed Oct 4, 2017
1 parent ed92e84 commit ae6a278
Show file tree
Hide file tree
Showing 6 changed files with 735 additions and 20 deletions.
22 changes: 18 additions & 4 deletions 2a_process_survey.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ dat$TOTAL <- HAUL$vertebrate_weight_kg
dat$TOTAL[which(is.na(dat$TOTAL))] <- 0 # replace NA with 0
```

```{r}
```{r message=FALSE}
# Add catch of each species by haul (takes a couple min)
library(dplyr)
dat$YEYE <- dat$DBRK <- dat$PHLB <- 0
Expand All @@ -94,14 +94,21 @@ for(i in 1:n.hauls){
}
# Order by date
dat <- dat[order(dat$DATE),]
# Add DAY covariate: day of the year
dat$DAY <- as.numeric(dat$DATE - as.Date(paste0(dat$YEAR,"-01-01")))
# Add binomial catch columns
dat$DBRK_01 <- dat$PHLB_01 <- dat$YEYE_01 <- 0
dat$DBRK_01[which(dat$DBRK>0)] <- 1
dat$PHLB_01[which(dat$PHLB>0)] <- 1
dat$YEYE_01[which(dat$YEYE>0)] <- 1
```

### Add SST covariate
- `SST`: daily sea surface temperature anomalies (in degC)

Download daily sea surface temperature anomalies (.nc files) for 2003-2013 from: https://www.esrl.noaa.gov/psd/data/gridded/data.noaa.oisst.v2.highres.html

```{r eval=TRUE, echo=TRUE, results='hide'}
```{r eval=TRUE, echo=TRUE, message=FALSE, results='hide'}
# function to get SST daily anomaly at the DATE/LON/LAT for each haul
# uses bilinear interpolation from nearest gridpoints
library(ncdf4)
Expand Down Expand Up @@ -164,7 +171,7 @@ dat <- dat[-which(is.na(dat$SST)),]

*Note:* The Rockfish Conservation Area (RCA) boundaries have changed by month, year, latitude, and depth. We have prepared `rca_boundaries.csv` using historical RCA boundaries. For more details, see the [RCA webpage](http://www.westcoast.fisheries.noaa.gov/fisheries/management/groundfish_closures/rockfish_areas.html).

```{r}
```{r message=FALSE}
library(tidyr)
# Get historical RCA boundary limits
Expand Down Expand Up @@ -217,12 +224,19 @@ dat$logDEPTH <- log(dat$DEPTH)
# Center/de-mean each covariate
dat$sst <- dat$SST
demean <- function(vec){ return(vec - mean(vec))}
dat[,c("logDEPTH","sst")] <- apply(dat[,c("logDEPTH","sst")],2,demean)
dat[,c("DAY","logDEPTH","sst")] <- apply(dat[,c("DAY","logDEPTH","sst")],2,demean)
# Create squared covariates
dat$sst2 <- dat$sst^2
dat$logDEPTH2 <- dat$logDEPTH^2
# Turn categorical variables into factors
dat$YEAR <- as.factor(dat$YEAR)
dat$DBRK_01 <- as.factor(dat$DBRK_01)
dat$PHLB_01 <- as.factor(dat$PHLB_01)
dat$YEYE_01 <- as.factor(dat$YEYE_01)
dat$inRCA <- as.factor(dat$inRCA)
# Data are ready to fit
save(dat, file="/home/brian/Documents/Bycatch/WCGOP/data/wcann_processed.RData")
```
30 changes: 18 additions & 12 deletions 2a_process_survey.html
Original file line number Diff line number Diff line change
Expand Up @@ -134,16 +134,8 @@ <h3>Combine <code>HAUL</code> and <code>CATCH</code></h3>
dat$TOTAL &lt;-<span class="st"> </span>HAUL$vertebrate_weight_kg
dat$TOTAL[<span class="kw">which</span>(<span class="kw">is.na</span>(dat$TOTAL))] &lt;-<span class="st"> </span><span class="dv">0</span> <span class="co"># replace NA with 0</span></code></pre></div>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r"><span class="co"># Add catch of each species by haul (takes a couple min)</span>
<span class="kw">library</span>(dplyr)</code></pre></div>
<pre><code>##
## Attaching package: 'dplyr'</code></pre>
<pre><code>## The following objects are masked from 'package:stats':
##
## filter, lag</code></pre>
<pre><code>## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union</code></pre>
<div class="sourceCode"><pre class="sourceCode r"><code class="sourceCode r">dat$YEYE &lt;-<span class="st"> </span>dat$DBRK &lt;-<span class="st"> </span>dat$PHLB &lt;-<span class="st"> </span><span class="dv">0</span>
<span class="kw">library</span>(dplyr)
dat$YEYE &lt;-<span class="st"> </span>dat$DBRK &lt;-<span class="st"> </span>dat$PHLB &lt;-<span class="st"> </span><span class="dv">0</span>
for(i in <span class="dv">1</span>:n.hauls){
<span class="co"># get all species caught in the ith haul</span>
cur_haul &lt;-<span class="st"> </span><span class="kw">filter</span>(CATCH, trawl_id==dat$HAUL_ID[i])
Expand All @@ -152,7 +144,14 @@ <h3>Combine <code>HAUL</code> and <code>CATCH</code></h3>
if(<span class="st">&quot;Pacific halibut&quot;</span> %in%<span class="st"> </span>cur_haul$common_name) dat$PHLB[i] &lt;-<span class="st"> </span><span class="kw">as.numeric</span>(dplyr::<span class="kw">filter</span>(cur_haul,common_name==<span class="st">&quot;Pacific halibut&quot;</span>) %&gt;%<span class="st"> </span>dplyr::<span class="kw">select</span>(total_catch_wt_kg))
}
<span class="co"># Order by date</span>
dat &lt;-<span class="st"> </span>dat[<span class="kw">order</span>(dat$DATE),]</code></pre></div>
dat &lt;-<span class="st"> </span>dat[<span class="kw">order</span>(dat$DATE),]
<span class="co"># Add DAY covariate: day of the year</span>
dat$DAY &lt;-<span class="st"> </span><span class="kw">as.numeric</span>(dat$DATE -<span class="st"> </span><span class="kw">as.Date</span>(<span class="kw">paste0</span>(dat$YEAR,<span class="st">&quot;-01-01&quot;</span>)))
<span class="co"># Add binomial catch columns</span>
dat$DBRK_01 &lt;-<span class="st"> </span>dat$PHLB_01 &lt;-<span class="st"> </span>dat$YEYE_01 &lt;-<span class="st"> </span><span class="dv">0</span>
dat$DBRK_01[<span class="kw">which</span>(dat$DBRK&gt;<span class="dv">0</span>)] &lt;-<span class="st"> </span><span class="dv">1</span>
dat$PHLB_01[<span class="kw">which</span>(dat$PHLB&gt;<span class="dv">0</span>)] &lt;-<span class="st"> </span><span class="dv">1</span>
dat$YEYE_01[<span class="kw">which</span>(dat$YEYE&gt;<span class="dv">0</span>)] &lt;-<span class="st"> </span><span class="dv">1</span></code></pre></div>
</div>
<div id="add-sst-covariate" class="section level3">
<h3>Add SST covariate</h3>
Expand Down Expand Up @@ -273,12 +272,19 @@ <h3>Standardize covariates</h3>
<span class="co"># Center/de-mean each covariate</span>
dat$sst &lt;-<span class="st"> </span>dat$SST
demean &lt;-<span class="st"> </span>function(vec){ <span class="kw">return</span>(vec -<span class="st"> </span><span class="kw">mean</span>(vec))}
dat[,<span class="kw">c</span>(<span class="st">&quot;logDEPTH&quot;</span>,<span class="st">&quot;sst&quot;</span>)] &lt;-<span class="st"> </span><span class="kw">apply</span>(dat[,<span class="kw">c</span>(<span class="st">&quot;logDEPTH&quot;</span>,<span class="st">&quot;sst&quot;</span>)],<span class="dv">2</span>,demean)
dat[,<span class="kw">c</span>(<span class="st">&quot;DAY&quot;</span>,<span class="st">&quot;logDEPTH&quot;</span>,<span class="st">&quot;sst&quot;</span>)] &lt;-<span class="st"> </span><span class="kw">apply</span>(dat[,<span class="kw">c</span>(<span class="st">&quot;DAY&quot;</span>,<span class="st">&quot;logDEPTH&quot;</span>,<span class="st">&quot;sst&quot;</span>)],<span class="dv">2</span>,demean)

<span class="co"># Create squared covariates</span>
dat$sst2 &lt;-<span class="st"> </span>dat$sst^<span class="dv">2</span>
dat$logDEPTH2 &lt;-<span class="st"> </span>dat$logDEPTH^<span class="dv">2</span>

<span class="co"># Turn categorical variables into factors</span>
dat$YEAR &lt;-<span class="st"> </span><span class="kw">as.factor</span>(dat$YEAR)
dat$DBRK_01 &lt;-<span class="st"> </span><span class="kw">as.factor</span>(dat$DBRK_01)
dat$PHLB_01 &lt;-<span class="st"> </span><span class="kw">as.factor</span>(dat$PHLB_01)
dat$YEYE_01 &lt;-<span class="st"> </span><span class="kw">as.factor</span>(dat$YEYE_01)
dat$inRCA &lt;-<span class="st"> </span><span class="kw">as.factor</span>(dat$inRCA)

<span class="co"># Data are ready to fit</span>
<span class="kw">save</span>(dat, <span class="dt">file=</span><span class="st">&quot;/home/brian/Documents/Bycatch/WCGOP/data/wcann_processed.RData&quot;</span>)</code></pre></div>
</div>
Expand Down
Loading

0 comments on commit ae6a278

Please sign in to comment.