From 49f7429854419c6bc069b106ac6a7a9a0ea4e349 Mon Sep 17 00:00:00 2001 From: Quarto GHA Workflow Runner Date: Fri, 15 Nov 2024 23:48:33 +0000 Subject: [PATCH] Built site for gh-pages --- .nojekyll | 2 +- index.html | 57 +- listings.json | 1 + .../image.jpg | Bin 0 -> 1963 bytes .../index.html | 1068 ++++++++++++++++- search.json | 9 +- sitemap.xml | 24 +- 7 files changed, 1139 insertions(+), 22 deletions(-) create mode 100644 posts/2024-11-15-bcdata-ortho-historic/image.jpg diff --git a/.nojekyll b/.nojekyll index d6ae5d3..13587a0 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -b17a214d \ No newline at end of file +dfa6f492 \ No newline at end of file diff --git a/index.html b/index.html index f8cc8e5..920fa41 100644 --- a/index.html +++ b/index.html @@ -192,7 +192,7 @@

new graphiti

+
Categories
All (9)
COG (1)
CORS (1)
R (3)
assets (1)
aws (3)
bcdata (1)
bibtex (1)
citations (1)
drought (1)
fwapg (1)
land cover (1)
leafem (1)
leaflet (1)
names (1)
news (1)
paws (3)
planetary computer (1)
precipitation (1)
processx (1)
r (4)
rayshader (1)
remote sensing (1)
s3 (3)
s3sf (1)
@@ -206,7 +206,46 @@
Categories
-
+
+
+

+

+

+
+ + +
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+

diff --git a/listings.json b/listings.json index c15c51c..845595f 100644 --- a/listings.json +++ b/listings.json @@ -2,6 +2,7 @@ { "listing": "/index.html", "items": [ + "/posts/2024-11-15-bcdata-ortho-historic/index.html", "/posts/2024-09-21-aws-cors-cog-leaflet/index.html", "/posts/2024-06-30-land-cover/index.html", "/posts/2024-06-19-precipitation/index.html", diff --git a/posts/2024-11-15-bcdata-ortho-historic/image.jpg b/posts/2024-11-15-bcdata-ortho-historic/image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..34bee30b3a943db1696a9b84323d5da17d9df852 GIT binary patch literal 1963 zcma)7SvZ@C0*z{8uU74fGNKEyewJwrZBdk1B27qwPVGUIv6NamEp;_&FcpeYyCy=l zei}>7P>R^9B1%YA(WuzNAmq9)_xtYKJrC!c?>kTDyqt7bXFDlzWpNM)B;{alg8+eq zs7F3pRQQPdbDTGiB+JzaX)6#2j%FK%Q=qMy4Y;JB2 z0D$)`KR^G#F$NQZ^C~Vb1%X5s78cgGcq1btEEcP^wY9p2%;j|%z0)fXJY_1~j7IBM0e97jg9;J`k1C$t=2f#yMr__K&fuI%g3?%gM_)h55tJTeU z*<%{8@Bky)+hUQUHvczn^$1qLAQiBaf$7r>zrJlc_V^OV5e|NoqvAQbzajs7{0oxf z6=>dljGhpHJBFtnABz;LnU`^;L;f3lL_i7u(APj_Pxq<{U&w_e>fm_Qt(?0m%$xb| z*R{wqMJ`|a+DakZ#-a}qXPe^7B^q6IOaj+BjpTRutkljAmAY+GjfH)4XWARE7x(wx zF;&oa-w=#;cK7mX*V`4+1K^f6Cq7kdWtnA}p~o35_uDZnqGkTjJw&2EN_NEv{WW}2 zhiuB0_RP&rEbMYy$DC`C3`WmXxQ7LA1T_cEV*ASQvsZTXy#Hl#_GIpJb9Ih>O?6^; z*ZZF?n~|gz65frmv2XF642i znZlZWyH+pdx{c+zzlJRL=}q0n2T-}gv33jYmzlI>qXQC+?o#cek92^B!a8qQPY{n5RM3(x~){Ye^n+_d~x<0Zi+7~W;>Zl zBLSGxUd4!eqQr`eB7HZvyk-fBEg1?K00o zPJM?y!$|dHq%s!hJu}nTE7J>$BdaY{m8?EBa1RD1WV2EZyPEJ3F6IQa^^|~!fNPiO4J2RK!L1fV=hVshoFZ?lMGcw8CmuvZumGI zeBF%+v(ZW`m?_>L-(I^)sbxpA%M$8~zSEJvR_JOP^`vbs-AuWxNV-(F4z-e^E;BA1 zGs)|56MwC1V6k9YfnvLElu=BK`HS^irC*!3+Ki3KvnB1t^++!{u`?WuHn8F`es+ad zBhf^eJn{N-gHNRe3?;^*4pFL5QGTI;WUJxk{msY8UZwJRgC=^?k>UfRG;{b|VAUe4 zr;QX*g{mjursU^4q!_<(2i5fZ7~W9Kt9Ua~#60fyvR1X3)uJg$#|66{8`lujyTOC> zn>$I=Sd`R=Ig&bOO|o4ja~Dq74&~ZWUuz7+A6q;{9lIP0HTgo-e|A1;NL@gUW6I(C zy6Y$BeIo08l@mU_8rb%ti<}_g-7E?ZZ_Zjj(Qx76E1!w57b1hzTdq;L5Wu{eG4sj! z(AVB|`U3*0EWXqvcu+NRW<@-3U@zewaKW(trlSdhIuiR42xW+mb$c$<G$JJt=YbD(FQvc*MVpJRSI zScl6y&ztOyhq=Ma6IB?OiYQnW{?FsEgQ)GR?2$_Y*m{!vAb*(yD7(o@$l*3y3cle{ zj+LnchxVCR4@zaz&}X|Q55?yjc0U-H*M+wuB5-xtmRZ9unNP53gV`gk)z@Qq%LP;n z>)t@wz(xaeO6~gOq-{Hi_Z~)8Vf(Gqgf)-1 zB-fA*Hj{hU9T&gRmPr=w=UmT)3j_OdVDN2(&Ov{YXES7A*W$-%uY(R$;$V=43xasw zjdxG$Dn>gQT9zr+hVl*koUIS+zDTH_rWIvwmP~4zrt|zZwb4JERasvIU3}V1oAFED zEji&JX~MH@?SICZ28QYW&FJnoFFt>yjZ6EvTcRLw4g1}!hq57mu<0%LBcXiqwAk3k zrBwp>RoUL$m9>YQZ^Icdadh>z8GHKFEo(_;q-X)sDk;|Zz7!rY4U{Ow%0^NRNrt#{ nEwI6WYESg=(Et9B!1qvyJLksBQpn%`PvT(fY*Tmb)`Ncoa^J=P literal 0 HcmV?d00001 diff --git a/posts/2024-11-15-bcdata-ortho-historic/index.html b/posts/2024-11-15-bcdata-ortho-historic/index.html index aeb9400..10b9ecb 100644 --- a/posts/2024-11-15-bcdata-ortho-historic/index.html +++ b/posts/2024-11-15-bcdata-ortho-historic/index.html @@ -1,2 +1,1068 @@ - \ No newline at end of file + + + + + + + + + + +Getting details of historic orthophoto imagery with R – nevermind + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ +
+
+
+

Getting details of historic orthophoto imagery with R

+
+
fwapg
+
r
+
bcdata
+
+
+
+ + +
+ +
+
Author
+
+

al

+
+
+ +
+
Published
+
+

November 15, 2024

+
+
+ +
+
Modified
+
+

November 15, 2024

+
+
+ +
+ + +
+ + + + +
+ + + + + +

We need some historic ortho photo imagery so that we can have a look at historic watershed conditions compared to current. First thing we’re gonna do is generate a area of interest for which we want all of the map sheets ideas for.

+
+
+Code +
suppressMessages(library(tidyverse))
+library(ggplot2)
+library(bcdata)
+library(fwapgr)
+suppressMessages(library(sf))
+# library(leaflet)
+# library(leafem)```
+
+
+
+
+Code +
path_post <- fs::path(
+  here::here(),
+  "posts",
+  params$post_name
+)
+
+
+
+
+Code +
staticimports::import(
+  dir = fs::path(
+    path_post,
+    "scripts"
+  ),
+  outfile = fs::path(
+    path_post,
+    "scripts",
+    "staticimports",
+    ext = "R"
+  )
+)
+
+
+
+
+Code +
source(
+  fs::path(
+    path_post,
+    "scripts",
+    "staticimports",
+    ext = "R"
+  )
+)
+
+
+
+
+Code +
# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa
+# blueline key
+blk <- 360873822
+# downstream route measure
+drm <- 166030.4
+
+aoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk, 
+                                        downstream_route_measure = drm) |> 
+  sf::st_transform(4326)
+
+#get the bounding box of our aoi
+# aoi_bb <- sf::st_bbox(aoi)
+
+
+
+
+Code +
# grab all the railways
+l_rail <- rfp::rfp_bcd_get_data(
+    bcdata_record_id = "whse_basemapping.gba_railway_tracks_sp"
+) |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names() 
+
+
+# streams in the bulkley and then filter to just keep the big ones
+l_streams <- rfp::rfp_bcd_get_data(
+  bcdata_record_id = "whse_basemapping.fwa_stream_networks_sp",
+  col_filter = "watershed_group_code",
+  col_filter_value = "BULK",
+  # grab a smaller object by including less columns
+  col_extract = c("linear_feature_id", "stream_order", "gnis_name", "downstream_route_measure", "blue_line_key", "length_metre")
+) |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names() |> 
+  dplyr::filter(stream_order > 4)
+
+# historic orthophotos
+# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY
+#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points
+l_imagery <- rfp::rfp_bcd_get_data(
+  bcdata_record_id = "WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY") |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names()
+
+l_imagery_hist <- rfp::rfp_bcd_get_data(
+  bcdata_record_id = "WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT") |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names()
+
+l_imagery_grid <- rfp::rfp_bcd_get_data(
+  bcdata_record_id = "WHSE_BASEMAPPING.NTS_50K_GRID") |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names()
+
+
+
+
+Code +
# get a list of the objects in our env that start with l_
+ls <- ls()[stringr::str_starts(ls(), "l_")] 
+
+layers_to_trim <- tibble::lst(
+  !!!mget(ls)
+)
+
+
+# Function to validate and repair geometries
+validate_geometries <- function(layer) {
+  layer <- sf::st_make_valid(layer)
+  layer <- layer[sf::st_is_valid(layer), ]
+  return(layer)
+}
+
+# Apply validation to the AOI and layers
+aoi <- validate_geometries(aoi)
+layers_to_trim <- purrr::map(layers_to_trim, validate_geometries)
+
+# clip them  with purrr and sf
+layers_trimmed <- purrr::map(
+  layers_to_trim,
+  ~ sf::st_intersection(.x, aoi)
+) 
+
+
+

OK, so it looks like we cannot get historical air photo information from layers downloaded from BC data catalogue because the majority of the photos are not referenced. It does however seem that if we go through the online interface, we can use a NTS 1 to 50,000 map sheet to export a CSV file that does contain information about individual air photo locations. Our study area is huge so to begin with let’s just have a look at the area from Macquarie Creek up to a boat the location of Talley. For this area we just have 21 to 50,000 map sheets. We can visualize which map she’s those are by downloading that map sheet layer from BC data catalogue overlay it on our general study area.

+
+
+Code +
map <- ggplot() +
+  # tidyterra::geom_spatraster(
+  #   data = as.factor(land_cover_raster_resampled),
+  #   use_coltab = TRUE,
+  #   maxcell = Inf
+  # ) +
+  geom_sf(
+      data = aoi,
+      fill = "transparent",
+      color = "black",
+      linewidth = .5
+  ) +
+  geom_sf(
+    data = layers_trimmed$l_streams,
+    color = "blue",
+    size = 1
+  ) +
+  geom_sf(
+    data = layers_trimmed$l_rail,
+    color = "black",
+    size = 1
+  ) +
+  # geom_sf(
+  #   data = layers_trimmed$l_imagery_hist,
+  #   color = "red",
+  #   size = 1
+  # ) +
+  geom_sf(
+    data = layers_trimmed$l_imagery_grid,
+    alpha = 0.25,
+  ) +
+  geom_sf_text(
+    data = layers_trimmed$l_imagery_grid,
+    aes(label = map_tile),
+    size = 3  # Adjust size of the text labels as needed
+  )
+  # ggdark::dark_theme_void()
+
+
+map
+
+
+
+
+

+
+
+
+
+

So looks like we have map sheet 093L09 as well as 0 93L08. Next, we’re gonna go into the provincial WIMSI9 service and manually select our NTS grids, state the date range we would like to search and export. We chose the following options:

+
    +
  • Years from: 1963 to 1980
  • +
+

It looks like this.

+
+
+Code +
knitr::include_graphics(fs::path(
+  path_post,
+  "fig/Screenshot 2024-11-15 at 2.03.42 PM",
+  ext = "png"
+  )
+)
+
+
+
+
+

+
+
+
+
+

Let’s read in what we have, turn into spatial object, trim to overall study area and plot

+
+
+Code +
# list csvs
+
+ls <- fs::dir_ls(
+  fs::path(
+    path_post,
+    "data"),
+  glob = "*.csv"
+)
+
+photos_raw <- ls |> 
+  purrr::map_df(
+    readr::read_csv
+  ) |> 
+  sf::st_as_sf(
+    coords = c("Longitude", "Latitude"), crs = 4326
+  ) |> 
+  janitor::clean_names() |> 
+  dplyr::mutate(photo_date = lubridate::mdy(photo_date)) 
+
+photos_aoi <- sf::st_intersection(photos_raw, aoi)
+
+
+
+
+Code +
map +
+  geom_sf(
+    data = photos_aoi,
+    alpha = 0.25,
+  ) 
+
+
+
+
+

+
+
+
+
+Code +
  # geom_sf_text(
+  #   data = photos_aoi,
+  #   aes(
+  #     label = 
+  #       paste(
+  #         # roll_frame_series,
+  #         frame,
+  #         sep = "-"
+  #       )
+  #   ),
+  #   size = 2  # Adjust size of the text labels as needed
+  # )
+
+
+
+
+Code +
buffer <- 3000
+drm <- 263795
+
+
+

Ok - so let’s say we only want the ones that are within say 3000 of either side of the Bulkley River mainstem.

+

Let’s grab just the mainstem segments up to the top end of Bulkley Lake, merge, buffer the stream and clip our airphoto list by that.

+

We can use the downstream_route_measure of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). It looks like this in QGIS

+
+
+Code +
knitr::include_graphics(fs::path(
+  path_post,
+  "fig/Screenshot 2024-11-15 at 2.41.57 PM",
+  ext = "png"
+  )
+)
+
+
+
+
+

+
+
+
+
+
+
+Code +
aoi_refined_raw <- layers_trimmed$l_streams |> 
+  dplyr::filter(gnis_name == "Bulkley River" & downstream_route_measure < drm) |> 
+  # dplyr::arrange(downstream_route_measure) |>
+  # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m
+  # dplyr::filter(cumsum(length_metre) <= length_m) |>
+  sf::st_union()
+
+
+aoi_refined_buffered <- sf::st_buffer(
+  # we need to run st_sf or we get a sp object in a list...
+  sf::st_sf(
+    aoi_refined_raw
+  ), 
+  buffer, endCapStyle = "FLAT"
+) 
+
+photos_aoi_refined <- sf::st_intersection(photos_raw, aoi_refined_buffered)
+
+
+

ok lets plot again

+
+
+Code +
map +
+  geom_sf(
+    data = photos_aoi_refined,
+    alpha = 0.25,
+  ) 
+
+
+
+
+

+
+
+
+
+

Now lets filter by date to see what we can get for the earliest photos possible. What is our range:

+
+
+Code +
range(photos_aoi_refined$photo_date)
+
+
+
[1] "1968-05-09" "1980-09-07"
+
+
+

What do we have for photos before 1970

+
+
+Code +
map +
+  geom_sf(
+    data = photos_aoi_refined |> dplyr::filter(photo_date <= "1969-12-31"),
+    alpha = 0.25,
+  ) 
+
+
+
+
+

+
+
+
+
+

Looks like decent coverage so lets go with that.

+
+
+Code +
photos <- photos_aoi_refined |> 
+  dplyr::filter(photo_date <= "1969-12-31")
+
+photos |> 
+  my_dt_table(cols_freeze_left = 2)
+
+
+
+ +
+
+

There are 104 photos. Let’s burn out a csv that can be used to find them.

+
+
+Code +
path_output <- fs::path(
+      path_post,
+      "exports",
+      paste(
+        "airphotos",
+        paste(range(photos$photo_date), collapse = "_"),
+        sep = "_"
+      ),
+      ext = "csv"
+    )
+
+photos |> 
+  readr::write_csv(
+    path_output, na =""
+  )
+
+
+

We can view and download this file here

+
+
+Code +
  leaflet::leaflet() |>
+    leaflet::addTiles() |>
+    leafem:::addCOG(
+      url = url
+      , group = "COG"
+      , resolution = 512
+      , autozoom = TRUE
+    )
+
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/search.json b/search.json index b53a290..b1b26b6 100644 --- a/search.json +++ b/search.json @@ -95,7 +95,14 @@ "href": "index.html", "title": "new graphiti", "section": "", - "text": "CORS for serving of COGs of UAV imagery on AWS with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\ns3sf\n\n\nleaflet\n\n\nleafem\n\n\nCOG\n\n\nCORS\n\n\n\n\n\n\n\n\n\nSep 21, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping Land Cover with R\n\n\n\n\n\n\nland cover\n\n\nR\n\n\nplanetary computer\n\n\nremote sensing\n\n\n\n\n\n\n\n\n\nJul 1, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping and Plotting Precipitation with R\n\n\n\n\n\n\nprecipitation\n\n\nR\n\n\ndrought\n\n\nrayshader\n\n\n\n\n\n\n\n\n\nJun 19, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCleaning up bib files flexibly with Zotero and R\n\n\n\n\n\n\nnews\n\n\nbibtex\n\n\nR\n\n\ncitations\n\n\n\n\n\n\n\n\n\nMay 27, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSetting aws bucket permissions with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\n\n\n\n\n\n\n\nMay 24, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSyncing files to aws with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\nprocessx\n\n\n\n\n\n\n\n\n\nMay 23, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLogos and Equipment List Somewhere Accessible\n\n\n\n\n\n\nassets\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLower snake_case vs Everything_Else\n\n\n\n\n\n\nnames\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\nNo matching items" + "text": "Getting details of historic orthophoto imagery with R\n\n\n\n\n\n\nfwapg\n\n\nr\n\n\nbcdata\n\n\n\n\n\n\n\n\n\nNov 15, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCORS for serving of COGs of UAV imagery on AWS with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\ns3sf\n\n\nleaflet\n\n\nleafem\n\n\nCOG\n\n\nCORS\n\n\n\n\n\n\n\n\n\nSep 21, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping Land Cover with R\n\n\n\n\n\n\nland cover\n\n\nR\n\n\nplanetary computer\n\n\nremote sensing\n\n\n\n\n\n\n\n\n\nJul 1, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping and Plotting Precipitation with R\n\n\n\n\n\n\nprecipitation\n\n\nR\n\n\ndrought\n\n\nrayshader\n\n\n\n\n\n\n\n\n\nJun 19, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCleaning up bib files flexibly with Zotero and R\n\n\n\n\n\n\nnews\n\n\nbibtex\n\n\nR\n\n\ncitations\n\n\n\n\n\n\n\n\n\nMay 27, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSetting aws bucket permissions with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\n\n\n\n\n\n\n\nMay 24, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSyncing files to aws with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\nprocessx\n\n\n\n\n\n\n\n\n\nMay 23, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLogos and Equipment List Somewhere Accessible\n\n\n\n\n\n\nassets\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLower snake_case vs Everything_Else\n\n\n\n\n\n\nnames\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\nNo matching items" + }, + { + "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html", + "href": "posts/2024-11-15-bcdata-ortho-historic/index.html", + "title": "Getting details of historic orthophoto imagery with R", + "section": "", + "text": "We need some historic ortho photo imagery so that we can have a look at historic watershed conditions compared to current. First thing we’re gonna do is generate a area of interest for which we want all of the map sheets ideas for.\n\n\nCode\nsuppressMessages(library(tidyverse))\nlibrary(ggplot2)\nlibrary(bcdata)\nlibrary(fwapgr)\nsuppressMessages(library(sf))\n# library(leaflet)\n# library(leafem)```\n\n\n\n\nCode\npath_post <- fs::path(\n here::here(),\n \"posts\",\n params$post_name\n)\n\n\n\n\nCode\nstaticimports::import(\n dir = fs::path(\n path_post,\n \"scripts\"\n ),\n outfile = fs::path(\n path_post,\n \"scripts\",\n \"staticimports\",\n ext = \"R\"\n )\n)\n\n\n\n\nCode\nsource(\n fs::path(\n path_post,\n \"scripts\",\n \"staticimports\",\n ext = \"R\"\n )\n)\n\n\n\n\nCode\n# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa\n# blueline key\nblk <- 360873822\n# downstream route measure\ndrm <- 166030.4\n\naoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk, \n downstream_route_measure = drm) |> \n sf::st_transform(4326)\n\n#get the bounding box of our aoi\n# aoi_bb <- sf::st_bbox(aoi)\n\n\n\n\nCode\n# grab all the railways\nl_rail <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.gba_railway_tracks_sp\"\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() \n\n\n# streams in the bulkley and then filter to just keep the big ones\nl_streams <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.fwa_stream_networks_sp\",\n col_filter = \"watershed_group_code\",\n col_filter_value = \"BULK\",\n # grab a smaller object by including less columns\n col_extract = c(\"linear_feature_id\", \"stream_order\", \"gnis_name\", \"downstream_route_measure\", \"blue_line_key\", \"length_metre\")\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() |> \n dplyr::filter(stream_order > 4)\n\n# historic orthophotos\n# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY\n#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points\nl_imagery <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_hist <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_grid <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_BASEMAPPING.NTS_50K_GRID\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\n\n\n\nCode\n# get a list of the objects in our env that start with l_\nls <- ls()[stringr::str_starts(ls(), \"l_\")] \n\nlayers_to_trim <- tibble::lst(\n !!!mget(ls)\n)\n\n\n# Function to validate and repair geometries\nvalidate_geometries <- function(layer) {\n layer <- sf::st_make_valid(layer)\n layer <- layer[sf::st_is_valid(layer), ]\n return(layer)\n}\n\n# Apply validation to the AOI and layers\naoi <- validate_geometries(aoi)\nlayers_to_trim <- purrr::map(layers_to_trim, validate_geometries)\n\n# clip them with purrr and sf\nlayers_trimmed <- purrr::map(\n layers_to_trim,\n ~ sf::st_intersection(.x, aoi)\n) \n\n\nOK, so it looks like we cannot get historical air photo information from layers downloaded from BC data catalogue because the majority of the photos are not referenced. It does however seem that if we go through the online interface, we can use a NTS 1 to 50,000 map sheet to export a CSV file that does contain information about individual air photo locations. Our study area is huge so to begin with let’s just have a look at the area from Macquarie Creek up to a boat the location of Talley. For this area we just have 21 to 50,000 map sheets. We can visualize which map she’s those are by downloading that map sheet layer from BC data catalogue overlay it on our general study area.\n\n\nCode\nmap <- ggplot() +\n # tidyterra::geom_spatraster(\n # data = as.factor(land_cover_raster_resampled),\n # use_coltab = TRUE,\n # maxcell = Inf\n # ) +\n geom_sf(\n data = aoi,\n fill = \"transparent\",\n color = \"black\",\n linewidth = .5\n ) +\n geom_sf(\n data = layers_trimmed$l_streams,\n color = \"blue\",\n size = 1\n ) +\n geom_sf(\n data = layers_trimmed$l_rail,\n color = \"black\",\n size = 1\n ) +\n # geom_sf(\n # data = layers_trimmed$l_imagery_hist,\n # color = \"red\",\n # size = 1\n # ) +\n geom_sf(\n data = layers_trimmed$l_imagery_grid,\n alpha = 0.25,\n ) +\n geom_sf_text(\n data = layers_trimmed$l_imagery_grid,\n aes(label = map_tile),\n size = 3 # Adjust size of the text labels as needed\n )\n # ggdark::dark_theme_void()\n\n\nmap\n\n\n\n\n\n\n\n\n\nSo looks like we have map sheet 093L09 as well as 0 93L08. Next, we’re gonna go into the provincial WIMSI9 service and manually select our NTS grids, state the date range we would like to search and export. We chose the following options:\n\nYears from: 1963 to 1980\n\nIt looks like this.\n\n\nCode\nknitr::include_graphics(fs::path(\n path_post,\n \"fig/Screenshot 2024-11-15 at 2.03.42 PM\",\n ext = \"png\"\n )\n)\n\n\n\n\n\n\n\n\n\nLet’s read in what we have, turn into spatial object, trim to overall study area and plot\n\n\nCode\n# list csvs\n\nls <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.csv\"\n)\n\nphotos_raw <- ls |> \n purrr::map_df(\n readr::read_csv\n ) |> \n sf::st_as_sf(\n coords = c(\"Longitude\", \"Latitude\"), crs = 4326\n ) |> \n janitor::clean_names() |> \n dplyr::mutate(photo_date = lubridate::mdy(photo_date)) \n\nphotos_aoi <- sf::st_intersection(photos_raw, aoi)\n\n\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi,\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nCode\n # geom_sf_text(\n # data = photos_aoi,\n # aes(\n # label = \n # paste(\n # # roll_frame_series,\n # frame,\n # sep = \"-\"\n # )\n # ),\n # size = 2 # Adjust size of the text labels as needed\n # )\n\n\n\n\nCode\nbuffer <- 3000\ndrm <- 263795\n\n\nOk - so let’s say we only want the ones that are within say 3000 of either side of the Bulkley River mainstem.\nLet’s grab just the mainstem segments up to the top end of Bulkley Lake, merge, buffer the stream and clip our airphoto list by that.\nWe can use the downstream_route_measure of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). It looks like this in QGIS\n\n\nCode\nknitr::include_graphics(fs::path(\n path_post,\n \"fig/Screenshot 2024-11-15 at 2.41.57 PM\",\n ext = \"png\"\n )\n)\n\n\n\n\n\n\n\n\n\n\n\nCode\naoi_refined_raw <- layers_trimmed$l_streams |> \n dplyr::filter(gnis_name == \"Bulkley River\" & downstream_route_measure < drm) |> \n # dplyr::arrange(downstream_route_measure) |>\n # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m\n # dplyr::filter(cumsum(length_metre) <= length_m) |>\n sf::st_union()\n\n\naoi_refined_buffered <- sf::st_buffer(\n # we need to run st_sf or we get a sp object in a list...\n sf::st_sf(\n aoi_refined_raw\n ), \n buffer, endCapStyle = \"FLAT\"\n) \n\nphotos_aoi_refined <- sf::st_intersection(photos_raw, aoi_refined_buffered)\n\n\nok lets plot again\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi_refined,\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nNow lets filter by date to see what we can get for the earliest photos possible. What is our range:\n\n\nCode\nrange(photos_aoi_refined$photo_date)\n\n\n[1] \"1968-05-09\" \"1980-09-07\"\n\n\nWhat do we have for photos before 1970\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi_refined |> dplyr::filter(photo_date <= \"1969-12-31\"),\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nLooks like decent coverage so lets go with that.\n\n\nCode\nphotos <- photos_aoi_refined |> \n dplyr::filter(photo_date <= \"1969-12-31\")\n\nphotos |> \n my_dt_table(cols_freeze_left = 2)\n\n\n\n\n\n\nThere are 104 photos. Let’s burn out a csv that can be used to find them.\n\n\nCode\npath_output <- fs::path(\n path_post,\n \"exports\",\n paste(\n \"airphotos\",\n paste(range(photos$photo_date), collapse = \"_\"),\n sep = \"_\"\n ),\n ext = \"csv\"\n )\n\nphotos |> \n readr::write_csv(\n path_output, na =\"\"\n )\n\n\nWe can view and download this file here\n\n\nCode\n leaflet::leaflet() |>\n leaflet::addTiles() |>\n leafem:::addCOG(\n url = url\n , group = \"COG\"\n , resolution = 512\n , autozoom = TRUE\n )" }, { "objectID": "posts/2024-05-27-references-bib-succinct/index.html", diff --git a/sitemap.xml b/sitemap.xml index b5cb33b..5f3c009 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,42 +2,46 @@ https://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-permissions/index.html - 2024-11-15T23:30:47.866Z + 2024-11-15T23:48:04.767Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-30-land-cover/index.html - 2024-11-15T23:30:47.846Z + 2024-11-15T23:48:04.747Z https://NewGraphEnvironment.github.io/new_graphiti/posts/logos-equipment/index.html - 2024-11-15T23:30:47.866Z + 2024-11-15T23:48:04.767Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-09-21-aws-cors-cog-leaflet/index.html - 2024-11-15T23:30:47.858Z + 2024-11-15T23:48:04.759Z https://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-processx/index.html - 2024-11-15T23:30:47.866Z + 2024-11-15T23:48:04.767Z https://NewGraphEnvironment.github.io/new_graphiti/about.html - 2024-11-15T23:30:47.802Z + 2024-11-15T23:48:04.703Z https://NewGraphEnvironment.github.io/new_graphiti/index.html - 2024-11-15T23:30:47.818Z + 2024-11-15T23:48:04.719Z + + + https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-11-15-bcdata-ortho-historic/index.html + 2024-11-15T23:48:04.767Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-05-27-references-bib-succinct/index.html - 2024-11-15T23:30:47.830Z + 2024-11-15T23:48:04.727Z https://NewGraphEnvironment.github.io/new_graphiti/posts/snakecase/index.html - 2024-11-15T23:30:47.866Z + 2024-11-15T23:48:04.767Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-19-precipitation/index.html - 2024-11-15T23:30:47.834Z + 2024-11-15T23:48:04.731Z