diff --git a/.nojekyll b/.nojekyll
index d6ae5d3..13587a0 100644
--- a/.nojekyll
+++ b/.nojekyll
@@ -1 +1 @@
-b17a214d
\ No newline at end of file
+dfa6f492
\ No newline at end of file
diff --git a/index.html b/index.html
index f8cc8e5..920fa41 100644
--- a/index.html
+++ b/index.html
@@ -192,7 +192,7 @@
# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa
+# blueline key
+blk <-360873822
+# downstream route measure
+drm <-166030.4
+
+aoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk,
+downstream_route_measure = drm) |>
+ sf::st_transform(4326)
+
+#get the bounding box of our aoi
+# aoi_bb <- sf::st_bbox(aoi)
+
+
+
+
+Code
+
# grab all the railways
+l_rail <- rfp::rfp_bcd_get_data(
+bcdata_record_id ="whse_basemapping.gba_railway_tracks_sp"
+) |>
+ sf::st_transform(4326) |>
+ janitor::clean_names()
+
+
+# streams in the bulkley and then filter to just keep the big ones
+l_streams <- rfp::rfp_bcd_get_data(
+bcdata_record_id ="whse_basemapping.fwa_stream_networks_sp",
+col_filter ="watershed_group_code",
+col_filter_value ="BULK",
+# grab a smaller object by including less columns
+col_extract =c("linear_feature_id", "stream_order", "gnis_name", "downstream_route_measure", "blue_line_key", "length_metre")
+) |>
+ sf::st_transform(4326) |>
+ janitor::clean_names() |>
+ dplyr::filter(stream_order >4)
+
+# historic orthophotos
+# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY
+#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points
+l_imagery <- rfp::rfp_bcd_get_data(
+bcdata_record_id ="WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY") |>
+ sf::st_transform(4326) |>
+ janitor::clean_names()
+
+l_imagery_hist <- rfp::rfp_bcd_get_data(
+bcdata_record_id ="WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT") |>
+ sf::st_transform(4326) |>
+ janitor::clean_names()
+
+l_imagery_grid <- rfp::rfp_bcd_get_data(
+bcdata_record_id ="WHSE_BASEMAPPING.NTS_50K_GRID") |>
+ sf::st_transform(4326) |>
+ janitor::clean_names()
+
+
+
+
+Code
+
# get a list of the objects in our env that start with l_
+ls <-ls()[stringr::str_starts(ls(), "l_")]
+
+layers_to_trim <- tibble::lst(
+!!!mget(ls)
+)
+
+
+# Function to validate and repair geometries
+validate_geometries <-function(layer) {
+ layer <- sf::st_make_valid(layer)
+ layer <- layer[sf::st_is_valid(layer), ]
+return(layer)
+}
+
+# Apply validation to the AOI and layers
+aoi <-validate_geometries(aoi)
+layers_to_trim <- purrr::map(layers_to_trim, validate_geometries)
+
+# clip them with purrr and sf
+layers_trimmed <- purrr::map(
+ layers_to_trim,
+~ sf::st_intersection(.x, aoi)
+)
+
+
+
OK, so it looks like we cannot get historical air photo information from layers downloaded from BC data catalogue because the majority of the photos are not referenced. It does however seem that if we go through the online interface, we can use a NTS 1 to 50,000 map sheet to export a CSV file that does contain information about individual air photo locations. Our study area is huge so to begin with let’s just have a look at the area from Macquarie Creek up to a boat the location of Talley. For this area we just have 21 to 50,000 map sheets. We can visualize which map she’s those are by downloading that map sheet layer from BC data catalogue overlay it on our general study area.
So looks like we have map sheet 093L09 as well as 0 93L08. Next, we’re gonna go into the provincial WIMSI9 service and manually select our NTS grids, state the date range we would like to search and export. We chose the following options:
# geom_sf_text(
+# data = photos_aoi,
+# aes(
+# label =
+# paste(
+# # roll_frame_series,
+# frame,
+# sep = "-"
+# )
+# ),
+# size = 2 # Adjust size of the text labels as needed
+# )
+
+
+
+
+Code
+
buffer <-3000
+drm <-263795
+
+
+
Ok - so let’s say we only want the ones that are within say 3000 of either side of the Bulkley River mainstem.
+
Let’s grab just the mainstem segments up to the top end of Bulkley Lake, merge, buffer the stream and clip our airphoto list by that.
+
We can use the downstream_route_measure of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). It looks like this in QGIS
aoi_refined_raw <- layers_trimmed$l_streams |>
+ dplyr::filter(gnis_name =="Bulkley River"& downstream_route_measure < drm) |>
+# dplyr::arrange(downstream_route_measure) |>
+# calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m
+# dplyr::filter(cumsum(length_metre) <= length_m) |>
+ sf::st_union()
+
+
+aoi_refined_buffered <- sf::st_buffer(
+# we need to run st_sf or we get a sp object in a list...
+ sf::st_sf(
+ aoi_refined_raw
+ ),
+ buffer, endCapStyle ="FLAT"
+)
+
+photos_aoi_refined <- sf::st_intersection(photos_raw, aoi_refined_buffered)
+
+
+
+
+
\ No newline at end of file
diff --git a/search.json b/search.json
index b53a290..b1b26b6 100644
--- a/search.json
+++ b/search.json
@@ -95,7 +95,14 @@
"href": "index.html",
"title": "new graphiti",
"section": "",
- "text": "CORS for serving of COGs of UAV imagery on AWS with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\ns3sf\n\n\nleaflet\n\n\nleafem\n\n\nCOG\n\n\nCORS\n\n\n\n\n\n\n\n\n\nSep 21, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping Land Cover with R\n\n\n\n\n\n\nland cover\n\n\nR\n\n\nplanetary computer\n\n\nremote sensing\n\n\n\n\n\n\n\n\n\nJul 1, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping and Plotting Precipitation with R\n\n\n\n\n\n\nprecipitation\n\n\nR\n\n\ndrought\n\n\nrayshader\n\n\n\n\n\n\n\n\n\nJun 19, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCleaning up bib files flexibly with Zotero and R\n\n\n\n\n\n\nnews\n\n\nbibtex\n\n\nR\n\n\ncitations\n\n\n\n\n\n\n\n\n\nMay 27, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSetting aws bucket permissions with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\n\n\n\n\n\n\n\nMay 24, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSyncing files to aws with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\nprocessx\n\n\n\n\n\n\n\n\n\nMay 23, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLogos and Equipment List Somewhere Accessible\n\n\n\n\n\n\nassets\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLower snake_case vs Everything_Else\n\n\n\n\n\n\nnames\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\nNo matching items"
+ "text": "Getting details of historic orthophoto imagery with R\n\n\n\n\n\n\nfwapg\n\n\nr\n\n\nbcdata\n\n\n\n\n\n\n\n\n\nNov 15, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCORS for serving of COGs of UAV imagery on AWS with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\ns3sf\n\n\nleaflet\n\n\nleafem\n\n\nCOG\n\n\nCORS\n\n\n\n\n\n\n\n\n\nSep 21, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping Land Cover with R\n\n\n\n\n\n\nland cover\n\n\nR\n\n\nplanetary computer\n\n\nremote sensing\n\n\n\n\n\n\n\n\n\nJul 1, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nMapping and Plotting Precipitation with R\n\n\n\n\n\n\nprecipitation\n\n\nR\n\n\ndrought\n\n\nrayshader\n\n\n\n\n\n\n\n\n\nJun 19, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nCleaning up bib files flexibly with Zotero and R\n\n\n\n\n\n\nnews\n\n\nbibtex\n\n\nR\n\n\ncitations\n\n\n\n\n\n\n\n\n\nMay 27, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSetting aws bucket permissions with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\n\n\n\n\n\n\n\nMay 24, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nSyncing files to aws with R\n\n\n\n\n\n\naws\n\n\ns3\n\n\nr\n\n\npaws\n\n\nprocessx\n\n\n\n\n\n\n\n\n\nMay 23, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLogos and Equipment List Somewhere Accessible\n\n\n\n\n\n\nassets\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\n\n\n\n\n\n\nLower snake_case vs Everything_Else\n\n\n\n\n\n\nnames\n\n\n\n\n\n\n\n\n\nMay 7, 2024\n\n\nal\n\n\n\n\n\n\nNo matching items"
+ },
+ {
+ "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html",
+ "href": "posts/2024-11-15-bcdata-ortho-historic/index.html",
+ "title": "Getting details of historic orthophoto imagery with R",
+ "section": "",
+ "text": "We need some historic ortho photo imagery so that we can have a look at historic watershed conditions compared to current. First thing we’re gonna do is generate a area of interest for which we want all of the map sheets ideas for.\n\n\nCode\nsuppressMessages(library(tidyverse))\nlibrary(ggplot2)\nlibrary(bcdata)\nlibrary(fwapgr)\nsuppressMessages(library(sf))\n# library(leaflet)\n# library(leafem)```\n\n\n\n\nCode\npath_post <- fs::path(\n here::here(),\n \"posts\",\n params$post_name\n)\n\n\n\n\nCode\nstaticimports::import(\n dir = fs::path(\n path_post,\n \"scripts\"\n ),\n outfile = fs::path(\n path_post,\n \"scripts\",\n \"staticimports\",\n ext = \"R\"\n )\n)\n\n\n\n\nCode\nsource(\n fs::path(\n path_post,\n \"scripts\",\n \"staticimports\",\n ext = \"R\"\n )\n)\n\n\n\n\nCode\n# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa\n# blueline key\nblk <- 360873822\n# downstream route measure\ndrm <- 166030.4\n\naoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk, \n downstream_route_measure = drm) |> \n sf::st_transform(4326)\n\n#get the bounding box of our aoi\n# aoi_bb <- sf::st_bbox(aoi)\n\n\n\n\nCode\n# grab all the railways\nl_rail <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.gba_railway_tracks_sp\"\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() \n\n\n# streams in the bulkley and then filter to just keep the big ones\nl_streams <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.fwa_stream_networks_sp\",\n col_filter = \"watershed_group_code\",\n col_filter_value = \"BULK\",\n # grab a smaller object by including less columns\n col_extract = c(\"linear_feature_id\", \"stream_order\", \"gnis_name\", \"downstream_route_measure\", \"blue_line_key\", \"length_metre\")\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() |> \n dplyr::filter(stream_order > 4)\n\n# historic orthophotos\n# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY\n#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points\nl_imagery <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_hist <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_grid <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_BASEMAPPING.NTS_50K_GRID\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\n\n\n\nCode\n# get a list of the objects in our env that start with l_\nls <- ls()[stringr::str_starts(ls(), \"l_\")] \n\nlayers_to_trim <- tibble::lst(\n !!!mget(ls)\n)\n\n\n# Function to validate and repair geometries\nvalidate_geometries <- function(layer) {\n layer <- sf::st_make_valid(layer)\n layer <- layer[sf::st_is_valid(layer), ]\n return(layer)\n}\n\n# Apply validation to the AOI and layers\naoi <- validate_geometries(aoi)\nlayers_to_trim <- purrr::map(layers_to_trim, validate_geometries)\n\n# clip them with purrr and sf\nlayers_trimmed <- purrr::map(\n layers_to_trim,\n ~ sf::st_intersection(.x, aoi)\n) \n\n\nOK, so it looks like we cannot get historical air photo information from layers downloaded from BC data catalogue because the majority of the photos are not referenced. It does however seem that if we go through the online interface, we can use a NTS 1 to 50,000 map sheet to export a CSV file that does contain information about individual air photo locations. Our study area is huge so to begin with let’s just have a look at the area from Macquarie Creek up to a boat the location of Talley. For this area we just have 21 to 50,000 map sheets. We can visualize which map she’s those are by downloading that map sheet layer from BC data catalogue overlay it on our general study area.\n\n\nCode\nmap <- ggplot() +\n # tidyterra::geom_spatraster(\n # data = as.factor(land_cover_raster_resampled),\n # use_coltab = TRUE,\n # maxcell = Inf\n # ) +\n geom_sf(\n data = aoi,\n fill = \"transparent\",\n color = \"black\",\n linewidth = .5\n ) +\n geom_sf(\n data = layers_trimmed$l_streams,\n color = \"blue\",\n size = 1\n ) +\n geom_sf(\n data = layers_trimmed$l_rail,\n color = \"black\",\n size = 1\n ) +\n # geom_sf(\n # data = layers_trimmed$l_imagery_hist,\n # color = \"red\",\n # size = 1\n # ) +\n geom_sf(\n data = layers_trimmed$l_imagery_grid,\n alpha = 0.25,\n ) +\n geom_sf_text(\n data = layers_trimmed$l_imagery_grid,\n aes(label = map_tile),\n size = 3 # Adjust size of the text labels as needed\n )\n # ggdark::dark_theme_void()\n\n\nmap\n\n\n\n\n\n\n\n\n\nSo looks like we have map sheet 093L09 as well as 0 93L08. Next, we’re gonna go into the provincial WIMSI9 service and manually select our NTS grids, state the date range we would like to search and export. We chose the following options:\n\nYears from: 1963 to 1980\n\nIt looks like this.\n\n\nCode\nknitr::include_graphics(fs::path(\n path_post,\n \"fig/Screenshot 2024-11-15 at 2.03.42 PM\",\n ext = \"png\"\n )\n)\n\n\n\n\n\n\n\n\n\nLet’s read in what we have, turn into spatial object, trim to overall study area and plot\n\n\nCode\n# list csvs\n\nls <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.csv\"\n)\n\nphotos_raw <- ls |> \n purrr::map_df(\n readr::read_csv\n ) |> \n sf::st_as_sf(\n coords = c(\"Longitude\", \"Latitude\"), crs = 4326\n ) |> \n janitor::clean_names() |> \n dplyr::mutate(photo_date = lubridate::mdy(photo_date)) \n\nphotos_aoi <- sf::st_intersection(photos_raw, aoi)\n\n\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi,\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nCode\n # geom_sf_text(\n # data = photos_aoi,\n # aes(\n # label = \n # paste(\n # # roll_frame_series,\n # frame,\n # sep = \"-\"\n # )\n # ),\n # size = 2 # Adjust size of the text labels as needed\n # )\n\n\n\n\nCode\nbuffer <- 3000\ndrm <- 263795\n\n\nOk - so let’s say we only want the ones that are within say 3000 of either side of the Bulkley River mainstem.\nLet’s grab just the mainstem segments up to the top end of Bulkley Lake, merge, buffer the stream and clip our airphoto list by that.\nWe can use the downstream_route_measure of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). It looks like this in QGIS\n\n\nCode\nknitr::include_graphics(fs::path(\n path_post,\n \"fig/Screenshot 2024-11-15 at 2.41.57 PM\",\n ext = \"png\"\n )\n)\n\n\n\n\n\n\n\n\n\n\n\nCode\naoi_refined_raw <- layers_trimmed$l_streams |> \n dplyr::filter(gnis_name == \"Bulkley River\" & downstream_route_measure < drm) |> \n # dplyr::arrange(downstream_route_measure) |>\n # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m\n # dplyr::filter(cumsum(length_metre) <= length_m) |>\n sf::st_union()\n\n\naoi_refined_buffered <- sf::st_buffer(\n # we need to run st_sf or we get a sp object in a list...\n sf::st_sf(\n aoi_refined_raw\n ), \n buffer, endCapStyle = \"FLAT\"\n) \n\nphotos_aoi_refined <- sf::st_intersection(photos_raw, aoi_refined_buffered)\n\n\nok lets plot again\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi_refined,\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nNow lets filter by date to see what we can get for the earliest photos possible. What is our range:\n\n\nCode\nrange(photos_aoi_refined$photo_date)\n\n\n[1] \"1968-05-09\" \"1980-09-07\"\n\n\nWhat do we have for photos before 1970\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi_refined |> dplyr::filter(photo_date <= \"1969-12-31\"),\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nLooks like decent coverage so lets go with that.\n\n\nCode\nphotos <- photos_aoi_refined |> \n dplyr::filter(photo_date <= \"1969-12-31\")\n\nphotos |> \n my_dt_table(cols_freeze_left = 2)\n\n\n\n\n\n\nThere are 104 photos. Let’s burn out a csv that can be used to find them.\n\n\nCode\npath_output <- fs::path(\n path_post,\n \"exports\",\n paste(\n \"airphotos\",\n paste(range(photos$photo_date), collapse = \"_\"),\n sep = \"_\"\n ),\n ext = \"csv\"\n )\n\nphotos |> \n readr::write_csv(\n path_output, na =\"\"\n )\n\n\nWe can view and download this file here\n\n\nCode\n leaflet::leaflet() |>\n leaflet::addTiles() |>\n leafem:::addCOG(\n url = url\n , group = \"COG\"\n , resolution = 512\n , autozoom = TRUE\n )"
},
{
"objectID": "posts/2024-05-27-references-bib-succinct/index.html",
diff --git a/sitemap.xml b/sitemap.xml
index b5cb33b..5f3c009 100644
--- a/sitemap.xml
+++ b/sitemap.xml
@@ -2,42 +2,46 @@
https://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-permissions/index.html
- 2024-11-15T23:30:47.866Z
+ 2024-11-15T23:48:04.767Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-30-land-cover/index.html
- 2024-11-15T23:30:47.846Z
+ 2024-11-15T23:48:04.747Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/logos-equipment/index.html
- 2024-11-15T23:30:47.866Z
+ 2024-11-15T23:48:04.767Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/2024-09-21-aws-cors-cog-leaflet/index.html
- 2024-11-15T23:30:47.858Z
+ 2024-11-15T23:48:04.759Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-processx/index.html
- 2024-11-15T23:30:47.866Z
+ 2024-11-15T23:48:04.767Zhttps://NewGraphEnvironment.github.io/new_graphiti/about.html
- 2024-11-15T23:30:47.802Z
+ 2024-11-15T23:48:04.703Zhttps://NewGraphEnvironment.github.io/new_graphiti/index.html
- 2024-11-15T23:30:47.818Z
+ 2024-11-15T23:48:04.719Z
+
+
+ https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-11-15-bcdata-ortho-historic/index.html
+ 2024-11-15T23:48:04.767Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/2024-05-27-references-bib-succinct/index.html
- 2024-11-15T23:30:47.830Z
+ 2024-11-15T23:48:04.727Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/snakecase/index.html
- 2024-11-15T23:30:47.866Z
+ 2024-11-15T23:48:04.767Zhttps://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-19-precipitation/index.html
- 2024-11-15T23:30:47.834Z
+ 2024-11-15T23:48:04.731Z