diff --git a/.nojekyll b/.nojekyll index 113fe68..e960c6d 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -81c98961 \ No newline at end of file +4865b3b9 \ No newline at end of file diff --git a/index.html b/index.html index 1068ce7..9d2cd09 100644 --- a/index.html +++ b/index.html @@ -206,7 +206,7 @@
Categories
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+

Following download we run some clean up to ensure the geometry of our spatial files is “valid”, trim to our area of interest and burn locally so that every time we rerun iterations of this memo we don’t need to wait for the download process which takes a little longer than we want to wait.

@@ -407,41 +406,69 @@

Download Spat )

+
+
+Code +
# lets use the nts mapsheet to query the photo centroids to avoid a massive file download
+col_value <- layers_trimmed$l_imagery_grid |> 
+  dplyr::pull(map_tile) 
+
+  
+
+l_photo_centroids <- rfp::rfp_bcd_get_data(
+  bcdata_record_id = "WHSE_IMAGERY_AND_BASE_MAPS.AIMG_PHOTO_CENTROIDS_SP",
+  col_filter = "nts_tile",
+  col_filter_value = col_value) |> 
+  sf::st_transform(4326) |> 
+  janitor::clean_names()
+
+# Apply validation to the AOI and layers
+l_photo_centroids <-lngs_geom_validate(l_photo_centroids)
+
+# clip to aoi - can use  layers_trimmed$aoi 
+l_photo_centroids <- sf::st_intersection(l_photo_centroids, aoi)
+
+
+lburn_sf(l_photo_centroids, "l_photo_centroids")
+
+

Next - we read the layers back in. The download step is skipped now unless we turn it on again by changing the update_gis param in our memo yaml header to TRUE.

Code -
# now we read in all the sf layers that are local so it is really quick
-layers_to_load <- fs::dir_ls(
-  fs::path(
-    path_post,
-    "data"),
-  glob = "*.geojson"
-)
-
-layers_trimmed <- layers_to_load |>
-  purrr::map(
-    ~ sf::st_read(
-      .x, quiet = TRUE)
-  ) |> 
-  purrr::set_names(
-    nm =tools::file_path_sans_ext(
-      basename(
-        names(
-          layers_to_load
-        )
-      )
-    )
-  )
+
# now we read in all the sf layers that are local so it is really quick
+layers_to_load <- fs::dir_ls(
+  fs::path(
+    path_post,
+    "data"),
+  glob = "*.geojson"
+)
+
+layers_trimmed <- layers_to_load |>
+  purrr::map(
+    ~ sf::st_read(
+      .x, quiet = TRUE)
+  ) |> 
+  purrr::set_names(
+    nm =tools::file_path_sans_ext(
+      basename(
+        names(
+          layers_to_load
+        )
+      )
+    )
+  )
-

OK, seems we cannot get machine readable historical air photo information from layers downloaded from BC data catalogue - perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way. This what the information in the Historic Imagery Points layer looks like.

+

OK, seems we cannot get machine readable historical air photo information from the downloaded from the BC data catalogue Historic Imagery Points and Historic Imagery Polygons layers perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way.

Code -
layers_trimmed$l_imagery_hist |> 
-  sf::st_drop_geometry() |> 
-  knitr::kable()
+
#This what the information in the [Historic Imagery Points](https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points) layer looks like.
+
+layers_trimmed$l_imagery_hist |> 
+  sf::st_drop_geometry() |> 
+  knitr::kable()
@@ -625,45 +652,45 @@

Download Spat
Code -
map <- ggplot() +
-  geom_sf(
-      data = layers_trimmed$aoi,
-      fill = "transparent",
-      color = "black",
-      linewidth = .5
-  ) +
-  geom_sf(
-    data = layers_trimmed$l_streams,
-    color = "blue",
-    size = 1
-  ) +
-  geom_sf(
-    data = layers_trimmed$l_rail,
-    color = "black",
-    size = 1
-  ) +
-  # geom_sf(
-  #   data = layers_trimmed$l_imagery,
-  #   alpha = 0,
-  # ) +
-  geom_sf(
-    data = layers_trimmed$l_imagery_hist,
-    color = "red",
-    size = 2
-  ) +
-  geom_sf(
-    data = layers_trimmed$l_imagery_grid,
-    alpha = 0.25,
-  ) +
-  geom_sf_text(
-    data = layers_trimmed$l_imagery_grid,
-    aes(label = map_tile),
-    size = 3  # Adjust size of the text labels as needed
-  )
-  # ggdark::dark_theme_void()
-
-
-map
+
map <- ggplot() +
+  geom_sf(
+      data = layers_trimmed$aoi,
+      fill = "transparent",
+      color = "black",
+      linewidth = .5
+  ) +
+  geom_sf(
+    data = layers_trimmed$l_streams,
+    color = "blue",
+    size = 1
+  ) +
+  geom_sf(
+    data = layers_trimmed$l_rail,
+    color = "black",
+    size = 1
+  ) +
+  # geom_sf(
+  #   data = layers_trimmed$l_imagery,
+  #   alpha = 0,
+  # ) +
+  geom_sf(
+    data = layers_trimmed$l_imagery_hist,
+    color = "red",
+    size = 2
+  ) +
+  geom_sf(
+    data = layers_trimmed$l_imagery_grid,
+    alpha = 0.25,
+  ) +
+  geom_sf_text(
+    data = layers_trimmed$l_imagery_grid,
+    aes(label = map_tile),
+    size = 3  # Adjust size of the text labels as needed
+  )
+  # ggdark::dark_theme_void()
+
+
+map
@@ -673,73 +700,72 @@

Download Spat

-

It does however seem that if we go through the online interface, we can use a NTS 1:50,000 map sheet to export a CSV file that does contain information about individual air photo locations (centre coordinate?). Our study area is huge so to begin with we can look at the area from somewhere around the confluence of McQuarie Creek and the Neexdzii Kwah up to say the upstream end of Bulkley Lake. For this area we just have two 1:50,000 map sheets (093L09 and 093L08).

- -
-

Download Photo Information

-

Next, we go into the provincial WIMSI service and for each NTS 1:50,000 grid - we state the date range we would like to search and export. Thinking we may be able to hit the API directly with a query in the future which would eliminate the painful point and click portion of this workflow. For now though - we are doing this piece manually and chose the following options:

-
    -
  • Years from: 1950 to 2024
  • -
  • Media: All
  • -
  • Only show georeferenced images: - No
  • -
-

Running these options exports a csv for each query. We saved them in this repo with their original sane names airphotos.{mapsheet}.csv.

+

It does however seem that the Air Photo Centroids are georeferenced

Code -
knitr::include_graphics(fs::path(
-  path_post,
-  "fig",
-  "Screenshot1",
-  ext = "png"
-  )
-)
+
#if we go through the online interface, we can use a NTS 1:50,000 map sheet to export a CSV file that does contain information about individual air photo locations (centre coordinate?). Our study area is huge so to begin with we can look at the area from somewhere around the confluence of McQuarie Creek and the Neexdzii Kwah up to say the upstream end of Bulkley Lake. For this area we just have two 1:50,000 map sheets (`093L09` and `093L08`). 
-
-
-
-

-
-
+
+
+Code +
# ## Download Photo Information
+
+# Next, we go into the provincial [WIMSI](https://www2.gov.bc.ca/gov/content/data/geographic-data-services/digital-imagery/air-photos) service and for each NTS 1:50,000 grid - we state the date range we would like to search and export. Thinking we may be able to hit the API directly with a query in the future which would eliminate the painful point and click portion of this workflow.  For now though - we are doing this piece manually and chose the following options:
+# 
+#   - `Years from`: 1950 to 2024
+#   - `Media`: All
+#   - `Only show georeferenced images`: - No
+# 
+# Running these options exports a csv for each query.  We saved them in this repo with their original sane names `airphotos.{mapsheet}.csv`.
+
+knitr::include_graphics(fs::path(
+  path_post,
+  "fig",
+  "Screenshot1",
+  ext = "png"
+  )
+)
+
-

At this point we have downloaded two csvs (one for each NTS 1:50,000 mapsheet of course) with information about the airphotos including UTM coordinates that we will assume for now are the photo centres. In our next steps we read in what we have, turn into spatial object, trim to overall study area and plot.

Code -
# list csvs
-ls <- fs::dir_ls(
-  fs::path(
-    path_post,
-    "data"),
-  glob = "*.csv"
-)
-
-photos_raw <- ls |> 
-  purrr::map_df(
-    readr::read_csv
-  ) |> 
-  sf::st_as_sf(
-    coords = c("Longitude", "Latitude"), crs = 4326
-  ) |> 
-  janitor::clean_names() |> 
-  dplyr::mutate(photo_date = lubridate::mdy(photo_date)) 
-
-
-photos_aoi <- sf::st_intersection(
-  photos_raw, 
-  layers_trimmed$aoi |> st_make_valid()
-  )
+
# At this point we have downloaded two csvs (one for each NTS 1:50,000 mapsheet of course) with information about the airphotos including UTM coordinates that we will assume for now are the photo centres. In our next steps we read in what we have, turn into spatial object, trim to overall study area and plot.
+# list csvs
+ls <- fs::dir_ls(
+  fs::path(
+    path_post,
+    "data"),
+  glob = "*.csv"
+)
+
+photos_raw <- ls |> 
+  purrr::map_df(
+    readr::read_csv
+  ) |> 
+  sf::st_as_sf(
+    coords = c("Longitude", "Latitude"), crs = 4326
+  ) |> 
+  janitor::clean_names() |> 
+  dplyr::mutate(photo_date = lubridate::mdy(photo_date)) 
+
+
+photos_aoi <- sf::st_intersection(
+  photos_raw, 
+  layers_trimmed$aoi |> st_make_valid()
+  )
Code -
map +
-  geom_sf(
-    data = photos_aoi,
-    alpha = 0.25,
-  ) 
+
map +
+  geom_sf(
+    data = layers_trimmed$l_photo_centroids,
+    alpha = 0.25
+  ) 
@@ -750,27 +776,27 @@

Download Photo

Code -
  # geom_sf_text(
-  #   data = photos_aoi,
-  #   aes(
-  #     label = 
-  #       paste(
-  #         # roll_frame_series,
-  #         frame,
-  #         sep = "-"
-  #       )
-  #   ),
-  #   size = 2  # Adjust size of the text labels as needed
-  # )
+
  # geom_sf_text(
+  #   data = photos_aoi,
+  #   aes(
+  #     label = 
+  #       paste(
+  #         # roll_frame_series,
+  #         frame,
+  #         sep = "-"
+  #       )
+  #   ),
+  #   size = 2  # Adjust size of the text labels as needed
+  # )
-

That is a lot of photos! 3957 photos to be exact!!!

+

That is a lot of photos! 7910 photos to be exact!!!

Code -
q_buffer <- 1000
-q_drm_main <- 263795
-q_drm_other <- 5000
+
q_buffer <- 500
+q_drm_main <- 263795
+q_drm_other <- 5000
@@ -778,67 +804,68 @@

Download Photo

Clip Photo Information with Streams Buffers

Here are our query parameters to narrow down the area within our selected mapsheets in which we want to find photos for:

    -
  • Buffer: 1000m - size of buffer used on either side of stream lines selected
  • +
  • Buffer: 500m - size of buffer used on either side of stream lines selected
  • Stream segments:
    • Bulkley River (gnis_name in the stream layer)
    • Maxan Creek
    • +
    • Buck Creek
    • stream segments which begin before 5000 from the mainstem
Code -
# We use the `downstream_route_measure` of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek).  We find it in QGIS by highlighting the stream layer and clicking on our segment of interest while we have the information tool selected - the resulting pop-up looks like this in QGIS.
-knitr::include_graphics(fs::path(
-  path_post,
-  "fig",
-  "Screenshot2",
-  ext = "png"
-  )
-)
+
# We use the `downstream_route_measure` of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek).  We find it in QGIS by highlighting the stream layer and clicking on our segment of interest while we have the information tool selected - the resulting pop-up looks like this in QGIS.
+knitr::include_graphics(fs::path(
+  path_post,
+  "fig",
+  "Screenshot2",
+  ext = "png"
+  )
+)
Code -
aoi_refined_raw <- layers_trimmed$l_streams |> 
-  # removed  & downstream_route_measure < q_drm_main for bulkley as doestn't cahnge 1960s query and increases beyond just by 5 photos
-  dplyr::filter(gnis_name == "Bulkley River"|
-                  gnis_name != "Bulkley River" & downstream_route_measure < q_drm_other |
-                  gnis_name == "Maxan Creek") |> 
-  # dplyr::arrange(downstream_route_measure) |>
-  # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m
-  # dplyr::filter(cumsum(length_metre) <= length_m) |>
-  sf::st_union() |> 
-  # we need to run st_sf or we get a sp object in a list...
-  sf::st_sf()
-  
-aoi_refined_buffered <- sf::st_buffer(
-  aoi_refined_raw,
-  q_buffer, endCapStyle = "FLAT"
-) 
-
-photos_aoi_refined <- sf::st_intersection(
-  photos_raw, 
-  aoi_refined_buffered
-  )
+
aoi_refined_raw <- layers_trimmed$l_streams |> 
+  # removed  & downstream_route_measure < q_drm_main for bulkley as doestn't cahnge 1960s query and increases beyond just by 5 photos
+  dplyr::filter(gnis_name == "Bulkley River"|
+                  gnis_name != "Bulkley River" & downstream_route_measure < q_drm_other |
+                  gnis_name %in% c("Maxan Creek", "Buck Creek")) |> 
+  # dplyr::arrange(downstream_route_measure) |>
+  # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m
+  # dplyr::filter(cumsum(length_metre) <= length_m) |>
+  sf::st_union() |> 
+  # we need to run st_sf or we get a sp object in a list...
+  sf::st_sf()
+  
+aoi_refined_buffered <- sf::st_buffer(
+  aoi_refined_raw,
+  q_buffer, endCapStyle = "FLAT"
+) 
+
+photos_aoi_refined <- sf::st_intersection(
+  layers_trimmed$l_photo_centroids, 
+  aoi_refined_buffered
+  )
-

Let’s plot again and include our buffered areas around the first 5000m of streams (area in red) along with the location of the photo points that land within that area. Looks like this give us 711 photos.

+

Let’s plot again and include our buffered areas around the first 5000m of streams (area in red) along with the location of the photo points that land within that area. Looks like this give us 968 photos.

Code -
map +
-  geom_sf(
-    data = aoi_refined_buffered,
-    color = "red",
-    alpha= 0
-  ) +
-  geom_sf(
-    data = photos_aoi_refined,
-    alpha = 0.25,
-  ) 
+
map +
+  geom_sf(
+    data = aoi_refined_buffered,
+    color = "red",
+    alpha= 0
+  ) +
+  geom_sf(
+    data = photos_aoi_refined,
+    alpha = 0.25,
+  ) 
@@ -855,45 +882,45 @@

Filter Photos by Dat
Code -
range(photos_aoi_refined$photo_date)
+
range(photos_aoi_refined$photo_date)
-
[1] "1968-05-09" "2019-09-18"
+
[1] "1963-08-07" "2019-09-18"
Code -
q_date_end1 <- "1969-12-31"
-q_date_start1 <- "1970-01-01"
-q_date_end2 <- "1975-12-31"
+
q_date_end1 <- "1969-12-31"
+q_date_start1 <- "1970-01-01"
+q_date_end2 <- "1975-12-31"

There is lots going on here so we will map a comparison of a couple of options. First we will look at all photos available for before 1970 (red dots on map below). We will also see what photos we have for between 1970-01-01 and on or before 1975-12-31 (blue dots on map below)?

Code -
photos1 <- photos_aoi_refined |> 
-  dplyr::filter(
-      photo_date <= q_date_end1
-  )
-
-photos2 <- photos_aoi_refined |> 
-  dplyr::filter(
-    photo_date >= q_date_start1 &
-      photo_date <= q_date_end2
-  )
+
photos1 <- photos_aoi_refined |> 
+  dplyr::filter(
+      photo_date <= q_date_end1
+  )
+
+photos2 <- photos_aoi_refined |> 
+  dplyr::filter(
+    photo_date >= q_date_start1 &
+      photo_date <= q_date_end2
+  )
Code -
map +
-  geom_sf(
-    data = photos1,
-    color = "red",
-    alpha = 0.25,
-  ) 
+
map +
+  geom_sf(
+    data = photos1,
+    color = "red",
+    alpha = 0.25,
+  ) 
@@ -904,12 +931,12 @@

Filter Photos by Dat

Code -
map + 
-  geom_sf(
-    data = photos2,
-    color = "blue",
-    alpha = 0.25,
-  ) 
+
map + 
+  geom_sf(
+    data = photos2,
+    color = "blue",
+    alpha = 0.25,
+  ) 
@@ -919,18 +946,18 @@

Filter Photos by Dat

-

Well we have some tough decisions to make as we seem to have better coverage in the Maxan Creek drainage in the 70s (91 photos) but good coverage of the mainstem Neexdzii Kwah in the 60s (72 photos)…

+

Well we have some tough decisions to make as we seem to have better coverage in the Maxan Creek drainage in the 70s (144 photos) but good coverage of the mainstem Neexdzii Kwah in the 60s (83 photos)…


-

The table below can be used to filter photos from any time (provided it’s location falls in the mapsheet areas we downloaded info for) and export the result to csv or excel file. This could be useful for narrowing down our search not only to timescales but also to specific mapsheets with options given for both NTS 1:50,000 as well as BCGS 1:20,000 grid mapsheets.

+

The table below can be used to filter photos from any time (provided it’s location falls in the watershed area we clipped our downloaded points to) and export the result to csv or excel file. This could be useful for narrowing down our search not only to timescales but also to specific mapsheets with options given for both NTS 1:50,000 as well as BCGS 1:20,000 grid mapsheets.

Code -
photos_aoi_refined |> 
-  my_dt_table(cols_freeze_left = 2)
+
photos_aoi_refined |> 
+  my_dt_table(cols_freeze_left = 2)
-
- +
+
@@ -945,61 +972,60 @@

Ex
Code -
lfile_name_photos <- function(dat = NULL){
-  fs::path(
-      path_post,
-      "exports",
-      paste(
-        "airphotos",
-        paste(range(dat$photo_date), collapse = "_"),
-        sep = "_"
-      ),
-      ext = "csv"
-    )
-}
-
-# path_output <- fs::path(
-#       path_post,
-#       "exports",
-#       paste(
-#         "airphotos",
-#         paste(range(photos$photo_date), collapse = "_"),
-#         sep = "_"
-#       ),
-#       ext = "csv"
-#     )
-
-photos1 |> 
-  readr::write_csv(
-    lfile_name_photos(photos1), na =""
-  )
-
-photos2 |> 
-  readr::write_csv(
-    lfile_name_photos(photos2), na =""
-  )
-
-lpath_link <- function(dat = NULL){
-  paste0(
-    "https://github.com/NewGraphEnvironment/new_graphiti/tree/main/posts/2024-11-15-bcdata-ortho-historic/exports/",
-    basename(
-      lfile_name_photos(dat)
-    )
-  )
-}
+
lfile_name_photos <- function(dat = NULL){
+  fs::path(
+      path_post,
+      "exports",
+      paste(
+        "airphotos",
+        paste(range(dat$photo_date), collapse = "_"),
+        sep = "_"
+      ),
+      ext = "csv"
+    )
+}
+
+# path_output <- fs::path(
+#       path_post,
+#       "exports",
+#       paste(
+#         "airphotos",
+#         paste(range(photos$photo_date), collapse = "_"),
+#         sep = "_"
+#       ),
+#       ext = "csv"
+#     )
+
+photos1 |> 
+  readr::write_csv(
+    lfile_name_photos(photos1), na =""
+  )
+
+photos2 |> 
+  readr::write_csv(
+    lfile_name_photos(photos2), na =""
+  )
+
+lpath_link <- function(dat = NULL){
+  paste0(
+    "https://github.com/NewGraphEnvironment/new_graphiti/tree/main/posts/2024-11-15-bcdata-ortho-historic/exports/",
+    basename(
+      lfile_name_photos(dat)
+    )
+  )
+}

We can view and download exported csv files here

Pick a Horse

-

We are absolutely privaledged to potentially have the assistance of Mike Price to help us obtain this imagery from the UBC archives. Sooo… - unless he has a preference that 100% can override our recommendation - we would think that he would probably prefer clear direction from us. Soooo - to start - thinking the best plan is to go with the earliest imagery possible and we scan in the “before 1969-12-31” timeframe. The name of the csv with that information is airphotos_1968-05-09_1968-07-31.csv and the file can found along with the other export at the link above or individually here.

+

We are absolutely privaledged to potentially have the assistance of Mike Price to help us obtain this imagery from the UBC archives. Sooo… - unless he has a preference that 100% can override our recommendation - we would think that he would probably prefer clear direction from us. Soooo - to start - thinking the best plan is to go with the imagery in the timeframe of 1971-06-05, 1975-09-18” since the coverage seems much better than the earlier images… The name of the csv with that information is airphotos_1971-06-05_1975-09-18.csv and the file can found along with the other export at the link above or individually here.

Outstanding Issues

    -
  • Why does the date range of the photos we can get IDs for through the WIMSI online portal not overlap the dates listed in the BC Data Catalouge Historic Imagery Points for the dates in the Historic Imagery Points that are listed as 1950 - 1963 vs our earliest records from the portal at 1968-05-09??!!
  • -
  • Is it crazy to export all mapsheets for the area and add Buck Creek to our study area (easy enough to do) or is this a reasonable start and good enough for a start?
  • +
  • Why does the date range of the photos we can get IDs for through the WIMSI online portal not overlap the dates listed in the BC Data Catalouge Historic Imagery Points for the dates in the Historic Imagery Points that are listed as 1950 - 1963 vs our earliest records from the portal at 1963-08-07??!!
diff --git a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map2-1.png b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map2-1.png index b61afce..229a01b 100644 Binary files a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map2-1.png and b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map2-1.png differ diff --git a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map3-1.png b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map3-1.png index 1848f73..c236a1c 100644 Binary files a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map3-1.png and b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map3-1.png differ diff --git a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-1.png b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-1.png index 4499206..9a691d3 100644 Binary files a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-1.png and b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-1.png differ diff --git a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-2.png b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-2.png index 9eaaa2b..0790640 100644 Binary files a/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-2.png and b/posts/2024-11-15-bcdata-ortho-historic/index_files/figure-html/map4-2.png differ diff --git a/search.json b/search.json index 49dde19..57348da 100644 --- a/search.json +++ b/search.json @@ -109,28 +109,21 @@ "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#download-spatial-data-layers", "title": "Getting details of historic orthophoto imagery with R", "section": "Download Spatial Data Layers", - "text": "Download Spatial Data Layers\nHere we download our area of interest which is the Neexdzii Kwah River (a.k.a Upper Bulkley River) which is located between Houston, BC (just south of Smithers) and Topley, BC which is east of Houston and north of Burns Lake, BC. We hit up our remote database managed by Simon Norris with a package built by Poisson Consulting specifically for the task. We use the downstream_route_measure of the Bulkley River (identified through a unique blue_line_key) to query the watershed area upstream of the point where the Neexdzii Kwah River enters the Wedzin Kwah River (a.k.a Morice River).\n\n\nCode\n# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa\n# blueline key\nblk <- 360873822\n# downstream route measure\ndrm <- 166030.4\n\naoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk, \n downstream_route_measure = drm) |> \n sf::st_transform(4326)\n\n\n#get the bounding box of our aoi\n# aoi_bb <- sf::st_bbox(aoi)\n\n#lets burn this so we don't need to download each time\naoi <- lngs_geom_validate(aoi)\nlburn_sf(\n aoi,\n deparse(substitute(aoi)))\n\n\nNext we grab a few key layers from the BC Data Catalougue API using convience functions in our rfp package (“Reproducable Field Products”) which wrap the provincially maintained bcdata package. We grab:\n\nRailways\nStreams in the Bulkley Watershed group that are 5th order or greater.\nHistoric Imagery Points\nHistoric Imagery Polygons\nNTS 1:50,000 Grid (we will see why in a second)\n\n\n\nCode\n# grab all the railways\nl_rail <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.gba_railway_tracks_sp\"\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() \n\n\n# streams in the bulkley and then filter to just keep the big ones\nl_streams <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.fwa_stream_networks_sp\",\n col_filter = \"watershed_group_code\",\n col_filter_value = \"BULK\",\n # grab a smaller object by including less columns\n col_extract = c(\"linear_feature_id\", \"stream_order\", \"gnis_name\", \"downstream_route_measure\", \"blue_line_key\", \"length_metre\")\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() |> \n dplyr::filter(stream_order > 4)\n\n# historic orthophotos\n# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY\n#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points\nl_imagery <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_hist <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_grid <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_BASEMAPPING.NTS_50K_GRID\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\n# burn all these to file so quick to repeat\n\n\nFollowing download we run some clean up to ensure the geometry of our spatial files is “valid”, trim to our area of interest and burn locally so that every time we rerun iterations of this memo we don’t need to wait for the download process which takes a little longer than we want to wait.\n\n\nCode\n# get a list of the objects in our env that start with l_\nls <- ls()[stringr::str_starts(ls(), \"l_\")] \n\nlayers_all <- tibble::lst(\n !!!mget(ls)\n)\n\n# Apply validation to the AOI and layers\nlayers_all <- purrr::map(\n layers_all, \n lngs_geom_validate\n )\n\n# clip them with purrr and sf\nlayers_trimmed <- purrr::map(\n layers_all,\n ~ sf::st_intersection(.x, aoi)\n) \n\n# Burn each `sf` object to GeoJSON\npurrr::walk2(\n layers_trimmed,\n names(layers_trimmed),\n lburn_sf\n)\n\n\nNext - we read the layers back in. The download step is skipped now unless we turn it on again by changing the update_gis param in our memo yaml header to TRUE.\n\n\nCode\n# now we read in all the sf layers that are local so it is really quick\nlayers_to_load <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.geojson\"\n)\n\nlayers_trimmed <- layers_to_load |>\n purrr::map(\n ~ sf::st_read(\n .x, quiet = TRUE)\n ) |> \n purrr::set_names(\n nm =tools::file_path_sans_ext(\n basename(\n names(\n layers_to_load\n )\n )\n )\n )\n\n\nOK, seems we cannot get machine readable historical air photo information from layers downloaded from BC data catalogue - perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way. This what the information in the Historic Imagery Points layer looks like.\n\n\nCode\nlayers_trimmed$l_imagery_hist |> \n sf::st_drop_geometry() |> \n knitr::kable()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nid\nhistorical_index_map_id\nscale_category\ngeoextent_mapsheet\nmap_tag\nstart_year\nend_year\nkml_url\npdf_url\nobjectid\nse_anno_cad_data\narea_ha\nlocalcode_ltree\nrefine_method\nwscode_ltree\n\n\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.557\n557\nlarge\n093l_e\n093l_e_1\n1950\n1963\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_1_1950_1963.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_1_20ch_1950_1963.pdf\n1860\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.558\n558\nlarge\n093l_e\n093l_e_2\n1971\n1974\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_2_1971_1974.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_2_20ch_1971_1974.pdf\n1861\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.559\n559\nlarge\n093l_e\n093l_e_3\n1975\n1975\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_3_1975.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_3_20k_1975.pdf\n1862\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.560\n560\nlarge\n093l_e\n093l_e_4\n1980\n1980\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_4_1980.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_4_20k_1980.pdf\n1863\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.561\n561\nlarge\n093l_e\n093l_e_5\n1980\n1980\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_5_1980.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_5_10k_1980.pdf\n1864\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.562\n562\nlarge\n093l_e\n093l_e_6\n1981\n1983\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_6_1981_1983.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_6_20k_1981_1983.pdf\n1865\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.563\n563\nlarge\n093l_e\n093l_e_7\n1989\n1989\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_7_1989.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_7_15k_1989.pdf\n1866\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.564\n564\nlarge\n093l_e\n093l_e_8\n1990\n1990\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_8_1990.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_8_10k_15k_1990_bw_1990_colour.pdf\n1867\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\n\n\n\n\n\nCode\nmap <- ggplot() +\n geom_sf(\n data = layers_trimmed$aoi,\n fill = \"transparent\",\n color = \"black\",\n linewidth = .5\n ) +\n geom_sf(\n data = layers_trimmed$l_streams,\n color = \"blue\",\n size = 1\n ) +\n geom_sf(\n data = layers_trimmed$l_rail,\n color = \"black\",\n size = 1\n ) +\n # geom_sf(\n # data = layers_trimmed$l_imagery,\n # alpha = 0,\n # ) +\n geom_sf(\n data = layers_trimmed$l_imagery_hist,\n color = \"red\",\n size = 2\n ) +\n geom_sf(\n data = layers_trimmed$l_imagery_grid,\n alpha = 0.25,\n ) +\n geom_sf_text(\n data = layers_trimmed$l_imagery_grid,\n aes(label = map_tile),\n size = 3 # Adjust size of the text labels as needed\n )\n # ggdark::dark_theme_void()\n\n\nmap\n\n\n\n\n\n\n\n\n\nIt does however seem that if we go through the online interface, we can use a NTS 1:50,000 map sheet to export a CSV file that does contain information about individual air photo locations (centre coordinate?). Our study area is huge so to begin with we can look at the area from somewhere around the confluence of McQuarie Creek and the Neexdzii Kwah up to say the upstream end of Bulkley Lake. For this area we just have two 1:50,000 map sheets (093L09 and 093L08)." - }, - { - "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html#download-photo-information", - "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#download-photo-information", - "title": "Getting details of historic orthophoto imagery with R", - "section": "Download Photo Information", - "text": "Download Photo Information\nNext, we go into the provincial WIMSI service and for each NTS 1:50,000 grid - we state the date range we would like to search and export. Thinking we may be able to hit the API directly with a query in the future which would eliminate the painful point and click portion of this workflow. For now though - we are doing this piece manually and chose the following options:\n\nYears from: 1950 to 2024\nMedia: All\nOnly show georeferenced images: - No\n\nRunning these options exports a csv for each query. We saved them in this repo with their original sane names airphotos.{mapsheet}.csv.\n\n\nCode\nknitr::include_graphics(fs::path(\n path_post,\n \"fig\",\n \"Screenshot1\",\n ext = \"png\"\n )\n)\n\n\n\n\n\n\n\n\n\nAt this point we have downloaded two csvs (one for each NTS 1:50,000 mapsheet of course) with information about the airphotos including UTM coordinates that we will assume for now are the photo centres. In our next steps we read in what we have, turn into spatial object, trim to overall study area and plot.\n\n\nCode\n# list csvs\nls <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.csv\"\n)\n\nphotos_raw <- ls |> \n purrr::map_df(\n readr::read_csv\n ) |> \n sf::st_as_sf(\n coords = c(\"Longitude\", \"Latitude\"), crs = 4326\n ) |> \n janitor::clean_names() |> \n dplyr::mutate(photo_date = lubridate::mdy(photo_date)) \n\n\nphotos_aoi <- sf::st_intersection(\n photos_raw, \n layers_trimmed$aoi |> st_make_valid()\n )\n\n\n\n\nCode\nmap +\n geom_sf(\n data = photos_aoi,\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nCode\n # geom_sf_text(\n # data = photos_aoi,\n # aes(\n # label = \n # paste(\n # # roll_frame_series,\n # frame,\n # sep = \"-\"\n # )\n # ),\n # size = 2 # Adjust size of the text labels as needed\n # )\n\n\nThat is a lot of photos! 3957 photos to be exact!!!\n\n\nCode\nq_buffer <- 1000\nq_drm_main <- 263795\nq_drm_other <- 5000" + "text": "Download Spatial Data Layers\nHere we download our area of interest which is the Neexdzii Kwah River (a.k.a Upper Bulkley River) which is located between Houston, BC (just south of Smithers) and Topley, BC which is east of Houston and north of Burns Lake, BC. We hit up our remote database managed by Simon Norris with a package built by Poisson Consulting specifically for the task. We use the downstream_route_measure of the Bulkley River (identified through a unique blue_line_key) to query the watershed area upstream of the point where the Neexdzii Kwah River enters the Wedzin Kwah River (a.k.a Morice River).\n\n\nCode\n# lets build a custom watersehed just for upstream of the confluence of Neexdzii Kwa and Wetzin Kwa\n# blueline key\nblk <- 360873822\n# downstream route measure\ndrm <- 166030.4\n\naoi <- fwapgr::fwa_watershed_at_measure(blue_line_key = blk, \n downstream_route_measure = drm) |> \n sf::st_transform(4326)\n\n\n#get the bounding box of our aoi\n# aoi_bb <- sf::st_bbox(aoi)\n\n#lets burn this so we don't need to download each time\naoi <- lngs_geom_validate(aoi)\nlburn_sf(\n aoi,\n deparse(substitute(aoi)))\n\n\nNext we grab a few key layers from the BC Data Catalougue API using convience functions in our rfp package (“Reproducable Field Products”) which wrap the provincially maintained bcdata package. We grab:\n\nRailways\nStreams in the Bulkley Watershed group that are 5th order or greater.\nHistoric Imagery Points\nHistoric Imagery Polygons\nNTS 1:50,000 Grid (we will see why in a second)\nAir Photo Centroids\n\n\n\nCode\n# grab all the railways\nl_rail <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.gba_railway_tracks_sp\"\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() \n\n\n# streams in the bulkley and then filter to just keep the big ones\nl_streams <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"whse_basemapping.fwa_stream_networks_sp\",\n col_filter = \"watershed_group_code\",\n col_filter_value = \"BULK\",\n # grab a smaller object by including less columns\n col_extract = c(\"linear_feature_id\", \"stream_order\", \"gnis_name\", \"downstream_route_measure\", \"blue_line_key\", \"length_metre\")\n) |> \n sf::st_transform(4326) |> \n janitor::clean_names() |> \n dplyr::filter(stream_order > 4)\n\n# historic orthophotos\n# WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POLY\n#https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points\nl_imagery <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_ORTHOPHOTO_TILES_POLY\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_hist <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\nl_imagery_grid <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_BASEMAPPING.NTS_50K_GRID\") |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\n\nFollowing download we run some clean up to ensure the geometry of our spatial files is “valid”, trim to our area of interest and burn locally so that every time we rerun iterations of this memo we don’t need to wait for the download process which takes a little longer than we want to wait.\n\n\nCode\n# get a list of the objects in our env that start with l_\nls <- ls()[stringr::str_starts(ls(), \"l_\")] \n\nlayers_all <- tibble::lst(\n !!!mget(ls)\n)\n\n# Apply validation to the AOI and layers\nlayers_all <- purrr::map(\n layers_all, \n lngs_geom_validate\n )\n\n# clip them with purrr and sf\nlayers_trimmed <- purrr::map(\n layers_all,\n ~ sf::st_intersection(.x, aoi)\n) \n\n# Burn each `sf` object to GeoJSON\npurrr::walk2(\n layers_trimmed,\n names(layers_trimmed),\n lburn_sf\n)\n\n\n\n\nCode\n# lets use the nts mapsheet to query the photo centroids to avoid a massive file download\ncol_value <- layers_trimmed$l_imagery_grid |> \n dplyr::pull(map_tile) \n\n \n\nl_photo_centroids <- rfp::rfp_bcd_get_data(\n bcdata_record_id = \"WHSE_IMAGERY_AND_BASE_MAPS.AIMG_PHOTO_CENTROIDS_SP\",\n col_filter = \"nts_tile\",\n col_filter_value = col_value) |> \n sf::st_transform(4326) |> \n janitor::clean_names()\n\n# Apply validation to the AOI and layers\nl_photo_centroids <-lngs_geom_validate(l_photo_centroids)\n\n# clip to aoi - can use layers_trimmed$aoi \nl_photo_centroids <- sf::st_intersection(l_photo_centroids, aoi)\n\n\nlburn_sf(l_photo_centroids, \"l_photo_centroids\")\n\n\nNext - we read the layers back in. The download step is skipped now unless we turn it on again by changing the update_gis param in our memo yaml header to TRUE.\n\n\nCode\n# now we read in all the sf layers that are local so it is really quick\nlayers_to_load <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.geojson\"\n)\n\nlayers_trimmed <- layers_to_load |>\n purrr::map(\n ~ sf::st_read(\n .x, quiet = TRUE)\n ) |> \n purrr::set_names(\n nm =tools::file_path_sans_ext(\n basename(\n names(\n layers_to_load\n )\n )\n )\n )\n\n\nOK, seems we cannot get machine readable historical air photo information from the downloaded from the BC data catalogue Historic Imagery Points and Historic Imagery Polygons layers perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way.\n\n\nCode\n#This what the information in the [Historic Imagery Points](https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points) layer looks like.\n\nlayers_trimmed$l_imagery_hist |> \n sf::st_drop_geometry() |> \n knitr::kable()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nid\nhistorical_index_map_id\nscale_category\ngeoextent_mapsheet\nmap_tag\nstart_year\nend_year\nkml_url\npdf_url\nobjectid\nse_anno_cad_data\narea_ha\nlocalcode_ltree\nrefine_method\nwscode_ltree\n\n\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.557\n557\nlarge\n093l_e\n093l_e_1\n1950\n1963\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_1_1950_1963.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_1_20ch_1950_1963.pdf\n1860\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.558\n558\nlarge\n093l_e\n093l_e_2\n1971\n1974\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_2_1971_1974.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_2_20ch_1971_1974.pdf\n1861\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.559\n559\nlarge\n093l_e\n093l_e_3\n1975\n1975\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_3_1975.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_3_20k_1975.pdf\n1862\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.560\n560\nlarge\n093l_e\n093l_e_4\n1980\n1980\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_4_1980.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_4_20k_1980.pdf\n1863\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.561\n561\nlarge\n093l_e\n093l_e_5\n1980\n1980\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_5_1980.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_5_10k_1980.pdf\n1864\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.562\n562\nlarge\n093l_e\n093l_e_6\n1981\n1983\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_6_1981_1983.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_6_20k_1981_1983.pdf\n1865\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.563\n563\nlarge\n093l_e\n093l_e_7\n1989\n1989\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_7_1989.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_7_15k_1989.pdf\n1866\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\nWHSE_IMAGERY_AND_BASE_MAPS.AIMG_HIST_INDEX_MAPS_POINT.564\n564\nlarge\n093l_e\n093l_e_8\n1990\n1990\nhttps://openmaps.gov.bc.ca/flight_indices/kml/large_scale/093/93l_e_8_1990.kml\nhttps://openmaps.gov.bc.ca/flight_indices/pdf/large_scale_block_10000_to_25000/093_nts/93l_e/93l_e_sheet_8_10k_15k_1990_bw_1990_colour.pdf\n1867\nNA\n231944.7\n400.431358.585806\nCUT\n400.431358\n\n\n\n\n\n\n\nCode\nmap <- ggplot() +\n geom_sf(\n data = layers_trimmed$aoi,\n fill = \"transparent\",\n color = \"black\",\n linewidth = .5\n ) +\n geom_sf(\n data = layers_trimmed$l_streams,\n color = \"blue\",\n size = 1\n ) +\n geom_sf(\n data = layers_trimmed$l_rail,\n color = \"black\",\n size = 1\n ) +\n # geom_sf(\n # data = layers_trimmed$l_imagery,\n # alpha = 0,\n # ) +\n geom_sf(\n data = layers_trimmed$l_imagery_hist,\n color = \"red\",\n size = 2\n ) +\n geom_sf(\n data = layers_trimmed$l_imagery_grid,\n alpha = 0.25,\n ) +\n geom_sf_text(\n data = layers_trimmed$l_imagery_grid,\n aes(label = map_tile),\n size = 3 # Adjust size of the text labels as needed\n )\n # ggdark::dark_theme_void()\n\n\nmap\n\n\n\n\n\n\n\n\n\nIt does however seem that the Air Photo Centroids are georeferenced\n\n\nCode\n#if we go through the online interface, we can use a NTS 1:50,000 map sheet to export a CSV file that does contain information about individual air photo locations (centre coordinate?). Our study area is huge so to begin with we can look at the area from somewhere around the confluence of McQuarie Creek and the Neexdzii Kwah up to say the upstream end of Bulkley Lake. For this area we just have two 1:50,000 map sheets (`093L09` and `093L08`). \n\n\n\n\nCode\n# ## Download Photo Information\n\n# Next, we go into the provincial [WIMSI](https://www2.gov.bc.ca/gov/content/data/geographic-data-services/digital-imagery/air-photos) service and for each NTS 1:50,000 grid - we state the date range we would like to search and export. Thinking we may be able to hit the API directly with a query in the future which would eliminate the painful point and click portion of this workflow. For now though - we are doing this piece manually and chose the following options:\n# \n# - `Years from`: 1950 to 2024\n# - `Media`: All\n# - `Only show georeferenced images`: - No\n# \n# Running these options exports a csv for each query. We saved them in this repo with their original sane names `airphotos.{mapsheet}.csv`.\n\nknitr::include_graphics(fs::path(\n path_post,\n \"fig\",\n \"Screenshot1\",\n ext = \"png\"\n )\n)\n\n\n\n\nCode\n# At this point we have downloaded two csvs (one for each NTS 1:50,000 mapsheet of course) with information about the airphotos including UTM coordinates that we will assume for now are the photo centres. In our next steps we read in what we have, turn into spatial object, trim to overall study area and plot.\n# list csvs\nls <- fs::dir_ls(\n fs::path(\n path_post,\n \"data\"),\n glob = \"*.csv\"\n)\n\nphotos_raw <- ls |> \n purrr::map_df(\n readr::read_csv\n ) |> \n sf::st_as_sf(\n coords = c(\"Longitude\", \"Latitude\"), crs = 4326\n ) |> \n janitor::clean_names() |> \n dplyr::mutate(photo_date = lubridate::mdy(photo_date)) \n\n\nphotos_aoi <- sf::st_intersection(\n photos_raw, \n layers_trimmed$aoi |> st_make_valid()\n )\n\n\n\n\nCode\nmap +\n geom_sf(\n data = layers_trimmed$l_photo_centroids,\n alpha = 0.25\n ) \n\n\n\n\n\n\n\n\n\nCode\n # geom_sf_text(\n # data = photos_aoi,\n # aes(\n # label = \n # paste(\n # # roll_frame_series,\n # frame,\n # sep = \"-\"\n # )\n # ),\n # size = 2 # Adjust size of the text labels as needed\n # )\n\n\nThat is a lot of photos! 7910 photos to be exact!!!\n\n\nCode\nq_buffer <- 500\nq_drm_main <- 263795\nq_drm_other <- 5000" }, { "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html#clip-photo-information-with-streams-buffers", "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#clip-photo-information-with-streams-buffers", "title": "Getting details of historic orthophoto imagery with R", "section": "Clip Photo Information with Streams Buffers", - "text": "Clip Photo Information with Streams Buffers\nHere are our query parameters to narrow down the area within our selected mapsheets in which we want to find photos for:\n\nBuffer: 1000m - size of buffer used on either side of stream lines selected\nStream segments:\n\nBulkley River (gnis_name in the stream layer)\nMaxan Creek\nstream segments which begin before 5000 from the mainstem\n\n\n\n\nCode\n# We use the `downstream_route_measure` of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). We find it in QGIS by highlighting the stream layer and clicking on our segment of interest while we have the information tool selected - the resulting pop-up looks like this in QGIS.\nknitr::include_graphics(fs::path(\n path_post,\n \"fig\",\n \"Screenshot2\",\n ext = \"png\"\n )\n)\n\n\n\n\nCode\naoi_refined_raw <- layers_trimmed$l_streams |> \n # removed & downstream_route_measure < q_drm_main for bulkley as doestn't cahnge 1960s query and increases beyond just by 5 photos\n dplyr::filter(gnis_name == \"Bulkley River\"|\n gnis_name != \"Bulkley River\" & downstream_route_measure < q_drm_other |\n gnis_name == \"Maxan Creek\") |> \n # dplyr::arrange(downstream_route_measure) |>\n # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m\n # dplyr::filter(cumsum(length_metre) <= length_m) |>\n sf::st_union() |> \n # we need to run st_sf or we get a sp object in a list...\n sf::st_sf()\n \naoi_refined_buffered <- sf::st_buffer(\n aoi_refined_raw,\n q_buffer, endCapStyle = \"FLAT\"\n) \n\nphotos_aoi_refined <- sf::st_intersection(\n photos_raw, \n aoi_refined_buffered\n )\n\n\nLet’s plot again and include our buffered areas around the first 5000m of streams (area in red) along with the location of the photo points that land within that area. Looks like this give us 711 photos.\n\n\nCode\nmap +\n geom_sf(\n data = aoi_refined_buffered,\n color = \"red\",\n alpha= 0\n ) +\n geom_sf(\n data = photos_aoi_refined,\n alpha = 0.25,\n )" + "text": "Clip Photo Information with Streams Buffers\nHere are our query parameters to narrow down the area within our selected mapsheets in which we want to find photos for:\n\nBuffer: 500m - size of buffer used on either side of stream lines selected\nStream segments:\n\nBulkley River (gnis_name in the stream layer)\nMaxan Creek\nBuck Creek\nstream segments which begin before 5000 from the mainstem\n\n\n\n\nCode\n# We use the `downstream_route_measure` of the stream layer to exclude areas upstream of Bulkley Lake (also known as Taman Creek). We find it in QGIS by highlighting the stream layer and clicking on our segment of interest while we have the information tool selected - the resulting pop-up looks like this in QGIS.\nknitr::include_graphics(fs::path(\n path_post,\n \"fig\",\n \"Screenshot2\",\n ext = \"png\"\n )\n)\n\n\n\n\nCode\naoi_refined_raw <- layers_trimmed$l_streams |> \n # removed & downstream_route_measure < q_drm_main for bulkley as doestn't cahnge 1960s query and increases beyond just by 5 photos\n dplyr::filter(gnis_name == \"Bulkley River\"|\n gnis_name != \"Bulkley River\" & downstream_route_measure < q_drm_other |\n gnis_name %in% c(\"Maxan Creek\", \"Buck Creek\")) |> \n # dplyr::arrange(downstream_route_measure) |>\n # calculate when we get to length_m by adding up the length_metre field and filtering out everything up to length_m\n # dplyr::filter(cumsum(length_metre) <= length_m) |>\n sf::st_union() |> \n # we need to run st_sf or we get a sp object in a list...\n sf::st_sf()\n \naoi_refined_buffered <- sf::st_buffer(\n aoi_refined_raw,\n q_buffer, endCapStyle = \"FLAT\"\n) \n\nphotos_aoi_refined <- sf::st_intersection(\n layers_trimmed$l_photo_centroids, \n aoi_refined_buffered\n )\n\n\nLet’s plot again and include our buffered areas around the first 5000m of streams (area in red) along with the location of the photo points that land within that area. Looks like this give us 968 photos.\n\n\nCode\nmap +\n geom_sf(\n data = aoi_refined_buffered,\n color = \"red\",\n alpha= 0\n ) +\n geom_sf(\n data = photos_aoi_refined,\n alpha = 0.25,\n )" }, { "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html#filter-photos-by-date", "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#filter-photos-by-date", "title": "Getting details of historic orthophoto imagery with R", "section": "Filter Photos by Date", - "text": "Filter Photos by Date\nNow lets filter by date to see what our options are including the earliest photos possible. Here is our range to choose from:\n\n\nCode\nrange(photos_aoi_refined$photo_date)\n\n\n[1] \"1968-05-09\" \"2019-09-18\"\n\n\n\n\nCode\nq_date_end1 <- \"1969-12-31\"\nq_date_start1 <- \"1970-01-01\"\nq_date_end2 <- \"1975-12-31\"\n\n\nThere is lots going on here so we will map a comparison of a couple of options. First we will look at all photos available for before 1970 (red dots on map below). We will also see what photos we have for between 1970-01-01 and on or before 1975-12-31 (blue dots on map below)?\n\n\nCode\nphotos1 <- photos_aoi_refined |> \n dplyr::filter(\n photo_date <= q_date_end1\n )\n\nphotos2 <- photos_aoi_refined |> \n dplyr::filter(\n photo_date >= q_date_start1 &\n photo_date <= q_date_end2\n )\n\n\n\n\nCode\nmap +\n geom_sf(\n data = photos1,\n color = \"red\",\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nCode\nmap + \n geom_sf(\n data = photos2,\n color = \"blue\",\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nWell we have some tough decisions to make as we seem to have better coverage in the Maxan Creek drainage in the 70s (91 photos) but good coverage of the mainstem Neexdzii Kwah in the 60s (72 photos)…\n\nThe table below can be used to filter photos from any time (provided it’s location falls in the mapsheet areas we downloaded info for) and export the result to csv or excel file. This could be useful for narrowing down our search not only to timescales but also to specific mapsheets with options given for both NTS 1:50,000 as well as BCGS 1:20,000 grid mapsheets.\n\n\nCode\nphotos_aoi_refined |> \n my_dt_table(cols_freeze_left = 2)" + "text": "Filter Photos by Date\nNow lets filter by date to see what our options are including the earliest photos possible. Here is our range to choose from:\n\n\nCode\nrange(photos_aoi_refined$photo_date)\n\n\n[1] \"1963-08-07\" \"2019-09-18\"\n\n\n\n\nCode\nq_date_end1 <- \"1969-12-31\"\nq_date_start1 <- \"1970-01-01\"\nq_date_end2 <- \"1975-12-31\"\n\n\nThere is lots going on here so we will map a comparison of a couple of options. First we will look at all photos available for before 1970 (red dots on map below). We will also see what photos we have for between 1970-01-01 and on or before 1975-12-31 (blue dots on map below)?\n\n\nCode\nphotos1 <- photos_aoi_refined |> \n dplyr::filter(\n photo_date <= q_date_end1\n )\n\nphotos2 <- photos_aoi_refined |> \n dplyr::filter(\n photo_date >= q_date_start1 &\n photo_date <= q_date_end2\n )\n\n\n\n\nCode\nmap +\n geom_sf(\n data = photos1,\n color = \"red\",\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nCode\nmap + \n geom_sf(\n data = photos2,\n color = \"blue\",\n alpha = 0.25,\n ) \n\n\n\n\n\n\n\n\n\nWell we have some tough decisions to make as we seem to have better coverage in the Maxan Creek drainage in the 70s (144 photos) but good coverage of the mainstem Neexdzii Kwah in the 60s (83 photos)…\n\nThe table below can be used to filter photos from any time (provided it’s location falls in the watershed area we clipped our downloaded points to) and export the result to csv or excel file. This could be useful for narrowing down our search not only to timescales but also to specific mapsheets with options given for both NTS 1:50,000 as well as BCGS 1:20,000 grid mapsheets.\n\n\nCode\nphotos_aoi_refined |> \n my_dt_table(cols_freeze_left = 2)" }, { "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html#export-csv-files-with-photo-information", @@ -144,14 +137,14 @@ "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#pick-a-horse", "title": "Getting details of historic orthophoto imagery with R", "section": "Pick a Horse", - "text": "Pick a Horse\nWe are absolutely privaledged to potentially have the assistance of Mike Price to help us obtain this imagery from the UBC archives. Sooo… - unless he has a preference that 100% can override our recommendation - we would think that he would probably prefer clear direction from us. Soooo - to start - thinking the best plan is to go with the earliest imagery possible and we scan in the “before 1969-12-31” timeframe. The name of the csv with that information is airphotos_1968-05-09_1968-07-31.csv and the file can found along with the other export at the link above or individually here." + "text": "Pick a Horse\nWe are absolutely privaledged to potentially have the assistance of Mike Price to help us obtain this imagery from the UBC archives. Sooo… - unless he has a preference that 100% can override our recommendation - we would think that he would probably prefer clear direction from us. Soooo - to start - thinking the best plan is to go with the imagery in the timeframe of 1971-06-05, 1975-09-18” since the coverage seems much better than the earlier images… The name of the csv with that information is airphotos_1971-06-05_1975-09-18.csv and the file can found along with the other export at the link above or individually here." }, { "objectID": "posts/2024-11-15-bcdata-ortho-historic/index.html#outstanding-issues", "href": "posts/2024-11-15-bcdata-ortho-historic/index.html#outstanding-issues", "title": "Getting details of historic orthophoto imagery with R", "section": "Outstanding Issues", - "text": "Outstanding Issues\n\nWhy does the date range of the photos we can get IDs for through the WIMSI online portal not overlap the dates listed in the BC Data Catalouge Historic Imagery Points for the dates in the Historic Imagery Points that are listed as 1950 - 1963 vs our earliest records from the portal at 1968-05-09??!!\nIs it crazy to export all mapsheets for the area and add Buck Creek to our study area (easy enough to do) or is this a reasonable start and good enough for a start?" + "text": "Outstanding Issues\n\nWhy does the date range of the photos we can get IDs for through the WIMSI online portal not overlap the dates listed in the BC Data Catalouge Historic Imagery Points for the dates in the Historic Imagery Points that are listed as 1950 - 1963 vs our earliest records from the portal at 1963-08-07??!!" }, { "objectID": "posts/2024-05-27-references-bib-succinct/index.html", diff --git a/sitemap.xml b/sitemap.xml index 9f68249..ccf1ea8 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,46 +2,46 @@ https://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-permissions/index.html - 2024-11-17T21:09:12.062Z + 2024-11-18T05:07:34.509Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-30-land-cover/index.html - 2024-11-17T21:09:12.030Z + 2024-11-18T05:07:34.457Z https://NewGraphEnvironment.github.io/new_graphiti/posts/logos-equipment/index.html - 2024-11-17T21:09:12.062Z + 2024-11-18T05:07:34.509Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-09-21-aws-cors-cog-leaflet/index.html - 2024-11-17T21:09:12.042Z + 2024-11-18T05:07:34.473Z https://NewGraphEnvironment.github.io/new_graphiti/posts/aws-storage-processx/index.html - 2024-11-17T21:09:12.062Z + 2024-11-18T05:07:34.509Z https://NewGraphEnvironment.github.io/new_graphiti/about.html - 2024-11-17T21:09:11.986Z + 2024-11-18T05:07:34.417Z https://NewGraphEnvironment.github.io/new_graphiti/index.html - 2024-11-17T21:09:12.002Z + 2024-11-18T05:07:34.429Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-11-15-bcdata-ortho-historic/index.html - 2024-11-17T21:09:12.062Z + 2024-11-18T05:07:34.509Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-05-27-references-bib-succinct/index.html - 2024-11-17T21:09:12.014Z + 2024-11-18T05:07:34.441Z https://NewGraphEnvironment.github.io/new_graphiti/posts/snakecase/index.html - 2024-11-17T21:09:12.062Z + 2024-11-18T05:07:34.509Z https://NewGraphEnvironment.github.io/new_graphiti/posts/2024-06-19-precipitation/index.html - 2024-11-17T21:09:12.014Z + 2024-11-18T05:07:34.445Z