diff --git a/.nojekyll b/.nojekyll index 113fe68..e960c6d 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -81c98961 \ No newline at end of file +4865b3b9 \ No newline at end of file diff --git a/index.html b/index.html index 1068ce7..9d2cd09 100644 --- a/index.html +++ b/index.html @@ -206,7 +206,7 @@
Following download we run some clean up to ensure the geometry of our spatial files is “valid”, trim to our area of interest and burn locally so that every time we rerun iterations of this memo we don’t need to wait for the download process which takes a little longer than we want to wait.
@@ -407,41 +406,69 @@# lets use the nts mapsheet to query the photo centroids to avoid a massive file download
+<- layers_trimmed$l_imagery_grid |>
+ col_value ::pull(map_tile)
+ dplyr
+
+
+<- rfp::rfp_bcd_get_data(
+ l_photo_centroids bcdata_record_id = "WHSE_IMAGERY_AND_BASE_MAPS.AIMG_PHOTO_CENTROIDS_SP",
+ col_filter = "nts_tile",
+ col_filter_value = col_value) |>
+ ::st_transform(4326) |>
+ sf::clean_names()
+ janitor
+# Apply validation to the AOI and layers
+<-lngs_geom_validate(l_photo_centroids)
+ l_photo_centroids
+# clip to aoi - can use layers_trimmed$aoi
+<- sf::st_intersection(l_photo_centroids, aoi)
+ l_photo_centroids
+
+lburn_sf(l_photo_centroids, "l_photo_centroids")
Next - we read the layers back in. The download step is skipped now unless we turn it on again by changing the update_gis
param in our memo yaml
header to TRUE
.
# now we read in all the sf layers that are local so it is really quick
-<- fs::dir_ls(
- layers_to_load ::path(
- fs
- path_post,"data"),
- glob = "*.geojson"
-
- )
-<- layers_to_load |>
- layers_trimmed ::map(
- purrr~ sf::st_read(
- quiet = TRUE)
- .x, |>
- ) ::set_names(
- purrrnm =tools::file_path_sans_ext(
- basename(
- names(
-
- layers_to_load
- )
- )
- ) )
# now we read in all the sf layers that are local so it is really quick
+<- fs::dir_ls(
+ layers_to_load ::path(
+ fs
+ path_post,"data"),
+ glob = "*.geojson"
+
+ )
+<- layers_to_load |>
+ layers_trimmed ::map(
+ purrr~ sf::st_read(
+ quiet = TRUE)
+ .x, |>
+ ) ::set_names(
+ purrrnm =tools::file_path_sans_ext(
+ basename(
+ names(
+
+ layers_to_load
+ )
+ )
+ ) )
OK, seems we cannot get machine readable historical air photo information from layers downloaded from BC data catalogue - perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way. This what the information in the Historic Imagery Points layer looks like.
+OK, seems we cannot get machine readable historical air photo information from the downloaded from the BC data catalogue Historic Imagery Points and Historic Imagery Polygons layers perhpas because the majority of the photos are not georeferenced? What we see in the map under the table below (red dot on map) is one point which contains 8 records including links to pdfs and kmls which are basically a georeferenced drawing of where the imagery overlaps. From as far as I can tell - if we wanted to try to use these kmls or pdfs to select orthoimagery we would need to manually eyeball where the photo polygons overlap where we want to see imagery for and manually write down identifiers for photo by hand. Maybe I am missing something but it sure seems that way.
$l_imagery_hist |>
- layers_trimmed::st_drop_geometry() |>
- sf::kable() knitr
#This what the information in the [Historic Imagery Points](https://catalogue.data.gov.bc.ca/dataset/airborne-imagery-historical-index-map-points) layer looks like.
+
+$l_imagery_hist |>
+ layers_trimmed::st_drop_geometry() |>
+ sf::kable() knitr