rtidyverser-sfsnowfall

Create line density per polygon using tidyverse + sf R


I have a GIS question that has been stumping me for some time now. The end goal would be to extract the density of lines per pixel/voxel/polygon using tidyverse/sf packages. As of now I have a function that works when I execute line-by-line, but not as a function. The ultimate end-goal would be to use this function in sfLappy of the snowfall package to be run in parallel. Any help on getting this to work as a function would be greatly appreciated! The data involved can be found here....

https://www.dropbox.com/s/zg9o2b5x4wizafo/hexagons.gpkg?dl=0 https://www.dropbox.com/s/x2gxx36pjkutxzm/railroad_lines.gpkg?dl=0

The function that I had created, which, again, works line-for-line but not as a function, can be found here:

length_in_poly <- function(fishnet, spatial_lines) {
  require(sf)
  require(tidyverse)
  require(magrittr)

  fishnet <- st_as_sf(do.call(rbind, fishnet))
  spatial_lines <- st_as_sf(do.call(rbind, spatial_lines))

  fish_length <- list()

  for (i in 1:nrow(fishnet)) {

   split_lines <- spatial_lines %>%
    st_cast(., "MULTILINESTRING", group_or_split = FALSE) %>%
    st_intersection(., fishnet[i, ]) %>%
    mutate(lineid = row_number())

   fish_length[[i]] <- split_lines %>%
    mutate(length = sum(st_length(.)))
  }

  fish_length <-  do.call(rbind, fish_length) %>%
    group_by(hexid4k) %>%
    summarize(length = sum(length))

  fishnet <- fishnet %>%
    st_join(., fish_length, join = st_intersects) %>%
    mutate(hexid4k = hexid4k.x,
           length = ifelse(is.na(length), 0, length),
           pixel_area = as.numeric(st_area(geom)),
           density = length/pixel_area)
}

To prep the data:

library(sf)
library(tidyverse)
library(snowfall)

input_hexagons <- st_read("hexagons.gpkg")
input_rail_lines <- st_read("railroad_lines.gpkg")

Using some code from here:

faster_as_tibble <- function(x) {
  structure(x, class = c("tbl_df", "tbl", "data.frame", "sfc"), row.names = as.character(seq_along(x[[1]])))
}

split_fast_tibble <- function (x, f, drop = FALSE, ...) {
  lapply(split(x = seq_len(nrow(x)), f = f,  ...),
         function(ind) faster_as_tibble(lapply(x, "[", ind)))
}

Create a state-wise list:

sub_hexnet <- split_fast_tibble(input_hexagons, input_hexagons$STUSPS) %>%
    lapply(st_as_sf)

Finally, to run just as a single-core process:

test <- lapply(fishnet = as.list(sub_hexnet),
         FUN = length_in_poly,
         spatial_lines = input_rail_lines)

Or, in the perfect world, a multi-core process:

sfInit(parallel = TRUE, cpus = parallel::detectCores())
sfExport(list = c("sub_hexnet", "mask_rails"))

extractions <- sfLapply(fishnet = sub_hexnet,
         fun = length_in_poly,
         spatial_lines = input_rail_lines)
sfStop()

Thanks in advance for any help - I am completely stumped!


Solution

  • After messing around with this for a while I finally figured out a solution.

    The key helper functions used:

    load_data <- function(url, dir, layer, outname) {
      file <- paste0(dir, "/", layer, ".shp")
    
      if (!file.exists(file)) {
        download.file(url, destfile = paste0(dir, ".zip"))
        unzip(paste0(dir, ".zip"),
              exdir = dir)
        unlink(paste0(dir, ".zip"))
    
      }
      name <- paste0(outname, "_shp")
      name <- sf::st_read(dsn = dir, layer = layer)
      name
    }
    
    
    get_density <- function(x, grids, lines) {
    
      require(tidyverse)
      require(lubridate)
      require(sf)
    
      sub_grids <- grids %>%
        dplyr::filter(hexid4k == x)
    
      single_lines_hexid <- lines %>%
        dplyr::filter(hexid4k == x) %>%
        sf::st_intersection(., sub_grids) %>%
        dplyr::select(hexid4k, STUSPS) %>%
        dplyr::mutate(length_line = st_length(.),
                      length_line = ifelse(is.na(length_line), 0, length_line))
    
      sub_grids <- sub_grids %>%
        sf::st_join(., single_lines_hexid, join = st_intersects) %>%
        dplyr::mutate(hexid4k = hexid4k.x) %>%
        dplyr::group_by(hexid4k) %>%
        dplyr::summarize(length_line = sum(length_line)) %>%
        dplyr::mutate(pixel_area = as.numeric(st_area(geom)),
                      density = length_line/pixel_area) %>%
        dplyr::select(hexid4k, length_line, density, pixel_area)
      return(sub_grids)
    }
    

    Prep the input data:

    usa_shp <- load_data(url = "https://www2.census.gov/geo/tiger/GENZ2016/shp/cb_2016_us_state_20m.zip",
                           dir = 'data',
                           layer = "cb_2016_us_state_20m",
                           outname = "usa") %>%
        sf::st_transform(p4string_ea) %>%
        dplyr::filter(!STUSPS %in% c("HI", "AK", "PR"))
      usa_shp$STUSPS <- droplevels(usa_shp$STUSPS)
    
    hex_points <- spsample(as(usa_shp, 'Spatial'), type = "hexagonal", cellsize = 4000)
        hex_grid <- HexPoints2SpatialPolygons(hex_points, dx = 4000)
        hexnet_4k <- st_as_sf(hex_grid) %>%
          mutate(hexid4k = row_number()) %>%
          st_intersection(., st_union(usa_shp)) %>%
          st_join(., usa_shp, join = st_intersects) %>%
          dplyr::select(hexid4k, STUSPS)
    
    transmission_lines_hex <- load_data( url = "https://hifld-dhs-gii.opendata.arcgis.com/datasets/75af06441c994aaf8e36208b7cd44014_0.zip",
        dir = 'data',
        layer = 'Electric_Power_Transmission_Lines',
        outname = 'tl')%>%
        dplyr::select(LINEARID, STUSPS) %>%
        st_join(., hexnet_4k, join = st_intersects) %>%
        mutate(STUSPS = STUSPS.x) %>%
        dplyr::select(LINEARID, hexid4k, STUSPS)
    

    The resulting parallel process is as follows:

    hexnet_list <- hexnet_4k %>%
      split(., .$STUSPS)
    sfInit(parallel = TRUE, cpus = num_cores)
    sfExport('transmission_lines_hex')
    sfSource('src/functions/helper_functions.R')
    
    transmission_lines_density <- lapply(hexnet_list,
                                  function (input_list) {
                                    require(tidyverse)
                                    require(magrittr)
                                    require(lubridate)
                                    require(lubridate)
                                    require(sf)
    
                                    sub_grid <- dplyr:::bind_cols(input_list)
                                    unique_ids <- unique(sub_grid$hexid4k)
                                    state_name <- unique(sub_grid$STUSPS)[1]
    
                                    print(paste0('Working on ', state_name))
                                    got_density <- lapply(unique_ids,
                                                          FUN = get_density,
                                                          grids = sub_grid,
                                                          lines = transmission_lines_hex)
                                    print(paste0('Finishing ', state_name))
    
                                    return(got_density)
                                  }
    )
    sfStop()
    

    I hope some of this may be useful to you and, as always, suggestions on optimization would be welcomed.