Date: 2024-06-19
R version: 3.5.0
*Corresponding author: matthew.malishev [at] gmail.com
This document can be found at https://github.com/darwinanddavis/UsefulCode
Accessing sf, sp, geos,a nd geojson objects, such as attributes, accessing lower levels of their structure, latlon and MULTIPOLYGONS layers, etc.
Read and convert json to df
# opt 1
require(jsonlite)
data <- fromJSON("data.json", simplifyDataFrame = T, flatten = T) %>% as.data.frame(data)
# opt 2
file <- "my_geo_data.json" %>% readr::read_lines
# opt 3
file <- "my_geo_data.json" %>% read_sfConvert sf to geojson
require(geojsonio)
sf %>% geojsonio::geojson_list()Save R obj as geojson
# save R obj as geojson
sfjson %>% geojson_write(file = here::here("data”,”new.json"))
# opt 2
sf %>% st_write("sf.geojson")
# opt 3 convert sf to json string and save to dir
sf %>% geojsonio::geojson_list() %>% rjson::toJSON(method = "C") %>% readr::write_lines("sf.json")Loop through sf and convert and save as geojson
require(geojsonio)
for (i in sf$name) {
sf %>% filter(name %in% i) %>% geojson_list() %>% geojson_write(here::here("data", paste0("newfile",
i, ".json")))
}Read in json and convert to df
require(jsonlite)
read_json("test.json", simplifyVector = T)Write json/geojson to dir
# write json methods working but only points
pacman::p_load(jsonlite, geojsonio)
sf %>% st_write("data.shp") # write shp
sf %>% st_write("data.json") # write json
sf %>% st_write("data.geojson", driver = "geojson") # write geojson
sf %>% st_write("data.json", driver = "geojson", delete_dsn = T) # overwrite file
sf %>% geojsonio::geojson_write(file = "data.json")
sf %>% rjson::toJSON(indent = "2")
sf %>% geojsonio::geojson_list() %>% rjson::toJSON(method = "C") %>% readr::write_lines("sf.json")
sf %>% geojsonio::file_to_geojson(output = "data.geojson")
# convert sf to json
sf %>% jsonlite::toJSON() %>% jsonlite::prettify()
# convert list to geojson
ls %>% rjson::toJSON()
ls %>% jsonify::to_json(unbox = T) %>% jsonify::pretty_json()Check available drivers for writing shp, json, geojson, kml
require(rgdal)
ogrDrivers() %>% ViewGet bbox coords
require(sf)
methods(class = "sf")
obj$geometry %>% attr(which = "bbox")Get first element or range of sf
obj$geometry
obj$geometry[1] %>% unlist %>% .[1] # first element
obj %>% st_coordinates() %>% .[1:3] # coord range Get polygons (and range) in geometry layer
obj %>% st_geometry() %>% .[1:10] # polygon range Remove or keep/select specific sf types from sf obj
obj %>% st_collection_extract("POINT") # get just points
obj %>% st_geometry %>% st_collection_extract("POINT") # get geometry of just pointsGet boundary/outlines
# good for plotting US state border lines over other sf files
obj %>% st_geometry() %>% st_boundary %>% ggplot() + geom_sf()
# boundary of bbox
mp %>% st_bbox() %>% st_as_sfc() %>% ggplot() + geom_sf()Break up multipolygon into separate polygons/pull specific polygon e.g. main France polygon and rm islands
french_guiana <- ne_countries("large",type = "countries",returnclass = "sf") %>%
filter(name %in% c("France")) %>% # add french guiana
st_cast("POLYGON") %>% slice(1) # get just french guiana Get areas of individual polygons within multipolygon
dd %>% filter(name == "Netherlands") %>% st_cast("POLYGON") %>% st_geometry %>% st_area()Filter sf geometry only
dat %>% filter(st_geometry_type(.) == "LINESTRING")Convert matrix directly to sf
require(sfheaders)
m %>% sf_point()
m %>% sf_multipolygon()
m %>% sf_multilinestring()
m %>% sf_linestring()
# convert list of matrices/arrays into sf
require(purrr)
require(magrittr)
require(rlist)
ls %>%
map(matrix, ncol = 2) %>%
map(magrittr::set_colnames, c("lon","lat")) %>% # rename cols
compact() %>% # drop empty lists
map(sfheaders::sf_multipolygon, multipolygon_id = NULL) %>% # convert to sf
rlist::list.rbind() # bind sfs
Return Z range for sf obj
sf %>% st_z_range()Drop zm components
sf %>% st_zm()Read in .shp file
require(rgdal) # \ rgdal
shp <- readOGR(".shp", layer = "X") # layer = name of geometry elementGet and transform projection
require(rgdal) # \ rgdal
proj4string(shp) # get projection
spTransform(shp, CRS("+proj=longlat +datum=WGS84")) # transform projection Base map from OSM
# osm
gg <- get_map(location, source = "osm", color = "bw")
ggmap(gg, extent = "device", darken = c(0.9, "white")) + geom_point(data = df, aes(x, y))Crop out sf objects
sf %>% st_crop(dd %>% filter(name == "Victoria"))
sf %>% st_intersection(dd %>% filter(name == "Victoria"))Crop plot region to country border
ggplot() + borders(regions = "Germany") + geom_tile()Mask/crop raster data
Crop raster/tiff to bbox
obj <- pathm # sf object
ras <- "raster.tiff" %>% raster # load raster
e <- as(obj %>% extent(), "SpatialPolygons") # get bbox/extent as Sp
crs(e) <- "+proj=longlat +datum=WGS84 +no_defs" # add prj
r <- crop(ras, e) # crop raster to this bbox
r <- r %>% rasterToPoints() %>% as.data.frame # convert to df for plotting
colnames(r) <- c("x", "y", "z")Get inverse mask (hole within sf obj)
require(raster)
mask(sf1, sf2, inverse = T)Crop multipolyon boundary to surround sf obj boundary and combine into one polygon
dd %>% st_intersection(boundary_osm %>% st_boundary()) %>% st_cast("POLYGON")
# option 2
sf %>% st_union %>% st_cast("POLYGON") %>% st_boundary()Shrink sf obj (reverse buffer)
# subtract centroid from geometry, then halve and reposition centroid
(sf$geometry - sf$geometry %>% st_centroid)/2 + sf$geometry %>% st_centroid()Displace sf obj
# displace x by 0 and y by 500
sf_disp <- sf %>% st_geometry() + c(0, 500)
# plot using aes
ggplot() + geom_sf(aes(geometry = sf_disp))Crop sf based on area
sf %>% filter(st_area %>% as.numeric %in% sf %>% st_area %>% as.numeric %>% max)
# filter out polygons based on area
quant <- 0.3
sf %>% filter(st_area(.) > st_area(.) %>% quantile(quant)) # get polygons > 30% of all polygon areas Make custom bbox
require(ggmap)
lon <- 144.93
lat <- -37.79
ggmap::make_bbox(lon, lat, f = 0.5)Crop satellite map to bbox
# see blood gold for example
cropx <- c(10.5,23.4)
cropy <- c(-1.5,44.0)
ggplot() +
ggmap(basemap) + # sat map from google
scale_x_continuous(limits = cropx, expand = c(0,0)) +
scale_y_continuous(limits = cropy,expand = c(0,0))
Create concave polygon hull of multipoints
require(concaveman)
sf %>% summarise() %>% concaveman::concaveman(concavity = 1)Crop raster
require(raster)
r = raster(vals = rnorm(400), nrows = 20, ncols = 20, ext = extent(c(0, 20, 0, 20)))
p = Polygon(matrix(5, 5, 15, 12, 7, 16, 3, 10), ncol = 2, byrow = T)
p = SpatialPolygons(list(Polygons(list(p), "p")))
r2 = mask(r, p)
plot(r2)Get any city centroid, add buffer, then crop
require(raster)
require(maps)
data(world.cities)
buff <- 2
country <- "Australia"
city <- "Melbourne"
# get centroid
bb <- world.cities %>% filter(country.etc == country & name == city) %>% select(long, lat) %>% unlist %>%
st_point() %>% st_buffer(buff)
# crop with polygon
sf %>% st_crop(bb)
# crop with bbox
sf %>% st_crop(bb %>% st_bbox() %>% st_as_sfc())Crop one side of sf object
# https://spencerschien.info/post/spatial_buffer/
sf_use_s2(FALSE) # set first to enable geos
sf %>% st_buffer(100, singleSide = T)Databases - maps, world data, natural earth, raster, DEM
# country polygons and natural earth data - rivers/lakes, urban areas, etc
require(rnaturalearth)
# hires country polygons
devtools::install_github("ropensci/rnaturalearthhires")
require(rnaturalearthhires)require(elevatr)
pts <- c(-140.5, 35.6)
pts_buff <- pts %>% st_buffer(1 * 10^4) # create buffer around centroid
get_elev_raster(pts_buff, z = 12)Remove empty geometries
# rm empty geos for ''spsample': error in evaluating the argument 'x' in selecting a method for
# function 'addAttrToGeom': empty geometries are not supported by sp classes: conversion failed'
sf %>% filter(!st_is_empty(.), drop = F)
sf %>% filter(is.na(st_dimension(.)) == F) # opt 2 to rm empty geos Align sf objects with Google map satellite base map
addy <- "Alice Springs, Northern Territory"
extent <- geocode(addy, output = "more") %>% pull(address)
maptype <- "satellite"
zoom <- 4
color <- "bw"
darken <- 0.3
# get google map
prj <- 3857 # set proj to match google map prj
base_map <- get_map(extent, maptype = maptype, color = color, zoom = zoom)
# function to fix the bbox to be in EPSG:3857
ggmap_bbox <- function(map) {
if (!inherits(map, "ggmap"))
stop("map must be a ggmap object")
# Extract the bounding box (in lat/lon) from the ggmap to a numeric vector, and set the names to what
# sf::st_bbox expects:
map_bbox <- setNames(unlist(attr(map, "bb")), c("ymin", "xmin", "ymax", "xmax"))
# Coonvert the bbox to an sf polygon, transform it to 3857, and convert back to a bbox (convoluted,
# but it works)
bbox_3857 <- st_bbox(st_transform(st_as_sfc(st_bbox(map_bbox, crs = 4326)), prj))
# Overwrite the bbox of the ggmap object with the transformed coordinates
attr(map, "bb")$ll.lat <- bbox_3857["ymin"]
attr(map, "bb")$ll.lon <- bbox_3857["xmin"]
attr(map, "bb")$ur.lat <- bbox_3857["ymax"]
attr(map, "bb")$ur.lon <- bbox_3857["xmax"]
map
}
map <- ggmap_bbox(base_map) # transform proj of sat map
# plot
map %>% ggmap(extent = "device", legend = "bottom", darken = c(0.5, "#FFFFFF"))Plot sf data on Google maps basemap
prj <- 3857 # set proj to match google map prj
map %>% ggmap(extent = "device", legend = "bottom", darken = c(0.5, "#FFFFFF")) + geom_sf(data = sf %>%
st_transform(prj), size = 0.05, inherit.aes = F) # include inherit.aes = F to show data .gdb/.gdbtable)Read in gdb data
fh <- "polygons.gdbtable" # 'polygons.gdb'
fhl <- fh %>% st_layers() %>% .[[1]] # get layer name
sf <- fh %>% st_read(layer = fhl) # read in data using layerRead .gpx files (GPS track software/apps)
# option 1
gpx <- st_read("user.gpx", "track_points")
pathm <- gpx %>%
group_by(user_ID) %>% # get grouping variable
dplyr::summarize(do_union = F) %>%
st_cast("LINESTRING")
# option 2
pacman::p_load(XML,lubridate)
fd <- "/Volumes/Matt_timemachine/maptracks/2021/sep/sam/gpx/"
fh <- "day182.gpx"
gpx1 <- paste0(fd,fh) %>% # parse gpx file
htmlTreeParse(error = function(...) {}, useInternalNodes = T)
elev <- as.numeric(xpathSApply(gpx1, path = "//trkpt/ele", xmlValue))
times <- xpathSApply(gpx1, path = "//trkpt/time", xmlValue)
coord <- xpathSApply(gpx1, path = "//trkpt", xmlAttrs)
city_df <- tibble(lat = coord["lat",] %>% as.numeric(),
lon = coord["lon",] %>% as.numeric(),
elev = elev,
time = times %>% ymd_hms()
)
# elev profile
ggplot(data = city_df) +
geom_line(aes(time,elev)) +
scale_x_datetime(date_breaks = "6 hours"
# date_minor_breaks = "1 hour", # optional
# date_labels = "%M%D" # full month and year
)
# convert gpx coords to linestring
pathgpx <- city_df %>%
dplyr::select(lat,lon) %>%
as.matrix() %>% # convert points to linestring
st_linestring(dim = "XY") %>%
st_sfc(crs = 4326) %>% # convert sfg object to geometry
st_transform(crs = 4326) # then convert projection
Parse gpx data within kml
First manually convert file from ‘.kml’ to ‘.gpx’
require(XML)
require(dplyr)
require(tidyr)
coord_id <- "//coord" # tag where lat/lon/elev data are located in file
gpx2 <- "data/full.gpx" %>%
htmlTreeParse(error = function(...) {}, useInternalNodes = T)
pathgpx <- xpathSApply(gpx2, path = coord_id, xmlValue) %>% # read coords (lon,lat,elev)
as.data.frame() %>%
tidyr::separate(col = ".",into = c("lon", "lat", "elev"), sep = " ", remove = T) %>% # separate char into individual values
mutate_all(as.numeric)
# timestamp
time_id <- "//when" # may vary with data
gpx2 <- "data/full.gpx" %>%
xpathSApply(gpx2, path = time_id, xmlValue) # get timestamp library("ggspatial")
ggplot(data = world) +
geom_sf() +
annotation_scale(location = "bl", width_hint = 0.5) +
annotation_north_arrow(location = "bl", which_north = "true",
pad_x = unit(0.75, "in"), pad_y = unit(0.5, "in"),
style = north_arrow_fancy_orienteering) +
coord_sf(xlim = c(-102.15, -74.12), ylim = c(7.65, 33.97))
# scale bar
p + ggspatial::annotation_scale(
location = "bl",
bar_cols = c(bg,fg),
line_width = 0.5,
pad_x = unit(1.5,"cm"),
pad_y = unit(1.5,"cm"),
text_col = fg, line_col = fg,
style = "ticks", # "bar"
width_hint = 0.1 # set scale to 10% of map
)
## Scale on map varies by more than 10%, scale bar may be inaccurate
# accessing coords in sf (complex) shp <- here::here('worldmaps','data','day14.shp') %>% readOGR() d
# <- shp@data # df vars bb <- shp@bbox #bbox ply <- shp@polygons # polygons plyl <- ply %>%
# purrr::map(ggplot2::fortify) # melt polygon class names(plyl) <- d$VARNAME_3 # get district name
# ggplot() + geom_polygon(data = ply_df[[600]], aes(long, lat))
# ply[[1]]@Polygons[[1]]@coords # works ply[[1]] %>% attr(which='Polygons') %>% purrr::map('coords')
# # works
#list of proj
# https://proj.org/operations/projections/patterson.html
# https://spatialreference.org/ref/?search=albers&srtext=Search
require(mapdata,rnaturalworld)
###
d <- ne_countries(scale = "large",
type = "countries",
country = "Japan",
returnclass = "sf")
bb <- d$geometry %>% st_bbox()
# get current proj
d %>% st_crs() # /sf
d %>% crs() # /raster
# transform proj options
crsn <- 3995 # 3995 3033
plat <- 50
plon <- 0
prj <- paste0("+lat_0=",plat," +lon_0=",plon," +init=epsg:",crsn) # opt1
prj <- "+init=epsg:4326" # opt2
# transforming for diff data types
# da
ras@crs@projargs <- prj
ras %>% st_as_sf(coords = c("lon","lat"), crs = 4326) %>% # convert city_df to sp. NB need to set initial crs to one compatible with latlon coords, e.g. 4326
sf::st_transform(crs = prj) # now transform proj
dl %>% st_transform(prj) # shp
dt <- d %>% st_transform("+proj=utm +zone=19 +ellps=GRS80 +datum=NAD83")
dt <- d %>% st_transform(crsn)
# quartz()
p <- ggplot(data = d) +
geom_sf() +
coord_sf(crs = prj) # option 1
coord_sf(crs = st_crs(crsn)) # option 2
# proj in string format
prj <- projection(pathm) # get proj in string format
# site2 <- elevatr::get_elev_raster(city_df,
# z = zoom,
# prj = prj,
# expand = 3) %>% # get elev raster
# rasterToPoints() %>% # convert to df
# as.data.frame
# coerce projection
sf %>% st_set_crs(prj) %>% st_transform(prj2) # or use prj again to coerce uncooperative projections Get projection info
# find proj
crs_codes = rgdal::make_EPSG() # get all espg code and info
crs_codes %>% filter(code == 27700) %>% pull # pull espg name and code
crs_codes[crs_codes$note %>% str_which("Austra"), "code"]require(sf)
path <- "/data.kml" %>% sf::st_read() # single layer kml
path <- "/data.kml" %>% sf::st_layers() # multi layer kml
# for kmz, change .kmz to .zip, then unzip and read as kmlSave sf as kml/kmz
# opt 1
st_write(sf, "test.kml", driver = "kml", delete_dsn = T)
# opt 2
require(rgdal)
writeOGR(sf, dsn = "sf.kml", layer = "polygonWGS", driver = KML)library(rlas)
# 3d plotting of lidar data
devtools::install_github("AckerDWM/gg3D")
library(gg3D)Convert raster to sf
library(stars)
library(sf)
tifpath <- system.file("tif/L7_ETMs.tif", package = "stars")
tif <- read_stars(tifpath)
sf <- st_as_sf(tif)Convert sp/sf/spdf data to data frame (fortify)
require(rworldmap)
require(broom)
d <- getMap(resolution = "high") # get world data
d_fort <- tidy(d, region = "NAME") # fortify Convert sf geometry to df
require(sfheaders)
sf %>% sf_to_dfConvert points to linestring
inter_df <- rbind(df1,df2,df3) %>%
as.matrix() %>%
st_linestring(dim = "XY") %>% # convert points to single linestring
st_sfc(crs = 4326) # convert sfg object to geometry
# option 2
pathm <- city_df %>%
dplyr::select(lat,lon) %>%
as.matrix() %>% # convert points to linestring
st_linestring(dim = "XY") %>%
st_sfc(crs = 4326) %>% # convert sfg object to geometry Convert each row in df to linestring
inter_df <- rbind(df1, df2, df3) %>% st_as_sf(coords = c("lon", "lat"), crs = 4326) %>% st_geometry() %>%
st_cast("LINESTRING")Replace coordinates in sf obj
sf_df <- sf %>% st_coordinates %>% data_frame # convert to df
sf_df[1:3, "Y"] <- c(10.1, 10.2, 10.3) # replace with new coords
sf <- sf_df[, c("X", "Y")] %>% as.matrix() %>% st_linestring(dim = c("X", "Y")) # convert back to sfFlip XY coordinates
sf %>% st_coordinates() %>% .[, c(2, 1)]Convert list of sf obj to sfc
do.call(rbind, my_list)Convert coordinates to text (for simple copy paste)
bbc <- dd %>% filter(name %in% "Netherlands")
bbc %>% st_centroid() %>% st_geometry() %>% st_as_text()Read in raster, change projection, and convert to matrix/df
require(raster)
ras <- "ras.tif" %>% raster()
prj <- ras %>% projection() # get proj
bb <- ras %>% extent() # get bbox
require(raster)
ras %>% raster() %>% crop(bb) %>% rasterToPoints() %>% as.data.frame
require(rayshader)
ras %>% raster %>% crop(bb) %>% raster_to_matrix()
require(terra)
ras %>% rast() %>% crop(bb)
# read in .adf files
require(rayshader)
"data.adf" %>% raster %>% raster_to_matrix %>% data.frameConvert raster to various data stuctures
require(raster)
alt = getData("alt", country = "CHE")
slope = terrain(alt, opt = "slope")
aspect = terrain(alt, opt = "aspect")
hill = hillShade(slope, aspect, 40, 270)
# raster to df
df <- alt %>% rasterToPoints() %>% as.data.frame() # terrain
df <- aspect %>% rasterToPoints() %>% as.data.frame() # aspect
df <- hill %>% rasterToPoints() %>% as.data.frame() # hillshade
# raster to sf
sf <- hill %>% rasterToPoints() %>% as.data.frame %>% st_as_sf(coords = c("x", "y"), crs = 4326)
bb <- sf %>% .[sf %>% nrow/2, ] %>% st_buffer(5 * 10^4) # circle crop based on middle centroid
bb <- sf %>% .[sf %>% nrow/2, ] %>% st_buffer(5 * 10^4) %>% st_bbox() # bbox crop
sf <- sf %>% st_crop(bb)
# raster to spdf
spp <- hill %>% as("SpatialPixelsDataFrame") %>% data.frame # ras to spdf to df
# plot
ggplot() + geom_raster(data = df, aes(x, y, fill = layer)) # plot df
geom_sf(data = sf, aes(fill = layer, col = layer)) # plot sf
geom_sf(data = spp, aes(fill = layer, col = layer)) # plot spdfElevation profiles
pacman::p_load(topoDistance,elevatr)
# elevation raster (country tiles)
elevp <- getData('alt', country='FRA', mask = T)
elevp$FRA_msk_alt@crs # crs
elevp$FRA_msk_alt@extent # bbox
# elevation raster
elevp2 <- elevatr::get_elev_raster(city_df[,c("lon","lat")], # get elev raster
z = 3,
prj <- "+proj=longlat +datum=WGS84 +no_defs"
# expand = 3
) %>%
rasterToPoints() %>% as.data.frame
elevp2sf <- elevp2 %>% st_as_sf(coords = c("x","y"), crs = 4326) # df to sf
# elevation points (usa only)
elevp3 <- elevatr::get_elev_point(df,
prj <- "+init=epsg:4326")
elevp3_fort <- tidy(elevp3$elevation, region = "elevation") # fortify
# convert raster to diff classes
elevdf <- elevp %>% rasterToPoints() %>% as.data.frame # raster to df
elevdf <- elevp %>% rasterToPoints() %>% as.data.frame %>%
st_as_sf(coords = c("x","y"), crs = 4326) # raster to sp
# convert lines to elev profile
pts <- df[,c("lon","lat")] %>% as.matrix() %>% SpatialPoints() # df to points
elevpath <- topoDist(elevp,pts)
topoProfile(elevp,sl,pts = 100, type = "base") # generic elev profile plot
# plot
qplot(data = elevp2[1:10000,],x,layer,geom = "line")Summarise raster layers
cellStats(raster, stat = "min", na.rm = T) # summarise raster layersRead .tiff
require(raster)
dem1 <- "srtm_20_05.tif" %>% raster()Decrease/increase raster resolution
ras %>% aggregate(fact = 3) # lower res by factor of 3
ras %>% disaggregate(fact = 3, method = "bilinear") # increase res by factor of 3
require(terra)
ras %>% disagg(3)
ras %>% aggregate(3)
ras %>% aggregate(c(2, 3), fun = sum) # aggregate by diff factor for rows and cols plus change method Match geometry/proj of raster from different sources
require(terra)
ras %>% res() # get resolution of raster
ras %>% resample(ras2) # resample ras2 to ras1 resolution
# change projection
ras %>% project("+proj=utm +zone=32")Change resolution of raster
require(raster)
res(ras)
xres(ras)
yres(ras)
res(ras) <- 1/120
res(ras) <- c(1/120, 1/60) # change res of x and yGet zscale/elevation of raster
require(geoviz)
ras %>% raster_zscale()Shaded contour maps (Tanaka maps)
# https://github.com/riatelab/tanaka/ https://rgeomatic.hypotheses.org/1536
library(tanaka)
library(terra)
ras <- rast(system.file("tif/elev.tif", package = "tanaka"))
tanaka(ras, breaks = seq(80, 400, 20), legend.pos = "topright", legend.title = "Elevation\n(meters)")Convert sf to spatiallines obj
lns <- df[, c("lon", "lat")] %>% as.matrix() %>% Line() # df to lines
pts <- df[, c("lon", "lat")] %>% as.matrix() %>% SpatialPoints() # df to points
# sf to spatiallines
sl <- sf$geometry %>% .[1] %>% st_zm() %>% as_Spatial() # opt 1
sl <- sf$geometry %>% .[1] %>% st_zm() %>% as("Spatial") # opt 2Convert spatial class to sf
sp %>% st_as_sf()Hi-res globe map
pacman::p_load(rnaturalearth,dplyr,sf)
inset_globe <- ne_countries("medium", type = "sovereignty", returnclass = "sf") # %>%
# filter(!continent %in% c("Africa","Europe"))
inset_countries <- ne_countries("medium", country = c("United States of America","Australia"), type = "sovereignty", returnclass = "sf")
crsn <- 2163
plat <- 5
plon <- 180
inset_prj <- paste0("+lat_0=",plat," +lon_0=",plon," +init=epsg:",crsn) # set centered projection
inset_globe <- inset_globe %>% st_transform(inset_prj)
inset_countries <- inset_countries %>% st_transform(inset_prj)
inset_flight <- inter_df1 %>% st_transform(inset_prj) %>% st_as_sf() # convert into sf
gp <- ggplot() +
geom_sf(data = inset_globe, fill = adjustcolor(fg,0.7), col = NA, size = 0.1) +
geom_sf(data = inset_countries, fill = adjustcolor(flight_col,0.7), col = NA, size = 0.1) +
geom_my_sf(inset_flight,flight_col,0.5,3) + # melb to la flight
coord_sf(crs = inset_prj) +
# theme(plot.background = element_rect(fill = "transparent")) +
theme_nothing()
gpGetting gc flight path from sf obj
library(geosphere)
require(sf)
get_inter_func <- function(sc1,sc2){
gcIntermediate(
sites_flight[sites_flight$city %in% sc1,] %>% st_geometry() %>% unlist,
sites_flight[sites_flight$city %in% sc2,] %>% st_geometry() %>% unlist,
n=100, addStartEnd=T, breakAtDateLine=F) %>%
as.data.frame()
} %>%
as.matrix() %>% # convert points to linestring
st_linestring(dim = "XY") %>%
st_sfc(crs = 4326) # convert sfg object to geometry Combine multiple sf/sfc geometries into one
multipath <- "kml" %>% st_layers %>% .[[1]] # get layer ids
pathm <- c()
for (pid in multipath) {
path <- kmldata %>% st_read(layer = multipath[pid], fid_column_name = "IDS")
pathm <- rbind(pathm, path)
}
# combine separated geometries into one geometry
patht <- st_combine(pathm)
# dissolve seperate geometries into one
sf %>% st_union()
# join two different sf objs and combine into one geometry
# opt 1
dplyr::bind_rows(list(sf1, sf2)) %>% st_union()
# opt 2
st_sfc(sf1, sf2) %>% st_combine()Join/merge df vars with sf obj (ideal for heatmap/chloropleth)
countries1 <- df$country_name %>% unique # identify country names
mp <- ne_countries(scale = "medium", returnclass = "sf") %>%
filter(name %in% countries1) %>%
rename("country_name" = name) %>% # match var names between df and sp obj
left_join(df,by = "country_name") # join by matched var nameMerge/dissolve overlapping polygon/sf boundaries into one
dd <- rnaturalearth::ne_states(countries, returnclass = "sf")
dd2 <- ne_countries("large", "sovereignty", returnclass = "sf")
plot(gUnion(dd, dd2[dd, ]))Snap sf objects based on proximity
library(units)
library(sfnetworks)
sf <- sf %>%
st_snap(.,., tolerance = 10000) %>% # coerces the snapping using a big tolerance value
as_sfnetwork() # convert to network obj
autoplot(sf)Validate sfs (useful when trying combining sfs and connecting lines make weird shapes)
sf %>% st_is_valid()
sf[!st_is_valid(sf), ] %>% st_make_valid()Find intersecting geo points within sf objects
require(osmdata)
require(sf)
get_osm_feature <- function(loc,key,value,geo){
oo <- opq(loc) %>% # find location
add_osm_feature(key = key, value = value) %>% # source key-value pair
osmdata_sf() %>% # get sf obj
unname_osmdata_sf() # combat sf error
oo[[geo]] # %>% # pull geometry
}
# location, key, value
location1 <- "melbourne australia"
polyg <- get_osm_feature(location1, "leisure","park|natu","osm_polygons") # get osm features
# check if point intersects with polygon
pts_inter = c(144.7695,-38.2724) %>%
st_point() %>% # convert into sf point
st_sfc(crs = 4326) %>% # set prj
st_as_sf() # convert into sf
polyg <- polyg %>% st_transform(4326) # match proj to above sf coords
polyg_inter <- st_intersection(polyg,pts_inter) # find intersecting point within polygon
# plot intersecting polygon and point
st_intersection(pts_inter,polyg) %>% st_geometry()
polyg %>% filter(osm_id == "234385754")
ggplot() +
geom_sf(data = polyg %>% filter(osm_id == polyg_inter$osm_id)) + # plot polygon w intersecting point
geom_sf(data = pts_inter, col = "red") + # add intersecting point
geom_sf_text(data = pts_inter, label = "Intersecting\npoint") +
theme_bw()Fixing intersecting sf objs (self/non-coding intersection error)
# 2023
sf_use_s2(FALSE) # run at start of session to avoid 'Error in overlapping polygons' error
require(sf)
require(lwgeom)
# various options
sf %>% st_make_valid # validate obj
sf %>% st_buffer(0) %>% st_intersection(polys) # add 0 buffer
sf %>% st_snap(polys, tolerance = 0.09) # snap to polygon edge
sf %>% lwgeom::st_snap_to_grid(10) # snap to grid
sf[5:1] %>% st_intersection() # try reversing order of obj
sf[sf %>% st_is_valid, ] # index only valid objs Get intersecting underlying polygons using point data
# pull underlying polygon data where point data intersects/overlays
sf_polygon %>% st_join(sf_point, join = st_intersects)
# option 2
sf_polygon %>%
st_filter(sf_dataframe,
.predicate = st_intersects
# join = st_overlaps
)
# option 3 - check other predicate options and switch point/poly order
sf_polygon[st_intersects(sf_point,sf_polygon,sparse = T, prepared = T) %>% unlist,] %>%
st_join(sf_point, join = st_intersects, largest = T)Merge sequential/overlapping sf polygons
# merge first 20 polygons into one polygon
sf[1:20, ] %>% st_union
# merge first 20 polygons into one multilinestring
sf[1:20, ] %>% st_boundary %>% st_line_mergeRead in multiple files and combine into one sfc
fhs <- dir %>% list.files(full.names = T)
prj <- 4326
slist <- st_sfc(crs = prj) # create empty sfc
for (f in fhs) {
s <- f %>% read_sf()
slist <- rbind(s, slist)
}
slist <- slist %>% mutate(id = LETTERS[1:length(fhs)])Other predicate funcs for sf pairs
st_intersects()
st_disjoint()
st_touches()
st_crosses()
st_within()
st_contains()
st_contains_properly()
st_overlaps()
st_equals()
st_covers()
st_covered_by()
st_equals_exact()
st_is_within_distance()Create interactive HTML knitted from Rmd
{rdeck}
Change spatial polygons into hexagon tiles
dd <- rnaturalearth::ne_states(c("New Zealand"), returnclass = "sf")
# opt 1
# convert sf files using st_make_grid (with x = st_as_sf(sf_obj))
st_make_grid(st_as_sf(dd),
square=FALSE,
cellsize = 1,
n = 200,
# crs = prj,
flat_topped = T)[st_as_sf(dd)]
# opt 2
# change sf to sp
df_hex <- df %>% st_as_sf(coords = c("lon","lat"), crs = 4163) # need to keep regular prj
hex_points <- df_hex %>% as_Spatial() %>% spsample(type = "hexagonal", cellsize = 0.3) # set cell size according to data density
hex_polygons <- HexPoints2SpatialPolygons(hex_points) %>%
st_as_sf(crs = st_crs(df_hex)) # %>% st_intersection(., df_hex)
hex_polygons$fill <- lengths(st_intersects(hex_polygons, df_hex)) # create fill col based on data length
# now project data
bb <- maps::world.cities %>% filter(country.etc == "Australia", name == "Brisbane") # %>% st_as_sf(coords = c("long","lat"), crs = prj) %>% st_buffer(1.2*10^6) %>% st_bbox
crsn <- 2163
plat <- bb[,"lat"]
plon <- bb[,"long"]
prj <- paste0("+lat_0=",plat," +lon_0=",plon," +init=epsg:",crsn) # opt1
dd <- ne_countries("large",returnclass = "sf") %>% st_transform(prj)
hex_polygons <- hex_polygons %>% st_transform(prj)
bbox <- bb %>% st_as_sf(coords = c("long","lat")) %>% st_buffer(5*10^6) %>% st_bbox
# plot
hex_polygons <- hex_polygons %>% filter(fill != 0) # rm all 0's (optional)
colpal <- sequential_hcl(hex_polygons$fill %>% n_distinct() %>% log + 1, "Purples") %>% rev # use log + 1 scale to improve col depth
ggplot() +
geom_sf(data = hex_polygons, aes(fill = fill %>% log + 1, col = fill %>% log + 1),size = 0.05) +
scale_fill_gradientn(name = "Activity", colours = colpal, aesthetics = c("col"), na.value = "transparent"
# , guide = "none"
)Add buffer decay border around hexes
# using above hex_polygons sf data
ddd <- hex_polygons %>% filter(fill != 0)
hex_buff <- 5*10^4 # create buffer decay
hexbuff_outer <- ddd %>% st_combine %>% st_boundary() %>% st_buffer(hex_buff,endCapStyle = "SQUARE",joinStyle = "MITRE")
hexbuff_inner <- ddd %>% st_combine %>% st_boundary() %>% st_buffer(hex_buff/2,endCapStyle = "SQUARE",joinStyle = "MITRE")
colpal <- sequential_hcl(ddd$fill %>% n_distinct() %>% log + 1, "Purples") %>% rev # colpal for hex with fill = 0
ggplot() +
# hex with buffer decay
geom_sf(data = hexbuff_outer, col = NA, fill = adjustcolor(colpal[1] %>% darken(0.1),opac/2), size = 0.2) +
geom_sf(data = hexbuff_inner, col = NA, fill = adjustcolor(colpal[1] %>% darken(0.2),opac/2), size = 0.2) +
geom_sf(data = ddd , aes(fill = fill %>% log + 1, col = fill %>% log + 1),size = 0.1) +
scale_fill_gradientn(name = "Activity", colours = colpal, aesthetics = c("col"), na.value = "transparent"
# , guide = "none"
)Turn sf into hex for mapping
library(h3jsr)
df_hex <- sf %>%
select(var1) %>%
mutate(hex=point_to_h3(geometry, res=8)) %>%
st_drop_geometry() %>%
count(hex) %>% # get count for var1 hex
h3_to_polygon(simple=FALSE) # turn into geometry
# add hex to leaflet
require(leaflet)
leaflet(elementId='HexCounts') %>%
addProviderTiles(dependencyLocation=here::here()) %>%
addGlPolygons(data=df_hex, opacity=1, fillColor='n')Add sequential colpal to flight paths/geometry/sf
inter_df %>% mutate(colpal = path_col %>% rep(7) %>% lighten(seq(0.1, 0.5, 0.1)))Add labels to sf obj
ggplot() +
geom_sf_label(data=state_names, aes(label=state),family=city_label_font,fill = NA, color="red", size = 5, label.size = 0, nudge_x = 0.5, nudge_y = 0.5) +require(raster)
dem1 <- getData(country = c("USA"), level = 1) # download data
dem1 <- getData("SRTM", lat = 37.83, lon = -83.68) %>% aggregate(fact = 3) # get srtm and reduce res
dem1 <- "srtm_20_05.tif" %>% raster() # or read from file
angle_zen <- 40
angle_dir <- 270
quant <- 0.3 # quantile to filter elev data
dems <- dem1 %>% terrain(opt = "slope")
demr <- dem1 %>% terrain(opt = "aspect")
demh <- hillShade(dems, demr, angle_zen, angle_dir)
# conver to df
dem_df <- rasterToPoints(demh) %>% as.data.frame()
colnames(dem_df) <- c("lon", "lat", "hill")
# take only upper quantile
dem_df <- dem_df %>% filter(hill > hill %>% quantile(quant), hill < hill %>% quantile(quant + 0.1)) # get elev > given quantile
# colpal
colpal <- c(border_col, fg, "#FFFFFF")
cc <- colpal %>% length # number of colour hues
ggplot() + geom_raster(data = dem_df, aes(lon, lat, fill = hill), show.legend = F)New osm data method
-------------------------------# https://www.urbandemographics.org/post/mapping-walking-time-osm-r5r/
osmextract::oe_download(provider = "openstreetmap_fr", file_url = osmextract::oe_match("Rio De Janeiro")[[1]],
download_directory = here::here("data"), force_download = T)Access levels within sf variable (e.g. Name) using attributes
sf$Name %>% attr(which = "levels")Fix intersecting layers and wrapping error (spherical geometry error)
sf_use_s2(FALSE)require(maps,sf,rmapshaper)
bind_rows(list(
"a" = sf1,
"b" = sf2
)) %>% st_as_sf %>% st_combine()
# filter out dust
quant <- 0.1
bind_rows(list(
sf1,
sf2
)) %>%
filter(st_area(.) > st_area(.) %>% quantile(quant))
osm_list_quant %>% nrow
bind_rows(list(
sf1,
sf2
)) %>% st_simplify()
# set smoothness of polygons [0,1] (lower = less smooth)
keep <- 0.2
bind_rows(list(
sf1,
sf2
)) %>%
ms_simplify(keep = keep)
# set smoothness of polygons [0,1] (lower = less smooth)
keep <- 0.2
bind_rows(list(
sf1,
sf2
)) %>%
as('Spatial') %>% # convert to spoly then reconvert to sp
rmapshaper::ms_simplify(keep = keep) %>%
st_as_sf()
# combine tolerance and area quantile
keep <- 0.2
quant <- 0.1
osm_list_ms_simp3 <- bind_rows(list(
sf1,
sf2
)) %>%
as('Spatial') %>%
rmapshaper::ms_simplify(keep = keep) %>%
st_as_sf() %>% filter(st_area(.) > st_area(.) %>% quantile(quant))library(tidyverse)
library(sf)
library(nngeo)
airports <- read_csv("https://gist.githubusercontent.com/fletchjeff/b60f46ca3c1aa51e9aea9fd86d0ab433/raw/81ee446939110daf8b5c3a79425ccf1100070578/airports.csv") %>%
mutate(row_num = row_number()) %>% st_as_sf(coords = c("Longitude", "Latitude"))
# African airports
airports_africa <- airports %>% filter(str_detect(`Tz database time zone`, "Africa|Indian/Antananarivo"))
# Nearest neighbours
nn <- st_nn(airports_africa, airports_africa, k = 10, progress = F)
airports_connexions <- st_connect(airports_africa, airports_africa, ids = nn, progress = F)
# plot
airports_connexions %>% ggplot() + geom_sf(size = 0.75) + theme_minimal() + theme(plot.title = element_text(family = "Gotham Black",
size = rel(4)))