Skip to contents

After reading the tutorial, one may have noticed that writing certain pipelines can be tedious, especially when normalizing a dataset, which always implies using the same code.

tri <- triangulate(filter = keep_ground())
trans <- transform_with(tri)
norm <- tri + trans

This vignette shows a list of pipeline factories, i.e., functions that generate useful pieces of pipelines that can be reused. It also defines some functions that execute a pipeline, which may be regularly useful instead of writing it out. The lists are not comprehensive and will be updated if good tools come to mind. Some are already installed natively in the package.

Pipeline factories

Normalize

This pipeline is already installed in the package.
normalize = function(extrabytes = FALSE)
{
  tri <- triangulate(filter = keep_ground())
  pipeline <- tri
  
  if (extrabytes)
  {
    extra = add_extrabytes("int", "HAG", "Height Above Ground")
    trans = transform_with(tri, store_in_attribute = "HAG")
    pipeline = pipeline + extra + trans
  }
  else
  {
    trans = transform_with(tri)
    pipeline = pipeline + trans
  }
  
  return(pipeline)
}

It can be used this way

pipeline = reader(f) + normalize() + write_las(o)

Classify ground

With CSF

ground_csf = function(smooth = FALSE, threshold = 0.5, resolution = 0.5, rigidness = 1L, iterations = 500L, step = 0.65)
{
  csf = function(data, smooth, threshold, resolution, rigidness, iterations, step)
  {
    id = RCSF::CSF(data, smooth, threshold, resolution, rigidness, iterations, step)
    class = integer(nrow(data))
    class[id] = 2L
    data$Classification <- class
    return(data)
  }
  
  classify = callback(csf, expose = "xyz", smooth = smooth, threshold = threshold, resolution = resolution, rigidness = rigidness, iterations = iterations, step = step)
  return(classify)
}
pipeline = reader(f) + ground_csf() + write_las(o)

With MCC

ground_mcc = function(s = 1.5, t = 0.3)
{
  csf = function(data, s, t)
  {
    id = RMCC::MCC(data, s, t)
    class = integer(nrow(data))
    class[id] = 2L
    data$Classification <- class
    return(data)
  }
  
  classify = callback(csf, expose = "xyz", s = s, t = t)
  return(classify)
}
pipeline = reader(f) + ground_mcc() + write_las(o)
These pipelines use callback() that exposes the point cloud as a data.frame. One of the reasons why lasR is more memory-efficient and faster than lidR is that it does not expose the point cloud as a data.frame. Thus, these pipelines are not very different from the classify_ground() function in lidR. The advantage of using lasR here is the ability to pipe different stages.

Canopy Heigh Model

These two pipelines are natively installed in the package under the name chm().
chm_p2r = function(res, filter = "", ofile = tempfile(fileext = ".tif"))
{
  return(rasterize(res, "max", filter = filter, ofile = ofile))
}
chm_tin = function(res, ofile = tempfile(fileext = ".tif"))
{
  tin = triangulate(filter = keep_first())
  chm = rasterize(res, tin, ofile = ofile)
  return(tin+chm)
}

Digital Terrain Model

This one is also natively installed in the package.

add_class can be used to add a class used as ground such as 9 for water.

dtm = function(res, ofile = tempfile(fileext = ".tif"), add_class = NULL)
{
  filter = keep_ground()
  if (!is.null(add_class)) filter = filter + keep_class(add_class)
  tin = triangulate(filter = filter)
  chm = rasterize(res, tin, ofile = ofile)
  return(tin+chm)
}

Useful functions

Read LAS

read_las = function(files, select = "xyzi", filter = "")
{
  load = function(data) { return(data) }
  read = reader_las(filter = filter)
  call = callback(load, expose = select, no_las_update = TRUE)
  return(exec(read+call, on = f))
}

Buffer tiles

buffer_tiles = function(files, buffer, ofiles = paste0(tempdir(), "/*_buffered.las"))
{
  read = reader_las()
  write = write_las(ofiles, keep_buffer = TRUE)
  return(exec(read+write, on = files, buffer = buffer))
}

Clip circle

Writes LAS files or returns data.frames. Supports sf objects as input.

clip_circle = function(files, geometry, radius, ofiles = paste0(tempdir(), "/*_clipped.las"))
{
  if (sf::st_geometry_type(geometry, FALSE) != "POINT") 
    stop("Expected POINT geometry type")

  coordinates <- sf::st_coordinates(geometry)
  xcenter <- coordinates[,1]
  ycenter <- coordinates[,2]
  
  read = reader_las(xc = xcenter, yc = ycenter, r = radius)
  
  if (length(ofiles) == 1L && ofiles == "")
    stage = callback(function(data) { return(data) }, expose = "*", no_las_update = T)
  else
    stage = write_las(ofiles)
  
  ans = exec(read+stage, on = files)
  return(ans)
}

CRS

A CRS as sf object. The cost of applying hulls() is virtually null.

crs = function(files)
{
  pipeline = reader_las() + hulls()
  ans = exec(pipeline, on = files)
  return(sf::st_crs(ans))
}

Inventory metrics

Using an sf object to provide plot centers and offering the option to normalize on-the-fly. It returns the same sf object with extra attributes.

inventory_metrics = function(files, geometry, radius, fun, normalize = FALSE)
{
  if (sf::st_geometry_type(geometry, FALSE) != "POINT") 
    stop("Expected POINT geometry type")

  coordinates <- sf::st_coordinates(geometry)
  xcenter <- coordinates[,1]
  ycenter <- coordinates[,2]
  
  pipeline <- reader_las(xc = xcenter, yc = ycenter, r = radius)
  
  if (normalize)
  {
    tri <- triangulate(filter = keep_ground())
    trans <- transform_with(tri)
    pipeline <- pipeline + tri + trans
  }

  pipeline <- pipeline + callback(fun, expose = "*")
  ans <- exec(pipeline, on = files)
  ans <- lapply(ans, as.data.frame)
  ans <- do.call(rbind, ans)
  return(cbind(geometry, ans))
}

Virtual point cloud

build_vpc = function(files, ofile)
{
  read = reader_las()
  write = write_vpc(ofile)
  exec(read+write, on = files)
}