diff --git a/R/ISSR800.R b/R/ISSR800.R index ed487c55..aeb924cd 100644 --- a/R/ISSR800.R +++ b/R/ISSR800.R @@ -1,36 +1,37 @@ - -#' @title ISSR-800 Web Coverage Service (WCS) +#' ISSR-800 Web Coverage Service (WCS) #' -#' @author D.E. Beaudette and A.G. Brown +#' Intermediate-scale gridded (800m) soil property and interpretation maps from +#' aggregated SSURGO and STATSGO data. These maps were developed by +#' USDA-NRCS-SPSD staff in collaboration with UCD-LAWR. Originally for +#' educational use and [interactive thematic maps](https://casoilresource.lawr.ucdavis.edu/soil-properties/), these data are a suitable alternative to gridded STATSGO-derived thematic soil maps. The full size grids can be [downloaded here](https://casoilresource.lawr.ucdavis.edu/soil-properties/download.php) #' -#' @description Intermediate-scale gridded (800m) soil property and interpretation maps from aggregated SSURGO and STATSGO data. These maps were developed by USDA-NRCS-SPSD staff in collaboration with UCD-LAWR. Originally for educational use and \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/}{interactive thematic maps}, these data are a suitable alternative to gridded STATSGO-derived thematic soil maps. The full size grids can be \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/download.php}{downloaded here}. +#' \code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, +#' \code{sfc} or \code{bbox} object or a \code{list} containing: #' +#' \describe{ \item{list("aoi")}{bounding-box specified as (xmin, ymin, xmax, +#' ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} \item{list("crs")}{coordinate +#' reference system of BBOX, e.g. '+init=epsg:4326'} } #' +#' The WCS query is parameterized using \code{raster::extent} derived from the +#' above AOI specification, after conversion to the native CRS (EPSG:6350) of +#' the ISSR-800 grids. #' -#' @param aoi area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details +#' Variables available from this WCS can be queried using \code{WCS_details(wcs +#' = 'ISSR800')}. #' +#' @param aoi area of interest (AOI) defined using a \code{Spatial*}, a +#' \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details #' @param var ISSR-800 grid name, see details -#' -#' @param res grid resolution, units of meters. The native resolution of ISSR-800 grids (this WCS) is 800m. -#' -#' @param quiet logical, passed to \code{download.file} to enable / suppress URL and progress bar for download. -#' -#' @details \code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: -#' -#' \describe{ -#' \item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} -#' \item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} -#' } -#' -#' The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the ISSR-800 grids. -#' -#' Variables available from this WCS can be queried using \code{WCS_details(wcs = 'ISSR800')}. -#' -#' @note There are still some issues to be resolved related to the encoding of NA Variables with a natural zero (e.g. SAR) have 0 set to NA. -#' -#' @return \code{raster} object containing indexed map unit keys and associated raster attribute table -#' -#' @export +#' @param res grid resolution, units of meters. The native resolution of +#' ISSR-800 grids (this WCS) is 800m. +#' @param quiet logical, passed to \code{download.file} to enable / suppress +#' URL and progress bar for download. +#' @return \code{raster} object containing indexed map unit keys and associated +#' raster attribute table +#' @note There are still some issues to be resolved related to the encoding of +#' NA Variables with a natural zero (e.g. SAR) have 0 set to NA. +#' @author D.E. Beaudette and A.G. Brown +#' @export ISSR800.wcs ISSR800.wcs <- function(aoi, var, res = 800, quiet = FALSE) { if(!requireNamespace('rgdal', quietly=TRUE)) diff --git a/R/KSSL_VG_model.R b/R/KSSL_VG_model.R index cb015803..84cfb876 100644 --- a/R/KSSL_VG_model.R +++ b/R/KSSL_VG_model.R @@ -1,4 +1,3 @@ - # define van Genuchten model as a function # this is tailored to the parameters stored in our KSSL data # https://en.wikipedia.org/wiki/Water_retention_curve @@ -55,13 +54,43 @@ #' \href{https://en.wikipedia.org/wiki/Water_retention_curve}{water retention curve estimation} #' # 'van Genuchten, M.Th. (1980). "A closed-form equation for predicting the hydraulic conductivity of unsaturated soils". Soil Science Society of America Journal. 44 (5): 892-898. + + +#' Develop a Water Retention Curve from KSSL Data #' +#' Water retention curve modeling via van Genuchten model and KSSL data. #' +#' This function was developed to work with measured or estimated parameters the [van Genuchten model](https://en.wikipedia.org/wiki/Water_retention_curve), as generated by the [ROSETTA model](https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/). #' -#' @export -#' +#' As such, \code{VG_params} should have the following format and +#' conventions: \describe{ \item{theta_r}{saturated water content, values +#' should be in the range of {0, 1}} \item{theta_s}{residual water content, +#' values should be in the range of {0, 1}} \item{alpha}{related to the inverse +#' of the air entry suction, function expects log10-transformed values with +#' units of cm} \item{npar}{index of pore size distribution, function expects +#' log10-transformed values with units of 1/cm} } +#' +#' @param VG_params \code{data.frame} or \code{list} object with the parameters +#' of the van Genuchten model, see details +#' @param phi_min lower limit for water potential in kPa +#' @param phi_max upper limit for water potential in kPa +#' @param pts number of points to include in estimated water retention curve +#' @return A list with the following components: \describe{ +#' \item{VG_curve}{estimated water retention curve: paired estimates of water +#' potential (phi) and water content (theta)} \item{VG_function}{spline +#' function for converting water potential (phi, units of kPa) to estimated +#' volumetric water content (theta, units of percent, range: {0, 1})} +#' \item{VG_inverse_function}{spline function for converting volumetric water +#' content (theta, units of percent, range: {0, 1}) to estimated water +#' potential (phi, units of kPa)} } +#' +#' @note A practical example is given in the [fetchSCAN tutorial](http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html). +#' +#' @author D.E. Beaudette +#' @references [water retention curve estimation](https://en.wikipedia.org/wiki/Water_retention_curve) #' @examples #' +#' #' # basic example #' d <- data.frame( #' theta_r = 0.0337216, @@ -74,6 +103,8 @@ #' #' str(vg) #' +#' +#' @export KSSL_VG_model KSSL_VG_model <- function(VG_params, phi_min=10^-6, phi_max=10^8, pts=100) { # sanity check: no NA allowed diff --git a/R/OSDquery.R b/R/OSDquery.R index f9bba04c..4467eb48 100644 --- a/R/OSDquery.R +++ b/R/OSDquery.R @@ -1,13 +1,28 @@ - # https://www.postgresql.org/docs/9.5/static/textsearch-controls.html # these are all parameters expected by the SoilWeb OSD Fulltext search - - -#' @title Full text searching of the USDA-NRCS Official Series Descriptions +#' Full text searching of the USDA-NRCS Official Series Descriptions +#' +#' This is an example of how chunks of text parsed from OSD records can be made search-able with [PostgreSQL full-text indexing](https://www.postgresql.org/docs/9.5/textsearch.html). This query system utilizes [special syntax](https://www.postgresql.org/docs/9.5/datatype-textsearch.html). +#' +#' Each search field (except for the "brief narrative" and MLRA) corresponds with a section header in an OSD. The results may not include every OSD due to formatting errors and typos. Results are scored based on the number of times search terms match words in associated sections. This is the R API corresponding to the [SoilWeb PostgreSQL OSD full-text search API](https://casoilresource.lawr.ucdavis.edu/osd-search/) +#' +#' See \url{https://casoilresource.lawr.ucdavis.edu/osd-search/} +#' for more information. \itemize{ \item family level taxa are derived from SC +#' database, not parsed OSD records \item MLRA are derived via spatial +#' intersection (SSURGO x MLRA polygons) \item MLRA-filtering is only possible +#' for series used in the current SSURGO snapshot (component name) \item +#' logical AND: \code{&} \item logical OR: \code{|} \item wildcard, e.g. +#' rhy-something \verb{rhy:*} \item search terms with spaces need doubled +#' single quotes: \verb{''san joaquin''} \item combine search terms into a +#' single expression: \verb{(grano:* | granite)} } +#' +#' Related documentation can be found in the following tutorials: +#' +#' - [Soil Series Query Functions](http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html) +#' - [Competing Soil Series](https://ncss-tech.github.io/AQP/soilDB/competing-series.html) +#' - [Siblings](https://ncss-tech.github.io/AQP/soilDB/siblings.html) #' -#' @description This is a rough example of how chunks of text parsed from OSD records can be made search-able with the \href{https://www.postgresql.org/docs/9.5/textsearch.html}{PostgreSQL fulltext indexing} and query system (\href{https://www.postgresql.org/docs/9.5/datatype-textsearch.html}{syntax details}). Each search field (except for the "brief narrative" and MLRA) corresponds with a section header in an OSD. The results may not include every OSD due to formatting errors and typos. Results are scored based on the number of times search terms match words in associated sections. This is the R API corresponding to \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage}. -#' #' @param mlra a comma-delimited list of MLRA to search ('17,18,22A') #' @param taxonomic_class search family level classification #' @param typical_pedon search typical pedon section @@ -17,43 +32,16 @@ #' @param competing_series search competing series section #' @param geog_location search geographic setting section #' @param geog_assoc_soils search geographically associated soils section -#' -#' @details -#' See \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage} for more information. -#' -#' * family level taxa are derived from SC database, not parsed OSD records -#' * MLRA are derived via spatial intersection (SSURGO x MLRA polygons) -#' * MLRA-filtering is only possible for series used in the current SSURGO snapshot (component name) -#' * logical AND: `&` -#' * logical OR: `|` -#' * wildcard, e.g. rhy-something `rhy:*` -#' * search terms with spaces need doubled single quotes: `''san joaquin''` -#' * combine search terms into a single expression: `(grano:* | granite)` -#' -#' Related documentation can be found in the following tutorials -#' -#' * \href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions} -#' -#' * \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series} -#' -#' * \href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings} -#' -#' @references \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} -#' -#' @author D.E. Beaudette -#' +#' @return a \code{data.frame} object containing soil series names that match +#' patterns supplied as arguments. #' @note SoilWeb maintains a snapshot of the Official Series Description data. -#' +#' @author D.E. Beaudette #' @seealso \code{\link{fetchOSD}, \link{siblings}, \link{fetchOSD}} -#' +#' @references +#' \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} #' @keywords manip -#' -#' @return a \code{data.frame} object containing soil series names that match patterns supplied as arguments. -#' @export -#' #' @examples #' -#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet() & @@ -71,6 +59,8 @@ #' } #' } #' +#' +#' @export OSDquery OSDquery <- function(mlra='', taxonomic_class='', typical_pedon='', brief_narrative='', ric='', use_and_veg='', competing_series='', geog_location='', geog_assoc_soils='') { # check for required packages diff --git a/R/ROSETTA.R b/R/ROSETTA.R index 37ed55c1..c27b13c0 100644 --- a/R/ROSETTA.R +++ b/R/ROSETTA.R @@ -1,4 +1,3 @@ - # handle a single ROSETTA API request # x.chunk: single set of data to be processed, a data.frame # vars: column names of those soil propertie passed to API @@ -76,7 +75,7 @@ #' @title ROSETTA Model API #' -#' @description A simple interface to the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model} for predicting hydraulic parameters from soil properties. The ROSETTA API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of Zhang and Schaap, (2017). See the \href{http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html}{related tutorial} for additional examples. +#' @description A simple interface to the [ROSETTA model](https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/) for predicting hydraulic parameters from soil properties. The ROSETTA API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of Zhang and Schaap, (2017). See the [ROSETTA API tutorial](http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html) for additional examples. #' #' @author D.E. Beaudette, Todd Skaggs (ARS), Richard Reid #' @@ -162,6 +161,139 @@ # * best model (0) is always used, API no longer accepts `model` as a parameter # * versions 1,2,3 supported + + +#' ROSETTA Model API +#' +#' A simple interface to the [ROSETTA model](https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/) for predicting hydraulic parameters from soil properties. The ROSETTA +#' API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of +#' Zhang and Schaap, (2017). See the [ROSETTA API tutorial](http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html) for additional examples. +#' +#' Soil properties supplied in \code{x} must be described, in order, via +#' \code{vars} argument. The API does not use the names but column ordering +#' must follow: sand, silt, clay, bulk density, volumetric water content at +#' 33kPa (1/3 bar), and volumetric water content at 1500 kPa (15 bar). +#' +#' The ROSETTA model relies on a minimum of 3 soil properties, with increasing +#' (expected) accuracy as additional properties are included: \itemize{ \item +#' required, \code{sand}, \code{silt}, \code{clay}: USDA soil texture separates +#' (percentages) that sum to 100\% \item optional, \verb{bulk density} (any +#' moisture basis): mass per volume after accounting for >2mm fragments, units +#' of gm/cm3 \item optional, \verb{volumetric water content at 33 kPa}: roughly +#' "field capacity" for most soils, units of cm^3/cm^3 \item optional, +#' \verb{volumetric water content at 1500 kPa}: roughly "permanent wilting +#' point" for most plants, units of cm^3/cm^3 } +#' +#' Column names not specified in \code{vars} are retained in the output. +#' +#' Three versions of the ROSETTA model are available, selected using \code{v = +#' 1}, \code{v = 2}, or \code{v = 3}. \itemize{ \item \strong{version 1}: +#' Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer +#' program for estimating soil hydraulic parameters with hierarchical +#' pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.1016/S0022-1694(01)00466-8")\Sexpr{tools:::Rd_expr_doi("10.1016/S0022-1694(01)00466-8")}. +#' \item \strong{version 2}: Schaap, M.G., A. Nemes, and M.T. van Genuchten. +#' 2004. Comparison of Models for Indirect Estimation of Water Retention and +#' Available Water in Surface Soils. Vadose Zone Journal 3(4): 1455-1463. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.2136/vzj2004.1455")\Sexpr{tools:::Rd_expr_doi("10.2136/vzj2004.1455")}. +#' \item \strong{version 3}: Zhang, Y., and M.G. Schaap. 2017. Weighted +#' recalibration of the Rosetta pedotransfer model with improved estimates of +#' hydraulic parameter distributions and summary statistics (Rosetta3). Journal +#' of Hydrology 547: 39-53. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")} +#' } +#' +#' @param x a \code{data.frame} of required soil properties, may contain other +#' columns, see details +#' @param vars character vector of column names in \code{x} containing relevant +#' soil property values, see details +#' @param v ROSETTA model version number: '1', '2', or '3', see details and +#' references. +#' @param chunkSize number of records per API call +#' @param conf configuration passed to \code{httr::POST()} such as +#' \code{verbose()}. +#' @return a \code{data.frame} object with the following columns: \itemize{ +#' \item \code{...}: pre-existing columns from \code{x} \item \code{theta_r}: +#' residual volumetric water content (cm^3/cm^3) \item \code{theta_s}: +#' saturated volumetric water content (cm^3/cm^3) \item \code{alpha}: related +#' to the inverse of the air entry suction, log10-transformed values with units +#' of cm \item \code{npar}: index of pore size distribution, log10-transformed +#' values with units of 1/cm \item \code{ksat}: saturated hydraulic +#' conductivity, log10-transformed values with units of cm/day \item +#' \code{.rosetta.model}: best-available model selection (-1 signifies that +#' prediction was not possible due to missing values in \code{x}) \item +#' \code{.rosetta.version}: ROSETTA algorithm version, selected via function +#' argument \code{v} } +#' @note Input data should not contain columns names that will conflict with +#' the ROSETTA API results: \code{theta_r}, \code{theta_s}, \code{alpha}, +#' \code{npar}, \code{ksat}. +#' @author D.E. Beaudette, Todd Skaggs (ARS), Richard Reid +#' @references Consider using the interactive version, with copy/paste +#' functionality at: \url{https://www.handbook60.org/rosetta}. +#' +#' Rosetta Model Home Page: +#' \url{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}. +#' +#' Python ROSETTA model: \url{http://www.u.arizona.edu/~ygzhang/download.html}. +#' +#' Yonggen Zhang, Marcel G. Schaap. 2017. Weighted recalibration of the Rosetta +#' pedotransfer model with improved estimates of hydraulic parameter +#' distributions and summary statistics (Rosetta3). Journal of Hydrology. 547: +#' 39-53. c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")}. +#' +#' Kosugi, K. 1999. General model for unsaturated hydraulic conductivity for +#' soils with lognormal pore-size distribution. Soil Sci. Soc. Am. J. +#' 63:270-277. +#' +#' Mualem, Y. 1976. A new model predicting the hydraulic conductivity of +#' unsaturated porous media. Water Resour. Res. 12:513-522. +#' +#' Schaap, M.G. and W. Bouten. 1996. Modeling water retention curves of sandy +#' soils using neural networks. Water Resour. Res. 32:3033-3040. +#' +#' Schaap, M.G., Leij F.J. and van Genuchten M.Th. 1998. Neural network +#' analysis for hierarchical prediction of soil water retention and saturated +#' hydraulic conductivity. Soil Sci. Soc. Am. J. 62:847-855. +#' +#' Schaap, M.G., and F.J. Leij, 1998. Database Related Accuracy and Uncertainty +#' of Pedotransfer Functions, Soil Science 163:765-779. +#' +#' Schaap, M.G., F.J. Leij and M. Th. van Genuchten. 1999. A bootstrap-neural +#' network approach to predict soil hydraulic parameters. In: van Genuchten, +#' M.Th., F.J. Leij, and L. Wu (eds), Proc. Int. Workshop, Characterization and +#' Measurements of the Hydraulic Properties of Unsaturated Porous Media, pp +#' 1237-1250, University of California, Riverside, CA. +#' +#' Schaap, M.G., F.J. Leij, 1999, Improved prediction of unsaturated hydraulic +#' conductivity with the Mualem-van Genuchten, Submitted to Soil Sci. Soc. Am. +#' J. +#' +#' van Genuchten, M.Th. 1980. A closed-form equation for predicting the +#' hydraulic conductivity of unsaturated soils. Soil Sci. Am. J. 44:892-898. +#' +#' Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer +#' program for estimating soil hydraulic parameters with hierarchical +#' pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.1016/S0022-1694(01)00466-8")\Sexpr{tools:::Rd_expr_doi("10.1016/S0022-1694(01)00466-8")}. +#' +#' Schaap, M.G., A. Nemes, and M.T. van Genuchten. 2004. Comparison of Models +#' for Indirect Estimation of Water Retention and Available Water in Surface +#' Soils. Vadose Zone Journal 3(4): 1455-1463. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.2136/vzj2004.1455")\Sexpr{tools:::Rd_expr_doi("10.2136/vzj2004.1455")}. +#' +#' Zhang, Y., and M.G. Schaap. 2017. Weighted recalibration of the Rosetta +#' pedotransfer model with improved estimates of hydraulic parameter +#' distributions and summary statistics (Rosetta3). Journal of Hydrology 547: +#' 39-53. doi: +#' c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +#' "10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")}. +#' @export ROSETTA ROSETTA <- function(x, vars, v = c('1', '2', '3'), chunkSize = 10000, conf = NULL) { # check for required packages diff --git a/R/SDA-spatial.R b/R/SDA-spatial.R index a4fa51f0..b825e1c8 100644 --- a/R/SDA-spatial.R +++ b/R/SDA-spatial.R @@ -1,4 +1,3 @@ - ## chunked queries for large number of records: # https://github.com/ncss-tech/soilDB/issues/71 @@ -16,22 +15,25 @@ ## TODO: geometry collections are not allowed in sp objects.. ## TODO: consider moving to sf -#' @title Post-process WKT returned from SDA. + + +#' Post-process WKT returned from SDA. +#' +#' This is a helper function, commonly used with \code{SDA_query} to extract +#' WKT (well-known text) representation of geometry to an sp-class object. #' -#' @description This is a helper function, commonly used with \code{SDA_query} to extract WKT (well-known text) representation of geometry to an sp-class object. +#' The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. +#' See the [SDA Tutorial](http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html) for detailed examples. #' -#' @param d \code{data.frame} returned by \code{SDA_query}, containing WKT representation of geometry +#' @param d \code{data.frame} returned by \code{SDA_query}, containing WKT +#' representation of geometry #' @param g name of column in \code{d} containing WKT geometry #' @param p4s PROJ4 CRS definition, typically GCS WGS84 -#' -#' @details The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for detailed examples. -#' -#' @note This function requires the `httr`, `jsonlite`, `XML`, and `rgeos` packages. -#' -#' @author D.E. Beaudette -#' #' @return A \code{Spatial*} object. -#' +#' @note This function requires the \code{httr}, \code{jsonlite}, \code{XML}, +#' and \code{rgeos} packages. +#' @author D.E. Beaudette +#' @export processSDA_WKT processSDA_WKT <- function(d, g='geom', p4s='+proj=longlat +datum=WGS84') { # iterate over features (rows) and convert into list of SPDF p <- list() @@ -158,36 +160,53 @@ FROM geom_data; # 10-20x speed improvement over SDA_query_features -#' @title SDA Spatial Query + + +#' SDA Spatial Query #' -#' @description Query SDA (SSURGO / STATSGO) records via spatial intersection with supplied geometries. Input can be SpatialPoints, SpatialLines, or SpatialPolygons objects with a valid CRS. Map unit keys, overlapping polygons, or the spatial intersection of \code{geom} + SSURGO / STATSGO polygons can be returned. See details. +#' Query SDA (SSURGO / STATSGO) records via spatial intersection with supplied +#' geometries. Input can be SpatialPoints, SpatialLines, or SpatialPolygons +#' objects with a valid CRS. Map unit keys, overlapping polygons, or the +#' spatial intersection of \code{geom} + SSURGO / STATSGO polygons can be +#' returned. See details. #' -#' @param geom a Spatial* object, with valid CRS. May contain multiple features. -#' @param what a character vector specifying what to return. 'mukey': \code{data.frame} with intersecting map unit keys and names, \code{geom} overlapping or intersecting map unit polygons -#' @param geomIntersection logical; \code{FALSE}: overlapping map unit polygons returned, \code{TRUE}: intersection of \code{geom} + map unit polygons is returned. -#' @param db a character vector identifying the Soil Geographic Databases -#' ('SSURGO' or 'STATSGO') to query. Option \var{STATSGO} currently works -#' only in combination with \code{what = "geom"}. -#' -#' @return A \code{data.frame} if \code{what = 'mukey'}, otherwise \code{SpatialPolygonsDataFrame} object. +#' Queries for map unit keys are always more efficient vs. queries for +#' overlapping or intersecting (i.e. least efficient) features. \code{geom} is +#' converted to GCS / WGS84 as needed. Map unit keys are always returned when +#' using \code{what = "geom"}. #' +#' There is a 100,000 record limit and 32Mb JSON serializer limit, per query. +#' +#' SSURGO (detailed soil survey, typically 1:24,000 scale) and STATSGO +#' (generalized soil survey, 1:250,000 scale) data are stored together within +#' SDA. This means that queries that don't specify an area symbol may result in +#' a mixture of SSURGO and STATSGO records. See the examples below and the +#' [SDA Tutorial](http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html) +#' for details. +#' +#' @aliases SDA_spatialQuery SDA_make_spatial_query SDA_query_features +#' @param geom a Spatial* object, with valid CRS. May contain multiple +#' features. +#' @param what a character vector specifying what to return. 'mukey': +#' \code{data.frame} with intersecting map unit keys and names, \code{geom} +#' overlapping or intersecting map unit polygons +#' @param geomIntersection logical; \code{FALSE}: overlapping map unit polygons +#' returned, \code{TRUE}: intersection of \code{geom} + map unit polygons is +#' returned. +#' @param db a character vector identifying the Soil Geographic Databases +#' ('SSURGO' or 'STATSGO') to query. Option \var{STATSGO} currently works only +#' in combination with \code{what = "geom"}. +#' @return A \code{data.frame} if \code{what = 'mukey'}, otherwise +#' \code{SpatialPolygonsDataFrame} object. +#' @note Row-order is not preserved across features in \code{geom} and returned +#' object. Use \code{sp::over()} or similar functionality to extract from +#' results. Polygon area in acres is computed server-side when \code{what = +#' 'geom'} and \code{geomIntersection = TRUE}. #' @author D.E. Beaudette, A.G. Brown, D.R. Schlaepfer #' @seealso \code{\link{SDA_query}} #' @keywords manip -#' -#' @aliases SDA_make_spatial_query SDA_query_features -#' -#' @note Row-order is not preserved across features in \code{geom} and returned object. Use \code{sp::over()} or similar functionality to extract from results. Polygon area in acres is computed server-side when \code{what = 'geom'} and \code{geomIntersection = TRUE}. -#' -#' -#' @details Queries for map unit keys are always more efficient vs. queries for overlapping or intersecting (i.e. least efficient) features. \code{geom} is converted to GCS / WGS84 as needed. Map unit keys are always returned when using \code{what = "geom"}. -#' -#' There is a 100,000 record limit and 32Mb JSON serializer limit, per query. -#' -#' SSURGO (detailed soil survey, typically 1:24,000 scale) and STATSGO (generalized soil survey, 1:250,000 scale) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. -#' -#' #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet() & @@ -324,6 +343,8 @@ FROM geom_data; #' } #' } #' +#' +#' @export SDA_spatialQuery SDA_spatialQuery <- function(geom, what='mukey', geomIntersection=FALSE, db = c("SSURGO", "STATSGO")) { diff --git a/R/SDA_query.R b/R/SDA_query.R index 5f14f3c6..924cbef3 100644 --- a/R/SDA_query.R +++ b/R/SDA_query.R @@ -1,19 +1,22 @@ #' Generate chunk labels for splitting data -#' +#' +#' Generate chunk labels for splitting data +#' +#' #' @param ids vector of IDs #' @param size chunk (group) size -#' #' @return A numeric vector -#' @export makeChunks -#' #' @examples #' +#' #' # split the lowercase alphabet into 2 chunks -#' +#' #' aggregate(letters, #' by = list(makeChunks(letters, size=13)), #' FUN = paste0, collapse=",") #' +#' +#' @export makeChunks makeChunks <- function(ids, size=100) { n <- length(ids) chunk.id <- seq(from=1, to=floor(n / size)+1) @@ -22,19 +25,23 @@ makeChunks <- function(ids, size=100) { return(chunk.ids) } -#' @title Format vector of values into a string suitable for an SQL `IN` statement. + + +#' Format vector of values into a string suitable for an SQL \code{IN} +#' statement. #' -#' @description Concatenate a vector to SQL \code{IN}-compatible syntax: \code{letters[1:3]} becomes \code{('a','b','c')}. Values in \code{x} are first passed through \code{unique()}. +#' Concatenate a vector to SQL \code{IN}-compatible syntax: \code{letters[1:3]} +#' becomes \code{('a','b','c')}. Values in \code{x} are first passed through +#' \code{unique()}. #' -#' @note Only \code{character} output is supported. #' #' @param x A character vector. -#' -#' @return A character vector (unit length) containing concatenated group syntax for use in SQL \code{IN}, with unique value found in \code{x}. -#' @export format_SQL_in_statement -#' +#' @return A character vector (unit length) containing concatenated group +#' syntax for use in SQL \code{IN}, with unique value found in \code{x}. +#' @note Only \code{character} output is supported. #' @examples #' +#' #' \donttest{ #' #' library(aqp) @@ -60,7 +67,7 @@ makeChunks <- function(ids, size=100) { #' #' # normalize mapunit/component level attributes to site-level for plot #' site(res) <- ~ muname + mukey + compname + comppct_r + taxclname -#' +#' #' # make a nice label #' res$labelname <- sprintf("%s (%s%s)", res$compname, res$comppct_r, "%") #' @@ -71,9 +78,11 @@ makeChunks <- function(ids, size=100) { #' par(mar=c(0,0,0,0)) #' groupedProfilePlot(res, groups = "mukey", color = "hzname", cex.names=0.8, #' id.style = "side", label = "labelname") -#'} +#' } #' #' +#' +#' @export format_SQL_in_statement format_SQL_in_statement <- function(x) { # there is no reason to preserve duplicates # and, plenty safe to perform a second time, in case this was done outside of the function call @@ -83,25 +92,37 @@ format_SQL_in_statement <- function(x) { return(i) } - -# #' Soil Data Access Query -#' -#' @param q A valid T-SQL query surrounded by double quotes #' -#' @description Submit a query to the Soil Data Access (SDA) REST/JSON web-service and return the results as a data.frame. There is a 100,000 record limit and 32Mb JSON serializer limit, per query. Queries should contain a WHERE statement or JOIN condition to limit the number of rows affected / returned. Consider wrapping calls to \code{SDA_query} in a function that can iterate over logical chunks (e.g. areasymbol, mukey, cokey, etc.). The function \code{makeChunks} can help with such iteration. +#' Submit a query to the Soil Data Access (SDA) REST/JSON web-service and +#' return the results as a data.frame. There is a 100,000 record limit and 32Mb +#' JSON serializer limit, per query. Queries should contain a WHERE statement +#' or JOIN condition to limit the number of rows affected / returned. Consider +#' wrapping calls to \code{SDA_query} in a function that can iterate over +#' logical chunks (e.g. areasymbol, mukey, cokey, etc.). The function +#' \code{makeChunks} can help with such iteration. #' -#' @details The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov} and query examples can be found at \url{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query examples can be found at \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. +#' The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov} +#' and query examples can be found at +#' \url{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query +#' examples can be found at +#' \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. #' -#' SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +#' SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are +#' stored together within SDA. This means that queries that don't specify an +#' area symbol may result in a mixture of SSURGO and STATSGO records. See the +#' examples below and the [SDA Tutorial](http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html) +#' for details. #' -#' @note This function requires the `httr`, `jsonlite`, and `XML` packages +#' @param q A valid T-SQL query surrounded by double quotes #' @return a data.frame result (\code{NULL} if empty, try-error on error) -#' @export +#' @note This function requires the \code{httr}, \code{jsonlite}, and +#' \code{XML} packages #' @author D.E. Beaudette #' @seealso \code{\link{mapunit_geom_by_ll_bbox}} #' @keywords manip #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet()) { @@ -161,7 +182,8 @@ format_SQL_in_statement <- function(x) { #' } #' } #' } - +#' +#' @export SDA_query SDA_query <- function(q) { # check for required packages diff --git a/R/SSURGO_spatial_query.R b/R/SSURGO_spatial_query.R index bf2dbe53..72d0a638 100644 --- a/R/SSURGO_spatial_query.R +++ b/R/SSURGO_spatial_query.R @@ -1,5 +1,43 @@ - # currently only queries SoilWeb for mapunit-level data + + +#' Get SSURGO Data via Spatial Query +#' +#' Get SSURGO Data via Spatial Query to SoilWeb +#' +#' Data are currently available from SoilWeb. These data are a snapshot of the +#' "official" data. The snapshot date is encoded in the "soilweb_last_update" +#' column in the function return value. Planned updates to this function will +#' include a switch to determine the data source: "official" data via USDA-NRCS +#' servers, or a "snapshot" via SoilWeb. +#' +#' @param bbox a bounding box in WGS84 geographic coordinates, see examples +#' @param coords a coordinate pair in WGS84 geographic coordinates, see +#' examples +#' @param what data to query, currently ignored +#' @param source the data source, currently ignored +#' @return The data returned from this function will depend on the query style. +#' See examples below. +#' @note This function should be considered experimental; arguments, results, +#' and side-effects could change at any time. SDA now supports spatial queries, +#' consider using \code{\link{SDA_query_features}} instead. +#' @author D.E. Beaudette +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet()) { +#' +#' # query by bbox +#' SoilWeb_spatial_query(bbox=c(-122.05, 37, -122, 37.05)) +#' +#' # query by coordinate pair +#' SoilWeb_spatial_query(coords=c(-121, 38)) +#' } +#' } +#' +#' @export SoilWeb_spatial_query SoilWeb_spatial_query <- function(bbox=NULL, coords=NULL, what='mapunit', source='soilweb') { # check for required packages diff --git a/R/STR.R b/R/STR.R index 5644ed21..96534181 100644 --- a/R/STR.R +++ b/R/STR.R @@ -1,5 +1,30 @@ - # + + +#' Graphical Description of US Soil Taxonomy Soil Temperature Regimes +#' +#' Graphical Description of US Soil Taxonomy Soil Temperature Regimes +#' +#' [Soil Temperature Regime Evaluation Tutorial](http://ncss-tech.github.io/AQP/soilDB/STR-eval.html) +#' +#' @param mast single value or vector of mean annual soil temperature (deg C) +#' @param msst single value or vector of mean summer soil temperature (deg C) +#' @param mwst single value of mean winter soil temperature (deg C) +#' @param permafrost logical: permafrost presence / absence +#' @param pt.cex symbol size +#' @param leg.cex legend size +#' @author D.E. Beaudette +#' @seealso \code{\link{estimateSTR}} +#' @references Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. +#' U.S. Department of Agriculture, Natural Resources Conservation Service, +#' National Soil Survey Center, Lincoln, Nebraska. +#' @keywords hplot +#' @examples +#' +#' par(mar=c(4,1,0,1)) +#' STRplot(mast = 0:25, msst = 10, mwst = 1) +#' +#' @export STRplot STRplot <- function(mast, msst, mwst, permafrost=FALSE, pt.cex=2.75, leg.cex=0.85) { # make a row of rectangles with colors based on STR @@ -126,6 +151,37 @@ STRplot <- function(mast, msst, mwst, permafrost=FALSE, pt.cex=2.75, leg.cex=0.8 # vectors of MAST, summer mean, winter mean all in Deg C + + +#' Estimate Soil Temperature Regime +#' +#' Estimate soil temperature regime (STR) based on mean annual soil temperature +#' (MAST), mean summer temperature (MSST), mean winter soil temperature (MWST), +#' presence of O horizons, saturated conditions, and presence of permafrost. +#' Several assumptions are made when O horizon or saturation are undefined. +#' +#' [Soil Temperature Regime Evaluation Tutorial](http://ncss-tech.github.io/AQP/soilDB/STR-eval.html) +#' +#' @param mast vector of mean annual soil temperature (deg C) +#' @param mean.summer vector of mean summer soil temperature (deg C) +#' @param mean.winter vector of mean winter soil temperature (deg C) +#' @param O.hz logical vector of O horizon presence / absence +#' @param saturated logical vector of seasonal saturation +#' @param permafrost logical vector of permafrost presence / absence +#' @return Vector of soil temperature regimes. +#' @author D.E. Beaudette +#' @seealso \code{\link{STRplot}} +#' @references Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. +#' U.S. Department of Agriculture, Natural Resources Conservation Service, +#' National Soil Survey Center, Lincoln, Nebraska. +#' @keywords manip +#' @examples +#' +#' # simple example +#' estimateSTR(mast=17, mean.summer = 22, mean.winter = 12) +#' +#' +#' @export estimateSTR estimateSTR <- function(mast, mean.summer, mean.winter, O.hz=NA, saturated=NA, permafrost=FALSE) { # check to make sure that the lengths of vectors are the same diff --git a/R/WCS-utils.R b/R/WCS-utils.R index 6e478424..96fbf7a6 100644 --- a/R/WCS-utils.R +++ b/R/WCS-utils.R @@ -1,17 +1,17 @@ - - -#' @title Web Coverage Services Details +#' Web Coverage Services Details +#' +#' List variables or databases provided by soilDB web coverage service (WCS) +#' abstraction. These lists will be expanded in future versions. +#' #' -#' @description List variables or databases provided by soilDB web coverage service (WCS) abstraction. These lists will be expanded in future versions. -#' #' @param wcs a WCS label ('mukey' or 'ISSR800') -#' #' @return a \code{data.frame} -#' @export -#' #' @examples #' +#' #' WCS_details(wcs = 'ISSR800') +#' +#' @export WCS_details WCS_details <- function(wcs = c('mukey', 'ISSR800')) { # select a WCS diff --git a/R/createStaticNASIS.R b/R/createStaticNASIS.R index 50152deb..8d340682 100644 --- a/R/createStaticNASIS.R +++ b/R/createStaticNASIS.R @@ -1,10 +1,13 @@ #' Method for "dumping" contents of an entire NASIS table -#' +#' +#' Method for "dumping" contents of an entire NASIS table +#' +#' #' @param table_name Character name of table. -#' @param static_path Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL} -#' +#' @param static_path Optional: path to SQLite database containing NASIS table +#' structure; Default: \code{NULL} #' @return A data.frame or other result of \code{DBI::dbGetQuery} -#' +#' @export .dump_NASIS_table .dump_NASIS_table <- function(table_name, static_path = NULL) { # connect to NASIS, identify columns con <- dbConnectNASIS(static_path) @@ -29,26 +32,36 @@ return(dbQueryNASIS(con, q)) } -#' Create a memory or file-based instance of NASIS database (for selected tables) -#' -#' @param tables Character vector of target tables. Default: \code{NULL} is all tables meeting the following criteria. -#' @param SS Logical. Include "selected set" tables (ending with suffix \code{"_View1"}). Default: \code{FALSE} -#' @param systables Logical. Include "system" tables (starting with prefix \code{"system"}). Default: \code{FALSE} -#' @param static_path Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL} -#' @param output_path Optional: path to new/existing SQLite database to write tables to. Default: \code{NULL} returns table results as named list. -#' @return A named list of results from calling \code{dbQueryNASIS} for all columns in each NASIS table. -#' @export + + +#' Create a memory or file-based instance of NASIS database (for selected +#' tables) +#' +#' Create a memory or file-based instance of NASIS database (for selected +#' tables) #' -#' @importFrom odbc dbListTables -#' @importFrom RSQLite dbListTables RSQLite -#' @importFrom DBI dbConnect dbDisconnect dbWriteTable #' +#' @param tables Character vector of target tables. Default: \code{NULL} is all +#' tables meeting the following criteria. +#' @param SS Logical. Include "selected set" tables (ending with suffix +#' \code{"_View1"}). Default: \code{FALSE} +#' @param systables Logical. Include "system" tables (starting with prefix +#' \code{"system"}). Default: \code{FALSE} +#' @param static_path Optional: path to SQLite database containing NASIS table +#' structure; Default: \code{NULL} +#' @param output_path Optional: path to new/existing SQLite database to write +#' tables to. Default: \code{NULL} returns table results as named list. +#' @return A named list of results from calling \code{dbQueryNASIS} for all +#' columns in each NASIS table. #' @examples #' +#' #' \dontrun{ #' str(createStaticNASIS(tables = c("calculation","formtext"))) #' } #' +#' +#' @export createStaticNASIS createStaticNASIS <- function(tables = NULL, SS = FALSE, systables = FALSE, static_path = NULL, output_path = NULL) { # can make static DB from another static DB, or default is local NASIS install (static_path=NULL) diff --git a/R/dbQueryNASIS.R b/R/dbQueryNASIS.R index bfe0d7e0..4689af69 100644 --- a/R/dbQueryNASIS.R +++ b/R/dbQueryNASIS.R @@ -1,14 +1,15 @@ #' Send queries to a NASIS DBIConnection -#' -#' @param conn A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}. +#' +#' Send queries to a NASIS DBIConnection +#' +#' +#' @param conn A \code{DBIConnection} object, as returned by +#' \code{DBI::dbConnect()}. #' @param q A statement to execute using \code{DBI::dbGetQuery} #' @param close Close connection after query? Default: \code{TRUE} #' @param ... Additional arguments to \code{DBI::dbGetQuery} -#' #' @return Result of \code{DBI::dbGetQuery} -#' @export -#' -#' @importFrom DBI dbGetQuery, dbDisconnect +#' @export dbQueryNASIS dbQueryNASIS <- function(conn, q, close = TRUE, ...) { if (inherits(conn, 'try-error')) @@ -29,14 +30,18 @@ dbQueryNASIS <- function(conn, q, close = TRUE, ...) { return(dd) } + + +#' Create a connection to a local NASIS database +#' #' Create a connection to a local NASIS database -#' -#' @param static_path Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL} -#' -#' @return A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}. #' -#' @export -#' +#' +#' @param static_path Optional: path to SQLite database containing NASIS table +#' structure; Default: \code{NULL} +#' @return A \code{DBIConnection} object, as returned by +#' \code{DBI::dbConnect()}. +#' @export dbConnectNASIS dbConnectNASIS <- function(static_path = NULL) { # TODO: NASIS sqlite snapshot connection via DBI/RSQLite diff --git a/R/estimateColorMixture.R b/R/estimateColorMixture.R index 1950765a..1d9e3502 100644 --- a/R/estimateColorMixture.R +++ b/R/estimateColorMixture.R @@ -3,19 +3,24 @@ ## all colors are mixed, should be applied to groups of related colors -#' @title Estimate color mixtures using weighted average of CIELAB color coordinates -#' -#' @note See \code{\link[aqp]{mixMunsell}} for a more realistic (but slower) simulation of subtractive mixing of pigments. + + +#' Estimate color mixtures using weighted average of CIELAB color coordinates #' -#' @author D.E. Beaudette -#' -#' @param x data.frame, typically from NASIS containing at least CIE LAB ('L', 'A', 'B') and some kind of weight -#' @param wt fractional weights, usually area of hz face -#' @param backTransform logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by aqp::rgb2Munsell default: \code{FALSE} -#' +#' Estimate color mixtures using weighted average of CIELAB color coordinates +#' +#' +#' @param x data.frame, typically from NASIS containing at least CIE LAB ('L', +#' 'A', 'B') and some kind of weight +#' @param wt fractional weights, usually area of hz face +#' @param backTransform logical, should the mixed sRGB representation of soil +#' color be transformed to closest Munsell chips? This is performed by +#' aqp::rgb2Munsell default: \code{FALSE} #' @return A data.frame containing estimated color mixture +#' @note See \code{\link[aqp]{mixMunsell}} for a more realistic (but slower) +#' simulation of subtractive mixing of pigments. +#' @author D.E. Beaudette #' @export estimateColorMixture -#' estimateColorMixture <- function(x, wt='pct', backTransform=FALSE) { ## TODO: account for backtransform == TRUE, different return structure diff --git a/R/fetchHenry.R b/R/fetchHenry.R index a9ce8e3a..45004cde 100644 --- a/R/fetchHenry.R +++ b/R/fetchHenry.R @@ -169,6 +169,74 @@ month2season <- function(x) { # this loads and packages the data into a list of objects + + +#' Download Data from the Henry Mount Soil Temperature and Water Database +#' +#' This function is a front-end to the REST query functionality of the Henry +#' Mount Soil Temperature and Water Database. +#' +#' Filling missing days with NA is useful for computing and index of how +#' complete the data are, and for estimating (mostly) unbiased MAST and +#' seasonal mean soil temperatures. Summaries are computed by first averaging +#' over Julian day, then averaging over all days of the year (MAST) or just +#' those days that occur within "summer" or "winter". This approach makes it +#' possible to estimate summaries in the presence of missing data. The quality +#' of summaries should be weighted by the number of "functional years" (number +#' of years with non-missing data after combining data by Julian day) and +#' "complete years" (number of years of data with >= 365 days of non-missing +#' data). +#' +#' @aliases fetchHenry month2season summarizeSoilTemperature +#' @param what type of data to return: 'sensors': sensor metadata only | +#' 'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor +#' metadata + soil moisture data | 'airtemp': sensor metadata + air temperature +#' data | 'waterlevel': sensor metadata + water level data |'all': sensor +#' metadata + all sensor data +#' @param usersiteid (optional) filter results using a NASIS user site ID +#' @param project (optional) filter results using a project ID +#' @param sso (optional) filter results using a soil survey office code +#' @param gran data granularity: "day", "week", "month", "year"; returned data +#' are averages +#' @param start.date (optional) starting date filter +#' @param stop.date (optional) ending date filter +#' @param pad.missing.days should missing data ("day" granularity) be filled +#' with NA? see details +#' @param soiltemp.summaries should soil temperature ("day" granularity only) +#' be summarized? see details +#' @return a list containing: \item{sensors}{a \code{SpatialPointsDataFrame} +#' object containing site-level information} \item{soiltemp}{a +#' \code{data.frame} object containing soil temperature timeseries data} +#' \item{soilVWC}{a \code{data.frame} object containing soil moisture +#' timeseries data} \item{airtemp}{a \code{data.frame} object containing air +#' temperature timeseries data} \item{waterlevel}{a \code{data.frame} object +#' containing water level timeseries data} +#' @note This function and the back-end database are very much a work in +#' progress. +#' @author D.E. Beaudette +#' @seealso \code{\link{fetchSCAN}} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet() & +#' require(lattice)) { +#' +#' # get CA630 data as daily averages +#' x <- fetchHenry(project='CA630', gran = 'day') +#' +#' # inspect data gaps +#' levelplot(factor(!is.na(sensor_value)) ~ doy * factor(year) | name, +#' data=x$soiltemp, col.regions=c('grey', 'RoyalBlue'), cuts=1, +#' colorkey=FALSE, as.table=TRUE, scales=list(alternating=3), +#' par.strip.text=list(cex=0.75), strip=strip.custom(bg='yellow'), +#' xlab='Julian Day', ylab='Year') +#' +#' } +#' } +#' +#' @export fetchHenry fetchHenry <- function(what='all', usersiteid=NULL, project=NULL, sso=NULL, gran='day', start.date=NULL, stop.date=NULL, pad.missing.days=TRUE, soiltemp.summaries=TRUE) { # check for required packages diff --git a/R/fetchKSSL.R b/R/fetchKSSL.R index 1db8322a..dbe251c9 100644 --- a/R/fetchKSSL.R +++ b/R/fetchKSSL.R @@ -1,4 +1,3 @@ - # create a valid URL filter for SoilWeb API # arguments are NA by default .buildFilter <- function(series, bbox, mlra, pedlabsampnum, pedon_id, pedon_key) { @@ -158,6 +157,113 @@ # fully vectorized in all arguments except BBOX + + +#' Fetch KSSL Data +#' +#' Download soil characterization and morphologic data via BBOX, MLRA, or soil +#' series name query, from the KSSL database. +#' +#' This is an experimental interface to a subset for the most commonly used +#' data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) +#' data. +#' +#' Series-queries are case insensitive. Series name is based on the "correlated +#' as" field (from KSSL snapshot) when present. The "sampled as" +#' classification was promoted to "correlated as" if the "correlated as" +#' classification was missing. +#' +#' When \code{returnMorphologicData} is TRUE, the resulting object is a list. +#' The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} +#' object) is stored in the named element "SPC". The additional elements are +#' basic morphologic data: soil color, rock fragment volume, pores, structure, +#' and redoximorphic features. There is a 1:many relationship between the +#' horizon data in "SPC" and the additional dataframes in \code{morph}. See +#' examples for ideas on how to "flatten" these tables. +#' +#' When \code{returnGeochemicalData} is TRUE, the resulting object is a list. +#' The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} +#' object) is stored in the named element "SPC". The additional elements are +#' geochemical and mineralogy analysis tables, specifically: +#' geochemical/elemental analyses "geochem", optical mineralogy "optical", and +#' X-ray diffraction / thermal "xrd_thermal". \code{returnGeochemicalData} will +#' include additional dataframes \code{geochem}, \code{optical}, and +#' \code{xrd_thermal} in list result. +#' +#' Setting \code{simplifyColors=TRUE} will automatically flatten the soil color +#' data and join to horizon level attributes. +#' +#' Function arguments (\code{series}, \code{mlra}, etc.) are fully vectorized +#' except for \code{bbox}. +#' +#' @param series vector of soil series names, case insensitive +#' @param bbox a single bounding box in WGS84 geographic coordinates e.g. +#' \code{c(-120, 37, -122, 38)} +#' @param mlra vector of MLRA IDs, e.g. "18" or "22A" +#' @param pedlabsampnum vector of KSSL pedon lab sample number +#' @param pedon_id vector of user pedon ID +#' @param pedon_key vector of KSSL internal pedon ID +#' @param returnMorphologicData logical, optionally request basic morphologic +#' data, see details section +#' @param returnGeochemicalData logical, optionally request geochemical, +#' optical and XRD/thermal data, see details section +#' @param simplifyColors logical, simplify colors (from morphologic data) and +#' join with horizon data +#' @param progress logical, optionally give progress when iterating over +#' multiple requests +#' @return a \code{SoilProfileCollection} object when +#' \code{returnMorphologicData} is FALSE, otherwise a list. +#' @note SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb +#' snapshot was developed using methods described here: +#' \url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use +#' the link below for the live data. +#' @author D.E. Beaudette and A.G. Brown +#' @seealso \code{\link{fetchOSD}} +#' @references \url{http://ncsslabdatamart.sc.egov.usda.gov/} +#' @keywords utilities +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet()) { +#' +#' library(aqp) +#' library(plyr) +#' library(reshape2) +#' +#' # search by series name +#' s <- fetchKSSL(series='auburn') +#' +#' # search by bounding-box +#' # s <- fetchKSSL(bbox=c(-120, 37, -122, 38)) +#' +#' # how many pedons +#' length(s) +#' +#' # plot +#' plotSPC(s, name='hzn_desgn', max.depth=150) +#' +#' ## +#' ## morphologic data +#' ## +#' +#' # get lab and morphologic data +#' s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE) +#' +#' # extract SPC +#' pedons <- s$SPC +#' +#' ## automatically simplify color data +#' s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE, simplifyColors=TRUE) +#' +#' # check +#' par(mar=c(0,0,0,0)) +#' plot(pedons, color='moist_soil_color', print.id=FALSE) +#' +#' } +#' } +#' +#' @export fetchKSSL fetchKSSL <- function(series=NA, bbox=NA, mlra=NA, pedlabsampnum=NA, pedon_id=NA, pedon_key=NA, returnMorphologicData=FALSE, returnGeochemicalData=FALSE, simplifyColors=FALSE, progress=TRUE) { if(!requireNamespace('jsonlite', quietly=TRUE)) diff --git a/R/fetchNASIS.R b/R/fetchNASIS.R index 134d3b96..5ec03aa5 100644 --- a/R/fetchNASIS.R +++ b/R/fetchNASIS.R @@ -1,34 +1,84 @@ # convenient interface to local NASIS data # from: pedons | components | lab | ??? # ... : arguments passed on to helper functions + + #' Fetch commonly used site/pedon/horizon or component data from NASIS. -#' @description Fetch commonly used site/pedon/horizon data or component from NASIS, returned as a SoilProfileCollection object. -#' @param from determines what objects should fetched? ('pedons' | 'components' | 'pedon_report') -#' @param url string specifying the url for the NASIS pedon_report (default: NULL) -#' @param SS fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE) -#' @param rmHzErrors should pedons with horizon depth errors be removed from the results? (default: TRUE) -#' @param nullFragsAreZero should fragment volumes of NULL be interpreted as 0? (default: TRUE), see details -#' @param soilColorState which colors should be used to generate the convenience field 'soil_color'? ('moist' | 'dry') -#' @param lab should the phlabresults child table be fetched with site/pedon/horizon data (default: FALSE) -#' @param fill (fetchNASIS(from='components') only: include component records without horizon data in result? (default: FALSE) -#' @param stringsAsFactors logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE) -#' @param static_path Optional: path to local SQLite database containing NASIS table structure; default: NULL -#' -#' @details This function imports data from NASIS into R as a \code{SoilProfileCollection} object. It "flattens" NASIS pedon and component tables, including their child tables, into several more easily manageable data frames. Primarily these functions access the local NASIS database using an ODBC connection. However using the \code{fetchNASIS()} argument \code{from = "pedon_report"}, data can be read from the NASIS Report 'fetchNASIS', as either a txt file or url. The primary purpose of \code{fetchNASIS(from = "pedon_report")} is to facilitate importing datasets larger than 8000+ pedons/components. #' -#' The value of \code{nullFragsAreZero} will have a significant impact on the rock fragment fractions returned by fetchNASIS. Set \code{nullFragsAreZero = FALSE} in those cases where there are many data-gaps and \code{NULL} rock fragment values should be interpreted as \code{NULL}. Set \code{nullFragsAreZero = TRUE} in those cases where \code{NULL} rock fragment values should be interpreted as 0. +#' Fetch commonly used site/pedon/horizon data or component from NASIS, +#' returned as a SoilProfileCollection object. +#' +#' This function imports data from NASIS into R as a +#' \code{SoilProfileCollection} object. It "flattens" NASIS pedon and component +#' tables, including their child tables, into several more easily manageable +#' data frames. Primarily these functions access the local NASIS database using +#' an ODBC connection. However using the \code{fetchNASIS()} argument +#' \code{from = "pedon_report"}, data can be read from the NASIS Report +#' 'fetchNASIS', as either a txt file or url. The primary purpose of +#' \code{fetchNASIS(from = "pedon_report")} is to facilitate importing datasets +#' larger than 8000+ pedons/components. +#' +#' The value of \code{nullFragsAreZero} will have a significant impact on the +#' rock fragment fractions returned by fetchNASIS. Set \code{nullFragsAreZero = +#' FALSE} in those cases where there are many data-gaps and \code{NULL} rock +#' fragment values should be interpreted as \code{NULL}. Set +#' \code{nullFragsAreZero = TRUE} in those cases where \code{NULL} rock +#' fragment values should be interpreted as 0. #' -#' This function attempts to do most of the boilerplate work when extracting site/pedon/horizon or component data from a local NASIS database. Pedons that are missing horizon data, or have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. Pedons with combination horizons (e.g. B/C) are erroneously marked as errors due to the way in which they are stored in NASIS as two overlapping horizon records. +#' This function attempts to do most of the boilerplate work when extracting +#' site/pedon/horizon or component data from a local NASIS database. Pedons +#' that are missing horizon data, or have errors in their horizonation are +#' excluded from the returned object, however, their IDs are printed on the +#' console. Pedons with combination horizons (e.g. B/C) are erroneously marked +#' as errors due to the way in which they are stored in NASIS as two +#' overlapping horizon records. #' -#' - \href{http://ncss-tech.github.io/AQP/soilDB/fetchNASIS-mini-tutorial.html}{fetchNASIS Pedons Tutorial} -#' -#' - \href{http://ncss-tech.github.io/AQP/soilDB/NASIS-component-data.html}{fetchNASIS Components Tutorial} +#' Tutorials: #' +#' - [fetchNASIS Pedons Tutorial](http://ncss-tech.github.io/AQP/soilDB/fetchNASIS-mini-tutorial.html) +#' - [fetchNASIS Components Tutorial](http://ncss-tech.github.io/AQP/soilDB/NASIS-component-data.html) +#' +#' @aliases fetchNASIS get_phorizon_from_NASIS_db +#' get_component_copm_data_from_NASIS_db +#' get_component_horizon_data_from_NASIS_db +#' get_component_correlation_data_from_NASIS_db +#' get_component_cogeomorph_data_from_NASIS_db +#' get_component_esd_data_from_NASIS_db +#' get_component_otherveg_data_from_NASIS_db get_copedon_from_NASIS_db +#' get_legend_from_NASISget_lmuaoverlap_from_NASIS get_mapunit_from_NASIS +#' get_projectmapunit_from_NASIS get_component_diaghz_from_NASIS_db +#' get_mutext_from_NASIS_db get_phfmp_from_NASIS_db get_RMF_from_NASIS_db +#' get_concentrations_from_NASIS_db fetchVegdata get_vegplot_from_NASIS_db +#' get_vegplot_location_from_NASIS_db get_vegplot_species_from_NASIS_db +#' get_vegplot_textnote_from_NASIS_db get_vegplot_transect_from_NASIS_db +#' get_vegplot_transpecies_from_NASIS_db +#' get_vegplot_tree_si_details_from_NASIS_db +#' get_vegplot_tree_si_summary_from_NASIS_db get_vegplot_trhi_from_NASIS_db +#' get_legend_from_NASIS get_lmuaoverlap_from_NASIS +#' @param from determines what objects should fetched? ('pedons' | 'components' | 'pedon_report') +#' @param url string specifying the url for the NASIS pedon_report (default: +#' `NULL`) +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: `TRUE`) +#' @param rmHzErrors should pedons with horizon depth errors be removed from +#' the results? (default: `TRUE`) +#' @param nullFragsAreZero should fragment volumes of `NULL` be interpreted as `0`? +#' (default: `TRUE`), see details +#' @param soilColorState which colors should be used to generate the +#' convenience field `soil_color`? (`'moist'` or `'dry'`) +#' @param lab should the `phlabresults` child table be fetched with +#' site/pedon/horizon data (default: `FALSE`) +#' @param fill (`fetchNASIS(from='components')` only: include component records +#' without horizon data in result? (default: `FALSE`) +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have been set outside of `uncode()` (i.e. hard +#' coded). +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` #' @return A SoilProfileCollection object #' @author D. E. Beaudette, J. M. Skovlin, S.M. Roecker, A.G. Brown -#' @aliases get_phorizon_from_NASIS_db get_component_copm_data_from_NASIS_db get_component_horizon_data_from_NASIS_db get_component_correlation_data_from_NASIS_db get_component_copm_data_from_NASIS_db get_component_cogeomorph_data_from_NASIS_db get_component_esd_data_from_NASIS_db get_component_otherveg_data_from_NASIS_db get_copedon_from_NASIS_db get_legend_from_NASISget_lmuaoverlap_from_NASIS get_mapunit_from_NASIS get_projectmapunit_from_NASIS get_component_diaghz_from_NASIS_db get_mutext_from_NASIS_db get_phfmp_from_NASIS_db get_RMF_from_NASIS_db get_concentrations_from_NASIS_db fetchVegdata get_vegplot_from_NASIS_db get_vegplot_location_from_NASIS_db get_vegplot_species_from_NASIS_db get_vegplot_textnote_from_NASIS_db get_vegplot_transect_from_NASIS_db get_vegplot_transpecies_from_NASIS_db get_vegplot_tree_si_details_from_NASIS_db get_vegplot_tree_si_summary_from_NASIS_db get_vegplot_trhi_from_NASIS_db get_legend_from_NASIS get_lmuaoverlap_from_NASIS -#' @export -#' +#' @export fetchNASIS fetchNASIS <- function(from='pedons', url = NULL, SS = TRUE, diff --git a/R/fetchNASISLabData.R b/R/fetchNASISLabData.R index e1c5540f..f662dc2a 100644 --- a/R/fetchNASISLabData.R +++ b/R/fetchNASISLabData.R @@ -1,4 +1,27 @@ # convenience function for loading most commonly used information from local NASIS database + + +#' Fetch lab data used site/horizon data from a PedonPC database. +#' +#' Fetch KSSL laboratory pedon/horizon layer data from a local NASIS database, +#' return as a SoilProfileCollection object. +#' +#' This function currently works only on Windows, and requires a 'nasis_local' +#' ODBC connection. +#' +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: TRUE) +#' @return a SoilProfileCollection class object +#' @note This function attempts to do most of the boilerplate work when +#' extracting KSSL laboratory site/horizon data from a local NASIS database. +#' Lab pedons that have errors in their horizonation are excluded from the +#' returned object, however, their IDs are printed on the console. See +#' \code{\link{getHzErrorsNASIS}} for a simple approach to identifying pedons +#' with problematic horizonation. +#' @author J.M. Skovlin and D.E. Beaudette +#' @seealso \code{\link{get_labpedon_data_from_NASIS_db}} +#' @keywords manip +#' @export fetchNASISLabData fetchNASISLabData <- function(SS = TRUE) { # test connection diff --git a/R/fetchNASISWebReport.R b/R/fetchNASISWebReport.R new file mode 100644 index 00000000..587c8a94 --- /dev/null +++ b/R/fetchNASISWebReport.R @@ -0,0 +1,449 @@ + +#' Extract component tables from NASIS Web Reports +#' +#' @aliases fetchNASISWebReport get_project_from_NASISWebReport +#' get_progress_from_NASISWebReport get_project_correlation_from_NASISWebReport +#' get_legend_from_NASISWebReport get_mapunit_from_NASISWebReport +#' get_projectmapunit_from_NASISWebReport +#' get_projectmapunit2_from_NASISWebReport get_component_from_NASISWebReport +#' get_chorizon_from_NASISWebReport get_cosoilmoist_from_NASISWebReport +#' get_sitesoilmoist_from_NASISWebReport get_lmuaoverlap_from_NASISWebReport +#' +#' @param projectname text string vector of project names to be inserted into a +#' SQL WHERE clause (default: `NA`) +#' @param mlraoffice text string value identifying the MLRA Regional Soil +#' Survey Office group name inserted into a SQL WHERE clause (default: `NA`) +#' @param mlrassoarea text string value identifying the MLRA Soil Survey Office +#' areasymbol symbol inserted into a SQL WHERE clause (default: `NA`) +#' @param fiscalyear text string value identifying the fiscal year inserted +#' into a SQL WHERE clause (default: `NA`) +#' @param projecttypename text string value identifying the project type name +#' inserted into a SQL WHERE clause (default: `NA`) +#' @param areasymbol text string value identifying the area symbol (e.g. +#' `IN001` or `IN%`) inserted into a SQL WHERE clause (default: `NA`) +#' `NULL` (default: `TRUE`) +#' @param fill should rows with missing component ids be removed (default: `FALSE`) +#' @param rmHzErrors should pedons with horizonation errors be removed from the +#' results? (default: `FALSE`) +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have been set outside of `uncode()` (i.e. hard +#' coded). The 'factory-fresh' default is TRUE, but this can be changed by +#' setting options(`stringsAsFactors = FALSE`) +#' @param droplevels logical: indicating whether to drop unused levels in +#' classifying factors. This is useful when a class has large number of unused +#' classes, which can waste space in tables and figures. +#' @return A data.frame or list with the results. +#' @author Stephen Roecker +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' +#' if (requireNamespace("curl") & +#' curl::has_internet() & +#' require("aqp") & +#' require("ggplot2") & +#' require("gridExtra") +#' ) { +#' # query soil components by projectname +#' test = fetchNASISWebReport( +#' "EVAL - MLRA 111A - Ross silt loam, 0 to 2 percent slopes, frequently flooded" +#' ) +#' test = test$spc +#' +#' # profile plot +#' plot(test) +#' +#' # convert the data for depth plot +#' clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) +#' names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) +#' +#' om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) +#' names(om_slice) = gsub("om_", "", names(om_slice)) +#' +#' test2 = rbind(data.frame(clay_slice, var = "clay"), +#' data.frame(om_slice, var = "om") +#' ) +#' +#' h = merge(test2, site(test)[c("dmuiid", "coiid", "compname", "comppct_r")], +#' by = "coiid", +#' all.x = TRUE +#' ) +#' +#' # depth plot of clay content by soil component +#' gg_comp <- function(x) { +#' ggplot(x) + +#' geom_line(aes(y = r, x = hzdept_r)) + +#' geom_line(aes(y = r, x = hzdept_r)) + +#' geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + +#' xlim(200, 0) + +#' xlab("depth (cm)") + +#' facet_grid(var ~ dmuiid + paste(compname, comppct_r)) + +#' coord_flip() +#' } +#' g1 <- gg_comp(subset(h, var == "clay")) +#' g2 <- gg_comp(subset(h, var == "om")) +#' +#' grid.arrange(g1, g2) +#' +#' +#' # query cosoilmoist (e.g. water table data) by mukey +#' # NA depths are interpreted as (???) with impute=TRUE argument +#' x <- get_cosoilmoist_from_NASISWebReport( +#' "EVAL - MLRA 111A - Ross silt loam, 0 to 2 percent slopes, frequently flooded" +#' ) +#' +#' ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + +#' geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, +#' ymin = 0, ymax = max(x$depb_r), +#' fill = flodfreqcl)) + +#' geom_line(cex = 1) + +#' geom_point() + +#' geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + +#' ylim(max(x$depb_r), 0) + +#' xlab("month") + ylab("depth (cm)") + +#' scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + +#' facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + +#' ggtitle(paste0(x$nationalmusym[1], +#' ': Water Table Levels from Component Soil Moisture Month Data')) +#' +#' +#' } +#' +#' +#' +#' } +#' +#' @export fetchNASISWebReport +fetchNASISWebReport <- function(projectname, rmHzErrors = FALSE, fill = FALSE, + stringsAsFactors = default.stringsAsFactors() +) { + + # load data in pieces + f.mapunit <- get_projectmapunit_from_NASISWebReport(projectname, stringsAsFactors = stringsAsFactors) + f.component <- get_component_from_NASISWebReport(projectname, stringsAsFactors = stringsAsFactors) + f.chorizon <- get_chorizon_from_NASISWebReport(projectname, fill, stringsAsFactors = stringsAsFactors) + + # return NULL if one of the required pieces is missing + if(is.null(f.mapunit) | is.null(f.component) | is.null(f.chorizon)) { + message("One or more inputs for fetchNASISWebReport (mapunit, component, or horizon) is NULL, returning NULL.") + return(NULL) + } + + + # optionally test for bad horizonation... flag, and remove + if (rmHzErrors) { + f.chorizon.test <- plyr::ddply(f.chorizon, 'coiid', function(d) { + res <- aqp::hzDepthTests(top=d[['hzdept_r']], bottom=d[['hzdepb_r']]) + return(data.frame(hz_logic_pass=all(!res))) + }) + + # which are the good (valid) ones? + good.ids <- as.character(f.chorizon.test$coiid[which(f.chorizon.test$hz_logic_pass)]) + bad.ids <- as.character(f.chorizon.test$coiid[which(! f.chorizon.test$hz_logic_pass)]) + + # keep the good ones + f.chorizon <- f.chorizon[which(f.chorizon$coiid %in% good.ids), ] + + # keep track of those components with horizonation errors + if(length(bad.ids) > 0) + assign('component.hz.problems', value=bad.ids, envir=soilDB.env) + } + + # upgrade to SoilProfilecollection + depths(f.chorizon) <- coiid ~ hzdept_r + hzdepb_r + + + ## TODO: this will fail in the presence of duplicates + ## TODO: make this error more informative + # add site data to object + site(f.chorizon) <- f.component # left-join via coiid + + # set NASIS-specific horizon identifier + hzidname(f.chorizon) <- 'chiid' + + # print any messages on possible data quality problems: + if (exists('component.hz.problems', envir=soilDB.env)) + message("-> QC: horizon errors detected, use `get('component.hz.problems', envir=soilDB.env)` for related cokey values") + + # done, return SPC + return(list(spc = f.chorizon, mapunit = f.mapunit)) + +} + +#' @rdname fetchNASISWebReport +get_component_from_NASISWebReport <- function(projectname, stringsAsFactors = default.stringsAsFactors()) { + + url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_component_from_NASISWebReport" + + d.component <- lapply(projectname, function(x) { + message("getting project '", x, "' from NasisReportsWebSite \n", sep = "") + args = list(p_projectname = x) + d = tryCatch(parseWebReport(url, args), + error = function(e) { + message(e) + return(NULL) + }) + }) + + d.component <- do.call("rbind", d.component) + + if(is.null(d.component)) + return(NULL) + + # set factor levels according to metadata domains + d.component <- uncode(d.component, db = "LIMS", stringsAsFactors = stringsAsFactors) + + # prep + d.component <- .cogmd_prep(d.component, db = "LIMS") + + + # return data.frame + return(d.component) + +} + + +#' @rdname fetchNASISWebReport +get_chorizon_from_NASISWebReport <- function(projectname, fill = FALSE, stringsAsFactors = default.stringsAsFactors()) { + + url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_chorizon_from_NASISWebReport" + + d.chorizon <- lapply(projectname, function(x) { + args = list(p_projectname = x) + d = parseWebReport(url, args) + }) + d.chorizon <- do.call("rbind", d.chorizon) + + ## TODO: might be nice to abstract this into a new function + # hacks to make R CMD check --as-cran happy: + metadata <- NULL + # load local copy of metadata + load(system.file("data/metadata.rda", package="soilDB")[1]) + + # transform variables and metadata + if (!all(is.na(d.chorizon$chiid))) { + d.chorizon <- within(d.chorizon, { + texture = tolower(texture) + if (stringsAsFactors == TRUE) { + texcl = factor(texcl, + levels = metadata[metadata$ColumnPhysicalName == "texcl", "ChoiceValue"], + labels = metadata[metadata$ColumnPhysicalName == "texcl", "ChoiceName"] + ) + } + }) + } + + # fill + if (fill == FALSE) { + d.chorizon <- d.chorizon[!is.na(d.chorizon$chiid), ] + } + + # return data.frame + return(d.chorizon) + +} + + + +#' @rdname fetchNASISWebReport +get_legend_from_NASISWebReport <- function(mlraoffice, areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { + + url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_legend_from_NASISWebReport" + + args <- list(p_mlraoffice = mlraoffice, p_areasymbol = areasymbol) + + d.legend <- parseWebReport(url, args) + + + # set factor levels according to metadata domains + # data is coming back uncoded from LIMS so db is set to "SDA" + d.legend <- uncode(d.legend, + db = "SDA", + droplevels = droplevels, + stringsAsFactors = stringsAsFactors + ) + + # date + d.legend$cordate <- as.Date(d.legend$cordate) + + # return data.frame + return(d.legend) + +} + + + +#' @rdname fetchNASISWebReport +get_lmuaoverlap_from_NASISWebReport <- function(areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { + url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_lmuaoverlap_from_NASISWebReport" + + d <- lapply(areasymbol, function(x) { + message("getting legend for '", x, "' from NasisReportsWebSite \n", sep = "") + args = list(p_areasymbol = x) + d = parseWebReport(url, args) + }) + d <- do.call("rbind", d) + + + # set factor levels according to metadata domains + # data is coming back uncoded from LIMS so db is set to "SDA" + d <- uncode(d, + db = "SDA", + droplevels = droplevels, + stringsAsFactors = stringsAsFactors + ) + + # return data.frame + return(d) + +} + + + +#' @rdname fetchNASISWebReport +get_mapunit_from_NASISWebReport <- function(areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { + url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_mapunit_from_NASISWebReport" + + d.mapunit <- lapply(areasymbol, function(x) { + message("getting map units for '", x, "' from NasisReportsWebSite \n", sep = "") + args = list(p_areasymbol = x) + d = parseWebReport(url, args) + }) + d.mapunit <- do.call("rbind", d.mapunit) + + d.mapunit$musym = as.character(d.mapunit$musym) + + # set factor levels according to metadata domains + # data is coming back uncoded from LIMS so db is set to "SDA" + d.mapunit <- uncode(d.mapunit, + db = "SDA", + droplevels = droplevels, + stringsAsFactors = stringsAsFactors + ) + + # return data.frame + return(d.mapunit) + +} + + +#' @rdname fetchNASISWebReport +get_projectmapunit_from_NASISWebReport <- function(projectname, stringsAsFactors = default.stringsAsFactors()) { + + url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_projectmapunit_from_NASISWebReport" + + + d.mapunit <- lapply(projectname, function(x) { + args = list(p_projectname = x) + d = parseWebReport(url, args) + }) + d.mapunit <- do.call("rbind", d.mapunit) + + d.mapunit$musym = as.character(d.mapunit$musym) + + # set factor levels according to metadata domains + d.mapunit <- uncode(d.mapunit, db = "LIMS", stringsAsFactors = stringsAsFactors) + + # return data.frame + return(d.mapunit) + +} + + +#' @rdname fetchNASISWebReport +get_projectmapunit2_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projectname, stringsAsFactors = default.stringsAsFactors()) { + + url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_projectmapunit2_from_NASISWebReport" + + args = list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projectname = projectname) + d.mapunit = parseWebReport(url, args) + + d.mapunit$musym = as.character(d.mapunit$musym) + + # set factor levels according to metadata domains + # data is coming back uncoded from LIMS so db is set to "SDA" + d.mapunit <- uncode(d.mapunit, db = "SDA", stringsAsFactors = stringsAsFactors) + + # return data.frame + return(d.mapunit) + +} + +#' @rdname fetchNASISWebReport +get_project_from_NASISWebReport <- function(mlrassoarea, fiscalyear) { + + url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_project_from_NASISWebReport" + + args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear) + + d.project <- parseWebReport(url, args) + + # prep + idx <- unlist(lapply(names(d.project), function(x) grepl("date_", x))) + if (any(idx)) { + d.project[idx] <- lapply(d.project[idx], function(x) as.Date(x, format = "%Y/%m/%d")) + } + + # return data.frame + return(d.project) + +} + + +#' @rdname fetchNASISWebReport +get_progress_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projecttypename) { + + url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_progress_from_NASISWebReport" + + args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projecttypename = projecttypename) + + d.progress <- parseWebReport(url, args) + + # return data.frame + return(d.progress) + +} + + +#' @rdname fetchNASISWebReport +get_project_correlation_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projectname) { + + # nasty hack to trick R CMD check + musym <- NULL + new_musym <- NULL + + url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_project_correlation_from_NASISWebReport" + + args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projectname = projectname) + + d.rcor <- parseWebReport(url, args) + + # compute musym_orig for additional lmapunits, necessary to catch changes to the original musym, due to a constraint on the lmapunit table that prevents duplicate musym for additional mapunits + if (! is.null(d.rcor)) { + + d.rcor <- within(d.rcor, { + n = nchar(musym) + begin_1 = substr(musym, 2, n) + end_1 = substr(musym, 1, n - 1) + end_4 = substr(musym, 1, n - 4) + + idx = musym != new_musym & !is.na(new_musym) + orig_musym = ifelse(idx & musym != begin_1 & (new_musym == begin_1 | substr(musym, 1, 1) %in% c("x", "z")), begin_1, musym) + # Joe recommended using |\\+${1}, but appears to be legit in some cases + orig_musym = ifelse(idx & musym != end_1 & new_musym == end_1 , end_1 , orig_musym) + orig_musym = ifelse(idx & musym != end_4 & new_musym == end_4 , end_4 , orig_musym) + }) + } + + d.rcor[c("n", "begin_1", "end_1", "end_4", "idx")] <- NULL + + # return data.frame + return(d.rcor) + +} + + + + diff --git a/R/fetchNOAA.R b/R/fetchNOAA.R index 3d8b9511..f53e3722 100644 --- a/R/fetchNOAA.R +++ b/R/fetchNOAA.R @@ -5,28 +5,36 @@ # TODO: will provide a variety of aggregation and batch application options # -- given Spatial* object, SoilProfileCollection, Coordinates + + #' Query the NOAA API to get station data near a given latitude and longitude -#' -#' @description Query the NOAA API to get station data (limit 1000 records) near a point. Default extent is plus or minus 0.5 degrees (bounding box) (with \code{bbox = 1}) around the specified point \[lat, lng]. -#' -#' In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token -#' +#' +#' Query the NOAA API to get station data (limit 1000 records) near a point. +#' Default extent is plus or minus 0.5 degrees (bounding box) (with \code{bbox +#' = 1}) around the specified point \[lat, lng]. +#' +#' In order to use this function, you must obtain an API token from this +#' website: https://www.ncdc.noaa.gov/cdo-web/token +#' +#' #' @param lat Latitude #' @param lng Longitude #' @param apitoken API key token for NOAA NCDC web service -#' @param bbox Optional: Dimension of the bounding box centered at \code{lat}, \code{lng}. -#' -#' @return data.frame containing station information for all stations within a bounding box around \code{lat}, \code{lng}. -#' @export -#' +#' @param bbox Optional: Dimension of the bounding box centered at \code{lat}, +#' \code{lng}. +#' @return data.frame containing station information for all stations within a +#' bounding box around \code{lat}, \code{lng}. #' @examples -#' +#' +#' #' ## in order to use this function, you must obtain an API token from this website: #' ## https://www.ncdc.noaa.gov/cdo-web/token -#' +#' #' # stations <- get_NOAA_stations_nearXY(lat = 37, lng = -120, #' # apitoken = "yourtokenhere") -#' +#' +#' +#' @export get_NOAA_stations_nearXY get_NOAA_stations_nearXY <- function(lat, lng, apitoken, bbox = 1) { if(!requireNamespace("httr")) @@ -98,32 +106,44 @@ get_NOAA_stations_nearXY <- function(lat, lng, apitoken, bbox = 1) { return(d$results) } -#' Get Global Historical Climatology Network Daily (GHCND) data from NOAA API for given datatype(s), station IDs and years. -#' -#' @description Obtain daily climatic summary data for a set of station IDs, years, and datatypes. -#' -#' Note that typically results from the NOAA API are limited to 1000 records. However, by "chunking" up data into individual station*year*datatypeid combinations, record results generally do not exceed 365 records for daily summaries. -#' -#' In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token -#' + + +#' Get Global Historical Climatology Network Daily (GHCND) data from NOAA API +#' for given datatype(s), station IDs and years. +#' +#' Obtain daily climatic summary data for a set of station IDs, years, and +#' datatypes. +#' +#' Note that typically results from the NOAA API are limited to 1000 records. +#' However, by "chunking" up data into individual station\emph{year}datatypeid +#' combinations, record results generally do not exceed 365 records for daily +#' summaries. +#' +#' In order to use this function, you must obtain an API token from this +#' website: https://www.ncdc.noaa.gov/cdo-web/token +#' +#' #' @param stations Station ID (e.g. \code{GHCND:USC00388786}) #' @param years One or more years (e.g. 2017:2020) -#' @param datatypeids One or more NOAA GHCND data type IDs (e.g \code{c("PRCP","SNOW")}) -#' @param apitoken API key token for NOAA NCDC web services (https://www.ncdc.noaa.gov/cdo-web/token) -#' -#' @return A data.frame containing the GHCND data requested (limit 1000 records) -#' @export get_NOAA_GHCND -#' +#' @param datatypeids One or more NOAA GHCND data type IDs (e.g +#' \code{c("PRCP","SNOW")}) +#' @param apitoken API key token for NOAA NCDC web services +#' (https://www.ncdc.noaa.gov/cdo-web/token) +#' @return A data.frame containing the GHCND data requested (limit 1000 +#' records) #' @examples -#' +#' +#' #' #' ## in order to use this function, you must obtain an API token from this website: #' ## https://www.ncdc.noaa.gov/cdo-web/token -#' +#' #' # get_NOAA_GHCND(c("GHCND:USC00388786", "GHCND:USC00388787"), #' # years = 2017:2020, #' # datatypeids = c("PRCP","SNOW"), #' # apitoken = "yourtokenhere") -#' +#' +#' +#' @export get_NOAA_GHCND get_NOAA_GHCND <- function(stations, years, datatypeids, apitoken) { do.call('rbind', lapply(stations, function(s) do.call('rbind', lapply(years, function(y) diff --git a/R/fetchOSD.R b/R/fetchOSD.R index 05f36c2d..6e7fe5c9 100644 --- a/R/fetchOSD.R +++ b/R/fetchOSD.R @@ -1,4 +1,3 @@ - ## tabulte the number of records within each geomorphic table ## there could be some cases where there are no records, resulting in FALSE # x: object returned by fetchOSD @@ -46,68 +45,80 @@ # 2018-10-11: updated to new API, URL subject to change # fetch basic OSD, SC, and SoilWeb summaries from new API + + #' Fetch Official Series Descriptions and summaries from SoilWeb API #' -#' @description This function fetches a variety of data associated with named soil series, extracted from the USDA-NRCS Official Series Description text files and detailed soil survey (SSURGO). These data are periodically updated and made available via SoilWeb. +#' This function fetches a variety of data associated with named soil series, +#' extracted from the USDA-NRCS Official Series Description text files and +#' detailed soil survey (SSURGO). These data are periodically updated and made +#' available via SoilWeb. #' -#' @param soils a character vector of named soil series; case-insensitive -#' @param colorState color state for horizon soil color visualization: "moist" or "dry" -#' @param extended if \code{TRUE} additional soil series summary data are returned, see details +#' - [Soil "Siblings" Tutorial](http://ncss-tech.github.io/AQP/soilDB/siblings.html) +#' - [Competing Soil Series](https://ncss-tech.github.io/AQP/soilDB/competing-series.html) +#' - [Siblings](https://ncss-tech.github.io/AQP/soilDB/siblings.html) #' -#' @details { -#' \itemize{ -#' \item{\href{https://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions}} -#' -#' \item{\href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}} -#' -#' \item{\href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings}} -#' } +#' The standard set of "site" and "horizon" data are returned as a +#' \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" +#' suite of summary data can be requested by setting \code{extended=TRUE}. The +#' resulting object will be a \code{list} with the following elements:) #' +#' \describe{ \item{SPC}{\code{SoilProfileCollection} containing standards +#' "site" and "horizon" data} \item{competing}{competing soil series from the +#' SC database snapshot} \item{geog_assoc_soils}{geographically associated +#' soils, extracted from named section in the OSD} \item{geomcomp}{empirical +#' probabilities for geomorphic component, derived from the current SSURGO +#' snapshot} \item{hillpos}{empirical probabilities for hillslope position, +#' derived from the current SSURGO snapshot} \item{mtnpos}{empirical +#' probabilities for mountain slope position, derived from the current SSURGO +#' snapshot} \item{terrace}{empirical probabilities for river terrace position, +#' derived from the current SSURGO snapshot} \item{flats}{empirical +#' probabilities for flat landscapes, derived from the current SSURGO snapshot} +#' \item{pmkind}{empirical probabilities for parent material kind, derived from +#' the current SSURGO snapshot} \item{pmorigin}{empirical probabilities for +#' parent material origin, derived from the current SSURGO snapshot} +#' \item{mlra}{empirical MLRA membership values, derived from the current +#' SSURGO snapshot} \item{climate}{experimental climate summaries from PRISM +#' stack} \item{metadata}{metadata associated with SoilWeb cached summaries} } #' -#' The standard set of "site" and "horizon" data are returned as a \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" suite of summary data can be requested by setting \code{extended=TRUE}. The resulting object will be a \code{list} with the following elements:) +#' When using \code{extended=TRUE}, there are a couple of scenarios in which +#' series morphology contained in \code{SPC} do not fully match records in the +#' associated series summaries (e.g. \code{competing}). #' #' \describe{ -#' \item{SPC}{\code{SoilProfileCollection} containing standards "site" and "horizon" data} -#' \item{competing}{competing soil series from the SC database snapshot} -#' \item{geog_assoc_soils}{geographically associated soils, extracted from named section in the OSD} -#' \item{geomcomp}{empirical probabilities for geomorphic component, derived from the current SSURGO snapshot} -#' \item{hillpos}{empirical probabilities for hillslope position, derived from the current SSURGO snapshot} -#' \item{mtnpos}{empirical probabilities for mountain slope position, derived from the current SSURGO snapshot} -#' \item{terrace}{empirical probabilities for river terrace position, derived from the current SSURGO snapshot} -#' \item{flats}{empirical probabilities for flat landscapes, derived from the current SSURGO snapshot} -#' \item{pmkind}{empirical probabilities for parent material kind, derived from the current SSURGO snapshot} -#' \item{pmorigin}{empirical probabilities for parent material origin, derived from the current SSURGO snapshot} -#' \item{mlra}{empirical MLRA membership values, derived from the current SSURGO snapshot} -#' \item{climate}{experimental climate summaries from PRISM stack} -#' \item{metadata}{metadata associated with SoilWeb cached summaries} -#' } #' +#' \item{1. A query for soil series that exist entirely outside of CONUS (e.g. +#' PALAU).}{ - Climate summaries are empty \code{data.frames} because these +#' summaries are currently generated from PRISM. We are working on a solution.} #' -#' When using `extended=TRUE`, there are a couple of scenarios in which series morphology contained in `SPC` do not fully match records in the associated series summaries (e.g. `competing`). -#' -#' \describe{ +#' \item{2. A query for data within CONUS, but OSD morphology missing due to +#' parsing error (e.g. formatting, typos).}{ - Extended summaries are present +#' but morphology missing from \code{SPC}. A warning is issued.} #' -#' \item{1. A query for soil series that exist entirely outside of CONUS (e.g. PALAU).}{ - Climate summaries are empty \code{data.frames} because these summaries are currently generated from PRISM. We are working on a solution.} -#' -#' \item{2. A query for data within CONUS, but OSD morphology missing due to parsing error (e.g. formatting, typos).}{ - Extended summaries are present but morphology missing from `SPC`. A warning is issued.} -#' -#' \item{3. A query for multiple soil series, with one more more listed as "inactive" (e.g. BREADSPRINGS).}{ - Extended summaries are present but morphology missing from `SPC`. A warning is issued.} -#' -#' } +#' \item{3. A query for multiple soil series, with one more more listed as +#' "inactive" (e.g. BREADSPRINGS).}{ - Extended summaries are present but +#' morphology missing from \code{SPC}. A warning is issued.} #' -#' These last two cases are problematic for analysis that makes use of morphology and extended data, such as outlined in this tutorial on \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}. -#' -#'} -#' -#' @return a \code{SoilProfileCollection} object containing basic soil morphology and taxonomic information. +#' } #' -#' @references USDA-NRCS OSD search tools: \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +#' These last two cases are problematic for analysis that makes use of +#' morphology and extended data, such as outlined in this tutorial on +#' [competing soil series](https://ncss-tech.github.io/AQP/soilDB/competing-series.html). #' +#' @param soils a character vector of named soil series; case-insensitive +#' @param colorState color state for horizon soil color visualization: "moist" +#' or "dry" +#' @param extended if \code{TRUE} additional soil series summary data are +#' returned, see details +#' @return a \code{SoilProfileCollection} object containing basic soil +#' morphology and taxonomic information. #' @author D.E. Beaudette #' @seealso \link{OSDquery}, \link{siblings} -#' @export -#' +#' @references USDA-NRCS OSD search tools: +#' \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +#' @keywords manip #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet()) { @@ -139,7 +150,8 @@ #' } #' } #' } -#' @keywords manip +#' +#' @export fetchOSD fetchOSD <- function(soils, colorState='moist', extended=FALSE) { # sanity check diff --git a/R/fetchPedonPC.R b/R/fetchPedonPC.R index d86dc370..f84d7bfd 100644 --- a/R/fetchPedonPC.R +++ b/R/fetchPedonPC.R @@ -1,85 +1,109 @@ -# horizon checking may be too strict - -fetchPedonPC <- function(dsn) { - - # not in parity with NASIS functions - warning("Loading data from PedonPC will return slightly different data structures than fetchNASIS().", call. = FALSE) - - # load data in pieces - site_data <- get_site_data_from_pedon_db(dsn) - hz_data <- get_hz_data_from_pedon_db(dsn) - color_data <- get_colors_from_pedon_db(dsn) - extended_data <- get_extended_data_from_pedon_db(dsn) - - # join pieces - # horizon + hz color: all horizons - h <- join(hz_data, color_data, by='phiid', type='left') - - # convert colors... in the presence of missing color data - h$soil_color <- NA - idx <- complete.cases(h$m_r) - h$soil_color[idx] <- with(h[idx, ], rgb(m_r, m_g, m_b)) # moist colors - - # replace horizons with hz + fragment summary - h <- join(h, extended_data$frag_summary, by='phiid', type='left') - - # fix some common problems - # replace missing lower boundaries - message('replacing missing lower horizon boundaries ...') - missing.lower.depth.idx <- which(!is.na(h$hzdept) & is.na(h$hzdepb)) - h$hzdepb[missing.lower.depth.idx] <- h$hzdept[missing.lower.depth.idx] + 1 - - # test for bad horizonation... flag, and remove - cat('finding horizonation errors ...\n') - h.test <- ddply(h, 'peiid', function(d) { - res <- aqp::hzDepthTests(top=d[['hzdept']], bottom=d[['hzdepb']]) - return(data.frame(hz_logic_pass=all(!res))) - }) - - # which are the good (valid) ones? - good.pedon.ids <- as.character(h.test$peiid[which(h.test$hz_logic_pass)]) - bad.pedon.ids <- as.character(h.test$pedon_id[which(!h.test$hz_logic_pass)]) - - # keep the good ones - h <- h[which(h$peiid %in% good.pedon.ids), ] - - # upgrade to SoilProfilecollection - depths(h) <- peiid ~ hzdept + hzdepb - - ## TODO: this is slow - # move pedon_id into @site, this will be used to join full table of site data - site(h) <- ~ pedon_id - - ## TODO: this will fail in the presence of duplicates - # add site data to object - site_data$pedon_id <- NULL # remove 'pedon_id' column from site_data - site(h) <- site_data # left-join via peiid - - # load diagnostic horizons into @diagnostic - diagnostic_hz(h) <- extended_data$diagnostic - - # add diagnostic boolean data into @site - site(h) <- extended_data$diagHzBoolean - - ### TODO: consider moving this into the extended data function ### - # load best-guess optimal records from taxhistory - # method is added to the new field called 'selection_method' - best.tax.data <- ddply(extended_data$taxhistory, 'peiid', .pickBestTaxHistory) - site(h) <- best.tax.data - - # join-in landform string - lf <- ddply(extended_data$geomorph, 'peiid', .formatLandformString, name.sep='|') - if(nrow(lf) > 0) - site(h) <- lf - - # set PedonPC/NASIS-specific horizon identifier - hzidname(h) <- 'phiid' - - # 7. save and mention bad pedons - assign('bad.pedon.ids', value=bad.pedon.ids, envir=soilDB.env) - if(length(bad.pedon.ids) > 0) - message("horizon errors detected, use `get('bad.pedon.ids', envir=soilDB.env)` for a list of pedon IDs") - - # done - return(h) -} +# horizon checking may be too strict + + + +#' Fetch commonly used site/horizon data from a PedonPC v.5 database. +#' +#' Fetch commonly used site/horizon data from a version 5.x PedonPC database, +#' return as a SoilProfileCollection object. +#' +#' This function currently works only on Windows. +#' +#' @aliases fetchPedonPC getHzErrorsPedonPC +#' @param dsn The path to a PedonPC version 5.x database +#' @return a SoilProfileCollection class object +#' @note This function attempts to do most of the boilerplate work when +#' extracting site/horizon data from a PedonPC or local NASIS database. Pedons +#' that have errors in their horizonation are excluded from the returned +#' object, however, their IDs are printed on the console. See +#' \code{\link{getHzErrorsPedonPC}} for a simple approach to identifying pedons +#' with problematic horizonation. Records from the 'taxhistory' table are +#' selected based on 1) most recent record, or 2) record with the least amount +#' of missing data. +#' @author D. E. Beaudette and J. M. Skovlin +#' @seealso \code{\link{get_hz_data_from_pedon_db}} +#' @keywords manip +#' @export fetchPedonPC +fetchPedonPC <- function(dsn) { + + # not in parity with NASIS functions + warning("Loading data from PedonPC will return slightly different data structures than fetchNASIS().", call. = FALSE) + + # load data in pieces + site_data <- get_site_data_from_pedon_db(dsn) + hz_data <- get_hz_data_from_pedon_db(dsn) + color_data <- get_colors_from_pedon_db(dsn) + extended_data <- get_extended_data_from_pedon_db(dsn) + + # join pieces + # horizon + hz color: all horizons + h <- join(hz_data, color_data, by='phiid', type='left') + + # convert colors... in the presence of missing color data + h$soil_color <- NA + idx <- complete.cases(h$m_r) + h$soil_color[idx] <- with(h[idx, ], rgb(m_r, m_g, m_b)) # moist colors + + # replace horizons with hz + fragment summary + h <- join(h, extended_data$frag_summary, by='phiid', type='left') + + # fix some common problems + # replace missing lower boundaries + message('replacing missing lower horizon boundaries ...') + missing.lower.depth.idx <- which(!is.na(h$hzdept) & is.na(h$hzdepb)) + h$hzdepb[missing.lower.depth.idx] <- h$hzdept[missing.lower.depth.idx] + 1 + + # test for bad horizonation... flag, and remove + cat('finding horizonation errors ...\n') + h.test <- ddply(h, 'peiid', function(d) { + res <- aqp::hzDepthTests(top=d[['hzdept']], bottom=d[['hzdepb']]) + return(data.frame(hz_logic_pass=all(!res))) + }) + + # which are the good (valid) ones? + good.pedon.ids <- as.character(h.test$peiid[which(h.test$hz_logic_pass)]) + bad.pedon.ids <- as.character(h.test$pedon_id[which(!h.test$hz_logic_pass)]) + + # keep the good ones + h <- h[which(h$peiid %in% good.pedon.ids), ] + + # upgrade to SoilProfilecollection + depths(h) <- peiid ~ hzdept + hzdepb + + ## TODO: this is slow + # move pedon_id into @site, this will be used to join full table of site data + site(h) <- ~ pedon_id + + ## TODO: this will fail in the presence of duplicates + # add site data to object + site_data$pedon_id <- NULL # remove 'pedon_id' column from site_data + site(h) <- site_data # left-join via peiid + + # load diagnostic horizons into @diagnostic + diagnostic_hz(h) <- extended_data$diagnostic + + # add diagnostic boolean data into @site + site(h) <- extended_data$diagHzBoolean + + ### TODO: consider moving this into the extended data function ### + # load best-guess optimal records from taxhistory + # method is added to the new field called 'selection_method' + best.tax.data <- ddply(extended_data$taxhistory, 'peiid', .pickBestTaxHistory) + site(h) <- best.tax.data + + # join-in landform string + lf <- ddply(extended_data$geomorph, 'peiid', .formatLandformString, name.sep='|') + if(nrow(lf) > 0) + site(h) <- lf + + # set PedonPC/NASIS-specific horizon identifier + hzidname(h) <- 'phiid' + + # 7. save and mention bad pedons + assign('bad.pedon.ids', value=bad.pedon.ids, envir=soilDB.env) + if(length(bad.pedon.ids) > 0) + message("horizon errors detected, use `get('bad.pedon.ids', envir=soilDB.env)` for a list of pedon IDs") + + # done + return(h) +} diff --git a/R/fetchRaCA.R b/R/fetchRaCA.R index 1bc6191c..8a74ba43 100644 --- a/R/fetchRaCA.R +++ b/R/fetchRaCA.R @@ -1,33 +1,51 @@ ## TODO: need to implement some kind of constraints on geographic queries ## NOTE: each VNIR spectra record is 6.6kb of data (compressed, gzip, level 5) + + #' Get Rapid Carbon Assessment (RaCA) data -#' @description Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, RaCA site ID, or series query from the SoilWeb API. +#' +#' Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, +#' RaCA site ID, or series query from the SoilWeb API. +#' +#' The VNIR spectra associated with RaCA data are quite large (each +#' gzip-compressed VNIR spectra record is about 6.6kb), so requests for these +#' data are disabled by default. Note that VNIR spectra can only be queried by +#' soil series or geographic BBOX. +#' #' @param series a soil series name; case-insensitive -#' @param bbox a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block +#' @param bbox a bounding box in WGS84 geographic coordinates e.g. +#' \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block #' @param state a two-letter US state abbreviation; case-insensitive #' @param rcasiteid a RaCA site id (e.g. 'C1609C01') -#' @param get.vnir logical, should associated VNIR spectra be downloaded? (see details) -#' @details The VNIR spectra associated with RaCA data are quite large (each gzip-compressed VNIR spectra record is about 6.6kb), so requests for these data are disabled by default. Note that VNIR spectra can only be queried by soil series or geographic BBOX. -#' @return { -#' \describe{ -#' \item{\code{pedons}:}{a \code{SoilProfileCollection} object containing site/pedon/horizon data} -#' \item{\code{trees}:}{a \code{data.frame} object containing tree DBH and height} -#' \item{\code{veg}:}{a \code{data.frame} object containing plant species} -#' \item{\code{stock}:}{a \code{data.frame} object containing carbon quantities (stocks) at standardized depths} -#' \item{\code{sample}:}{a \code{data.frame} object containing sample-level bulk density and soil organic carbon values} -#' \item{\code{spectra}:}{a numeric \code{matrix} containing VNIR reflectance spectra from 350--2500 nm} -#' } -#' } +#' @param get.vnir logical, should associated VNIR spectra be downloaded? (see +#' details) +#' @return +#' +#' \describe{ \item{list("pedons")}{a \code{SoilProfileCollection} object +#' containing site/pedon/horizon data}\item{:}{a \code{SoilProfileCollection} +#' object containing site/pedon/horizon data} \item{list("trees")}{a +#' \code{data.frame} object containing tree DBH and height}\item{:}{a +#' \code{data.frame} object containing tree DBH and height} +#' \item{list("veg")}{a \code{data.frame} object containing plant +#' species}\item{:}{a \code{data.frame} object containing plant species} +#' \item{list("stock")}{a \code{data.frame} object containing carbon quantities +#' (stocks) at standardized depths}\item{:}{a \code{data.frame} object +#' containing carbon quantities (stocks) at standardized depths} +#' \item{list("sample")}{a \code{data.frame} object containing sample-level +#' bulk density and soil organic carbon values}\item{:}{a \code{data.frame} +#' object containing sample-level bulk density and soil organic carbon values} +#' \item{list("spectra")}{a numeric \code{matrix} containing VNIR reflectance +#' spectra from 350--2500 nm}\item{:}{a numeric \code{matrix} containing VNIR +#' reflectance spectra from 350--2500 nm} } #' @author D.E. Beaudette, USDA-NRCS staff -#' @references { -#' \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} -#' \href{https://r-forge.r-project.org/scm/viewvc.php/*checkout*/docs/soilDB/RaCA-demo.html?root=aqp}{fetchRaCA() Tutorial} -#' } #' @seealso \code{\link{fetchOSD}} -#' @export +#' @references +#' +#' \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} #' @keywords utilities #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet()) { @@ -55,6 +73,8 @@ #' } #' } #' } +#' +#' @export fetchRaCA fetchRaCA <- function(series=NULL, bbox=NULL, state=NULL, rcasiteid=NULL, get.vnir=FALSE) { # important: change the default behavior of data.frame diff --git a/R/fetchSCAN.R b/R/fetchSCAN.R index 3cde8119..f296c0df 100644 --- a/R/fetchSCAN.R +++ b/R/fetchSCAN.R @@ -22,8 +22,6 @@ ### "STO.I-1:-2", "STO.I-1:-4", "STO.I-1:-8", "STO.I-1:-20", "STO.I-1:-40", ### "STO.I-2:-2", "STO.I-2:-4", "STO.I-2:-8", "STO.I-2:-20", "STO.I-2:-40" - - ## ## ideas: ## https://github.com/gunnarleffler/getSnotel @@ -36,7 +34,6 @@ ## https://wcc.sc.egov.usda.gov/nwcc/sitenotes?sitenum=462 ## - ## TODO: this crashes on 32bit R / libraries # helper function for getting a single table of SCAN metadata # site.code: a single SCAN site code @@ -80,9 +77,6 @@ return(m) } - - - # iterate over a vector of SCAN site codes, returning basic metadata # site.code: vector of SCAN site codes SCAN_sensor_metadata <- function(site.code) { @@ -101,8 +95,6 @@ SCAN_sensor_metadata <- function(site.code) { return(res) } - - ## https://github.com/ncss-tech/soilDB/issues/61 # site.code: vector of SCAN site codes SCAN_site_metadata <- function(site.code) { @@ -119,12 +111,47 @@ SCAN_site_metadata <- function(site.code) { return(res) } - - # site.code: vector of site codes # year: vector of years # report: single report type # req: for backwards compatibility + + +#' Fetch SCAN Data +#' +#' Query soil/climate data from USDA-NRCS SCAN Stations (experimental) +#' +#' See These functions require the `httr` and `rvest` libraries. +#' +#' @aliases fetchSCAN SCAN_sensor_metadata SCAN_site_metadata +#' @param site.code a vector of site codes +#' @param year a vector of years +#' @param report report name, single value only +#' @param req list of SCAN request parameters, for backwards-compatibility only +#' @return a \code{data.frame} object +#' @note \code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows). +#' @author D.E. Beaudette +#' @references https://www.wcc.nrcs.usda.gov/index.html +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet()) { +#' +#' # get data: new interface +#' x <- fetchSCAN(site.code=c(356, 2072), year=c(2015, 2016)) +#' str(x) +#' +#' # get sensor metadata +#' m <- SCAN_sensor_metadata(site.code=c(356, 2072)) +#' +#' # get site metadata +#' m <- SCAN_site_metadata(site.code=c(356, 2072)) +#' } +#' } +#' +#' @export fetchSCAN fetchSCAN <- function(site.code, year, report='SCAN', req=NULL) { ## backwards compatibility: diff --git a/R/fetchSDA_spatial.R b/R/fetchSDA_spatial.R index 307796ca..18ef3979 100644 --- a/R/fetchSDA_spatial.R +++ b/R/fetchSDA_spatial.R @@ -1,57 +1,83 @@ -#' @title Query Soil Data Access and Return Spatial Data -#' -#' @description This is a high-level "fetch" method to facilitate spatial queries to Soil Data Access (SDA) based on mapunit key (\code{mukey}) and national mapunit symbol (\code{nationalmusym}) for \code{mupolygon} (SSURGO) or \code{gsmmupolygon} (STATSGO) geometry OR legend key (\code{lkey}) and area symbols (\code{areasymbol}) for \code{sapolygon} (Soil Survey Area; SSA) geometry). -#' -#' A Soil Data Access spatial query is made returning geometry and key identifying information about the mapunit or area of interest. Additional columns from the mapunit or legend table can be included using \code{add.fields} argument. -#' -#' This function automatically "chunks" the input vector (using \code{soilDB::makeChunks}) of mapunit identifiers to minimize the likelihood of exceeding the SDA data request size. The number of chunks varies with the \code{chunk.size} setting and the length of your input vector. If you are working with many mapunits and/or large extents, you may need to decrease this number in order to have more chunks. -#' -#' Querying regions with complex mapping may require smaller \code{chunk.size}. Numerically adjacent IDs in the input vector may share common qualities (say, all from same soil survey area or region) which could cause specific chunks to perform "poorly" (slow or error) no matter what the chunk size is. Shuffling the order of the inputs using \code{sample} may help to eliminate problems related to this, depending on how you obtained your set of MUKEY/nationalmusym to query. One could feasibly use \code{muacres} as a heuristic to adjust for total acreage within chunks. -#' -#' @param x A vector of MUKEYs / national mapunit symbols (for mupolygon geometry); OR legend keys (LKEY) / area symbols (for sapolygon geometry) -#' -#' @param by.col Column name containing mapunit identifier \code{"mukey"}, \code{"nmusym"}, or \code{"areasymbol"} for \code{geom.src} \code{sapolygon}; default is inferred from \code{is.numeric(x) == TRUE} for \code{mukey} or \code{lkey} and (\code{nationalmusym} or \code{areasymbol} otherwise. -#' -#' @param method geometry result type: \code{"feature"} returns polygons, \code{"bbox"} returns the bounding box of each polygon, and \code{"point"} returns a single point within each polygon. -#' +#' Query Soil Data Access and Return Spatial Data +#' +#' This is a high-level "fetch" method to facilitate spatial queries to Soil +#' Data Access (SDA) based on mapunit key (\code{mukey}) and national mapunit +#' symbol (\code{nationalmusym}) for \code{mupolygon} (SSURGO) or +#' \code{gsmmupolygon} (STATSGO) geometry OR legend key (\code{lkey}) and area +#' symbols (\code{areasymbol}) for \code{sapolygon} (Soil Survey Area; SSA) +#' geometry). +#' +#' A Soil Data Access spatial query is made returning geometry and key +#' identifying information about the mapunit or area of interest. Additional +#' columns from the mapunit or legend table can be included using +#' \code{add.fields} argument. +#' +#' This function automatically "chunks" the input vector (using +#' \code{soilDB::makeChunks}) of mapunit identifiers to minimize the likelihood +#' of exceeding the SDA data request size. The number of chunks varies with the +#' \code{chunk.size} setting and the length of your input vector. If you are +#' working with many mapunits and/or large extents, you may need to decrease +#' this number in order to have more chunks. +#' +#' Querying regions with complex mapping may require smaller \code{chunk.size}. +#' Numerically adjacent IDs in the input vector may share common qualities +#' (say, all from same soil survey area or region) which could cause specific +#' chunks to perform "poorly" (slow or error) no matter what the chunk size is. +#' Shuffling the order of the inputs using \code{sample} may help to eliminate +#' problems related to this, depending on how you obtained your set of +#' MUKEY/nationalmusym to query. One could feasibly use \code{muacres} as a +#' heuristic to adjust for total acreage within chunks. +#' +#' Note that STATSGO data are fetched using \code{CLIPAREASYMBOL = 'US'} to +#' avoid duplicating state and national subsets of the geometry. +#' +#' @param x A vector of MUKEYs / national mapunit symbols (for mupolygon +#' geometry); OR legend keys (LKEY) / area symbols (for sapolygon geometry) +#' @param by.col Column name containing mapunit identifier \code{"mukey"}, +#' \code{"nmusym"}, or \code{"areasymbol"} for \code{geom.src} +#' \code{sapolygon}; default is inferred from \code{is.numeric(x) == TRUE} for +#' \code{mukey} or \code{lkey} and (\code{nationalmusym} or \code{areasymbol} +#' otherwise. +#' @param method geometry result type: \code{"feature"} returns polygons, +#' \code{"bbox"} returns the bounding box of each polygon, and \code{"point"} +#' returns a single point within each polygon. #' @param geom.src Either \code{mupolygon} or \code{sapolygon} -#' -#' @param db Default: SSURGO. When \code{geom.src} is \code{mupolygon}, use STATSGO polygon geometry instead of SSURGO by setting \code{db = "STATSGO"} -#' -#' @param add.fields Column names from \code{mapunit} or \code{legend} table to add to result. Must specify parent table name as the prefix \code{mapunit} before column name e.g. \code{mapunit.muname}. -#' -#' @param chunk.size How many queries should spatial request be divided into? Necessary for large results. Default: 10 -#' +#' @param db Default: SSURGO. When \code{geom.src} is \code{mupolygon}, use +#' STATSGO polygon geometry instead of SSURGO by setting \code{db = "STATSGO"} +#' @param add.fields Column names from \code{mapunit} or \code{legend} table to +#' add to result. Must specify parent table name as the prefix \code{mapunit} +#' before column name e.g. \code{mapunit.muname}. +#' @param chunk.size How many queries should spatial request be divided into? +#' Necessary for large results. Default: 10 #' @param verbose Print messages? -#' -#' @return A Spatial*DataFrame corresponding to SDA spatial data for all symbols requested. Default result contains geometry with attribute table containing unique feature ID, symbol and area symbol plus additional fields in result specified with `add.fields`. -#' -#' @details Note that STATSGO data are fetched using \code{CLIPAREASYMBOL = 'US'} to avoid duplicating state and national subsets of the geometry. -#' +#' @return A Spatial*DataFrame corresponding to SDA spatial data for all +#' symbols requested. Default result contains geometry with attribute table +#' containing unique feature ID, symbol and area symbol plus additional fields +#' in result specified with \code{add.fields}. #' @author Andrew G. Brown -#' #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet()) { -#' +#' #' # get spatial data for a single mukey #' single.mukey <- fetchSDA_spatial(x = "2924882") -#' +#' #' # demonstrate fetching full extent (multi-mukey) of national musym #' full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") -#' +#' #' # compare extent of nmusym to single mukey within it #' if(require(sp)) { #' plot(full.extent.nmusym, col = "RED",border=0) #' plot(single.mukey, add = TRUE, col = "BLUE", border=0) #' } -#' +#' #' # demo adding a field (`muname`) to attribute table of result #' head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) #' } #' } -#' @rdname fetchSDA_spatial +#' #' @export fetchSDA_spatial fetchSDA_spatial <- function(x, by.col = "mukey", diff --git a/R/fetchSoilGrids.R b/R/fetchSoilGrids.R index 3d609422..295f105c 100644 --- a/R/fetchSoilGrids.R +++ b/R/fetchSoilGrids.R @@ -1,19 +1,24 @@ #' Fetch SoilGrids 250m properties information from point locations -#' -#' This function obtains SoilGrids properties information (250m raster resolution) given a \code{data.frame} containing site IDs, latitudes and longitudes. #' -#' The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", "30-60cm", "60-100cm", "100-200cm"} and the properties returned are \code{"bdod", "cec", "cfvo", "clay", "nitrogen", "phh2o", "sand", "silt", "soc"} -- each with 5th, 50th, 95th, mean and uncertainty values. Point data requests are made through \code{properties/query} endpoint of the SoilGrids v2.0 REST API: https://rest.soilgrids.org/soilgrids/v2.0/docs -#' -#' @param locations A \code{data.frame} containing 3 columns referring to site ID, latitude and longitude. +#' This function obtains SoilGrids properties information (250m raster +#' resolution) given a \code{data.frame} containing site IDs, latitudes and +#' longitudes. #' -#' @param loc.names Optional: Column names referring to site ID, latitude and longitude. Default: \code{c("id","lat","lon")} -#' -#' @return A SoilProfileCollection -#' @export fetchSoilGrids +#' The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", +#' "30-60cm", "60-100cm", "100-200cm"} and the properties returned are +#' \code{"bdod", "cec", "cfvo", "clay", "nitrogen", "phh2o", "sand", "silt", +#' "soc"} -- each with 5th, 50th, 95th, mean and uncertainty values. Point data +#' requests are made through \code{properties/query} endpoint of the SoilGrids +#' v2.0 REST API: https://rest.soilgrids.org/soilgrids/v2.0/docs #' +#' @param locations A \code{data.frame} containing 3 columns referring to site +#' ID, latitude and longitude. +#' @param loc.names Optional: Column names referring to site ID, latitude and +#' longitude. Default: \code{c("id","lat","lon")} +#' @return A SoilProfileCollection #' @author Andrew G. Brown -#' #' @examples +#' #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet()) { @@ -24,12 +29,14 @@ #' lat = c(37.9, 38.1), #' lon = c(-120.3, -121.5), #' stringsAsFactors = FALSE) -#' +#' #' x <- fetchSoilGrids(your.points) #' #' plotSPC(x, name = NA, color = "socQ50") #' } #' } +#' +#' @export fetchSoilGrids fetchSoilGrids <- function(locations, loc.names = c("id","lat","lon")) { if (is.null(loc.names)) diff --git a/R/filter_KSSL.R b/R/filter_KSSL.R index 31d0dcdb..b517dc21 100644 --- a/R/filter_KSSL.R +++ b/R/filter_KSSL.R @@ -1,15 +1,24 @@ # filter_geochem # -#' @title Filter KSSL Geochemical Table -#' @description A function to subset KSSL "geochem" / elemental analysis result table to obtain rows/columns based on: column name, preparation code, major / trace element method. + + +#' Filter KSSL Geochemical Table +#' +#' A function to subset KSSL "geochem" / elemental analysis result table to +#' obtain rows/columns based on: column name, preparation code, major / trace +#' element method. +#' +#' #' @param geochem geochemical data, as returned by fetchKSSL #' @param columns Column name(s) to include in result #' @param prep_code Character vector of prep code(s) to include in result. -#' @param major_element_method Character vector of major element method(s) to include in result. -#' @param trace_element_method Character vector of trace element method(s) to include in result. -#' @return A data.frame, subsetted according to the constraints specified in arguments. +#' @param major_element_method Character vector of major element method(s) to +#' include in result. +#' @param trace_element_method Character vector of trace element method(s) to +#' include in result. +#' @return A data.frame, subsetted according to the constraints specified in +#' arguments. #' @author Andrew G. Brown. -#' @rdname filter_geochem #' @export filter_geochem filter_geochem <- function(geochem, columns=NULL, diff --git a/R/getHzErrorsNASIS.R b/R/getHzErrorsNASIS.R index 5bd0fa5b..96b76579 100644 --- a/R/getHzErrorsNASIS.R +++ b/R/getHzErrorsNASIS.R @@ -1,11 +1,17 @@ #' Check pedon horizon table for logic errors -#' -#' @param strict how strict should horizon boundaries be checked for consistency: TRUE=more | FALSE=less -#' @param SS fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE) -#' @param static_path Optional: path to local SQLite database containing NASIS table structure; default: NULL -#' @return A data.frame containing problematic records with columns: 'peiid','pedon_id','hzdept','hzdepb','hzname' -#' @export -#' +#' +#' Check pedon horizon table for logic errors +#' +#' +#' @param strict how strict should horizon boundaries be checked for +#' consistency: TRUE=more | FALSE=less +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: TRUE) +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: NULL +#' @return A data.frame containing problematic records with columns: +#' 'peiid','pedon_id','hzdept','hzdepb','hzname' +#' @export getHzErrorsNASIS getHzErrorsNASIS <- function(strict = TRUE, SS = TRUE, static_path = NULL) { if (!local_NASIS_defined(static_path)) diff --git a/R/get_colors_from_NASIS_db.R b/R/get_colors_from_NASIS_db.R index b2dea5a3..48d3f13a 100644 --- a/R/get_colors_from_NASIS_db.R +++ b/R/get_colors_from_NASIS_db.R @@ -1,7 +1,26 @@ ## 2013-01-08: now much faster since we only mix/clean data with > 1 color / horizon # results can be referenced via phiid (horizon-level ID) -get_colors_from_NASIS_db <- function(SS=TRUE, static_path = NULL) { + + +#' Extract Soil Color Data from a local NASIS Database +#' +#' Get, format, mix, and return color data from a NASIS database. +#' +#' This function currently works only on Windows. +#' +#' @param SS fetch data from Selected Set in NASIS or from the entire local +#' database (default: `TRUE`) +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' @return A data.frame with the results. +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{simplifyColorData}}, +#' \code{\link{get_hz_data_from_NASIS_db}}, +#' \code{\link{get_site_data_from_NASIS_db}} +#' @keywords manip +#' @export get_colors_from_NASIS_db +get_colors_from_NASIS_db <- function(SS = TRUE, static_path = NULL) { # unique-ness enforced via peiid (pedon-level) and phiid (horizon-level) q <- "SELECT peiid, phiid, colormoistst, colorpct as pct, colorhue, colorvalue, colorchroma diff --git a/R/get_colors_from_pedon_db.R b/R/get_colors_from_pedon_db.R index a00ed84d..0ac751f3 100644 --- a/R/get_colors_from_pedon_db.R +++ b/R/get_colors_from_pedon_db.R @@ -1,5 +1,20 @@ # 2013-01-08: now much faster since we only mix/clean data with > 1 color / horizon + + +#' Extract Soil Color Data from a PedonPC Database +#' +#' Get, format, mix, and return color data from a PedonPC database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The path to a 'pedon.mdb' database. +#' @return A data.frame with the results. +#' @author Dylan E. Beaudette and Jay M. Skovlin +#' @seealso \code{\link{get_hz_data_from_pedon_db}}, +#' \code{\link{get_site_data_from_pedon_db}} +#' @keywords manip +#' @export get_colors_from_pedon_db get_colors_from_pedon_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_component_data_from_NASIS_db.R b/R/get_component_data_from_NASIS_db.R index 2120c02b..91569d70 100644 --- a/R/get_component_data_from_NASIS_db.R +++ b/R/get_component_data_from_NASIS_db.R @@ -96,6 +96,43 @@ get_mutext_from_NASIS_db <- function(SS = TRUE, fixLineEndings = TRUE, static_pa ## just the component records, nothing above or below + + +#' Extract component data from a local NASIS Database +#' +#' This function currently works only on Windows. +#' +#' @aliases get_component_data_from_NASIS_db get_component_restrictions_from_NASIS_db +#' +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: `TRUE`) +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have set outside of `uncode()` (i.e. hard coded). +#' The 'factory-fresh' default is TRUE, but this can be changed by setting +#' options(`stringsAsFactors = FALSE`) +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A list with the results. +#' @author Dylan E. Beaudette, Stephen Roecker, and Jay M. Skovlin +#' @seealso \code{\link{fetchNASIS}} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(local_NASIS_defined()) { +#' # query text note data +#' fc <- try(get_component_data_from_NASIS_db()) +#' +#' # show structure of component data returned +#' str(fc) +#' } +#' } +#' +#' @export get_component_data_from_NASIS_db get_component_data_from_NASIS_db <- function(SS=TRUE, stringsAsFactors = default.stringsAsFactors(), static_path= NULL) { # must have RODBC installed if(!requireNamespace('RODBC')) @@ -582,6 +619,42 @@ get_component_otherveg_data_from_NASIS_db <- function(SS=TRUE, static_path = NUL return(d) } + + +#' Extract component month data from a local NASIS Database +#' +#' Extract component month data from a local NASIS Database. +#' +#' This function currently works only on Windows. +#' +#' @param SS get data from the currently loaded Selected Set in NASIS or from +#' the entire local database (default: TRUE) +#' @param fill should missing "month" rows in the comonth table be filled with +#' NA (FALSE) +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the uncode() function. It does not +#' convert those vectors that have set outside of uncode() (i.e. hard coded). +#' The 'factory-fresh' default is TRUE, but this can be changed by setting +#' options(stringsAsFactors = FALSE) +#' @return A list with the results. +#' @author Stephen Roecker +#' @seealso \code{\link{fetchNASIS}} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(local_NASIS_defined()) { +#' # query text note data +#' cm <- try(get_comonth_from_NASIS_db()) +#' +#' # show structure of component month data +#' str(cm) +#' } +#' } +#' +#' @export get_comonth_from_NASIS_db get_comonth_from_NASIS_db <- function(SS = TRUE, fill = FALSE, stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_component_from_GDB.R b/R/get_component_from_GDB.R index f2198e25..c40522d5 100644 --- a/R/get_component_from_GDB.R +++ b/R/get_component_from_GDB.R @@ -1,4 +1,3 @@ - get_component_from_GDB <- function(dsn = "gNATSGO_CONUS.gdb", WHERE = NULL, childs = FALSE, droplevels = TRUE, stringsAsFactors = TRUE) { # check @@ -396,6 +395,65 @@ get_mapunit_from_GDB <- function(dsn = "gNATSGO_CONUS.gdb", WHERE = NULL, drople } + + +#' Load and Flatten Data from SSURGO file geodatabases +#' +#' Functions to load and flatten commonly used tables and from SSURGO file +#' geodatabases, and create soil profile collection objects (SPC). +#' +#' These functions return data from SSURGO file geodatabases with the use of a +#' simple text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = +#' "areasymbol = 'IN001'"}. Any columns within the target table can be +#' specified (except for fetchGDB() currently, which only targets the legend +#' with the WHERE clause). +#' +#' @aliases fetchGDB get_legend_from_GDB get_mapunit_from_GDB +#' get_component_from_GDB +#' @param dsn data source name (interpretation varies by driver - for some +#' drivers, dsn is a file name, but may also be a folder, or contain the name +#' and access credentials of a database); in case of GeoJSON, dsn may be the +#' character string holding the geojson data. It can also be an open database +#' connection. +#' @param WHERE text string formatted as an SQL WHERE clause (default: FALSE) +#' @param childs logical; if FALSE parent material and geomorphic child tables +#' are not flattened and appended +#' @param droplevels logical: indicating whether to drop unused levels in +#' classifying factors. This is useful when a class has large number of unused +#' classes, which can waste space in tables and figures. +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the uncode() function. It does not +#' convert those vectors that have set outside of uncode() (i.e. hard coded). +#' The 'factory-fresh' default is TRUE, but this can be changed by setting +#' options(stringsAsFactors = FALSE) +#' @return A \code{data.frame} or \code{SoilProfileCollection} object. +#' @author Stephen Roecker +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' +#' ## replace `dsn` with path to your own geodatabase (SSURGO OR gNATSGO) +#' ## +#' ## +#' ## download CONUS gNATSGO from here: +#' ## https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625 +#' ## +#' ## +#' # dsn <- "D:/geodata/soils/gNATSGO_CONUS.gdb" +#' +#' # le <- get_legend_from_GDB(dsn = dsn, WHERE = "areasymbol LIKE '%'") +#' +#' # mu <- get_mapunit_from_GDB(dsn = dsn, WHERE = "muname LIKE 'Miami%'") +#' +#' # co <- get_component_from_GDB(dsn, WHERE = "compname = 'Miami' +#' # AND majcompflag = 'Yes'", childs = FALSE) +#' +#' # f_in_GDB <- fetchGDB(WHERE = "areasymbol LIKE 'IN%'") +#' +#' } +#' +#' @export fetchGDB fetchGDB <- function(dsn = "gNATSGO_CONUS.gdb", WHERE = NULL, childs = TRUE, diff --git a/R/get_component_from_LIMS.R b/R/get_component_from_LIMS.R index 7caa1bdc..e69de29b 100644 --- a/R/get_component_from_LIMS.R +++ b/R/get_component_from_LIMS.R @@ -1,320 +0,0 @@ -get_component_from_NASISWebReport <- function(projectname, stringsAsFactors = default.stringsAsFactors()) { - - url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_component_from_NASISWebReport" - - d.component <- lapply(projectname, function(x) { - message("getting project '", x, "' from NasisReportsWebSite \n", sep = "") - args = list(p_projectname = x) - d = tryCatch(parseWebReport(url, args), - error = function(e) { - message(e) - return(NULL) - }) - }) - - d.component <- do.call("rbind", d.component) - - if(is.null(d.component)) - return(NULL) - - # set factor levels according to metadata domains - d.component <- uncode(d.component, db = "LIMS", stringsAsFactors = stringsAsFactors) - - # prep - d.component <- .cogmd_prep(d.component, db = "LIMS") - - - # return data.frame - return(d.component) - -} - - -get_chorizon_from_NASISWebReport <- function(projectname, fill = FALSE, stringsAsFactors = default.stringsAsFactors()) { - - url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_chorizon_from_NASISWebReport" - - d.chorizon <- lapply(projectname, function(x) { - args = list(p_projectname = x) - d = parseWebReport(url, args) - }) - d.chorizon <- do.call("rbind", d.chorizon) - - ## TODO: might be nice to abstract this into a new function - # hacks to make R CMD check --as-cran happy: - metadata <- NULL - # load local copy of metadata - load(system.file("data/metadata.rda", package="soilDB")[1]) - - # transform variables and metadata - if (!all(is.na(d.chorizon$chiid))) { - d.chorizon <- within(d.chorizon, { - texture = tolower(texture) - if (stringsAsFactors == TRUE) { - texcl = factor(texcl, - levels = metadata[metadata$ColumnPhysicalName == "texcl", "ChoiceValue"], - labels = metadata[metadata$ColumnPhysicalName == "texcl", "ChoiceName"] - ) - } - }) - } - - # fill - if (fill == FALSE) { - d.chorizon <- d.chorizon[!is.na(d.chorizon$chiid), ] - } - - # return data.frame - return(d.chorizon) - -} - - - -get_legend_from_NASISWebReport <- function(mlraoffice, areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { - - url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_legend_from_NASISWebReport" - - args <- list(p_mlraoffice = mlraoffice, p_areasymbol = areasymbol) - - d.legend <- parseWebReport(url, args) - - - # set factor levels according to metadata domains - # data is coming back uncoded from LIMS so db is set to "SDA" - d.legend <- uncode(d.legend, - db = "SDA", - droplevels = droplevels, - stringsAsFactors = stringsAsFactors - ) - - # date - d.legend$cordate <- as.Date(d.legend$cordate) - - # return data.frame - return(d.legend) - -} - - - -get_lmuaoverlap_from_NASISWebReport <- function(areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { - url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_lmuaoverlap_from_NASISWebReport" - - d <- lapply(areasymbol, function(x) { - message("getting legend for '", x, "' from NasisReportsWebSite \n", sep = "") - args = list(p_areasymbol = x) - d = parseWebReport(url, args) - }) - d <- do.call("rbind", d) - - - # set factor levels according to metadata domains - # data is coming back uncoded from LIMS so db is set to "SDA" - d <- uncode(d, - db = "SDA", - droplevels = droplevels, - stringsAsFactors = stringsAsFactors - ) - - # return data.frame - return(d) - -} - - - -get_mapunit_from_NASISWebReport <- function(areasymbol, droplevels = TRUE, stringsAsFactors = default.stringsAsFactors()) { - url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_mapunit_from_NASISWebReport" - - d.mapunit <- lapply(areasymbol, function(x) { - message("getting map units for '", x, "' from NasisReportsWebSite \n", sep = "") - args = list(p_areasymbol = x) - d = parseWebReport(url, args) - }) - d.mapunit <- do.call("rbind", d.mapunit) - - d.mapunit$musym = as.character(d.mapunit$musym) - - # set factor levels according to metadata domains - # data is coming back uncoded from LIMS so db is set to "SDA" - d.mapunit <- uncode(d.mapunit, - db = "SDA", - droplevels = droplevels, - stringsAsFactors = stringsAsFactors - ) - - # return data.frame - return(d.mapunit) - -} - - -get_projectmapunit_from_NASISWebReport <- function(projectname, stringsAsFactors = default.stringsAsFactors()) { - - url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_projectmapunit_from_NASISWebReport" - - - d.mapunit <- lapply(projectname, function(x) { - args = list(p_projectname = x) - d = parseWebReport(url, args) - }) - d.mapunit <- do.call("rbind", d.mapunit) - - d.mapunit$musym = as.character(d.mapunit$musym) - - # set factor levels according to metadata domains - d.mapunit <- uncode(d.mapunit, db = "LIMS", stringsAsFactors = stringsAsFactors) - - # return data.frame - return(d.mapunit) - -} - - -get_projectmapunit2_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projectname, stringsAsFactors = default.stringsAsFactors()) { - - url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_projectmapunit2_from_NASISWebReport" - - args = list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projectname = projectname) - d.mapunit = parseWebReport(url, args) - - d.mapunit$musym = as.character(d.mapunit$musym) - - # set factor levels according to metadata domains - # data is coming back uncoded from LIMS so db is set to "SDA" - d.mapunit <- uncode(d.mapunit, db = "SDA", stringsAsFactors = stringsAsFactors) - - # return data.frame - return(d.mapunit) - -} - - -get_project_from_NASISWebReport <- function(mlrassoarea, fiscalyear) { - - url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_project_from_NASISWebReport" - - args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear) - - d.project <- parseWebReport(url, args) - - # prep - idx <- unlist(lapply(names(d.project), function(x) grepl("date_", x))) - if (any(idx)) { - d.project[idx] <- lapply(d.project[idx], function(x) as.Date(x, format = "%Y/%m/%d")) - } - - # return data.frame - return(d.project) - -} - - -get_progress_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projecttypename) { - - url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_progress_from_NASISWebReport" - - args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projecttypename = projecttypename) - - d.progress <- parseWebReport(url, args) - - # return data.frame - return(d.progress) - -} - - -get_project_correlation_from_NASISWebReport <- function(mlrassoarea, fiscalyear, projectname) { - - # nasty hack to trick R CMD check - musym <- NULL - new_musym <- NULL - - url <-"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_project_correlation_from_NASISWebReport" - - args <- list(p_mlrassoarea = mlrassoarea, p_fy = fiscalyear, p_projectname = projectname) - - d.rcor <- parseWebReport(url, args) - - # compute musym_orig for additional lmapunits, necessary to catch changes to the original musym, due to a constraint on the lmapunit table that prevents duplicate musym for additional mapunits - if (! is.null(d.rcor)) { - - d.rcor <- within(d.rcor, { - n = nchar(musym) - begin_1 = substr(musym, 2, n) - end_1 = substr(musym, 1, n - 1) - end_4 = substr(musym, 1, n - 4) - - idx = musym != new_musym & !is.na(new_musym) - orig_musym = ifelse(idx & musym != begin_1 & (new_musym == begin_1 | substr(musym, 1, 1) %in% c("x", "z")), begin_1, musym) - # Joe recommended using |\\+${1}, but appears to be legit in some cases - orig_musym = ifelse(idx & musym != end_1 & new_musym == end_1 , end_1 , orig_musym) - orig_musym = ifelse(idx & musym != end_4 & new_musym == end_4 , end_4 , orig_musym) - }) - } - - d.rcor[c("n", "begin_1", "end_1", "end_4", "idx")] <- NULL - - # return data.frame - return(d.rcor) - -} - - -fetchNASISWebReport <- function(projectname, rmHzErrors = FALSE, fill = FALSE, - stringsAsFactors = default.stringsAsFactors() -) { - - # load data in pieces - f.mapunit <- get_projectmapunit_from_NASISWebReport(projectname, stringsAsFactors = stringsAsFactors) - f.component <- get_component_from_NASISWebReport(projectname, stringsAsFactors = stringsAsFactors) - f.chorizon <- get_chorizon_from_NASISWebReport(projectname, fill, stringsAsFactors = stringsAsFactors) - - # return NULL if one of the required pieces is missing - if(is.null(f.mapunit) | is.null(f.component) | is.null(f.chorizon)) { - message("One or more inputs for fetchNASISWebReport (mapunit, component, or horizon) is NULL, returning NULL.") - return(NULL) - } - - - # optionally test for bad horizonation... flag, and remove - if (rmHzErrors) { - f.chorizon.test <- plyr::ddply(f.chorizon, 'coiid', function(d) { - res <- aqp::hzDepthTests(top=d[['hzdept_r']], bottom=d[['hzdepb_r']]) - return(data.frame(hz_logic_pass=all(!res))) - }) - - # which are the good (valid) ones? - good.ids <- as.character(f.chorizon.test$coiid[which(f.chorizon.test$hz_logic_pass)]) - bad.ids <- as.character(f.chorizon.test$coiid[which(! f.chorizon.test$hz_logic_pass)]) - - # keep the good ones - f.chorizon <- f.chorizon[which(f.chorizon$coiid %in% good.ids), ] - - # keep track of those components with horizonation errors - if(length(bad.ids) > 0) - assign('component.hz.problems', value=bad.ids, envir=soilDB.env) - } - - # upgrade to SoilProfilecollection - depths(f.chorizon) <- coiid ~ hzdept_r + hzdepb_r - - - ## TODO: this will fail in the presence of duplicates - ## TODO: make this error more informative - # add site data to object - site(f.chorizon) <- f.component # left-join via coiid - - # set NASIS-specific horizon identifier - hzidname(f.chorizon) <- 'chiid' - - # print any messages on possible data quality problems: - if (exists('component.hz.problems', envir=soilDB.env)) - message("-> QC: horizon errors detected, use `get('component.hz.problems', envir=soilDB.env)` for related cokey values") - - # done, return SPC - return(list(spc = f.chorizon, mapunit = f.mapunit)) - -} - diff --git a/R/get_component_from_SDA.R b/R/get_component_from_SDA.R index f86b1538..44935a3b 100644 --- a/R/get_component_from_SDA.R +++ b/R/get_component_from_SDA.R @@ -705,6 +705,174 @@ get_chorizon_from_SDA <- function(WHERE = NULL, duplicates = FALSE, } + + +#' Download and Flatten Data from Soil Data Access +#' +#' Functions to download and flatten commonly used tables and from Soil Data +#' Access, and create soil profile collection objects (SPC). +#' +#' These functions return data from Soil Data Access with the use of a simple +#' text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = +#' "areasymbol = 'IN001'"}. All functions are SQL queries that wrap around +#' \code{SDAquery()} and format the data for analysis. +#' +#' Beware SDA includes the data for both SSURGO and STATSGO2. The +#' \code{areasymbol} for STATSGO2 is \code{US}. For just SSURGO, include +#' \code{WHERE = "areareasymbol != 'US'"}. +#' +#' If the duplicates argument is set to TRUE, duplicate components are +#' returned. This is not necessary with data returned from NASIS, which has one +#' unique national map unit. SDA has duplicate map national map units, one for +#' each legend it exists in. +#' +#' The value of \code{nullFragsAreZero} will have a significant impact on the +#' rock fragment fractions returned by \code{fetchSDA}. Set +#' \code{nullFragsAreZero = FALSE} in those cases where there are many +#' data-gaps and NULL rock fragment values should be interpreted as NULLs. Set +#' \code{nullFragsAreZero = TRUE} in those cases where NULL rock fragment +#' values should be interpreted as 0. +#' +#' @aliases fetchSDA get_legend_from_SDA get_lmuaoverlap_from_SDA +#' get_mapunit_from_SDA get_component_from_SDA get_chorizon_from_SDA +#' get_cosoilmoist_from_SDA get_cointerp_from_SDA +#' @param WHERE text string formatted as an SQL WHERE clause (default: FALSE) +#' @param duplicates logical; if TRUE a record is returned for each unique +#' mukey (may be many per nationalmusym) +#' @param childs logical; if FALSE parent material and geomorphic child tables +#' are not flattened and appended +#' @param nullFragsAreZero should fragment volumes of NULL be interpreted as 0? +#' (default: TRUE), see details +#' @param rmHzErrors should pedons with horizonation errors be removed from the +#' results? (default: FALSE) +#' @param droplevels logical: indicating whether to drop unused levels in +#' classifying factors. This is useful when a class has large number of unused +#' classes, which can waste space in tables and figures. +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the uncode() function. It does not +#' convert those vectors that have set outside of uncode() (i.e. hard coded). +#' The 'factory-fresh' default is TRUE, but this can be changed by setting +#' options(stringsAsFactors = FALSE) +#' @return A data.frame or SoilProfileCollection object. +#' @author Stephen Roecker +#' @seealso \link{SDA_query} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' +#' +#' if (requireNamespace("curl") & +#' curl::has_internet() & +#' require(aqp) & +#' require("ggplot2") & +#' require("gridExtra") & +#' require("viridis") +#' ) { +#' +#' # query soil components by areasymbol and musym +#' test = fetchSDA(WHERE = "areasymbol = 'IN005' AND musym = 'MnpB2'") +#' +#' +#' # profile plot +#' plot(test) +#' +#' +#' # convert the data for depth plot +#' clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) +#' names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) +#' +#' om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) +#' names(om_slice) = gsub("om_", "", names(om_slice)) +#' +#' test2 = rbind(data.frame(clay_slice, var = "clay"), +#' data.frame(om_slice, var = "om") +#' ) +#' +#' h = merge(test2, site(test)[c("nationalmusym", "cokey", "compname", "comppct_r")], +#' by = "cokey", +#' all.x = TRUE +#' ) +#' +#' # depth plot of clay content by soil component +#' gg_comp <- function(x) { +#' ggplot(x) + +#' geom_line(aes(y = r, x = hzdept_r)) + +#' geom_line(aes(y = r, x = hzdept_r)) + +#' geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + +#' xlim(200, 0) + +#' xlab("depth (cm)") + +#' facet_grid(var ~ nationalmusym + paste(compname, comppct_r)) + +#' coord_flip() +#' } +#' g1 <- gg_comp(subset(h, var == "clay")) +#' g2 <- gg_comp(subset(h, var == "om")) +#' +#' grid.arrange(g1, g2) +#' +#' +#' # query cosoilmoist (e.g. water table data) by mukey +#' x <- get_cosoilmoist_from_SDA(WHERE = "mukey = '1395352'") +#' +#' ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + +#' geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, +#' ymin = 0, ymax = max(x$depb_r), +#' fill = flodfreqcl)) + +#' geom_line(cex = 1) + +#' geom_point() + +#' geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + +#' ylim(max(x$depb_r), 0) + +#' xlab("month") + ylab("depth (cm)") + +#' scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + +#' facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + +#' ggtitle(paste0(x$nationalmusym[1], +#' ': Water Table Levels from Component Soil Moisture Month Data')) +#' +#' +#' +#' # query all Miami major components +#' s <- get_component_from_SDA(WHERE = "compname = 'Miami' \n +#' AND majcompflag = 'Yes' AND areasymbol != 'US'") +#' +#' +#' # landform vs 3-D morphometry +#' test <- { +#' subset(s, ! is.na(landform) | ! is.na(geompos)) ->.; +#' split(., .$drainagecl, drop = TRUE) ->.; +#' lapply(., function(x) { +#' test = data.frame() +#' test = as.data.frame(table(x$landform, x$geompos)) +#' test$compname = x$compname[1] +#' test$drainagecl = x$drainagecl[1] +#' names(test)[1:2] <- c("landform", "geompos") +#' return(test) +#' }) ->.; +#' do.call("rbind", .) ->.; +#' .[.$Freq > 0, ] ->.; +#' within(., { +#' landform = reorder(factor(landform), Freq, max) +#' geompos = reorder(factor(geompos), Freq, max) +#' geompos = factor(geompos, levels = rev(levels(geompos))) +#' }) ->.; +#' } +#' test$Freq2 <- cut(test$Freq, +#' breaks = c(0, 5, 10, 25, 50, 100, 150), +#' labels = c("<5", "5-10", "10-25", "25-50", "50-100", "100-150") +#' ) +#' ggplot(test, aes(x = geompos, y = landform, fill = Freq2)) + +#' geom_tile(alpha = 0.5) + facet_wrap(~ paste0(compname, "\n", drainagecl)) + +#' scale_fill_viridis(discrete = TRUE) + +#' theme(aspect.ratio = 1, axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1)) + +#' ggtitle("Landform vs 3-D Morphometry for Miami Major Components on SDA") +#' +#' +#' } +#' +#' +#' +#' } +#' +#' @export fetchSDA fetchSDA <- function(WHERE = NULL, duplicates = FALSE, childs = TRUE, nullFragsAreZero = TRUE, rmHzErrors = FALSE, diff --git a/R/get_cosoilmoist_from_NASIS.R b/R/get_cosoilmoist_from_NASIS.R index 127b471c..22d142cb 100644 --- a/R/get_cosoilmoist_from_NASIS.R +++ b/R/get_cosoilmoist_from_NASIS.R @@ -1,4 +1,50 @@ -get_cosoilmoist_from_NASIS <- function(SS = TRUE, impute = TRUE, stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { +#' Read and Flatten the Component Soil Moisture Tables +#' +#' Read and flatten the component soil moisture month tables from a local NASIS +#' Database. +#' +#' The component soil moisture tables within NASIS house monthly data on +#' flooding, ponding, and soil moisture status. The soil moisture status is +#' used to specify the water table depth for components (e.g. \code{status == +#' "Moist"}). +#' +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: `TRUE`) +#' @param impute replace missing (i.e. `NULL`) values with `"Not_Populated"` for +#' categorical data, or the "RV" for numeric data or `201` cm if the "RV" is also +#' `NULL` (default: `TRUE`) +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have set outside of `uncode()` (i.e. hard coded). +#' The 'factory-fresh' default is TRUE, but this can be changed by setting +#' options(`stringsAsFactors = FALSE`) +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' @return A data.frame. +#' @note This function currently works only on Windows. +#' @author S.M. Roecker +#' @seealso \link{fetchNASIS}, \link{get_cosoilmoist_from_NASISWebReport}, +#' \link{get_cosoilmoist_from_SDA}, \code{get_comonth_from_SDA} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(local_NASIS_defined()) { +#' # load cosoilmoist (e.g. water table data) +#' test <- try(get_cosoilmoist_from_NASIS()) +#' +#' # inspect +#' if(!inherits(test, 'try-error')) { +#' head(test) +#' } +#' } +#' } +#' @export get_cosoilmoist_from_NASIS +get_cosoilmoist_from_NASIS <- function(SS = TRUE, + impute = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL) { + q.cosoilmoist <- "SELECT dmuiidref AS dmuiid, coiid, compname, comppct_r, drainagecl, month, flodfreqcl, floddurcl, pondfreqcl, ponddurcl, cosoilmoistiid, soimoistdept_l, soimoistdept_r, soimoistdept_h, soimoistdepb_l, soimoistdepb_r, soimoistdepb_h, soimoiststat diff --git a/R/get_extended_data_from_NASIS_db.R b/R/get_extended_data_from_NASIS_db.R index ad1e38fd..b3b75972 100644 --- a/R/get_extended_data_from_NASIS_db.R +++ b/R/get_extended_data_from_NASIS_db.R @@ -4,7 +4,49 @@ ## TODO_JS: incorporated the use of uncode() into all except the fragment queries, which I think are best left as they are. -get_extended_data_from_NASIS_db <- function(SS=TRUE, nullFragsAreZero=TRUE, stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { + + +#' Extract accessory tables and summaries from a local NASIS Database +#' +#' @param SS get data from the currently loaded Selected Set in NASIS or from +#' the entire local database (default: `TRUE`) +#' +#' @param nullFragsAreZero should fragment volumes of NULL be interpreted as 0? +#' (default: TRUE), see details +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have been set outside of `uncode()` (i.e. hard +#' coded). +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A list with the results. +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{get_hz_data_from_NASIS_db}}, +#' \code{\link{get_site_data_from_NASIS_db}} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' +#' if(local_NASIS_defined()) { +#' # query extended data +#' e <- try(get_extended_data_from_NASIS_db()) +#' +#' # show contents of extended data +#' str(e) +#' } +#' +#' } +#' +#' @export get_extended_data_from_NASIS_db +get_extended_data_from_NASIS_db <- function(SS = TRUE, + nullFragsAreZero = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL) { + # photo links from PedonPC stored as sitetext notes q.photolink <- "SELECT so.siteiidref AS siteiid, sot.recdate, sot.textcat, CAST(sot.textentry AS ntext) AS imagepath diff --git a/R/get_extended_data_from_pedon_db.R b/R/get_extended_data_from_pedon_db.R index 5a7abe61..cac8ec5e 100644 --- a/R/get_extended_data_from_pedon_db.R +++ b/R/get_extended_data_from_pedon_db.R @@ -1,6 +1,21 @@ # TODO: does not have parity with extended data function pulling from NASIS # missing queries for veg, ecosite, rf.data, surf.rf.summary, photolink, sitepm, structure + + +#' Extract accessory tables and summaries from a local pedonPC Database +#' +#' Extract accessory tables and summaries from a local pedonPC Database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The path to a 'pedon.mdb' database. +#' @return A list with the results. +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{get_hz_data_from_pedon_db}}, +#' \code{\link{get_site_data_from_pedon_db}} +#' @keywords manip +#' @export get_extended_data_from_pedon_db get_extended_data_from_pedon_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_hz_data_from_NASIS_db.R b/R/get_hz_data_from_NASIS_db.R index bbcb83a3..73db2883 100644 --- a/R/get_hz_data_from_NASIS_db.R +++ b/R/get_hz_data_from_NASIS_db.R @@ -1,7 +1,34 @@ ## TODO: when multiple textures have been defined, only the first one is returned (alphabetical ?) # -get_hz_data_from_NASIS_db <- function(SS=TRUE, stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { + +#' Extract Horizon Data from a local NASIS Database +#' +#' Get horizon-level data from a local NASIS database. +#' +#' @param SS fetch data from Selected Set in NASIS or from the entire local database (default: `TRUE`) +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have been set outside of `uncode()` (i.e. hard +#' coded). +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A data.frame. +#' +#' @note `NULL` total rock fragment values are assumed to represent an _absence_ of rock fragments, and set to 0. +#' +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' +#' @seealso \code{\link{get_hz_data_from_NASIS_db}}, \code{\link{get_site_data_from_NASIS_db}} +#' @keywords manip +#' @export get_hz_data_from_NASIS_db +get_hz_data_from_NASIS_db <- function(SS = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL) { + q <- "SELECT peiid, phiid, upedonid as pedon_id, hzname, dspcomplayerid as genhz, hzdept, hzdepb, bounddistinct, boundtopo, diff --git a/R/get_hz_data_from_pedon_db.R b/R/get_hz_data_from_pedon_db.R index 221ab651..683eb59b 100644 --- a/R/get_hz_data_from_pedon_db.R +++ b/R/get_hz_data_from_pedon_db.R @@ -6,6 +6,23 @@ # - multiple textures defined for a single horizon-- currently texture is not returned, see NASIS version for 50% fix # - multiple lab sample numbers in phsample + + +#' Extract Horizon Data from a PedonPC Database +#' +#' Get horizon-level data from a PedonPC database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The path to a 'pedon.mdb' database. +#' @return A data.frame. +#' @note NULL total rock fragment values are assumed to represent an _absence_ +#' of rock fragments, and set to 0. +#' @author Dylan E. Beaudette and Jay M. Skovlin +#' @seealso \code{\link{get_colors_from_pedon_db}}, +#' \code{\link{get_site_data_from_pedon_db}} +#' @keywords manip +#' @export get_hz_data_from_pedon_db get_hz_data_from_pedon_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_lablayer_data_from_NASIS_db.R b/R/get_lablayer_data_from_NASIS_db.R index 4d546347..e5bc5e6d 100644 --- a/R/get_lablayer_data_from_NASIS_db.R +++ b/R/get_lablayer_data_from_NASIS_db.R @@ -1,3 +1,21 @@ +#' Extract lab pedon layer data from a local NASIS Database +#' +#' Get lab pedon layer-level (horizon-level) data from a local NASIS database. +#' +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: `TRUE`) +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A data.frame. +#' @note This function queries KSSL laboratory site/horizon data from a local +#' NASIS database from the lab layer data table. +#' +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{get_labpedon_data_from_NASIS_db}} +#' @keywords manip +#' @export get_lablayer_data_from_NASIS_db get_lablayer_data_from_NASIS_db <- function(SS = TRUE, static_path = NULL) { # hacks to make R CMD check --as-cran happy: diff --git a/R/get_labpedon_data_from_NASIS_db.R b/R/get_labpedon_data_from_NASIS_db.R index fa3aadf4..81c27a1c 100644 --- a/R/get_labpedon_data_from_NASIS_db.R +++ b/R/get_labpedon_data_from_NASIS_db.R @@ -1,3 +1,25 @@ +#' Extract lab pedon data from a local NASIS Database +#' +#' Get lab pedon-level data from a local NASIS database. +#' +#' This function currently works only on Windows, and requires a 'nasis_local' +#' ODBC connection. +#' +#' @param SS fetch data from the currently loaded selected set in NASIS or from +#' the entire local database (default: TRUE) +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A data.frame. +#' +#' @note This function queries KSSL laboratory site/horizon data from a local +#' NASIS database from the lab pedon data table. +#' +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{get_lablayer_data_from_NASIS_db}} +#' @keywords manip +#' @export get_labpedon_data_from_NASIS_db get_labpedon_data_from_NASIS_db <- function(SS = TRUE, static_path = NULL) { q.ncsslabpedon <- "SELECT peiidref AS peiid, upedonid, descname, taxonname, taxclname, ncsspedonlabdata_View_1.pedlabsampnum, psctopdepth, pscbotdepth, noncarbclaywtavg, claytotwtavg, le0to100, wf0175wtavgpsc, volfractgt2wtavg, cec7clayratiowtavg, labdatasheeturl, ncsspedonlabdataiid AS labpeiid diff --git a/R/get_site_data_from_NASIS_db.R b/R/get_site_data_from_NASIS_db.R index a0604564..33686ee0 100644 --- a/R/get_site_data_from_NASIS_db.R +++ b/R/get_site_data_from_NASIS_db.R @@ -18,8 +18,35 @@ ## TODO: bug within RODBC - converts site_id == 056E916010 to an exponent -get_site_data_from_NASIS_db <- function(SS=TRUE, stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { - +#' Extract Site Data from a local NASIS Database +#' +#' Get site-level data from a local NASIS database. +#' +#' When multiple "site bedrock" entries are present, only the shallowest is +#' returned by this function. +#' +#' @param SS fetch data from Selected Set in NASIS or from the entire local +#' database (default: `TRUE`) +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have been set outside of `uncode()` (i.e. hard +#' coded). +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A data.frame +#' +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @seealso \code{\link{get_hz_data_from_NASIS_db}} +#' @keywords manip +#' +#' @export get_site_data_from_NASIS_db +get_site_data_from_NASIS_db <- function(SS = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL) { + q <- "SELECT siteiid as siteiid, peiid, CAST(usiteid AS varchar(60)) as site_id, CAST(upedonid AS varchar(60)) as pedon_id, obsdate as obs_date, utmzone, utmeasting, utmnorthing, -(longdegrees + CASE WHEN longminutes IS NULL THEN 0.0 ELSE longminutes / 60.0 END + CASE WHEN longseconds IS NULL THEN 0.0 ELSE longseconds / 60.0 / 60.0 END) as x, latdegrees + CASE WHEN latminutes IS NULL THEN 0.0 ELSE latminutes / 60.0 END + CASE WHEN latseconds IS NULL THEN 0.0 ELSE latseconds / 60.0 / 60.0 END as y, horizdatnm, longstddecimaldegrees as x_std, latstddecimaldegrees as y_std, gpspositionalerror, descname as describer, pedonpurpose, pedontype, pedlabsampnum, labdatadescflag, diff --git a/R/get_site_data_from_pedon_db.R b/R/get_site_data_from_pedon_db.R index 6fa012e7..b082539f 100644 --- a/R/get_site_data_from_pedon_db.R +++ b/R/get_site_data_from_pedon_db.R @@ -5,6 +5,21 @@ # siteiidref key removed from pedon table - use the pedon.siteobsiidref through the siteobs table (siteobs.siteobsiid) as the new linkage + + +#' Extract Site Data from a PedonPC Database +#' +#' Get site-level data from a PedonPC database. +#' +#' +#' @param dsn The path to a 'pedon.mdb' database. +#' @return A data.frame. +#' @note This function currently works only on Windows. +#' @author Dylan E. Beaudette and Jay M. Skovlin +#' @seealso \code{\link{get_hz_data_from_pedon_db}}, +#' \code{\link{get_veg_from_AK_Site}}, +#' @keywords manip +#' @export get_site_data_from_pedon_db get_site_data_from_pedon_db <- function(dsn) { # must have RODBC installed diff --git a/R/get_soilseries_from_NASIS.R b/R/get_soilseries_from_NASIS.R index 0c03a789..0b4636d4 100644 --- a/R/get_soilseries_from_NASIS.R +++ b/R/get_soilseries_from_NASIS.R @@ -1,4 +1,27 @@ -get_soilseries_from_NASIS <- function(stringsAsFactors = default.stringsAsFactors(), static_path = NULL) { +#' Get records from the Soil Classification (SC) database +#' +#' These functions return records from the Soil Classification database, either +#' from the local NASIS database (all series) or via web report (named series +#' only). +#' +#' @aliases get_soilseries_from_NASIS get_soilseries_from_NASISWebReport +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? This argument is passed to the `uncode()` function. It does not +#' convert those vectors that have set outside of `uncode()` (i.e. hard coded). +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A \code{data.frame} +#' +#' @author Stephen Roecker +#' +#' @keywords manip +#' +#' @export get_soilseries_from_NASIS +get_soilseries_from_NASIS <- function(stringsAsFactors = default.stringsAsFactors(), + static_path = NULL) { q.soilseries <- " SELECT soilseriesname, soilseriesstatus, benchmarksoilflag, statsgoflag, mlraoffice, areasymbol, areatypename, taxclname, taxorder, taxsuborder, taxgrtgroup, taxsubgrp, taxpartsize, taxpartsizemod, taxceactcl, taxreaction, taxtempcl, originyear, establishedyear, soiltaxclasslastupdated, soilseriesiid @@ -35,8 +58,6 @@ get_soilseries_from_NASIS <- function(stringsAsFactors = default.stringsAsFactor return(d.soilseries) } - - get_soilseries_from_NASISWebReport <- function(soils, stringsAsFactors = default.stringsAsFactors()) { url <- "https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=get_soilseries_from_NASISWebReport" diff --git a/R/get_text_notes_from_NASIS_db.R b/R/get_text_notes_from_NASIS_db.R index 8b607355..7dcd3ebd 100644 --- a/R/get_text_notes_from_NASIS_db.R +++ b/R/get_text_notes_from_NASIS_db.R @@ -1,3 +1,36 @@ +#' Extract text note data from a local NASIS Database +#' +#' @param SS get data from the currently loaded Selected Set in NASIS or from +#' the entire local database (default: `TRUE`) +#' +#' @param fixLineEndings convert line endings from `\r\n` to `\n` +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A `list` with the results. +#' @author Dylan E. Beaudette and Jay M. Skovlin +#' @seealso \code{\link{get_hz_data_from_pedon_db}}, +#' \code{\link{get_site_data_from_pedon_db}} +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(local_NASIS_defined()) { +#' # query text note data +#' t <- try(get_text_notes_from_NASIS_db()) +#' +#' # show contents text note data, includes: siteobs, site, pedon, horizon level text notes data. +#' str(t) +#' +#' # view text categories for site text notes +#' if(!inherits(t, 'try-error')) { +#' table(t$site_text$textcat) +#' } +#' } +#' } +#' +#' @export get_text_notes_from_NASIS_db get_text_notes_from_NASIS_db <- function(SS=TRUE, fixLineEndings=TRUE, static_path = NULL) { # petext diff --git a/R/get_veg_data_from_NASIS_db.R b/R/get_veg_data_from_NASIS_db.R index 2dde8426..2e26f046 100644 --- a/R/get_veg_data_from_NASIS_db.R +++ b/R/get_veg_data_from_NASIS_db.R @@ -1,5 +1,35 @@ ## TODO: merge with other vegplot functions + + +#' Extract veg data from a local NASIS Database +#' +#' Extract veg data from a local NASIS Database. +#' +#' This function currently works only on Windows. +#' +#' @param SS get data from the currently loaded Selected Set in NASIS or from +#' the entire local database (default: `TRUE`) +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A list with the results. +#' @author Jay M. Skovlin and Dylan E. Beaudette +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(local_NASIS_defined()) { +#' # query text note data +#' v <- try(get_veg_from_NASIS_db()) +#' +#' # show contents veg data returned +#' str(v) +#' } +#' } +#' +#' @export get_veg_data_from_NASIS_db get_veg_data_from_NASIS_db <- function(SS=TRUE, static_path = NULL) { # warning to use NASIS query to load related vegplot data for this to work diff --git a/R/get_veg_from_AK_Site.R b/R/get_veg_from_AK_Site.R index 9dc3d244..d1fde50f 100644 --- a/R/get_veg_from_AK_Site.R +++ b/R/get_veg_from_AK_Site.R @@ -1,5 +1,19 @@ - # gets all veg records per site ID + + +#' Retrieve Vegetation Data from an AK Site Database +#' +#' Retrieve Vegetation Data from an AK Site Database +#' +#' +#' @param dsn file path the the AK Site access database +#' @return A data.frame with vegetation data in long format, linked to site ID. +#' @note This function currently works only on Windows. +#' @author Dylan E. Beaudette +#' @seealso \code{\link{get_hz_data_from_pedon_db}}, +#' \code{\link{get_site_data_from_pedon_db}} +#' @keywords manip +#' @export get_veg_from_AK_Site get_veg_from_AK_Site <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_veg_from_MT_veg_db.R b/R/get_veg_from_MT_veg_db.R index dbf4c2e2..cfc42251 100644 --- a/R/get_veg_from_MT_veg_db.R +++ b/R/get_veg_from_MT_veg_db.R @@ -1,3 +1,17 @@ +#' Extract Site and Plot-level Data from a Montana RangeDB database +#' +#' Get Site and Plot-level data from a Montana RangeDB database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The name of the Montana RangeDB front-end database connection +#' (see details). +#' @return A data.frame. +#' @author Jay M. Skovlin +#' @seealso \code{\link{get_veg_species_from_MT_veg_db}}, +#' \code{\link{get_veg_other_from_MT_veg_db}} +#' @keywords manip +#' @export get_veg_from_MT_veg_db get_veg_from_MT_veg_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_veg_from_NPS_PLOTS_db.R b/R/get_veg_from_NPS_PLOTS_db.R index 4343e191..82bf0b06 100644 --- a/R/get_veg_from_NPS_PLOTS_db.R +++ b/R/get_veg_from_NPS_PLOTS_db.R @@ -2,6 +2,22 @@ # add as get_veg_from_NPS_PLOTS_db() to soilDB package # Jay Skovlin, 12/4/2013 # dsn <- "H:/GNP_vegetation_data_MR/GlacierNP_vegdata/PLOTS_v32_BE.accdb" + + +#' Retrieve Vegetation Data from an NPS PLOTS Database +#' +#' Used to extract species, stratum, and cover vegetation data from a backend +#' NPS PLOTS Database. Currently works for any Microsoft Access database with +#' an .mdb file format. +#' +#' +#' @param dsn file path to the NPS PLOTS access database on your system. +#' @return A data.frame with vegetation data in a long format with linkage to +#' NRCS soil pedon data via the site_id key field. +#' @note This function currently only works on Windows. +#' @author Jay M. Skovlin +#' @keywords manip +#' @export get_veg_from_NPS_PLOTS_db get_veg_from_NPS_PLOTS_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/get_veg_other_from_MT_veg_db.R b/R/get_veg_other_from_MT_veg_db.R index 52877f19..740a2cc0 100644 --- a/R/get_veg_other_from_MT_veg_db.R +++ b/R/get_veg_other_from_MT_veg_db.R @@ -1,3 +1,17 @@ +#' Extract cover composition data from a Montana RangeDB database +#' +#' Get cover composition data from a Montana RangeDB database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The name of the Montana RangeDB front-end database connection +#' (see details). +#' @return A data.frame. +#' @author Jay M. Skovlin +#' @seealso \code{\link{get_veg_from_MT_veg_db}}, +#' \code{\link{get_veg_species_from_MT_veg_db}} +#' @keywords manip +#' @export get_veg_other_from_MT_veg_db get_veg_other_from_MT_veg_db <- function(dsn) { # must have RODBC installed diff --git a/R/get_veg_species_from_MT_veg_db.R b/R/get_veg_species_from_MT_veg_db.R index 87671c12..495eba79 100644 --- a/R/get_veg_species_from_MT_veg_db.R +++ b/R/get_veg_species_from_MT_veg_db.R @@ -1,3 +1,17 @@ +#' Extract species-level Data from a Montana RangeDB database +#' +#' Get species-level data from a Montana RangeDB database. +#' +#' This function currently works only on Windows. +#' +#' @param dsn The name of the Montana RangeDB front-end database connection +#' (see details). +#' @return A data.frame. +#' @author Jay M. Skovlin +#' @seealso \code{\link{get_veg_from_MT_veg_db}}, +#' \code{\link{get_veg_other_from_MT_veg_db}} +#' @keywords manip +#' @export get_veg_species_from_MT_veg_db get_veg_species_from_MT_veg_db <- function(dsn) { # must have RODBC installed if(!requireNamespace('RODBC')) diff --git a/R/mapunit_geom_by_ll_bbox.R b/R/mapunit_geom_by_ll_bbox.R index 4bbfcd36..c6221a32 100644 --- a/R/mapunit_geom_by_ll_bbox.R +++ b/R/mapunit_geom_by_ll_bbox.R @@ -1,28 +1,35 @@ - - # 2011-06-22 # It appears that SDA does not actually return the spatial intersection of map unit polygons and bounding box. Rather, just those polygons that overlap the bbox. + + #' Fetch Map Unit Geometry from SDA #' -#' @description Fetch map unit geometry from the SDA website by WGS84 bounding box. There is a limit on the amount of data returned as serialized JSON (~32Mb) and a total record limit of 100,000. +#' Fetch map unit geometry from the SDA website by WGS84 bounding box. There is +#' a limit on the amount of data returned as serialized JSON (~32Mb) and a +#' total record limit of 100,000. #' -#' @param bbox a bounding box in WGS coordinates -#' @param source the source database, currently limited to soil data access (SDA) -#' @details The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See examples for bounding box formatting. -#' @return A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) coordinates. -#' @note SDA does not return the spatial intersection of map unit polygons and bounding box. Rather, just those polygons that are completely within the bounding box / overlap with the bbox. This function requires the 'rgdal' package. +#' The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. +#' See examples for bounding box formatting. #' +#' @param bbox a bounding box in WGS coordinates +#' @param source the source database, currently limited to soil data access +#' (SDA) +#' @return A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) +#' coordinates. +#' @note SDA does not return the spatial intersection of map unit polygons and +#' bounding box. Rather, just those polygons that are completely within the +#' bounding box / overlap with the bbox. This function requires the 'rgdal' +#' package. #' @author Dylan E. Beaudette -#' @export -#' #' @examples -#'## fetch map unit geometry from a bounding-box: -#'# -#'# +------------- (-120.41, 38.70) -#'# | | -#'# | | -#'# (-120.54, 38.61) --------------+ -#'# +#' +#' ## fetch map unit geometry from a bounding-box: +#' # +#' # +------------- (-120.41, 38.70) +#' # | | +#' # | | +#' # (-120.54, 38.61) --------------+ +#' # #' \donttest{ #' if(requireNamespace("curl") & #' curl::has_internet() & @@ -52,7 +59,9 @@ #' message('could not download XML result from SDA') #' } #' } -#'} +#' } +#' +#' @export mapunit_geom_by_ll_bbox mapunit_geom_by_ll_bbox <- function(bbox, source='sda') { # must have rgdal installed diff --git a/R/mix_and_clean_colors.R b/R/mix_and_clean_colors.R index c7f9c14d..99dbda3c 100644 --- a/R/mix_and_clean_colors.R +++ b/R/mix_and_clean_colors.R @@ -5,6 +5,18 @@ # x: data.frame, typically from NASIS containing at least 'r', 'g', 'b' colors {0,1} and some kind of weight # wt: fractional weights, usually area of hz face +#' Mix and Clean Colors +#' +#' Deprecated: only used in PedonPC functionality; use `estimateColorMixture` instead +#' +#' @param x a \code{data.frame} object containing sRGB coordinates associated +#' @param wt a group of colors to mix +#' wt: fractional weights, usually area of hz face +#' @param backTransform logical, should the mixed sRGB representation of soil +#' color be transformed to closest Munsell chips? This is performed by +#' +#' @return A data.frame containing mixed colors +#' @export mix_and_clean_colors <- function(x, wt='pct', backTransform=FALSE) { ## TODO finish this diff --git a/R/mukey-WCS.R b/R/mukey-WCS.R index f7df4150..bacf1e4a 100644 --- a/R/mukey-WCS.R +++ b/R/mukey-WCS.R @@ -1,36 +1,36 @@ - - -#' @title gNATSGO / gSSURGO Map Unit Key Web Coverage Service (WCS) +#' gNATSGO / gSSURGO Map Unit Key Web Coverage Service (WCS) #' -#' @description Download chunks of the gNATSGO or gSSURGO map unit key grid via bounding-box from the SoilWeb WCS. +#' Download chunks of the gNATSGO or gSSURGO map unit key grid via bounding-box +#' from the SoilWeb WCS. #' -#' @author D.E. Beaudette and A.G. Brown -#' -#' @param aoi area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details -#' -#' @param db name of the gridded map unit key grid to access, should be either 'gnatsgo' or 'gssurgo' -#' -#' @param res grid resolution, units of meters. The native resolution of gNATSGO and gSSURGO (this WCS) is 30m. -#' -#' @param quiet logical, passed to \code{download.file} to enable / suppress URL and progress bar for download. -#' -#' @note The gNATSGO grid includes raster soil survey map unit keys which are not in SDA. -#' -#' @details \code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: -#' -#' \describe{ -#' \item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} -#' \item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} -#' } -#' -#' The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the gNATSGO / gSSURGO grid. +#' \code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, +#' \code{sfc} or \code{bbox} object or a \code{list} containing: #' -#' Databases available from this WCS can be queried using \code{WCS_details(wcs = 'mukey')}. +#' \describe{ \item{list("aoi")}{bounding-box specified as (xmin, ymin, xmax, +#' ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} \item{list("crs")}{coordinate +#' reference system of BBOX, e.g. '+init=epsg:4326'} } #' -#' @return \code{raster} object containing indexed map unit keys and associated raster attribute table -#' -#' @export -#' +#' The WCS query is parameterized using \code{raster::extent} derived from the +#' above AOI specification, after conversion to the native CRS (EPSG:6350) of +#' the gNATSGO / gSSURGO grid. +#' +#' Databases available from this WCS can be queried using \code{WCS_details(wcs +#' = 'mukey')}. +#' +#' @param aoi area of interest (AOI) defined using a \code{Spatial*}, a +#' \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details +#' @param db name of the gridded map unit key grid to access, should be either +#' 'gnatsgo' or 'gssurgo' +#' @param res grid resolution, units of meters. The native resolution of +#' gNATSGO and gSSURGO (this WCS) is 30m. +#' @param quiet logical, passed to \code{download.file} to enable / suppress +#' URL and progress bar for download. +#' @return \code{raster} object containing indexed map unit keys and associated +#' raster attribute table +#' @note The gNATSGO grid includes raster soil survey map unit keys which are +#' not in SDA. +#' @author D.E. Beaudette and A.G. Brown +#' @export mukey.wcs mukey.wcs <- function(aoi, db = c('gnatsgo', 'gssurgo'), res = 30, quiet = FALSE) { if(!requireNamespace('rgdal', quietly=TRUE)) diff --git a/R/openNASISchannel.R b/R/openNASISchannel.R index ce459cb4..719f0a34 100644 --- a/R/openNASISchannel.R +++ b/R/openNASISchannel.R @@ -29,22 +29,26 @@ return(channel) } -#' Check for presence of `nasis_local` ODBC data source -#' -#' @param static_path Optional: path to local SQLite database containing NASIS table structure; default: NULL -#' + + +#' Check for presence of \code{nasis_local} ODBC data source +#' +#' Check for presence of \code{nasis_local} ODBC data source +#' +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: NULL #' @return logical -#' @export local_NASIS_defined -#' #' @examples -#' +#' +#' #' if(local_NASIS_defined()) { #' # use fetchNASIS or some other lower-level fetch function #' } else { #' message('could not find `nasis_local` ODBC data source') #' } -#' @importFrom odbc odbcListDataSources -#' @importFrom RSQLite dbCanConnect SQLite +#' +#' @export local_NASIS_defined local_NASIS_defined <- function(static_path = NULL) { if (is.null(static_path)) { if ('nasis_local' %in% odbc::odbcListDataSources()$name) { diff --git a/R/parseWebReport.R b/R/parseWebReport.R index 8856224f..f47e925c 100644 --- a/R/parseWebReport.R +++ b/R/parseWebReport.R @@ -1,4 +1,3 @@ - ## parallel requests? # https://cran.r-project.org/web/packages/curl/vignettes/intro.html#async_requests @@ -17,6 +16,32 @@ # examples: # url = 'https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=WEB-PROJECT_MUKEY_BY_GOAL_YEAR' # args = list(msso='2-MIN', fy='2018', asym='%', proj='0') + + +#' Parse contents of a web report, based on supplied arguments. +#' +#' Parse contents of a web report, based on supplied arguments. +#' +#' Report argument names can be inferred by inspection of the HTML source +#' associated with any given web report. +#' +#' @param url Base URL to a LIMS/NASIS web report. +#' @param args List of named arguments to send to report, see details. +#' @param index Integer index specifying the table to return, or, NULL for a +#' list of tables +#' @return A \code{data.frame} object in the case of a single integer passed to +#' \code{index}, a \code{list} object in the case of an integer vector or NULL +#' passed to \code{index}. +#' @note Most web reports are for internal use only. +#' @author D.E. Beaudette and S.M. Roecker +#' @keywords IO +#' @examples +#' +#' \donttest{ +#' # pending +#' } +#' +#' @export parseWebReport parseWebReport <- function(url, args, index=1) { # sanity check: package requirements diff --git a/R/seriesExtent.R b/R/seriesExtent.R index 61fb66fc..27eb379a 100644 --- a/R/seriesExtent.R +++ b/R/seriesExtent.R @@ -1,4 +1,3 @@ - ## this isn't going to work anymore, unless you have a GM API key # # get the series extent from SEE pre-cached GeoJSON data and plot on Google Maps # seriesExtentAsGmap <- function(s, timeout=60, exp=1.25) { @@ -28,27 +27,30 @@ # sp::plot(x.M, col=rgb(1, 0, 0, alpha=0.5), add=TRUE) # } - - - - -#' @title Retrieve Soil Series Extent Maps from SoilWeb +#' Retrieve Soil Series Extent Maps from SoilWeb #' -#' @description This function downloads a generalized representations of a soil series extent from SoilWeb, derived from the current SSURGO snapshot. Data can be returned as vector outlines (\code{SpatialPolygonsDataFrame} object) or gridded representation of area proportion falling within 800m cells (\code{raster} object). Gridded series extent data are only available in CONUS. Vector representations are returned with a GCS/WGS84 coordinate reference system and raster representations are returned with an Albers Equal Area / NAD83 coordinate reference system. +#' This function downloads a generalized representations of a soil series +#' extent from SoilWeb, derived from the current SSURGO snapshot. Data can be +#' returned as vector outlines (\code{SpatialPolygonsDataFrame} object) or +#' gridded representation of area proportion falling within 800m cells +#' (\code{raster} object). Gridded series extent data are only available in +#' CONUS. Vector representations are returned with a GCS/WGS84 coordinate +#' reference system and raster representations are returned with an Albers +#' Equal Area / NAD83 coordinate reference system. #' -#' @param s a soil series name, case-insensitive -#' -#' @param type series extent representation, \code{vector} results in a \code{SpatialPolygonsDataFrame} object and \code{raster} results in a \code{raster} object #' +#' @param s a soil series name, case-insensitive +#' @param type series extent representation, \code{vector} results in a +#' \code{SpatialPolygonsDataFrame} object and \code{raster} results in a +#' \code{raster} object #' @param timeout time that we are willing to wait for a response, in seconds -#' -#' @references \url{https://casoilresource.lawr.ucdavis.edu/see/} -#' +#' @note This function requires the \code{rgdal} package. Warning messages +#' about the proj4 CRS specification may be printed depending on your version +#' of \code{rgdal}. This should be resolved soon. #' @author D.E. Beaudette -#' -#' @note This function requires the \code{rgdal} package. Warning messages about the proj4 CRS specification may be printed depending on your version of \code{rgdal}. This should be resolved soon. -#' +#' @references \url{https://casoilresource.lawr.ucdavis.edu/see/} #' @examples +#' #' #' \donttest{ #' if(requireNamespace("curl") & @@ -83,7 +85,8 @@ #' } #' } #' - +#' +#' @export seriesExtent seriesExtent <- function(s, type = c('vector', 'raster'), timeout = 60) { if(!requireNamespace('rgdal', quietly=TRUE)) stop('please install the `rgdal` package', call.=FALSE) diff --git a/R/siblings.R b/R/siblings.R index dc8e4733..d8ec347f 100644 --- a/R/siblings.R +++ b/R/siblings.R @@ -1,9 +1,57 @@ - # 2018-11-14 ## TODO: launder series names, all upper case? # return information on soil series that co-occur with `s` # component.data: should the component names, kind, percent, etc. be returned as well? # cousins: return siblings of siblings (cousins)? + + +#' Lookup siblings and cousins for a given soil series. +#' +#' Lookup siblings and cousins for a given soil series, from the current fiscal +#' year SSURGO snapshot via SoilWeb. +#' +#' The siblings of any given soil series are defined as those soil series +#' (major and minor component) that share a parent map unit with the named +#' series (as a major component). Cousins are siblings of siblings. Data are +#' sourced from SoilWeb which maintains a copy of the current SSURGO snapshot. +#' +#' @param s character vector, the name of a single soil series, +#' case-insensitive. +#' @param only.major logical, should only return siblings that are major +#' components +#' @param component.data logical, should component data for siblings (and +#' optionally cousins) be returned? +#' @param cousins logical, should siblings-of-siblings (cousins) be returned? +#' @return \describe{ \item{sib}{\code{data.frame} containing siblings, major +#' component flag, and number of co-occurrences} +#' \item{sib.data}{\code{data.frame} containing sibling component data} +#' \item{cousins}{\code{data.frame} containing cousins, major component flag, +#' and number of co-occurrences} \item{cousin.data}{\code{data.frame} +#' containing cousin component data} } +#' @author D.E. Beaudette +#' @seealso \link{OSDquery}, \link{siblings}, \link{fetchOSD} +#' @references +#' - [Soil Series Query Functions](http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html) +#' - [Soil "Siblings" Tutorial](http://ncss-tech.github.io/AQP/soilDB/siblings.html) +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet()) { +#' +#' # basic usage +#' x <- siblings('zook') +#' x$sib +#' +#' # restrict to siblings that are major components +#' # e.g. the most likely siblings +#' x <- siblings('zook', only.major = TRUE) +#' x$sib +#' } +#' } +#' +#' @export siblings siblings <- function(s, only.major=FALSE, component.data=FALSE, cousins=FALSE) { # helper functions diff --git a/R/simplfyFragmentData.R b/R/simplfyFragmentData.R index 10e8ba63..4c3f5d42 100644 --- a/R/simplfyFragmentData.R +++ b/R/simplfyFragmentData.R @@ -1,4 +1,3 @@ - ## TODO: generalize, export, and make sieve sizes into an argument # latest NSSH part 618 @@ -128,6 +127,36 @@ # rf: un-coded contents of the phfrags table # id.var: id column name # nullFragsAreZero: convert NA to 0? + + +#' Simplify Coarse Fraction Data +#' +#' Simplify multiple coarse fraction (>2mm) records by horizon. +#' +#' This function is mainly intended for the processing of NASIS pedon/horizon +#' data which contains multiple coarse fragment descriptions per horizon. +#' \code{simplifyFragmentData} will "sieve out" coarse fragments into the USDA +#' classes, split into hard and para- fragments. +#' +#' The \code{simplifyFragmentData} function can be applied to data sources +#' other than NASIS by careful use of the \code{id.var} argument. However, +#' \code{rf} must contain coarse fragment volumes in the column "fragvol", +#' fragment size (mm) in columns "fragsize_l", "fragsize_r", "fragsize_h", and +#' fragment cementation class in "fraghard". +#' +#' Examples: +#' - [KSSL data](http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html) +#' +#' @aliases simplifyFragmentData simplfyFragmentData simplifyArtifactData +#' @param rf a \code{data.frame} object, typically returned from NASIS, see +#' details +#' @param id.var character vector with the name of the column containing an ID +#' that is unique among all horizons in \code{rf} +#' @param nullFragsAreZero should fragment volumes of NULL be interpreted as 0? +#' (default: TRUE), see details +#' @author D.E. Beaudette +#' @keywords manip +#' @export simplifyFragmentData simplifyFragmentData <- function(rf, id.var, nullFragsAreZero=TRUE) { # nasty hack to trick R CMD check diff --git a/R/simplifyColorData.R b/R/simplifyColorData.R index 84cc1153..0603212e 100644 --- a/R/simplifyColorData.R +++ b/R/simplifyColorData.R @@ -9,6 +9,51 @@ # This function is heavily biased towared NASIS-specific data structures and assumptions # d: data.frame with color data from horizon-color table: expects "colorhue", "colorvalue", "colorchroma" # id.var: name of the column with unique horizon IDs + + +#' Simplify Color Data by ID +#' +#' Simplify multiple Munsell color observations associated with each horizon. +#' +#' This function is mainly intended for the processing of NASIS pedon/horizon +#' data which may or may not contain multiple colors per horizon/moisture +#' status combination. \code{simplifyColorData} will "mix" multiple colors +#' associated with horizons in \code{d}, according to IDs specified by +#' \code{id.var}, using "weights" (area percentages) specified by the \code{wt} +#' argument to \code{mix_and_clean_colors}. +#' +#' Note that this function doesn't actually simulate the mixture of pigments on +#' a surface, rather, "mixing" is approximated via weighted average in the +#' CIELAB colorspace. +#' +#' The \code{simplifyColorData} function can be applied to data sources other +#' than NASIS by careful use of the \code{id.var} and \code{wt} arguments. +#' However, \code{d} must contain Munsell colors split into columns named +#' "colorhue", "colorvalue", and "colorchroma". In addition, the moisture state +#' ("Dry" or "Moist") must be specified in a column named "colormoistst". +#' +#' The \code{mix_and_clean_colors} function can be applied to arbitrary data +#' sources as long as \code{x} contains sRGB coordinates in columns named "r", +#' "g", and "b". This function should be applied to chunks of rows within which +#' color mixtures make sense. +#' +#' Examples: +#' - [KSSL data](http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html) +#' - [soil color mixing tutorial](http://ncss-tech.github.io/AQP/soilDB/mixing-soil-color-data.html) +#' +#' @param d a \code{data.frame} object, typically returned from NASIS, see +#' details +#' @param id.var character vector with the name of the column containing an ID +#' that is unique among all horizons in \code{d} +#' @param wt a character vector with the name of the column containing color +#' weights for mixing +#' @param bt logical, should the mixed sRGB representation of soil color be +#' transformed to closest Munsell chips? This is performed by +#' \code{aqp::rgb2Munsell} +#' \code{aqp::rgb2Munsell} +#' @author D.E. Beaudette +#' @keywords manip +#' @export simplifyColorData simplifyColorData <- function(d, id.var='phiid', wt='colorpct', bt=FALSE) { # sanity check: must contain 1 row @@ -91,7 +136,7 @@ simplifyColorData <- function(d, id.var='phiid', wt='colorpct', bt=FALSE) { # note: split will re-order IDs mc <- split(moist.colors[moist.mix.idx, mix.vars], f = moist.colors[[id.var]][moist.mix.idx]) - # final vesion + # final version mixed.moist <- lapply(mc, estimateColorMixture, wt = wt, backTransform = bt) # flatten and copy id.var from rownames diff --git a/R/soilDB-package.R b/R/soilDB-package.R new file mode 100644 index 00000000..ea557ecb --- /dev/null +++ b/R/soilDB-package.R @@ -0,0 +1,121 @@ +#' Soil Database Interface +#' +#' This package provides methods for extracting soils information from local +#' PedonPC and AK Site databases (MS Access format), local NASIS databases (MS +#' SQL Server), and the SDA web service. Currently USDA-NCSS data sources are +#' supported, however, there are plans to develop interfaces to outside systems +#' such as the Global Soil Mapping project. +#' +#' @name soilDB-package +#' @aliases soilDB.env soilDB-package soilDB +#' @docType package +#' @author J.M. Skovlin, D.E. Beaudette, S.M Roecker, A.G. Brown +#' @seealso \code{\link{fetchPedonPC}, \link{fetchNASIS}, \link{SDA_query}, \link{loafercreek}} +#' @keywords package +NULL + +#' Example \code{SoilProfilecollection} Objects Returned by \code{fetchNASIS}. +#' +#' Several examples of soil profile collections returned by +#' \code{fetchNASIS(from='pedons')} as \code{SoilProfileCollection} objects. +#' +#' +#' @name loafercreek +#' @aliases loafercreek gopheridge mineralKing +#' @docType data +#' @keywords datasets +#' @examples +#' +#' \donttest{ +#' if(require("aqp")) { +#' # load example dataset +#' data("gopheridge") +#' +#' # what kind of object is this? +#' class(gopheridge) +#' +#' # how many profiles? +#' length(gopheridge) +#' +#' # there are 60 profiles, this calls for a split plot +#' par(mar=c(0,0,0,0), mfrow=c(2,1)) +#' +#' # plot soil colors +#' plot(gopheridge[1:30, ], name='hzname', color='soil_color') +#' plot(gopheridge[31:60, ], name='hzname', color='soil_color') +#' +#' # need a larger top margin for legend +#' par(mar=c(0,0,4,0), mfrow=c(2,1)) +#' # generate colors based on clay content +#' plot(gopheridge[1:30, ], name='hzname', color='clay') +#' plot(gopheridge[31:60, ], name='hzname', color='clay') +#' +#' # single row and no labels +#' par(mar=c(0,0,0,0), mfrow=c(1,1)) +#' # plot soils sorted by depth to contact +#' plot(gopheridge, name='', print.id=FALSE, plot.order=order(gopheridge$bedrckdepth)) +#' +#' # plot first 10 profiles +#' plot(gopheridge[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') +#' +#' # add rock fragment data to plot: +#' addVolumeFraction(gopheridge[1:10, ], colname='total_frags_pct') +#' +#' # add diagnostic horizons +#' addDiagnosticBracket(gopheridge[1:10, ], kind='argillic horizon', col='red', offset=-0.4) +#' +#' ## loafercreek +#' data("loafercreek") +#' # plot first 10 profiles +#' plot(loafercreek[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') +#' +#' # add rock fragment data to plot: +#' addVolumeFraction(loafercreek[1:10, ], colname='total_frags_pct') +#' +#' # add diagnostic horizons +#' addDiagnosticBracket(loafercreek[1:10, ], kind='argillic horizon', col='red', offset=-0.4) +#' } +#' } +#' +NULL + +#' SCAN and SNOTEL Station Metadata +#' +#' SCAN and SNOTEL station metadata, a work in progress. +#' +#' These data have been compiled from several sources and represent a +#' progressive effort to organize SCAN/SNOTEL station metadata. Therefore, some +#' records may be missing or incorrect. +#' +#' @name SCAN_SNOTEL_metadata +#' @aliases SCAN_SNOTEL_metadata state_FIPS_codes +#' @docType data +#' @format A data frame with 1092 observations on the following 12 variables. +#' \describe{ \item{list("Name")}{station name} \item{list("Site")}{station ID} +#' \item{list("State")}{state} \item{list("Network")}{sensor network: SCAN / +#' SNOTEL} \item{list("County")}{county} \item{list("Elevation_ft")}{station +#' elevation in feet} \item{list("Latitude")}{latitude of station} +#' \item{list("Longitude")}{longitude of station} \item{list("HUC")}{associated +#' watershed} \item{list("climstanm")}{climate station name (TODO: remove this +#' column)} \item{list("upedonid")}{associated user pedon ID} +#' \item{list("pedlabsampnum")}{associated lab sample ID} } +#' @keywords datasets +NULL + +#' Timeline of US Published Soil Surveys +#' +#' This dataset contains the years of each US Soil Survey was published. +#' +#' This data was web scraped from the NRCS Soils Website. The scraping +#' procedure and a example plot are included in the examples section below. +#' +#' @name us_ss_timeline +#' @docType data +#' @format A data frame with 5209 observations on the following 5 variables. +#' \describe{ \item{list("ssa")}{Soil Survey name, a character vector} +#' \item{list("year")}{year of publication, a numeric vector} +#' \item{list("pdf")}{does a pdf exists, a logical vector} +#' \item{list("state")}{State abbreviation, a character vector} } +#' @source https://www.nrcs.usda.gov/wps/portal/nrcs/soilsurvey/soils/survey/state/ +#' @keywords datasets +NULL diff --git a/R/taxaExtent.R b/R/taxaExtent.R index de610a15..97ab681b 100644 --- a/R/taxaExtent.R +++ b/R/taxaExtent.R @@ -1,21 +1,22 @@ - -#' @title Retrieve Soil Taxonomy Membership Grids -#' -#' @description This function downloads a generalized representation of the geographic extent of any single taxa from the top 4 tiers of Soil Taxonomy. Data are provided by SoilWeb, ultimately sourced from from the current SSURGO snapshot. Data are returned as \code{raster} objects representing area proportion falling within 800m cells. Data are only available in CONUS and returned using an Albers Equal Area / NAD83 coordinate reference system. +#' Retrieve Soil Taxonomy Membership Grids #' -#' @param x single taxa name, case-insensitive +#' This function downloads a generalized representation of the geographic +#' extent of any single taxa from the top 4 tiers of Soil Taxonomy. Data are +#' provided by SoilWeb, ultimately sourced from from the current SSURGO +#' snapshot. Data are returned as \code{raster} objects representing area +#' proportion falling within 800m cells. Data are only available in CONUS and +#' returned using an Albers Equal Area / NAD83 coordinate reference system. #' -#' @param level the taxonomic level within the top 4 tiers of Soil Taxonomy, one of \code{c('order', 'suborder', 'greatgroup', 'subgroup')} #' +#' @param x single taxa name, case-insensitive +#' @param level the taxonomic level within the top 4 tiers of Soil Taxonomy, +#' one of \code{c('order', 'suborder', 'greatgroup', 'subgroup')} #' @param timeout time that we are willing to wait for a response, in seconds -#' #' @return a \code{raster} object -#' -#' @author D.E. Beaudette -#' #' @note This is a work in progress. +#' @author D.E. Beaudette +#' @examples #' -#' @examples #' \donttest{ #' #' if(requireNamespace("curl") & @@ -65,6 +66,8 @@ #' #' } #' +#' +#' @export taxaExtent taxaExtent <- function(x, level = c('order', 'suborder', 'greatgroup', 'subgroup'), timeout = 60) { level <- match.arg(level) diff --git a/R/uncode.R b/R/uncode.R index 5f88cae5..e6f6306c 100644 --- a/R/uncode.R +++ b/R/uncode.R @@ -1,3 +1,75 @@ +#' Convert coded values returned from NASIS and SDA queries to factors +#' +#' These functions convert the coded values returned from NASIS or SDA to +#' factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA +#' the metadata is pulled from a static snapshot in the soilDB package +#' (/data/metadata.rda). +#' +#' These functions convert the coded values returned from NASIS into their +#' plain text representation. It duplicates the functionality of the CODELABEL +#' function found in NASIS. This function is primarily intended to be used +#' internally by other soilDB R functions, in order to minimizes the need to +#' manually convert values. +#' +#' The function works by iterating through the column names in a data frame and +#' looking up whether they match any of the ColumnPhysicalNames found in the +#' metadata domain tables. If matches are found then the columns coded values +#' are converted to their corresponding factor levels. Therefore it is not +#' advisable to reuse column names from NASIS unless the contents match the +#' range of values and format found in NASIS. Otherwise uncode() will convert +#' their values to NA. +#' +#' When data is being imported from NASIS, the metadata tables are sourced +#' directly from NASIS. When data is being imported from SDA or the NASIS Web +#' Reports, the metadata is pulled from a static snapshot in the soilDB +#' package. +#' +#' Beware the default is to return the values as factors rather than strings. +#' While strings are generally preferable, factors make plotting more +#' convenient. Generally the factor level ordering returned by uncode() follows +#' the naturally ordering of categories that would be expected (e.g. sand, +#' silt, clay). +#' +#' @aliases metadata uncode code +#' +#' @param df data.frame +#' +#' @param invert converts the code labels back to their coded values (`FALSE`) +#' +#' @param db label specifying the soil database the data is coming from, which +#' indicates whether or not to query metadata from local NASIS database +#' ("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA") +#' +#' @param droplevels logical: indicating whether to drop unused levels in +#' classifying factors. This is useful when a class has large number of unused +#' classes, which can waste space in tables and figures. +#' +#' @param stringsAsFactors logical: should character vectors be converted to +#' factors? +#' +#' @param static_path Optional: path to local SQLite database containing NASIS +#' table structure; default: `NULL` +#' +#' @return A data frame with the results. +#' @author Stephen Roecker +#' @keywords manip +#' @examples +#' +#' \donttest{ +#' if(requireNamespace("curl") & +#' curl::has_internet() & +#' require(aqp)) { +#' # query component by nationalmusym +#' comp <- fetchSDA(WHERE = "nationalmusym = '2vzcp'") +#' s <- site(comp) +#' +#' # use SDA uncoding domain via db argument +#' s <- uncode(s, db="SDA") +#' levels(s$taxorder) +#' } +#' } +#' +#' @export uncode uncode <- function(df, invert = FALSE, db = "NASIS", diff --git a/R/waterDayYear.R b/R/waterDayYear.R index 9e632be8..08332800 100644 --- a/R/waterDayYear.R +++ b/R/waterDayYear.R @@ -1,9 +1,32 @@ - ## TODO: leap years? 365 vs 366 total days # compute water year and day # d: anythihng the can be safely converted it POSIXlt # end: MM-DD notation for end of water year + + +#' Compute Water Day and Year +#' +#' Compute "water" day and year, based on the end of the typical or legal dry +#' season. This is September 30 in California. +#' +#' This function doesn't know about leap-years. Probably worth checking. +#' +#' @param d anything the can be safely converted to \code{PPOSIXlt} +#' @param end "MM-DD" notation for end of water year +#' @return A \code{data.frame} object with the following \item{wy}{the "water +#' year"} \item{wd}{the "water day"} +#' @author D.E. Beaudette +#' @references Ideas borrowed from: +#' \url{https://github.com/USGS-R/dataRetrieval/issues/246} and +#' \url{https://stackoverflow.com/questions/48123049/create-day-index-based-on-water-year} +#' @keywords manip +#' @examples +#' +#' # try it +#' waterDayYear('2019-01-01') +#' +#' @export waterDayYear waterDayYear <- function(d, end="09-30") { # convert to water year, using Sept # ideas from: https://github.com/USGS-R/dataRetrieval/issues/246 diff --git a/man/ISSR800.wcs.Rd b/man/ISSR800.wcs.Rd index 772a0bd6..138e1689 100644 --- a/man/ISSR800.wcs.Rd +++ b/man/ISSR800.wcs.Rd @@ -7,34 +7,45 @@ ISSR800.wcs(aoi, var, res = 800, quiet = FALSE) } \arguments{ -\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} +\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a +\code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} \item{var}{ISSR-800 grid name, see details} -\item{res}{grid resolution, units of meters. The native resolution of ISSR-800 grids (this WCS) is 800m.} +\item{res}{grid resolution, units of meters. The native resolution of +ISSR-800 grids (this WCS) is 800m.} -\item{quiet}{logical, passed to \code{download.file} to enable / suppress URL and progress bar for download.} +\item{quiet}{logical, passed to \code{download.file} to enable / suppress +URL and progress bar for download.} } \value{ -\code{raster} object containing indexed map unit keys and associated raster attribute table +\code{raster} object containing indexed map unit keys and associated +raster attribute table } \description{ -Intermediate-scale gridded (800m) soil property and interpretation maps from aggregated SSURGO and STATSGO data. These maps were developed by USDA-NRCS-SPSD staff in collaboration with UCD-LAWR. Originally for educational use and \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/}{interactive thematic maps}, these data are a suitable alternative to gridded STATSGO-derived thematic soil maps. The full size grids can be \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/download.php}{downloaded here}. +Intermediate-scale gridded (800m) soil property and interpretation maps from +aggregated SSURGO and STATSGO data. These maps were developed by +USDA-NRCS-SPSD staff in collaboration with UCD-LAWR. Originally for +educational use and \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/}{interactive thematic maps}, these data are a suitable alternative to gridded STATSGO-derived thematic soil maps. The full size grids can be \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/download.php}{downloaded here} } \details{ -\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: +\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, +\code{sfc} or \code{bbox} object or a \code{list} containing: -\describe{ -\item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} -\item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} -} +\describe{ \item{list("aoi")}{bounding-box specified as (xmin, ymin, xmax, +ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} \item{list("crs")}{coordinate +reference system of BBOX, e.g. '+init=epsg:4326'} } -The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the ISSR-800 grids. +The WCS query is parameterized using \code{raster::extent} derived from the +above AOI specification, after conversion to the native CRS (EPSG:6350) of +the ISSR-800 grids. -Variables available from this WCS can be queried using \code{WCS_details(wcs = 'ISSR800')}. +Variables available from this WCS can be queried using \code{WCS_details(wcs += 'ISSR800')}. } \note{ -There are still some issues to be resolved related to the encoding of NA Variables with a natural zero (e.g. SAR) have 0 set to NA. +There are still some issues to be resolved related to the encoding of +NA Variables with a natural zero (e.g. SAR) have 0 set to NA. } \author{ D.E. Beaudette and A.G. Brown diff --git a/man/KSSL_VG_model.Rd b/man/KSSL_VG_model.Rd index 017007fe..c23506a8 100644 --- a/man/KSSL_VG_model.Rd +++ b/man/KSSL_VG_model.Rd @@ -7,7 +7,8 @@ KSSL_VG_model(VG_params, phi_min = 10^-6, phi_max = 10^8, pts = 100) } \arguments{ -\item{VG_params}{\code{data.frame} or \code{list} object with the parameters of the van Genuchten model, see details} +\item{VG_params}{\code{data.frame} or \code{list} object with the parameters +of the van Genuchten model, see details} \item{phi_min}{lower limit for water potential in kPa} @@ -22,6 +23,15 @@ A list with the following components: \item{VG_function}{spline function for converting water potential (phi, units of kPa) to estimated volumetric water content (theta, units of percent, range: \{0, 1\})} \item{VG_inverse_function}{spline function for converting volumetric water content (theta, units of percent, range: \{0, 1\}) to estimated water potential (phi, units of kPa)} } + +A list with the following components: \describe{ +\item{VG_curve}{estimated water retention curve: paired estimates of water +potential (phi) and water content (theta)} \item{VG_function}{spline +function for converting water potential (phi, units of kPa) to estimated +volumetric water content (theta, units of percent, range: {0, 1})} +\item{VG_inverse_function}{spline function for converting volumetric water +content (theta, units of percent, range: {0, 1}) to estimated water +potential (phi, units of kPa)} } } \description{ Water retention curve modeling via van Genuchten model and KSSL data. @@ -36,10 +46,13 @@ This function was developed to work with measured or estimated parameters of the } } \note{ +A practical example is given in the \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{fetchSCAN tutorial}. + A practical example is given in the \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{fetchSCAN tutorial}. } \examples{ + # basic example d <- data.frame( theta_r = 0.0337216, @@ -52,10 +65,29 @@ vg <- KSSL_VG_model(d) str(vg) + } \references{ +\href{https://en.wikipedia.org/wiki/Water_retention_curve}{water retention curve estimation} + +Develop a Water Retention Curve from KSSL Data + +Water retention curve modeling via van Genuchten model and KSSL data. + +This function was developed to work with measured or estimated parameters the \href{https://en.wikipedia.org/wiki/Water_retention_curve}{van Genuchten model}, as generated by the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model}. + +As such, \code{VG_params} should have the following format and +conventions: \describe{ \item{theta_r}{saturated water content, values +should be in the range of {0, 1}} \item{theta_s}{residual water content, +values should be in the range of {0, 1}} \item{alpha}{related to the inverse +of the air entry suction, function expects log10-transformed values with +units of cm} \item{npar}{index of pore size distribution, function expects +log10-transformed values with units of 1/cm} } + \href{https://en.wikipedia.org/wiki/Water_retention_curve}{water retention curve estimation} } \author{ +D.E. Beaudette + D.E. Beaudette } diff --git a/man/OSDquery.Rd b/man/OSDquery.Rd index ab000356..8fafbf54 100644 --- a/man/OSDquery.Rd +++ b/man/OSDquery.Rd @@ -36,29 +36,30 @@ OSDquery( \item{geog_assoc_soils}{search geographically associated soils section} } \value{ -a \code{data.frame} object containing soil series names that match patterns supplied as arguments. +a \code{data.frame} object containing soil series names that match +patterns supplied as arguments. } \description{ -This is a rough example of how chunks of text parsed from OSD records can be made search-able with the \href{https://www.postgresql.org/docs/9.5/textsearch.html}{PostgreSQL fulltext indexing} and query system (\href{https://www.postgresql.org/docs/9.5/datatype-textsearch.html}{syntax details}). Each search field (except for the "brief narrative" and MLRA) corresponds with a section header in an OSD. The results may not include every OSD due to formatting errors and typos. Results are scored based on the number of times search terms match words in associated sections. This is the R API corresponding to \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage}. +This is an example of how chunks of text parsed from OSD records can be made search-able with \href{https://www.postgresql.org/docs/9.5/textsearch.html}{PostgreSQL full-text indexing}. This query system utilizes \href{https://www.postgresql.org/docs/9.5/datatype-textsearch.html}{special syntax}. } \details{ -See \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage} for more information. -\itemize{ -\item family level taxa are derived from SC database, not parsed OSD records -\item MLRA are derived via spatial intersection (SSURGO x MLRA polygons) -\item MLRA-filtering is only possible for series used in the current SSURGO snapshot (component name) -\item logical AND: \code{&} -\item logical OR: \code{|} -\item wildcard, e.g. rhy-something \verb{rhy:*} -\item search terms with spaces need doubled single quotes: \verb{''san joaquin''} -\item combine search terms into a single expression: \verb{(grano:* | granite)} -} +Each search field (except for the "brief narrative" and MLRA) corresponds with a section header in an OSD. The results may not include every OSD due to formatting errors and typos. Results are scored based on the number of times search terms match words in associated sections. This is the R API corresponding to the \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{SoilWeb PostgreSQL OSD full-text search API} + +See \url{https://casoilresource.lawr.ucdavis.edu/osd-search/} +for more information. \itemize{ \item family level taxa are derived from SC +database, not parsed OSD records \item MLRA are derived via spatial +intersection (SSURGO x MLRA polygons) \item MLRA-filtering is only possible +for series used in the current SSURGO snapshot (component name) \item +logical AND: \code{&} \item logical OR: \code{|} \item wildcard, e.g. +rhy-something \verb{rhy:*} \item search terms with spaces need doubled +single quotes: \verb{''san joaquin''} \item combine search terms into a +single expression: \verb{(grano:* | granite)} } -Related documentation can be found in the following tutorials +Related documentation can be found in the following tutorials: \itemize{ -\item \href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions} -\item \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series} -\item \href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings} +\item \href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{Soil Series Query Functions} +\item \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{Competing Soil Series} +\item \href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{Siblings} } } \note{ @@ -66,7 +67,6 @@ SoilWeb maintains a snapshot of the Official Series Description data. } \examples{ - \donttest{ if(requireNamespace("curl") & curl::has_internet() & @@ -84,6 +84,7 @@ if(requireNamespace("curl") & } } + } \references{ \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} diff --git a/man/ROSETTA.Rd b/man/ROSETTA.Rd index b56cf4fd..621bb228 100644 --- a/man/ROSETTA.Rd +++ b/man/ROSETTA.Rd @@ -7,15 +7,19 @@ ROSETTA(x, vars, v = c("1", "2", "3"), chunkSize = 10000, conf = NULL) } \arguments{ -\item{x}{a \code{data.frame} of required soil properties, may contain other columns, see details} +\item{x}{a \code{data.frame} of required soil properties, may contain other +columns, see details} -\item{vars}{character vector of column names in \code{x} containing relevant soil property values, see details} +\item{vars}{character vector of column names in \code{x} containing relevant +soil property values, see details} -\item{v}{ROSETTA model version number: '1', '2', or '3', see details and references.} +\item{v}{ROSETTA model version number: '1', '2', or '3', see details and +references.} \item{chunkSize}{number of records per API call} -\item{conf}{configuration passed to \code{httr::POST()} such as \code{verbose()}.} +\item{conf}{configuration passed to \code{httr::POST()} such as +\code{verbose()}.} } \value{ a \code{data.frame} object with the following columns: @@ -29,9 +33,22 @@ a \code{data.frame} object with the following columns: \item \code{.rosetta.model}: best-available model selection (-1 signifies that prediction was not possible due to missing values in \code{x}) \item \code{.rosetta.version}: ROSETTA algorithm version, selected via function argument \code{v} } + +a \code{data.frame} object with the following columns: \itemize{ +\item \code{...}: pre-existing columns from \code{x} \item \code{theta_r}: +residual volumetric water content (cm^3/cm^3) \item \code{theta_s}: +saturated volumetric water content (cm^3/cm^3) \item \code{alpha}: related +to the inverse of the air entry suction, log10-transformed values with units +of cm \item \code{npar}: index of pore size distribution, log10-transformed +values with units of 1/cm \item \code{ksat}: saturated hydraulic +conductivity, log10-transformed values with units of cm/day \item +\code{.rosetta.model}: best-available model selection (-1 signifies that +prediction was not possible due to missing values in \code{x}) \item +\code{.rosetta.version}: ROSETTA algorithm version, selected via function +argument \code{v} } } \description{ -A simple interface to the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model} for predicting hydraulic parameters from soil properties. The ROSETTA API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of Zhang and Schaap, (2017). See the \href{http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html}{related tutorial} for additional examples. +A simple interface to the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model} for predicting hydraulic parameters from soil properties. The ROSETTA API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of Zhang and Schaap, (2017). See the \href{http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html}{ROSETTA API tutorial} for additional examples. } \details{ Soil properties supplied in \code{x} must be described, in order, via \code{vars} argument. The API does not use the names but column ordering must follow: sand, silt, clay, bulk density, volumetric water content at 33kPa (1/3 bar), and volumetric water content at 1500 kPa (15 bar). @@ -55,6 +72,10 @@ Three versions of the ROSETTA model are available, selected using \code{v = 1}, } \note{ Input data should not contain columns names that will conflict with the ROSETTA API results: \code{theta_r}, \code{theta_s}, \code{alpha}, \code{npar}, \code{ksat}. + +Input data should not contain columns names that will conflict with +the ROSETTA API results: \code{theta_r}, \code{theta_s}, \code{alpha}, +\code{npar}, \code{ksat}. } \references{ Consider using the interactive version, with copy/paste functionality at: \url{https://www.handbook60.org/rosetta}. @@ -86,7 +107,115 @@ Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer prog Schaap, M.G., A. Nemes, and M.T. van Genuchten. 2004. Comparison of Models for Indirect Estimation of Water Retention and Available Water in Surface Soils. Vadose Zone Journal 3(4): 1455-1463. doi: \doi{10.2136/vzj2004.1455}. Zhang, Y., and M.G. Schaap. 2017. Weighted recalibration of the Rosetta pedotransfer model with improved estimates of hydraulic parameter distributions and summary statistics (Rosetta3). Journal of Hydrology 547: 39-53. doi: \doi{10.1016/j.jhydrol.2017.01.004}. + +ROSETTA Model API + +A simple interface to the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model} for predicting hydraulic parameters from soil properties. The ROSETTA +API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of +Zhang and Schaap, (2017). See the \href{http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html}{ROSETTA API tutorial} for additional examples. + +Soil properties supplied in \code{x} must be described, in order, via +\code{vars} argument. The API does not use the names but column ordering +must follow: sand, silt, clay, bulk density, volumetric water content at +33kPa (1/3 bar), and volumetric water content at 1500 kPa (15 bar). + +The ROSETTA model relies on a minimum of 3 soil properties, with increasing +(expected) accuracy as additional properties are included: \itemize{ \item +required, \code{sand}, \code{silt}, \code{clay}: USDA soil texture separates +(percentages) that sum to 100\\% \item optional, \verb{bulk density} (any +moisture basis): mass per volume after accounting for >2mm fragments, units +of gm/cm3 \item optional, \verb{volumetric water content at 33 kPa}: roughly +"field capacity" for most soils, units of cm^3/cm^3 \item optional, +\verb{volumetric water content at 1500 kPa}: roughly "permanent wilting +point" for most plants, units of cm^3/cm^3 } + +Column names not specified in \code{vars} are retained in the output. + +Three versions of the ROSETTA model are available, selected using \code{v = +1}, \code{v = 2}, or \code{v = 3}. \itemize{ \item \strong{version 1}: +Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer +program for estimating soil hydraulic parameters with hierarchical +pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.1016/S0022-1694(01)00466-8")\Sexpr{tools:::Rd_expr_doi("10.1016/S0022-1694(01)00466-8")}. +\item \strong{version 2}: Schaap, M.G., A. Nemes, and M.T. van Genuchten. +2004. Comparison of Models for Indirect Estimation of Water Retention and +Available Water in Surface Soils. Vadose Zone Journal 3(4): 1455-1463. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.2136/vzj2004.1455")\Sexpr{tools:::Rd_expr_doi("10.2136/vzj2004.1455")}. +\item \strong{version 3}: Zhang, Y., and M.G. Schaap. 2017. Weighted +recalibration of the Rosetta pedotransfer model with improved estimates of +hydraulic parameter distributions and summary statistics (Rosetta3). Journal +of Hydrology 547: 39-53. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")} +} + +Consider using the interactive version, with copy/paste +functionality at: \url{https://www.handbook60.org/rosetta}. + +Rosetta Model Home Page: +\url{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}. + +Python ROSETTA model: \url{http://www.u.arizona.edu/~ygzhang/download.html}. + +Yonggen Zhang, Marcel G. Schaap. 2017. Weighted recalibration of the Rosetta +pedotransfer model with improved estimates of hydraulic parameter +distributions and summary statistics (Rosetta3). Journal of Hydrology. 547: +39-53. c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")}. + +Kosugi, K. 1999. General model for unsaturated hydraulic conductivity for +soils with lognormal pore-size distribution. Soil Sci. Soc. Am. J. +63:270-277. + +Mualem, Y. 1976. A new model predicting the hydraulic conductivity of +unsaturated porous media. Water Resour. Res. 12:513-522. + +Schaap, M.G. and W. Bouten. 1996. Modeling water retention curves of sandy +soils using neural networks. Water Resour. Res. 32:3033-3040. + +Schaap, M.G., Leij F.J. and van Genuchten M.Th. 1998. Neural network +analysis for hierarchical prediction of soil water retention and saturated +hydraulic conductivity. Soil Sci. Soc. Am. J. 62:847-855. + +Schaap, M.G., and F.J. Leij, 1998. Database Related Accuracy and Uncertainty +of Pedotransfer Functions, Soil Science 163:765-779. + +Schaap, M.G., F.J. Leij and M. Th. van Genuchten. 1999. A bootstrap-neural +network approach to predict soil hydraulic parameters. In: van Genuchten, +M.Th., F.J. Leij, and L. Wu (eds), Proc. Int. Workshop, Characterization and +Measurements of the Hydraulic Properties of Unsaturated Porous Media, pp +1237-1250, University of California, Riverside, CA. + +Schaap, M.G., F.J. Leij, 1999, Improved prediction of unsaturated hydraulic +conductivity with the Mualem-van Genuchten, Submitted to Soil Sci. Soc. Am. +J. + +van Genuchten, M.Th. 1980. A closed-form equation for predicting the +hydraulic conductivity of unsaturated soils. Soil Sci. Am. J. 44:892-898. + +Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer +program for estimating soil hydraulic parameters with hierarchical +pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.1016/S0022-1694(01)00466-8")\Sexpr{tools:::Rd_expr_doi("10.1016/S0022-1694(01)00466-8")}. + +Schaap, M.G., A. Nemes, and M.T. van Genuchten. 2004. Comparison of Models +for Indirect Estimation of Water Retention and Available Water in Surface +Soils. Vadose Zone Journal 3(4): 1455-1463. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.2136/vzj2004.1455")\Sexpr{tools:::Rd_expr_doi("10.2136/vzj2004.1455")}. + +Zhang, Y., and M.G. Schaap. 2017. Weighted recalibration of the Rosetta +pedotransfer model with improved estimates of hydraulic parameter +distributions and summary statistics (Rosetta3). Journal of Hydrology 547: +39-53. doi: +c("\\Sexpr[results=rd,stage=build]{tools:::Rd_expr_doi(\"#1\")}", +"10.1016/j.jhydrol.2017.01.004")\Sexpr{tools:::Rd_expr_doi("10.1016/j.jhydrol.2017.01.004")}. } \author{ +D.E. Beaudette, Todd Skaggs (ARS), Richard Reid + D.E. Beaudette, Todd Skaggs (ARS), Richard Reid } diff --git a/man/SCAN_SNOTEL_metadata.Rd b/man/SCAN_SNOTEL_metadata.Rd index bd2c1b89..eb276445 100644 --- a/man/SCAN_SNOTEL_metadata.Rd +++ b/man/SCAN_SNOTEL_metadata.Rd @@ -1,34 +1,27 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/soilDB-package.R +\docType{data} \name{SCAN_SNOTEL_metadata} \alias{SCAN_SNOTEL_metadata} \alias{state_FIPS_codes} - -\docType{data} - \title{SCAN and SNOTEL Station Metadata} - -\description{SCAN and SNOTEL station metadata, a work in progress.} - -\usage{data("SCAN_SNOTEL_metadata")} - \format{ - A data frame with 1092 observations on the following 12 variables. - \describe{ - \item{\code{Name}}{station name} - \item{\code{Site}}{station ID} - \item{\code{State}}{state} - \item{\code{Network}}{sensor network: SCAN / SNOTEL} - \item{\code{County}}{county} - \item{\code{Elevation_ft}}{station elevation in feet} - \item{\code{Latitude}}{latitude of station} - \item{\code{Longitude}}{longitude of station} - \item{\code{HUC}}{associated watershed} - \item{\code{climstanm}}{climate station name (TODO: remove this column)} - \item{\code{upedonid}}{associated user pedon ID} - \item{\code{pedlabsampnum}}{associated lab sample ID} - } +A data frame with 1092 observations on the following 12 variables. +\describe{ \item{list("Name")}{station name} \item{list("Site")}{station ID} +\item{list("State")}{state} \item{list("Network")}{sensor network: SCAN / +SNOTEL} \item{list("County")}{county} \item{list("Elevation_ft")}{station +elevation in feet} \item{list("Latitude")}{latitude of station} +\item{list("Longitude")}{longitude of station} \item{list("HUC")}{associated +watershed} \item{list("climstanm")}{climate station name (TODO: remove this +column)} \item{list("upedonid")}{associated user pedon ID} +\item{list("pedlabsampnum")}{associated lab sample ID} } +} +\description{ +SCAN and SNOTEL station metadata, a work in progress. +} +\details{ +These data have been compiled from several sources and represent a +progressive effort to organize SCAN/SNOTEL station metadata. Therefore, some +records may be missing or incorrect. } - -\details{These data have been compiled from several sources and represent a progressive effort to organize SCAN/SNOTEL station metadata. Therefore, some records may be missing or incorrect. Details on this effort can be found at the associated GitHub issue page: \url{https://github.com/ncss-tech/soilDB/issues/61}.} - - \keyword{datasets} diff --git a/man/SDA_query.Rd b/man/SDA_query.Rd index 7fd99c15..a6b9616b 100644 --- a/man/SDA_query.Rd +++ b/man/SDA_query.Rd @@ -13,17 +13,33 @@ SDA_query(q) a data.frame result (\code{NULL} if empty, try-error on error) } \description{ -Submit a query to the Soil Data Access (SDA) REST/JSON web-service and return the results as a data.frame. There is a 100,000 record limit and 32Mb JSON serializer limit, per query. Queries should contain a WHERE statement or JOIN condition to limit the number of rows affected / returned. Consider wrapping calls to \code{SDA_query} in a function that can iterate over logical chunks (e.g. areasymbol, mukey, cokey, etc.). The function \code{makeChunks} can help with such iteration. +Submit a query to the Soil Data Access (SDA) REST/JSON web-service and +return the results as a data.frame. There is a 100,000 record limit and 32Mb +JSON serializer limit, per query. Queries should contain a WHERE statement +or JOIN condition to limit the number of rows affected / returned. Consider +wrapping calls to \code{SDA_query} in a function that can iterate over +logical chunks (e.g. areasymbol, mukey, cokey, etc.). The function +\code{makeChunks} can help with such iteration. } \details{ -The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov} and query examples can be found at \url{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query examples can be found at \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov} +and query examples can be found at +\url{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query +examples can be found at +\url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. -SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are +stored together within SDA. This means that queries that don't specify an +area symbol may result in a mixture of SSURGO and STATSGO records. See the +examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} +for details. } \note{ -This function requires the \code{httr}, \code{jsonlite}, and \code{XML} packages +This function requires the \code{httr}, \code{jsonlite}, and +\code{XML} packages } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -83,6 +99,7 @@ if(requireNamespace("curl") & } } } + } \seealso{ \code{\link{mapunit_geom_by_ll_bbox}} diff --git a/man/SDA_spatialQuery.Rd b/man/SDA_spatialQuery.Rd index 42b32f72..144e4f29 100644 --- a/man/SDA_spatialQuery.Rd +++ b/man/SDA_spatialQuery.Rd @@ -14,33 +14,55 @@ SDA_spatialQuery( ) } \arguments{ -\item{geom}{a Spatial* object, with valid CRS. May contain multiple features.} +\item{geom}{a Spatial* object, with valid CRS. May contain multiple +features.} -\item{what}{a character vector specifying what to return. 'mukey': \code{data.frame} with intersecting map unit keys and names, \code{geom} overlapping or intersecting map unit polygons} +\item{what}{a character vector specifying what to return. 'mukey': +\code{data.frame} with intersecting map unit keys and names, \code{geom} +overlapping or intersecting map unit polygons} -\item{geomIntersection}{logical; \code{FALSE}: overlapping map unit polygons returned, \code{TRUE}: intersection of \code{geom} + map unit polygons is returned.} +\item{geomIntersection}{logical; \code{FALSE}: overlapping map unit polygons +returned, \code{TRUE}: intersection of \code{geom} + map unit polygons is +returned.} \item{db}{a character vector identifying the Soil Geographic Databases -('SSURGO' or 'STATSGO') to query. Option \var{STATSGO} currently works -only in combination with \code{what = "geom"}.} +('SSURGO' or 'STATSGO') to query. Option \var{STATSGO} currently works only +in combination with \code{what = "geom"}.} } \value{ -A \code{data.frame} if \code{what = 'mukey'}, otherwise \code{SpatialPolygonsDataFrame} object. +A \code{data.frame} if \code{what = 'mukey'}, otherwise +\code{SpatialPolygonsDataFrame} object. } \description{ -Query SDA (SSURGO / STATSGO) records via spatial intersection with supplied geometries. Input can be SpatialPoints, SpatialLines, or SpatialPolygons objects with a valid CRS. Map unit keys, overlapping polygons, or the spatial intersection of \code{geom} + SSURGO / STATSGO polygons can be returned. See details. +Query SDA (SSURGO / STATSGO) records via spatial intersection with supplied +geometries. Input can be SpatialPoints, SpatialLines, or SpatialPolygons +objects with a valid CRS. Map unit keys, overlapping polygons, or the +spatial intersection of \code{geom} + SSURGO / STATSGO polygons can be +returned. See details. } \details{ -Queries for map unit keys are always more efficient vs. queries for overlapping or intersecting (i.e. least efficient) features. \code{geom} is converted to GCS / WGS84 as needed. Map unit keys are always returned when using \code{what = "geom"}. +Queries for map unit keys are always more efficient vs. queries for +overlapping or intersecting (i.e. least efficient) features. \code{geom} is +converted to GCS / WGS84 as needed. Map unit keys are always returned when +using \code{what = "geom"}. There is a 100,000 record limit and 32Mb JSON serializer limit, per query. -SSURGO (detailed soil survey, typically 1:24,000 scale) and STATSGO (generalized soil survey, 1:250,000 scale) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +SSURGO (detailed soil survey, typically 1:24,000 scale) and STATSGO +(generalized soil survey, 1:250,000 scale) data are stored together within +SDA. This means that queries that don't specify an area symbol may result in +a mixture of SSURGO and STATSGO records. See the examples below and the +\href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} +for details. } \note{ -Row-order is not preserved across features in \code{geom} and returned object. Use \code{sp::over()} or similar functionality to extract from results. Polygon area in acres is computed server-side when \code{what = 'geom'} and \code{geomIntersection = TRUE}. +Row-order is not preserved across features in \code{geom} and returned +object. Use \code{sp::over()} or similar functionality to extract from +results. Polygon area in acres is computed server-side when \code{what = +'geom'} and \code{geomIntersection = TRUE}. } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet() & @@ -177,6 +199,7 @@ mtext( } } + } \seealso{ \code{\link{SDA_query}} diff --git a/man/STRplot.Rd b/man/STRplot.Rd index 61995c12..3eb26b81 100644 --- a/man/STRplot.Rd +++ b/man/STRplot.Rd @@ -1,41 +1,45 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/STR.R \name{STRplot} \alias{STRplot} - \title{Graphical Description of US Soil Taxonomy Soil Temperature Regimes} -\description{Graphical Description of US Soil Taxonomy Soil Temperature Regimes} - \usage{ STRplot(mast, msst, mwst, permafrost = FALSE, pt.cex = 2.75, leg.cex = 0.85) } - \arguments{ - \item{mast}{single value or vector of mean annual soil temperature (deg C)} - \item{msst}{single value or vector of mean summer soil temperature (deg C)} - \item{mwst}{single value of mean winter soil temperature (deg C)} - \item{permafrost}{logical: permafrost presence / absence} - \item{pt.cex}{symbol size} - \item{leg.cex}{legend size} -} +\item{mast}{single value or vector of mean annual soil temperature (deg C)} -\details{ -\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Related tutorial}. -} +\item{msst}{single value or vector of mean summer soil temperature (deg C)} -\references{ -Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. U.S. Department of Agriculture, Natural Resources Conservation Service, National Soil Survey Center, Lincoln, Nebraska. -} +\item{mwst}{single value of mean winter soil temperature (deg C)} -\author{D.E. Beaudette} +\item{permafrost}{logical: permafrost presence / absence} +\item{pt.cex}{symbol size} -\seealso{ -\code{\link{estimateSTR}} +\item{leg.cex}{legend size} +} +\description{ +Graphical Description of US Soil Taxonomy Soil Temperature Regimes +} +\details{ +\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Soil Temperature Regime Evaluation Tutorial} } - \examples{ + par(mar=c(4,1,0,1)) STRplot(mast = 0:25, msst = 10, mwst = 1) -} - -\keyword{ hplot }% use one of RShowDoc("KEYWORDS") +} +\references{ +Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. +U.S. Department of Agriculture, Natural Resources Conservation Service, +National Soil Survey Center, Lincoln, Nebraska. +} +\seealso{ +\code{\link{estimateSTR}} +} +\author{ +D.E. Beaudette +} +\keyword{hplot} diff --git a/man/SoilWeb_spatial_query.Rd b/man/SoilWeb_spatial_query.Rd new file mode 100644 index 00000000..cabc8cbd --- /dev/null +++ b/man/SoilWeb_spatial_query.Rd @@ -0,0 +1,61 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SSURGO_spatial_query.R +\name{SoilWeb_spatial_query} +\alias{SoilWeb_spatial_query} +\title{Get SSURGO Data via Spatial Query} +\usage{ +SoilWeb_spatial_query( + bbox = NULL, + coords = NULL, + what = "mapunit", + source = "soilweb" +) +} +\arguments{ +\item{bbox}{a bounding box in WGS84 geographic coordinates, see examples} + +\item{coords}{a coordinate pair in WGS84 geographic coordinates, see +examples} + +\item{what}{data to query, currently ignored} + +\item{source}{the data source, currently ignored} +} +\value{ +The data returned from this function will depend on the query style. +See examples below. +} +\description{ +Get SSURGO Data via Spatial Query to SoilWeb +} +\details{ +Data are currently available from SoilWeb. These data are a snapshot of the +"official" data. The snapshot date is encoded in the "soilweb_last_update" +column in the function return value. Planned updates to this function will +include a switch to determine the data source: "official" data via USDA-NRCS +servers, or a "snapshot" via SoilWeb. +} +\note{ +This function should be considered experimental; arguments, results, +and side-effects could change at any time. SDA now supports spatial queries, +consider using \code{\link{SDA_query_features}} instead. +} +\examples{ + +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # query by bbox + SoilWeb_spatial_query(bbox=c(-122.05, 37, -122, 37.05)) + + # query by coordinate pair + SoilWeb_spatial_query(coords=c(-121, 38)) +} +} + +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/man/WCS_details.Rd b/man/WCS_details.Rd index 225a7ccf..09355c0a 100644 --- a/man/WCS_details.Rd +++ b/man/WCS_details.Rd @@ -13,9 +13,12 @@ WCS_details(wcs = c("mukey", "ISSR800")) a \code{data.frame} } \description{ -List variables or databases provided by soilDB web coverage service (WCS) abstraction. These lists will be expanded in future versions. +List variables or databases provided by soilDB web coverage service (WCS) +abstraction. These lists will be expanded in future versions. } \examples{ + WCS_details(wcs = 'ISSR800') + } diff --git a/man/createStaticNASIS.Rd b/man/createStaticNASIS.Rd index f1c95933..062fa108 100644 --- a/man/createStaticNASIS.Rd +++ b/man/createStaticNASIS.Rd @@ -2,7 +2,8 @@ % Please edit documentation in R/createStaticNASIS.R \name{createStaticNASIS} \alias{createStaticNASIS} -\title{Create a memory or file-based instance of NASIS database (for selected tables)} +\title{Create a memory or file-based instance of NASIS database (for selected +tables)} \usage{ createStaticNASIS( tables = NULL, @@ -13,26 +14,35 @@ createStaticNASIS( ) } \arguments{ -\item{tables}{Character vector of target tables. Default: \code{NULL} is all tables meeting the following criteria.} +\item{tables}{Character vector of target tables. Default: \code{NULL} is all +tables meeting the following criteria.} -\item{SS}{Logical. Include "selected set" tables (ending with suffix \code{"_View1"}). Default: \code{FALSE}} +\item{SS}{Logical. Include "selected set" tables (ending with suffix +\code{"_View1"}). Default: \code{FALSE}} -\item{systables}{Logical. Include "system" tables (starting with prefix \code{"system"}). Default: \code{FALSE}} +\item{systables}{Logical. Include "system" tables (starting with prefix +\code{"system"}). Default: \code{FALSE}} -\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} +\item{static_path}{Optional: path to SQLite database containing NASIS table +structure; Default: \code{NULL}} -\item{output_path}{Optional: path to new/existing SQLite database to write tables to. Default: \code{NULL} returns table results as named list.} +\item{output_path}{Optional: path to new/existing SQLite database to write +tables to. Default: \code{NULL} returns table results as named list.} } \value{ -A named list of results from calling \code{dbQueryNASIS} for all columns in each NASIS table. +A named list of results from calling \code{dbQueryNASIS} for all +columns in each NASIS table. } \description{ -Create a memory or file-based instance of NASIS database (for selected tables) +Create a memory or file-based instance of NASIS database (for selected +tables) } \examples{ + \dontrun{ str(createStaticNASIS(tables = c("calculation","formtext"))) } + } diff --git a/man/dbConnectNASIS.Rd b/man/dbConnectNASIS.Rd index 5697bbca..91026213 100644 --- a/man/dbConnectNASIS.Rd +++ b/man/dbConnectNASIS.Rd @@ -7,10 +7,12 @@ dbConnectNASIS(static_path = NULL) } \arguments{ -\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} +\item{static_path}{Optional: path to SQLite database containing NASIS table +structure; Default: \code{NULL}} } \value{ -A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}. +A \code{DBIConnection} object, as returned by +\code{DBI::dbConnect()}. } \description{ Create a connection to a local NASIS database diff --git a/man/dbQueryNASIS.Rd b/man/dbQueryNASIS.Rd index 7965d3ff..75fb76a6 100644 --- a/man/dbQueryNASIS.Rd +++ b/man/dbQueryNASIS.Rd @@ -7,7 +7,8 @@ dbQueryNASIS(conn, q, close = TRUE, ...) } \arguments{ -\item{conn}{A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}.} +\item{conn}{A \code{DBIConnection} object, as returned by +\code{DBI::dbConnect()}.} \item{q}{A statement to execute using \code{DBI::dbGetQuery}} diff --git a/man/dot-dump_NASIS_table.Rd b/man/dot-dump_NASIS_table.Rd index eac48870..8ab501a5 100644 --- a/man/dot-dump_NASIS_table.Rd +++ b/man/dot-dump_NASIS_table.Rd @@ -9,7 +9,8 @@ \arguments{ \item{table_name}{Character name of table.} -\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} +\item{static_path}{Optional: path to SQLite database containing NASIS table +structure; Default: \code{NULL}} } \value{ A data.frame or other result of \code{DBI::dbGetQuery} diff --git a/man/estimateColorMixture.Rd b/man/estimateColorMixture.Rd index 4de22f10..40257685 100644 --- a/man/estimateColorMixture.Rd +++ b/man/estimateColorMixture.Rd @@ -7,11 +7,14 @@ estimateColorMixture(x, wt = "pct", backTransform = FALSE) } \arguments{ -\item{x}{data.frame, typically from NASIS containing at least CIE LAB ('L', 'A', 'B') and some kind of weight} +\item{x}{data.frame, typically from NASIS containing at least CIE LAB ('L', +'A', 'B') and some kind of weight} \item{wt}{fractional weights, usually area of hz face} -\item{backTransform}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by aqp::rgb2Munsell default: \code{FALSE}} +\item{backTransform}{logical, should the mixed sRGB representation of soil +color be transformed to closest Munsell chips? This is performed by +aqp::rgb2Munsell default: \code{FALSE}} } \value{ A data.frame containing estimated color mixture @@ -20,7 +23,8 @@ A data.frame containing estimated color mixture Estimate color mixtures using weighted average of CIELAB color coordinates } \note{ -See \code{\link[aqp]{mixMunsell}} for a more realistic (but slower) simulation of subtractive mixing of pigments. +See \code{\link[aqp]{mixMunsell}} for a more realistic (but slower) +simulation of subtractive mixing of pigments. } \author{ D.E. Beaudette diff --git a/man/estimateSTR.Rd b/man/estimateSTR.Rd index 1cc735fc..2d043ab0 100644 --- a/man/estimateSTR.Rd +++ b/man/estimateSTR.Rd @@ -1,44 +1,59 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/STR.R \name{estimateSTR} \alias{estimateSTR} - \title{Estimate Soil Temperature Regime} -\description{Estimate soil temperature regime (STR) based on mean annual soil temperature (MAST), mean summer temperature (MSST), mean winter soil temperature (MWST), presence of O horizons, saturated conditions, and presence of permafrost. Several assumptions are made when O horizon or saturation are undefined.} - \usage{ -estimateSTR(mast, mean.summer, mean.winter, O.hz = NA, saturated = NA, permafrost = FALSE) +estimateSTR( + mast, + mean.summer, + mean.winter, + O.hz = NA, + saturated = NA, + permafrost = FALSE +) } - \arguments{ - \item{mast}{vector of mean annual soil temperature (deg C)} - \item{mean.summer}{vector of mean summer soil temperature (deg C)} - \item{mean.winter}{vector of mean winter soil temperature (deg C)} - \item{O.hz}{logical vector of O horizon presence / absence} - \item{saturated}{logical vector of seasonal saturation} - \item{permafrost}{logical vector of permafrost presence / absence} -} +\item{mast}{vector of mean annual soil temperature (deg C)} -\details{ -\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Related tutorial}. -} +\item{mean.summer}{vector of mean summer soil temperature (deg C)} -\value{Vector of soil temperature regimes.} +\item{mean.winter}{vector of mean winter soil temperature (deg C)} -\references{ -Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. U.S. Department of Agriculture, Natural Resources Conservation Service, National Soil Survey Center, Lincoln, Nebraska. -} - -\author{D.E. Beaudette} +\item{O.hz}{logical vector of O horizon presence / absence} +\item{saturated}{logical vector of seasonal saturation} -\seealso{ -\code{\link{STRplot}} +\item{permafrost}{logical vector of permafrost presence / absence} +} +\value{ +Vector of soil temperature regimes. +} +\description{ +Estimate soil temperature regime (STR) based on mean annual soil temperature +(MAST), mean summer temperature (MSST), mean winter soil temperature (MWST), +presence of O horizons, saturated conditions, and presence of permafrost. +Several assumptions are made when O horizon or saturation are undefined. +} +\details{ +\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Soil Temperature Regime Evaluation Tutorial} } - \examples{ + # simple example estimateSTR(mast=17, mean.summer = 22, mean.winter = 12) -} - -\keyword{ manip }% use one of RShowDoc("KEYWORDS") +} +\references{ +Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. +U.S. Department of Agriculture, Natural Resources Conservation Service, +National Soil Survey Center, Lincoln, Nebraska. +} +\seealso{ +\code{\link{STRplot}} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/man/fetchGDB.Rd b/man/fetchGDB.Rd index ee031d50..3edf972d 100644 --- a/man/fetchGDB.Rd +++ b/man/fetchGDB.Rd @@ -1,63 +1,58 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_component_from_GDB.R \name{fetchGDB} \alias{fetchGDB} \alias{get_legend_from_GDB} \alias{get_mapunit_from_GDB} \alias{get_component_from_GDB} - \title{Load and Flatten Data from SSURGO file geodatabases} -\description{Functions to load and flatten commonly used tables and from SSURGO file geodatabases, and create soil profile collection objects (SPC).} \usage{ -fetchGDB(dsn = "gNATSGO_CONUS.gdb", - WHERE = NULL, - childs = TRUE, - droplevels = TRUE, - stringsAsFactors = TRUE - ) - - -get_legend_from_GDB(dsn = "gNATSGO_CONUS.gdb", - WHERE = NULL, - droplevels = TRUE, - stringsAsFactors = TRUE, - stats = FALSE - ) - -get_mapunit_from_GDB(dsn = "gNATSGO_CONUS.gdb", - WHERE = NULL, - droplevels = TRUE, - stringsAsFactors = TRUE, - stats = FALSE - ) - -get_component_from_GDB(dsn = "gNATSGO_CONUS.gdb", - WHERE = NULL, - childs = FALSE, - droplevels = TRUE, - stringsAsFactors = TRUE - ) - +fetchGDB( + dsn = "gNATSGO_CONUS.gdb", + WHERE = NULL, + childs = TRUE, + droplevels = TRUE, + stringsAsFactors = TRUE +) } - - \arguments{ - \item{dsn}{data source name (interpretation varies by driver - for some drivers, dsn is a file name, but may also be a folder, or contain the name and access credentials of a database); in case of GeoJSON, dsn may be the character string holding the geojson data. It can also be an open database connection.} - \item{WHERE}{text string formatted as an SQL WHERE clause (default: FALSE)} - \item{childs}{logical; if FALSE parent material and geomorphic child tables are not flattened and appended} - \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - \item{stats}{Return extended summary statistics (for legend or mapunit only)} - } - - -\details{These functions return data from SSURGO file geodatabases with the use of a simple text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = "areasymbol = 'IN001'"}. Any columns within the target table can be specified (except for fetchGDB() currently, which only targets the legend with the WHERE clause). +\item{dsn}{data source name (interpretation varies by driver - for some +drivers, dsn is a file name, but may also be a folder, or contain the name +and access credentials of a database); in case of GeoJSON, dsn may be the +character string holding the geojson data. It can also be an open database +connection.} + +\item{WHERE}{text string formatted as an SQL WHERE clause (default: FALSE)} + +\item{childs}{logical; if FALSE parent material and geomorphic child tables +are not flattened and appended} + +\item{droplevels}{logical: indicating whether to drop unused levels in +classifying factors. This is useful when a class has large number of unused +classes, which can waste space in tables and figures.} + +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the uncode() function. It does not +convert those vectors that have set outside of uncode() (i.e. hard coded). +The 'factory-fresh' default is TRUE, but this can be changed by setting +options(stringsAsFactors = FALSE)} +} +\value{ +A \code{data.frame} or \code{SoilProfileCollection} object. +} +\description{ +Functions to load and flatten commonly used tables and from SSURGO file +geodatabases, and create soil profile collection objects (SPC). +} +\details{ +These functions return data from SSURGO file geodatabases with the use of a +simple text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = +"areasymbol = 'IN001'"}. Any columns within the target table can be +specified (except for fetchGDB() currently, which only targets the legend +with the WHERE clause). } -\value{A \code{data.frame} or \code{SoilProfileCollection} object.} -\author{Stephen Roecker} - - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \examples{ + \donttest{ ## replace `dsn` with path to your own geodatabase (SSURGO OR gNATSGO) @@ -79,5 +74,9 @@ get_component_from_GDB(dsn = "gNATSGO_CONUS.gdb", # f_in_GDB <- fetchGDB(WHERE = "areasymbol LIKE 'IN\%'") } + +} +\author{ +Stephen Roecker } \keyword{manip} diff --git a/man/fetchHenry.Rd b/man/fetchHenry.Rd index a9f680f0..f11a49fe 100644 --- a/man/fetchHenry.Rd +++ b/man/fetchHenry.Rd @@ -1,45 +1,80 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchHenry.R \name{fetchHenry} \alias{fetchHenry} \alias{month2season} \alias{summarizeSoilTemperature} - \title{Download Data from the Henry Mount Soil Temperature and Water Database} - -\description{This function is a front-end to the REST query functionality of the Henry Mount Soil Temperature and Water Database.} - \usage{ -fetchHenry(what='all', usersiteid = NULL, project = NULL, sso = NULL, -gran = "day", start.date = NULL, stop.date = NULL, -pad.missing.days = TRUE, soiltemp.summaries = TRUE) +fetchHenry( + what = "all", + usersiteid = NULL, + project = NULL, + sso = NULL, + gran = "day", + start.date = NULL, + stop.date = NULL, + pad.missing.days = TRUE, + soiltemp.summaries = TRUE +) } - \arguments{ - \item{what}{type of data to return: 'sensors': sensor metadata only | 'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor metadata + soil moisture data | 'airtemp': sensor metadata + air temperature data | 'waterlevel': sensor metadata + water level data |'all': sensor metadata + all sensor data} - \item{usersiteid}{(optional) filter results using a NASIS user site ID} - \item{project}{(optional) filter results using a project ID} - \item{sso}{(optional) filter results using a soil survey office code} - \item{gran}{data granularity: "day", "week", "month", "year"; returned data are averages} - \item{start.date}{(optional) starting date filter} - \item{stop.date}{(optional) ending date filter} - \item{pad.missing.days}{should missing data ("day" granularity) be filled with NA? see details} - \item{soiltemp.summaries}{should soil temperature ("day" granularity only) be summarized? see details} -} +\item{what}{type of data to return: 'sensors': sensor metadata only | +'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor +metadata + soil moisture data | 'airtemp': sensor metadata + air temperature +data | 'waterlevel': sensor metadata + water level data |'all': sensor +metadata + all sensor data} -\details{Filling missing days with NA is useful for computing and index of how complete the data are, and for estimating (mostly) unbiased MAST and seasonal mean soil temperatures. Summaries are computed by first averaging over Julian day, then averaging over all days of the year (MAST) or just those days that occur within "summer" or "winter". This approach makes it possible to estimate summaries in the presence of missing data. The quality of summaries should be weighted by the number of "functional years" (number of years with non-missing data after combining data by Julian day) and "complete years" (number of years of data with >= 365 days of non-missing data).} +\item{usersiteid}{(optional) filter results using a NASIS user site ID} -\value{a list containing: - \item{sensors}{a \code{SpatialPointsDataFrame} object containing site-level information} - \item{soiltemp}{a \code{data.frame} object containing soil temperature timeseries data} - \item{soilVWC}{a \code{data.frame} object containing soil moisture timeseries data} - \item{airtemp}{a \code{data.frame} object containing air temperature timeseries data} - \item{waterlevel}{a \code{data.frame} object containing water level timeseries data} -} +\item{project}{(optional) filter results using a project ID} -\author{D.E. Beaudette} -\note{This function and the back-end database are very much a work in progress.} +\item{sso}{(optional) filter results using a soil survey office code} -\seealso{\code{\link{fetchSCAN}}} +\item{gran}{data granularity: "day", "week", "month", "year"; returned data +are averages} + +\item{start.date}{(optional) starting date filter} + +\item{stop.date}{(optional) ending date filter} + +\item{pad.missing.days}{should missing data ("day" granularity) be filled +with NA? see details} + +\item{soiltemp.summaries}{should soil temperature ("day" granularity only) +be summarized? see details} +} +\value{ +a list containing: \item{sensors}{a \code{SpatialPointsDataFrame} +object containing site-level information} \item{soiltemp}{a +\code{data.frame} object containing soil temperature timeseries data} +\item{soilVWC}{a \code{data.frame} object containing soil moisture +timeseries data} \item{airtemp}{a \code{data.frame} object containing air +temperature timeseries data} \item{waterlevel}{a \code{data.frame} object +containing water level timeseries data} +} +\description{ +This function is a front-end to the REST query functionality of the Henry +Mount Soil Temperature and Water Database. +} +\details{ +Filling missing days with NA is useful for computing and index of how +complete the data are, and for estimating (mostly) unbiased MAST and +seasonal mean soil temperatures. Summaries are computed by first averaging +over Julian day, then averaging over all days of the year (MAST) or just +those days that occur within "summer" or "winter". This approach makes it +possible to estimate summaries in the presence of missing data. The quality +of summaries should be weighted by the number of "functional years" (number +of years with non-missing data after combining data by Julian day) and +"complete years" (number of years of data with >= 365 days of non-missing +data). +} +\note{ +This function and the back-end database are very much a work in +progress. +} \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet() & @@ -57,7 +92,12 @@ if(requireNamespace("curl") & } } -} +} +\seealso{ +\code{\link{fetchSCAN}} +} +\author{ +D.E. Beaudette +} \keyword{manip} - diff --git a/man/fetchKSSL.Rd b/man/fetchKSSL.Rd index 69803095..d0d52a1d 100644 --- a/man/fetchKSSL.Rd +++ b/man/fetchKSSL.Rd @@ -1,50 +1,97 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchKSSL.R \name{fetchKSSL} \alias{fetchKSSL} \title{Fetch KSSL Data} -\description{Download soil characterization and morphologic data via BBOX, MLRA, or soil series name query, from the KSSL database.} +\usage{ +fetchKSSL( + series = NA, + bbox = NA, + mlra = NA, + pedlabsampnum = NA, + pedon_id = NA, + pedon_key = NA, + returnMorphologicData = FALSE, + returnGeochemicalData = FALSE, + simplifyColors = FALSE, + progress = TRUE +) +} +\arguments{ +\item{series}{vector of soil series names, case insensitive} -\usage{fetchKSSL(series=NA, bbox=NA, mlra=NA, pedlabsampnum=NA, -pedon_id=NA, pedon_key=NA, returnMorphologicData=FALSE, returnGeochemicalData=FALSE, -simplifyColors=FALSE, progress=TRUE)} +\item{bbox}{a single bounding box in WGS84 geographic coordinates e.g. +\code{c(-120, 37, -122, 38)}} -\arguments{ - \item{series}{vector of soil series names, case insensitive} - \item{bbox}{a single bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}} - \item{mlra}{vector of MLRA IDs, e.g. "18" or "22A"} - \item{pedlabsampnum}{vector of KSSL pedon lab sample number} - \item{pedon_id}{vector of user pedon ID} - \item{pedon_key}{vector of KSSL internal pedon ID} - \item{returnMorphologicData}{logical, optionally request basic morphologic data, see details section} - \item{returnGeochemicalData}{logical, optionally request geochemical, optical and XRD/thermal data, see details section} - \item{simplifyColors}{logical, simplify colors (from morphologic data) and join with horizon data} - \item{progress}{logical, optionally give progress when iterating over multiple requests} -} +\item{mlra}{vector of MLRA IDs, e.g. "18" or "22A"} +\item{pedlabsampnum}{vector of KSSL pedon lab sample number} -\details{This is an experimental interface to a subset for the most commonly used data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) data. +\item{pedon_id}{vector of user pedon ID} -Series-queries are case insensitive. Series name is based on the "correlated as" field (from KSSL snapshot) when present. The "sampled as" classification was promoted to "correlated as" if the "correlated as" classification was missing. +\item{pedon_key}{vector of KSSL internal pedon ID} -When \code{returnMorphologicData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are basic morphologic data: soil color, rock fragment volume, pores, structure, and redoximorphic features. There is a 1:many relationship between the horizon data in "SPC" and the additional dataframes in \code{morph}. See examples for ideas on how to "flatten" these tables. +\item{returnMorphologicData}{logical, optionally request basic morphologic +data, see details section} -When \code{returnGeochemicalData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are geochemical and mineralogy analysis tables, specifically: geochemical/elemental analyses "geochem", optical mineralogy "optical", and X-ray diffraction / thermal "xrd_thermal". \code{returnGeochemicalData} will include additional dataframes \code{geochem}, \code{optical}, and \code{xrd_thermal} in list result. +\item{returnGeochemicalData}{logical, optionally request geochemical, +optical and XRD/thermal data, see details section} -Setting \code{simplifyColors=TRUE} will automatically flatten the soil color data and join to horizon level attributes. +\item{simplifyColors}{logical, simplify colors (from morphologic data) and +join with horizon data} -Function arguments (\code{series}, \code{mlra}, etc.) are fully vectorized except for \code{bbox}. +\item{progress}{logical, optionally give progress when iterating over +multiple requests} +} +\value{ +a \code{SoilProfileCollection} object when +\code{returnMorphologicData} is FALSE, otherwise a list. } +\description{ +Download soil characterization and morphologic data via BBOX, MLRA, or soil +series name query, from the KSSL database. +} +\details{ +This is an experimental interface to a subset for the most commonly used +data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) +data. -\value{a \code{SoilProfileCollection} object when \code{returnMorphologicData} is FALSE, otherwise a list.} +Series-queries are case insensitive. Series name is based on the "correlated +as" field (from KSSL snapshot) when present. The "sampled as" +classification was promoted to "correlated as" if the "correlated as" +classification was missing. -\author{D.E. Beaudette and A.G. Brown} -\note{SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb snapshot was developed using methods described here: \url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use the link below for the live data.} +When \code{returnMorphologicData} is TRUE, the resulting object is a list. +The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} +object) is stored in the named element "SPC". The additional elements are +basic morphologic data: soil color, rock fragment volume, pores, structure, +and redoximorphic features. There is a 1:many relationship between the +horizon data in "SPC" and the additional dataframes in \code{morph}. See +examples for ideas on how to "flatten" these tables. -\references{ -\url{http://ncsslabdatamart.sc.egov.usda.gov/} -} +When \code{returnGeochemicalData} is TRUE, the resulting object is a list. +The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} +object) is stored in the named element "SPC". The additional elements are +geochemical and mineralogy analysis tables, specifically: +geochemical/elemental analyses "geochem", optical mineralogy "optical", and +X-ray diffraction / thermal "xrd_thermal". \code{returnGeochemicalData} will +include additional dataframes \code{geochem}, \code{optical}, and +\code{xrd_thermal} in list result. + +Setting \code{simplifyColors=TRUE} will automatically flatten the soil color +data and join to horizon level attributes. -\seealso{\code{\link{fetchOSD}}} +Function arguments (\code{series}, \code{mlra}, etc.) are fully vectorized +except for \code{bbox}. +} +\note{ +SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb +snapshot was developed using methods described here: +\url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use +the link below for the live data. +} \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -84,6 +131,15 @@ if(requireNamespace("curl") & } } -} +} +\references{ +\url{http://ncsslabdatamart.sc.egov.usda.gov/} +} +\seealso{ +\code{\link{fetchOSD}} +} +\author{ +D.E. Beaudette and A.G. Brown +} \keyword{utilities} diff --git a/man/fetchNASIS.Rd b/man/fetchNASIS.Rd index 085f450e..ce590bc1 100644 --- a/man/fetchNASIS.Rd +++ b/man/fetchNASIS.Rd @@ -48,36 +48,69 @@ fetchNASIS( \arguments{ \item{from}{determines what objects should fetched? ('pedons' | 'components' | 'pedon_report')} -\item{url}{string specifying the url for the NASIS pedon_report (default: NULL)} +\item{url}{string specifying the url for the NASIS pedon_report (default: +\code{NULL})} -\item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: \code{TRUE})} -\item{rmHzErrors}{should pedons with horizon depth errors be removed from the results? (default: TRUE)} +\item{rmHzErrors}{should pedons with horizon depth errors be removed from +the results? (default: \code{TRUE})} -\item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? (default: TRUE), see details} +\item{nullFragsAreZero}{should fragment volumes of \code{NULL} be interpreted as \code{0}? +(default: \code{TRUE}), see details} -\item{soilColorState}{which colors should be used to generate the convenience field 'soil_color'? ('moist' | 'dry')} +\item{soilColorState}{which colors should be used to generate the +convenience field \code{soil_color}? (\code{'moist'} or \code{'dry'})} -\item{lab}{should the phlabresults child table be fetched with site/pedon/horizon data (default: FALSE)} +\item{lab}{should the \code{phlabresults} child table be fetched with +site/pedon/horizon data (default: \code{FALSE})} -\item{fill}{(fetchNASIS(from='components') only: include component records without horizon data in result? (default: FALSE)} +\item{fill}{(\code{fetchNASIS(from='components')} only: include component records +without horizon data in result? (default: \code{FALSE})} -\item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have been set outside of \code{uncode()} (i.e. hard +coded).} -\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} } \value{ A SoilProfileCollection object } \description{ -Fetch commonly used site/pedon/horizon data or component from NASIS, returned as a SoilProfileCollection object. +Fetch commonly used site/pedon/horizon data or component from NASIS, +returned as a SoilProfileCollection object. } \details{ -This function imports data from NASIS into R as a \code{SoilProfileCollection} object. It "flattens" NASIS pedon and component tables, including their child tables, into several more easily manageable data frames. Primarily these functions access the local NASIS database using an ODBC connection. However using the \code{fetchNASIS()} argument \code{from = "pedon_report"}, data can be read from the NASIS Report 'fetchNASIS', as either a txt file or url. The primary purpose of \code{fetchNASIS(from = "pedon_report")} is to facilitate importing datasets larger than 8000+ pedons/components. +This function imports data from NASIS into R as a +\code{SoilProfileCollection} object. It "flattens" NASIS pedon and component +tables, including their child tables, into several more easily manageable +data frames. Primarily these functions access the local NASIS database using +an ODBC connection. However using the \code{fetchNASIS()} argument +\code{from = "pedon_report"}, data can be read from the NASIS Report +'fetchNASIS', as either a txt file or url. The primary purpose of +\code{fetchNASIS(from = "pedon_report")} is to facilitate importing datasets +larger than 8000+ pedons/components. -The value of \code{nullFragsAreZero} will have a significant impact on the rock fragment fractions returned by fetchNASIS. Set \code{nullFragsAreZero = FALSE} in those cases where there are many data-gaps and \code{NULL} rock fragment values should be interpreted as \code{NULL}. Set \code{nullFragsAreZero = TRUE} in those cases where \code{NULL} rock fragment values should be interpreted as 0. +The value of \code{nullFragsAreZero} will have a significant impact on the +rock fragment fractions returned by fetchNASIS. Set \code{nullFragsAreZero = +FALSE} in those cases where there are many data-gaps and \code{NULL} rock +fragment values should be interpreted as \code{NULL}. Set +\code{nullFragsAreZero = TRUE} in those cases where \code{NULL} rock +fragment values should be interpreted as 0. -This function attempts to do most of the boilerplate work when extracting site/pedon/horizon or component data from a local NASIS database. Pedons that are missing horizon data, or have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. Pedons with combination horizons (e.g. B/C) are erroneously marked as errors due to the way in which they are stored in NASIS as two overlapping horizon records. +This function attempts to do most of the boilerplate work when extracting +site/pedon/horizon or component data from a local NASIS database. Pedons +that are missing horizon data, or have errors in their horizonation are +excluded from the returned object, however, their IDs are printed on the +console. Pedons with combination horizons (e.g. B/C) are erroneously marked +as errors due to the way in which they are stored in NASIS as two +overlapping horizon records. + +Tutorials: \itemize{ \item \href{http://ncss-tech.github.io/AQP/soilDB/fetchNASIS-mini-tutorial.html}{fetchNASIS Pedons Tutorial} \item \href{http://ncss-tech.github.io/AQP/soilDB/NASIS-component-data.html}{fetchNASIS Components Tutorial} diff --git a/man/fetchNASISLabData.Rd b/man/fetchNASISLabData.Rd index 7bbb1d28..78cb00ed 100644 --- a/man/fetchNASISLabData.Rd +++ b/man/fetchNASISLabData.Rd @@ -1,20 +1,38 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchNASISLabData.R \name{fetchNASISLabData} \alias{fetchNASISLabData} - - \title{Fetch lab data used site/horizon data from a PedonPC database.} -\description{Fetch KSSL laboratory pedon/horizon layer data from a local NASIS database, return as a SoilProfileCollection object.} - -\usage{fetchNASISLabData(SS = TRUE)} +\usage{ +fetchNASISLabData(SS = TRUE) +} \arguments{ - \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} -} -\value{a SoilProfileCollection class object} -\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} -\author{J.M. Skovlin and D.E. Beaudette} -\note{This function attempts to do most of the boilerplate work when extracting KSSL laboratory site/horizon data from a local NASIS database. Lab pedons that have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. See \code{\link{getHzErrorsNASIS}} for a simple approach to identifying pedons with problematic horizonation.} - -\seealso{\code{\link{get_labpedon_data_from_NASIS_db}}} - +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: TRUE)} +} +\value{ +a SoilProfileCollection class object +} +\description{ +Fetch KSSL laboratory pedon/horizon layer data from a local NASIS database, +return as a SoilProfileCollection object. +} +\details{ +This function currently works only on Windows, and requires a 'nasis_local' +ODBC connection. +} +\note{ +This function attempts to do most of the boilerplate work when +extracting KSSL laboratory site/horizon data from a local NASIS database. +Lab pedons that have errors in their horizonation are excluded from the +returned object, however, their IDs are printed on the console. See +\code{\link{getHzErrorsNASIS}} for a simple approach to identifying pedons +with problematic horizonation. +} +\seealso{ +\code{\link{get_labpedon_data_from_NASIS_db}} +} +\author{ +J.M. Skovlin and D.E. Beaudette +} \keyword{manip} - diff --git a/man/fetchNASISWebReport.Rd b/man/fetchNASISWebReport.Rd index 37a5bc19..79071f26 100644 --- a/man/fetchNASISWebReport.Rd +++ b/man/fetchNASISWebReport.Rd @@ -1,3 +1,5 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchNASISWebReport.R \name{fetchNASISWebReport} \alias{fetchNASISWebReport} \alias{get_project_from_NASISWebReport} @@ -12,70 +14,111 @@ \alias{get_cosoilmoist_from_NASISWebReport} \alias{get_sitesoilmoist_from_NASISWebReport} \alias{get_lmuaoverlap_from_NASISWebReport} - -\title{Extract component tables from a the NASIS Web Reports} -\description{Get, format, impute, and return component tables.} +\title{Extract component tables from NASIS Web Reports} \usage{ -fetchNASISWebReport(projectname, rmHzErrors = FALSE, fill = FALSE, - stringsAsFactors = default.stringsAsFactors() - ) -get_progress_from_NASISWebReport(mlrassoarea, fiscalyear, projecttypename) +fetchNASISWebReport( + projectname, + rmHzErrors = FALSE, + fill = FALSE, + stringsAsFactors = default.stringsAsFactors() +) + +get_component_from_NASISWebReport( + projectname, + stringsAsFactors = default.stringsAsFactors() +) + +get_chorizon_from_NASISWebReport( + projectname, + fill = FALSE, + stringsAsFactors = default.stringsAsFactors() +) + +get_legend_from_NASISWebReport( + mlraoffice, + areasymbol, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() +) + +get_lmuaoverlap_from_NASISWebReport( + areasymbol, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() +) + +get_mapunit_from_NASISWebReport( + areasymbol, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() +) + +get_projectmapunit_from_NASISWebReport( + projectname, + stringsAsFactors = default.stringsAsFactors() +) + +get_projectmapunit2_from_NASISWebReport( + mlrassoarea, + fiscalyear, + projectname, + stringsAsFactors = default.stringsAsFactors() +) + get_project_from_NASISWebReport(mlrassoarea, fiscalyear) -get_project_correlation_from_NASISWebReport(mlrassoarea, fiscalyear, projectname) -get_projectmapunit_from_NASISWebReport(projectname, - stringsAsFactors = default.stringsAsFactors() - ) -get_projectmapunit2_from_NASISWebReport(mlrassoarea, fiscalyear, projectname, - stringsAsFactors = default.stringsAsFactors() - ) -get_legend_from_NASISWebReport(mlraoffice, - areasymbol, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) -get_mapunit_from_NASISWebReport(areasymbol, - droplevels = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) -get_component_from_NASISWebReport(projectname, - stringsAsFactors = default.stringsAsFactors() - ) -get_chorizon_from_NASISWebReport(projectname, fill = FALSE, - stringsAsFactors = default.stringsAsFactors() - ) -get_cosoilmoist_from_NASISWebReport(projectname, impute = TRUE, - stringsAsFactors = default.stringsAsFactors() - ) -get_sitesoilmoist_from_NASISWebReport(usiteid) -} +get_progress_from_NASISWebReport(mlrassoarea, fiscalyear, projecttypename) -\arguments{ - \item{projectname}{text string vector of project names to be inserted into a SQL WHERE clause (default: NA)} - \item{mlraoffice}{text string value identifying the MLRA Regional Soil Survey Office group name inserted into a SQL WHERE clause (default: NA)} - \item{mlrassoarea}{text string value identifying the MLRA Soil Survey Office areasymbol symbol inserted into a SQL WHERE clause (default: NA)} - \item{fiscalyear}{text string value identifying the fiscal year inserted into a SQL WHERE clause (default: NA)} - \item{projecttypename}{text string value identifying the project type name inserted into a SQL WHERE clause (default: NA)} - \item{areasymbol}{text string value identifying the area symbol (e.g. "IN001" or "IN\%") inserted into a SQL WHERE clause (default: NA)} - \item{usiteid}{text string value identifying the user site id inserted into a SQL WHERE clause (default: NA)} - \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} - \item{fill}{should rows with missing component ids be removed NA (FALSE)} - \item{rmHzErrors}{should pedons with horizonation errors be removed from the results? (default: FALSE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} +get_project_correlation_from_NASISWebReport( + mlrassoarea, + fiscalyear, + projectname +) } +\arguments{ +\item{projectname}{text string vector of project names to be inserted into a +SQL WHERE clause (default: \code{NA})} + +\item{rmHzErrors}{should pedons with horizonation errors be removed from the +results? (default: \code{FALSE})} + +\item{fill}{should rows with missing component ids be removed (default: \code{FALSE})} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have been set outside of \code{uncode()} (i.e. hard +coded). The 'factory-fresh' default is TRUE, but this can be changed by +setting options(\code{stringsAsFactors = FALSE})} -\value{A data.frame or list with the results.} -\author{Stephen Roecker} +\item{mlraoffice}{text string value identifying the MLRA Regional Soil +Survey Office group name inserted into a SQL WHERE clause (default: \code{NA})} +\item{areasymbol}{text string value identifying the area symbol (e.g. +\code{IN001} or \verb{IN\%}) inserted into a SQL WHERE clause (default: \code{NA}) +\code{NULL} (default: \code{TRUE})} -%% ~Make other sections like Warning with \section{Warning }{....} ~ +\item{droplevels}{logical: indicating whether to drop unused levels in +classifying factors. This is useful when a class has large number of unused +classes, which can waste space in tables and figures.} +\item{mlrassoarea}{text string value identifying the MLRA Soil Survey Office +areasymbol symbol inserted into a SQL WHERE clause (default: \code{NA})} +\item{fiscalyear}{text string value identifying the fiscal year inserted +into a SQL WHERE clause (default: \code{NA})} + +\item{projecttypename}{text string value identifying the project type name +inserted into a SQL WHERE clause (default: \code{NA})} +} +\value{ +A data.frame or list with the results. +} +\description{ +Extract component tables from NASIS Web Reports +} \examples{ -\donttest{ +\donttest{ if (requireNamespace("curl") & curl::has_internet() & @@ -150,9 +193,10 @@ if (requireNamespace("curl") & -} } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. +} +\author{ +Stephen Roecker +} \keyword{manip} diff --git a/man/fetchOSD.Rd b/man/fetchOSD.Rd index b7bebe0c..ab45c3d9 100644 --- a/man/fetchOSD.Rd +++ b/man/fetchOSD.Rd @@ -9,61 +9,78 @@ fetchOSD(soils, colorState = "moist", extended = FALSE) \arguments{ \item{soils}{a character vector of named soil series; case-insensitive} -\item{colorState}{color state for horizon soil color visualization: "moist" or "dry"} +\item{colorState}{color state for horizon soil color visualization: "moist" +or "dry"} -\item{extended}{if \code{TRUE} additional soil series summary data are returned, see details} +\item{extended}{if \code{TRUE} additional soil series summary data are +returned, see details} } \value{ -a \code{SoilProfileCollection} object containing basic soil morphology and taxonomic information. +a \code{SoilProfileCollection} object containing basic soil +morphology and taxonomic information. } \description{ -This function fetches a variety of data associated with named soil series, extracted from the USDA-NRCS Official Series Description text files and detailed soil survey (SSURGO). These data are periodically updated and made available via SoilWeb. +This function fetches a variety of data associated with named soil series, +extracted from the USDA-NRCS Official Series Description text files and +detailed soil survey (SSURGO). These data are periodically updated and made +available via SoilWeb. } \details{ -{ \itemize{ -\item{\href{https://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions}} - -\item{\href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}} - -\item{\href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings}} +\item \href{http://ncss-tech.github.io/AQP/soilDB/siblings.html}{Soil "Siblings" Tutorial} +\item \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{Competing Soil Series} +\item \href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{Siblings} } -The standard set of "site" and "horizon" data are returned as a \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" suite of summary data can be requested by setting \code{extended=TRUE}. The resulting object will be a \code{list} with the following elements:) +The standard set of "site" and "horizon" data are returned as a +\code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" +suite of summary data can be requested by setting \code{extended=TRUE}. The +resulting object will be a \code{list} with the following elements:) -\describe{ -\item{SPC}{\code{SoilProfileCollection} containing standards "site" and "horizon" data} -\item{competing}{competing soil series from the SC database snapshot} -\item{geog_assoc_soils}{geographically associated soils, extracted from named section in the OSD} -\item{geomcomp}{empirical probabilities for geomorphic component, derived from the current SSURGO snapshot} -\item{hillpos}{empirical probabilities for hillslope position, derived from the current SSURGO snapshot} -\item{mtnpos}{empirical probabilities for mountain slope position, derived from the current SSURGO snapshot} -\item{terrace}{empirical probabilities for river terrace position, derived from the current SSURGO snapshot} -\item{flats}{empirical probabilities for flat landscapes, derived from the current SSURGO snapshot} -\item{pmkind}{empirical probabilities for parent material kind, derived from the current SSURGO snapshot} -\item{pmorigin}{empirical probabilities for parent material origin, derived from the current SSURGO snapshot} -\item{mlra}{empirical MLRA membership values, derived from the current SSURGO snapshot} -\item{climate}{experimental climate summaries from PRISM stack} -\item{metadata}{metadata associated with SoilWeb cached summaries} -} +\describe{ \item{SPC}{\code{SoilProfileCollection} containing standards +"site" and "horizon" data} \item{competing}{competing soil series from the +SC database snapshot} \item{geog_assoc_soils}{geographically associated +soils, extracted from named section in the OSD} \item{geomcomp}{empirical +probabilities for geomorphic component, derived from the current SSURGO +snapshot} \item{hillpos}{empirical probabilities for hillslope position, +derived from the current SSURGO snapshot} \item{mtnpos}{empirical +probabilities for mountain slope position, derived from the current SSURGO +snapshot} \item{terrace}{empirical probabilities for river terrace position, +derived from the current SSURGO snapshot} \item{flats}{empirical +probabilities for flat landscapes, derived from the current SSURGO snapshot} +\item{pmkind}{empirical probabilities for parent material kind, derived from +the current SSURGO snapshot} \item{pmorigin}{empirical probabilities for +parent material origin, derived from the current SSURGO snapshot} +\item{mlra}{empirical MLRA membership values, derived from the current +SSURGO snapshot} \item{climate}{experimental climate summaries from PRISM +stack} \item{metadata}{metadata associated with SoilWeb cached summaries} } -When using \code{extended=TRUE}, there are a couple of scenarios in which series morphology contained in \code{SPC} do not fully match records in the associated series summaries (e.g. \code{competing}). +When using \code{extended=TRUE}, there are a couple of scenarios in which +series morphology contained in \code{SPC} do not fully match records in the +associated series summaries (e.g. \code{competing}). \describe{ -\item{1. A query for soil series that exist entirely outside of CONUS (e.g. PALAU).}{ - Climate summaries are empty \code{data.frames} because these summaries are currently generated from PRISM. We are working on a solution.} +\item{1. A query for soil series that exist entirely outside of CONUS (e.g. +PALAU).}{ - Climate summaries are empty \code{data.frames} because these +summaries are currently generated from PRISM. We are working on a solution.} -\item{2. A query for data within CONUS, but OSD morphology missing due to parsing error (e.g. formatting, typos).}{ - Extended summaries are present but morphology missing from \code{SPC}. A warning is issued.} +\item{2. A query for data within CONUS, but OSD morphology missing due to +parsing error (e.g. formatting, typos).}{ - Extended summaries are present +but morphology missing from \code{SPC}. A warning is issued.} -\item{3. A query for multiple soil series, with one more more listed as "inactive" (e.g. BREADSPRINGS).}{ - Extended summaries are present but morphology missing from \code{SPC}. A warning is issued.} +\item{3. A query for multiple soil series, with one more more listed as +"inactive" (e.g. BREADSPRINGS).}{ - Extended summaries are present but +morphology missing from \code{SPC}. A warning is issued.} } -These last two cases are problematic for analysis that makes use of morphology and extended data, such as outlined in this tutorial on \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}. - -} +These last two cases are problematic for analysis that makes use of +morphology and extended data, such as outlined in this tutorial on +\href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}. } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -95,9 +112,11 @@ if(requireNamespace("curl") & } } } + } \references{ -USDA-NRCS OSD search tools: \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +USDA-NRCS OSD search tools: +\url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} } \seealso{ \link{OSDquery}, \link{siblings} diff --git a/man/fetchPedonPC.Rd b/man/fetchPedonPC.Rd index b768c092..b983e01e 100644 --- a/man/fetchPedonPC.Rd +++ b/man/fetchPedonPC.Rd @@ -1,26 +1,39 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchPedonPC.R \name{fetchPedonPC} \alias{fetchPedonPC} \alias{getHzErrorsPedonPC} - \title{Fetch commonly used site/horizon data from a PedonPC v.5 database.} -\description{Fetch commonly used site/horizon data from a version 5.x PedonPC database, return as a SoilProfileCollection object.} - \usage{ fetchPedonPC(dsn) -getHzErrorsPedonPC(dsn, strict=TRUE) } - \arguments{ - \item{dsn}{The path to a PedonPC version 5.x database} - \item{strict}{should horizonation by strictly enforced? (TRUE)} -} - -\details{This function currently works only on Windows.} -\value{a SoilProfileCollection class object} -\author{D. E. Beaudette and J. M. Skovlin} -\note{This function attempts to do most of the boilerplate work when extracting site/horizon data from a PedonPC or local NASIS database. Pedons that have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. See \code{\link{getHzErrorsPedonPC}} for a simple approach to identifying pedons with problematic horizonation. Records from the 'taxhistory' table are selected based on 1) most recent record, or 2) record with the least amount of missing data.} - -\seealso{\code{\link{get_hz_data_from_pedon_db}}} - +\item{dsn}{The path to a PedonPC version 5.x database} +} +\value{ +a SoilProfileCollection class object +} +\description{ +Fetch commonly used site/horizon data from a version 5.x PedonPC database, +return as a SoilProfileCollection object. +} +\details{ +This function currently works only on Windows. +} +\note{ +This function attempts to do most of the boilerplate work when +extracting site/horizon data from a PedonPC or local NASIS database. Pedons +that have errors in their horizonation are excluded from the returned +object, however, their IDs are printed on the console. See +\code{\link{getHzErrorsPedonPC}} for a simple approach to identifying pedons +with problematic horizonation. Records from the 'taxhistory' table are +selected based on 1) most recent record, or 2) record with the least amount +of missing data. +} +\seealso{ +\code{\link{get_hz_data_from_pedon_db}} +} +\author{ +D. E. Beaudette and J. M. Skovlin +} \keyword{manip} - diff --git a/man/fetchRaCA.Rd b/man/fetchRaCA.Rd index cf77281b..d5d953e4 100644 --- a/man/fetchRaCA.Rd +++ b/man/fetchRaCA.Rd @@ -15,33 +15,46 @@ fetchRaCA( \arguments{ \item{series}{a soil series name; case-insensitive} -\item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block} +\item{bbox}{a bounding box in WGS84 geographic coordinates e.g. +\code{c(-120, 37, -122, 38)}, constrained to a 5-degree block} \item{state}{a two-letter US state abbreviation; case-insensitive} \item{rcasiteid}{a RaCA site id (e.g. 'C1609C01')} -\item{get.vnir}{logical, should associated VNIR spectra be downloaded? (see details)} +\item{get.vnir}{logical, should associated VNIR spectra be downloaded? (see +details)} } \value{ -{ -\describe{ -\item{\code{pedons}:}{a \code{SoilProfileCollection} object containing site/pedon/horizon data} -\item{\code{trees}:}{a \code{data.frame} object containing tree DBH and height} -\item{\code{veg}:}{a \code{data.frame} object containing plant species} -\item{\code{stock}:}{a \code{data.frame} object containing carbon quantities (stocks) at standardized depths} -\item{\code{sample}:}{a \code{data.frame} object containing sample-level bulk density and soil organic carbon values} -\item{\code{spectra}:}{a numeric \code{matrix} containing VNIR reflectance spectra from 350--2500 nm} -} -} +\describe{ \item{list("pedons")}{a \code{SoilProfileCollection} object +containing site/pedon/horizon data}\item{:}{a \code{SoilProfileCollection} +object containing site/pedon/horizon data} \item{list("trees")}{a +\code{data.frame} object containing tree DBH and height}\item{:}{a +\code{data.frame} object containing tree DBH and height} +\item{list("veg")}{a \code{data.frame} object containing plant +species}\item{:}{a \code{data.frame} object containing plant species} +\item{list("stock")}{a \code{data.frame} object containing carbon quantities +(stocks) at standardized depths}\item{:}{a \code{data.frame} object +containing carbon quantities (stocks) at standardized depths} +\item{list("sample")}{a \code{data.frame} object containing sample-level +bulk density and soil organic carbon values}\item{:}{a \code{data.frame} +object containing sample-level bulk density and soil organic carbon values} +\item{list("spectra")}{a numeric \code{matrix} containing VNIR reflectance +spectra from 350--2500 nm}\item{:}{a numeric \code{matrix} containing VNIR +reflectance spectra from 350--2500 nm} } } \description{ -Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, RaCA site ID, or series query from the SoilWeb API. +Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, +RaCA site ID, or series query from the SoilWeb API. } \details{ -The VNIR spectra associated with RaCA data are quite large (each gzip-compressed VNIR spectra record is about 6.6kb), so requests for these data are disabled by default. Note that VNIR spectra can only be queried by soil series or geographic BBOX. +The VNIR spectra associated with RaCA data are quite large (each +gzip-compressed VNIR spectra record is about 6.6kb), so requests for these +data are disabled by default. Note that VNIR spectra can only be queried by +soil series or geographic BBOX. } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -69,12 +82,10 @@ if(requireNamespace("curl") & } } } + } \references{ -{ \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} -\href{https://r-forge.r-project.org/scm/viewvc.php/\emph{checkout}/docs/soilDB/RaCA-demo.html?root=aqp}{fetchRaCA() Tutorial} -} } \seealso{ \code{\link{fetchOSD}} diff --git a/man/fetchSCAN.Rd b/man/fetchSCAN.Rd index 4d4169cc..627e72b6 100644 --- a/man/fetchSCAN.Rd +++ b/man/fetchSCAN.Rd @@ -1,39 +1,36 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchSCAN.R \name{fetchSCAN} \alias{fetchSCAN} \alias{SCAN_sensor_metadata} \alias{SCAN_site_metadata} - - \title{Fetch SCAN Data} -\description{Query soil/climate data from USDA-NRCS SCAN Stations (experimental)} - \usage{ -# get SCAN data -fetchSCAN(site.code, year, report='SCAN', req=NULL) - -# get sensor metadata for one or more sites -SCAN_sensor_metadata(site.code) - -# get site metadata for one or more sites -SCAN_site_metadata(site.code) +fetchSCAN(site.code, year, report = "SCAN", req = NULL) } - \arguments{ - \item{site.code}{a vector of site codes} - \item{year}{a vector of years} - \item{report}{report name, single value only} - \item{req}{list of SCAN request parameters, for backwards-compatibility only} -} +\item{site.code}{a vector of site codes} -\details{See \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{the fetchSCAN tutorial for details.} These functions require the `httr` and `rvest` libraries.} +\item{year}{a vector of years} -\note{\code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows).} - -\value{a \code{data.frame} object} -\references{https://www.wcc.nrcs.usda.gov/index.html} -\author{D.E. Beaudette} +\item{report}{report name, single value only} +\item{req}{list of SCAN request parameters, for backwards-compatibility only} +} +\value{ +a \code{data.frame} object +} +\description{ +Query soil/climate data from USDA-NRCS SCAN Stations (experimental) +} +\details{ +See These functions require the \code{httr} and \code{rvest} libraries. +} +\note{ +\code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows). +} \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -49,6 +46,12 @@ if(requireNamespace("curl") & m <- SCAN_site_metadata(site.code=c(356, 2072)) } } + +} +\references{ +https://www.wcc.nrcs.usda.gov/index.html +} +\author{ +D.E. Beaudette } \keyword{manip} - diff --git a/man/fetchSDA.Rd b/man/fetchSDA.Rd new file mode 100644 index 00000000..6fc2d642 --- /dev/null +++ b/man/fetchSDA.Rd @@ -0,0 +1,200 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_component_from_SDA.R +\name{fetchSDA} +\alias{fetchSDA} +\alias{get_legend_from_SDA} +\alias{get_lmuaoverlap_from_SDA} +\alias{get_mapunit_from_SDA} +\alias{get_component_from_SDA} +\alias{get_chorizon_from_SDA} +\alias{get_cosoilmoist_from_SDA} +\alias{get_cointerp_from_SDA} +\title{Download and Flatten Data from Soil Data Access} +\usage{ +fetchSDA( + WHERE = NULL, + duplicates = FALSE, + childs = TRUE, + nullFragsAreZero = TRUE, + rmHzErrors = FALSE, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() +) +} +\arguments{ +\item{WHERE}{text string formatted as an SQL WHERE clause (default: FALSE)} + +\item{duplicates}{logical; if TRUE a record is returned for each unique +mukey (may be many per nationalmusym)} + +\item{childs}{logical; if FALSE parent material and geomorphic child tables +are not flattened and appended} + +\item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? +(default: TRUE), see details} + +\item{rmHzErrors}{should pedons with horizonation errors be removed from the +results? (default: FALSE)} + +\item{droplevels}{logical: indicating whether to drop unused levels in +classifying factors. This is useful when a class has large number of unused +classes, which can waste space in tables and figures.} + +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the uncode() function. It does not +convert those vectors that have set outside of uncode() (i.e. hard coded). +The 'factory-fresh' default is TRUE, but this can be changed by setting +options(stringsAsFactors = FALSE)} +} +\value{ +A data.frame or SoilProfileCollection object. +} +\description{ +Functions to download and flatten commonly used tables and from Soil Data +Access, and create soil profile collection objects (SPC). +} +\details{ +These functions return data from Soil Data Access with the use of a simple +text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = +"areasymbol = 'IN001'"}. All functions are SQL queries that wrap around +\code{SDAquery()} and format the data for analysis. + +Beware SDA includes the data for both SSURGO and STATSGO2. The +\code{areasymbol} for STATSGO2 is \code{US}. For just SSURGO, include +\code{WHERE = "areareasymbol != 'US'"}. + +If the duplicates argument is set to TRUE, duplicate components are +returned. This is not necessary with data returned from NASIS, which has one +unique national map unit. SDA has duplicate map national map units, one for +each legend it exists in. + +The value of \code{nullFragsAreZero} will have a significant impact on the +rock fragment fractions returned by \code{fetchSDA}. Set +\code{nullFragsAreZero = FALSE} in those cases where there are many +data-gaps and NULL rock fragment values should be interpreted as NULLs. Set +\code{nullFragsAreZero = TRUE} in those cases where NULL rock fragment +values should be interpreted as 0. +} +\examples{ + +\donttest{ + + +if (requireNamespace("curl") & + curl::has_internet() & + require(aqp) & + require("ggplot2") & + require("gridExtra") & + require("viridis") +) { + + # query soil components by areasymbol and musym + test = fetchSDA(WHERE = "areasymbol = 'IN005' AND musym = 'MnpB2'") + + + # profile plot + plot(test) + + + # convert the data for depth plot + clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) + names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) + + om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) + names(om_slice) = gsub("om_", "", names(om_slice)) + + test2 = rbind(data.frame(clay_slice, var = "clay"), + data.frame(om_slice, var = "om") + ) + + h = merge(test2, site(test)[c("nationalmusym", "cokey", "compname", "comppct_r")], + by = "cokey", + all.x = TRUE + ) + + # depth plot of clay content by soil component + gg_comp <- function(x) { + ggplot(x) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + + xlim(200, 0) + + xlab("depth (cm)") + + facet_grid(var ~ nationalmusym + paste(compname, comppct_r)) + + coord_flip() + } + g1 <- gg_comp(subset(h, var == "clay")) + g2 <- gg_comp(subset(h, var == "om")) + + grid.arrange(g1, g2) + + + # query cosoilmoist (e.g. water table data) by mukey + x <- get_cosoilmoist_from_SDA(WHERE = "mukey = '1395352'") + + ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + + geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, + ymin = 0, ymax = max(x$depb_r), + fill = flodfreqcl)) + + geom_line(cex = 1) + + geom_point() + + geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + + ylim(max(x$depb_r), 0) + + xlab("month") + ylab("depth (cm)") + + scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + + facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + + ggtitle(paste0(x$nationalmusym[1], + ': Water Table Levels from Component Soil Moisture Month Data')) + + + + # query all Miami major components + s <- get_component_from_SDA(WHERE = "compname = 'Miami' \n + AND majcompflag = 'Yes' AND areasymbol != 'US'") + + + # landform vs 3-D morphometry + test <- { + subset(s, ! is.na(landform) | ! is.na(geompos)) ->.; + split(., .$drainagecl, drop = TRUE) ->.; + lapply(., function(x) { + test = data.frame() + test = as.data.frame(table(x$landform, x$geompos)) + test$compname = x$compname[1] + test$drainagecl = x$drainagecl[1] + names(test)[1:2] <- c("landform", "geompos") + return(test) + }) ->.; + do.call("rbind", .) ->.; + .[.$Freq > 0, ] ->.; + within(., { + landform = reorder(factor(landform), Freq, max) + geompos = reorder(factor(geompos), Freq, max) + geompos = factor(geompos, levels = rev(levels(geompos))) + }) ->.; + } + test$Freq2 <- cut(test$Freq, + breaks = c(0, 5, 10, 25, 50, 100, 150), + labels = c("<5", "5-10", "10-25", "25-50", "50-100", "100-150") + ) + ggplot(test, aes(x = geompos, y = landform, fill = Freq2)) + + geom_tile(alpha = 0.5) + facet_wrap(~ paste0(compname, "\n", drainagecl)) + + scale_fill_viridis(discrete = TRUE) + + theme(aspect.ratio = 1, axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1)) + + ggtitle("Landform vs 3-D Morphometry for Miami Major Components on SDA") + + +} + + + +} + +} +\seealso{ +\link{SDA_query} +} +\author{ +Stephen Roecker +} +\keyword{manip} diff --git a/man/fetchSDA_spatial.Rd b/man/fetchSDA_spatial.Rd index 9e616724..7ac0750b 100644 --- a/man/fetchSDA_spatial.Rd +++ b/man/fetchSDA_spatial.Rd @@ -16,38 +16,74 @@ fetchSDA_spatial( ) } \arguments{ -\item{x}{A vector of MUKEYs / national mapunit symbols (for mupolygon geometry); OR legend keys (LKEY) / area symbols (for sapolygon geometry)} +\item{x}{A vector of MUKEYs / national mapunit symbols (for mupolygon +geometry); OR legend keys (LKEY) / area symbols (for sapolygon geometry)} -\item{by.col}{Column name containing mapunit identifier \code{"mukey"}, \code{"nmusym"}, or \code{"areasymbol"} for \code{geom.src} \code{sapolygon}; default is inferred from \code{is.numeric(x) == TRUE} for \code{mukey} or \code{lkey} and (\code{nationalmusym} or \code{areasymbol} otherwise.} +\item{by.col}{Column name containing mapunit identifier \code{"mukey"}, +\code{"nmusym"}, or \code{"areasymbol"} for \code{geom.src} +\code{sapolygon}; default is inferred from \code{is.numeric(x) == TRUE} for +\code{mukey} or \code{lkey} and (\code{nationalmusym} or \code{areasymbol} +otherwise.} -\item{method}{geometry result type: \code{"feature"} returns polygons, \code{"bbox"} returns the bounding box of each polygon, and \code{"point"} returns a single point within each polygon.} +\item{method}{geometry result type: \code{"feature"} returns polygons, +\code{"bbox"} returns the bounding box of each polygon, and \code{"point"} +returns a single point within each polygon.} \item{geom.src}{Either \code{mupolygon} or \code{sapolygon}} -\item{db}{Default: SSURGO. When \code{geom.src} is \code{mupolygon}, use STATSGO polygon geometry instead of SSURGO by setting \code{db = "STATSGO"}} +\item{db}{Default: SSURGO. When \code{geom.src} is \code{mupolygon}, use +STATSGO polygon geometry instead of SSURGO by setting \code{db = "STATSGO"}} -\item{add.fields}{Column names from \code{mapunit} or \code{legend} table to add to result. Must specify parent table name as the prefix \code{mapunit} before column name e.g. \code{mapunit.muname}.} +\item{add.fields}{Column names from \code{mapunit} or \code{legend} table to +add to result. Must specify parent table name as the prefix \code{mapunit} +before column name e.g. \code{mapunit.muname}.} -\item{chunk.size}{How many queries should spatial request be divided into? Necessary for large results. Default: 10} +\item{chunk.size}{How many queries should spatial request be divided into? +Necessary for large results. Default: 10} \item{verbose}{Print messages?} } \value{ -A Spatial*DataFrame corresponding to SDA spatial data for all symbols requested. Default result contains geometry with attribute table containing unique feature ID, symbol and area symbol plus additional fields in result specified with \code{add.fields}. +A Spatial*DataFrame corresponding to SDA spatial data for all +symbols requested. Default result contains geometry with attribute table +containing unique feature ID, symbol and area symbol plus additional fields +in result specified with \code{add.fields}. } \description{ -This is a high-level "fetch" method to facilitate spatial queries to Soil Data Access (SDA) based on mapunit key (\code{mukey}) and national mapunit symbol (\code{nationalmusym}) for \code{mupolygon} (SSURGO) or \code{gsmmupolygon} (STATSGO) geometry OR legend key (\code{lkey}) and area symbols (\code{areasymbol}) for \code{sapolygon} (Soil Survey Area; SSA) geometry). +This is a high-level "fetch" method to facilitate spatial queries to Soil +Data Access (SDA) based on mapunit key (\code{mukey}) and national mapunit +symbol (\code{nationalmusym}) for \code{mupolygon} (SSURGO) or +\code{gsmmupolygon} (STATSGO) geometry OR legend key (\code{lkey}) and area +symbols (\code{areasymbol}) for \code{sapolygon} (Soil Survey Area; SSA) +geometry). +} +\details{ +A Soil Data Access spatial query is made returning geometry and key +identifying information about the mapunit or area of interest. Additional +columns from the mapunit or legend table can be included using +\code{add.fields} argument. -A Soil Data Access spatial query is made returning geometry and key identifying information about the mapunit or area of interest. Additional columns from the mapunit or legend table can be included using \code{add.fields} argument. +This function automatically "chunks" the input vector (using +\code{soilDB::makeChunks}) of mapunit identifiers to minimize the likelihood +of exceeding the SDA data request size. The number of chunks varies with the +\code{chunk.size} setting and the length of your input vector. If you are +working with many mapunits and/or large extents, you may need to decrease +this number in order to have more chunks. -This function automatically "chunks" the input vector (using \code{soilDB::makeChunks}) of mapunit identifiers to minimize the likelihood of exceeding the SDA data request size. The number of chunks varies with the \code{chunk.size} setting and the length of your input vector. If you are working with many mapunits and/or large extents, you may need to decrease this number in order to have more chunks. +Querying regions with complex mapping may require smaller \code{chunk.size}. +Numerically adjacent IDs in the input vector may share common qualities +(say, all from same soil survey area or region) which could cause specific +chunks to perform "poorly" (slow or error) no matter what the chunk size is. +Shuffling the order of the inputs using \code{sample} may help to eliminate +problems related to this, depending on how you obtained your set of +MUKEY/nationalmusym to query. One could feasibly use \code{muacres} as a +heuristic to adjust for total acreage within chunks. -Querying regions with complex mapping may require smaller \code{chunk.size}. Numerically adjacent IDs in the input vector may share common qualities (say, all from same soil survey area or region) which could cause specific chunks to perform "poorly" (slow or error) no matter what the chunk size is. Shuffling the order of the inputs using \code{sample} may help to eliminate problems related to this, depending on how you obtained your set of MUKEY/nationalmusym to query. One could feasibly use \code{muacres} as a heuristic to adjust for total acreage within chunks. -} -\details{ -Note that STATSGO data are fetched using \code{CLIPAREASYMBOL = 'US'} to avoid duplicating state and national subsets of the geometry. +Note that STATSGO data are fetched using \code{CLIPAREASYMBOL = 'US'} to +avoid duplicating state and national subsets of the geometry. } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -68,6 +104,7 @@ if(requireNamespace("curl") & head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) } } + } \author{ Andrew G. Brown diff --git a/man/fetchSoilGrids.Rd b/man/fetchSoilGrids.Rd index 22623381..98c7b983 100644 --- a/man/fetchSoilGrids.Rd +++ b/man/fetchSoilGrids.Rd @@ -7,20 +7,30 @@ fetchSoilGrids(locations, loc.names = c("id", "lat", "lon")) } \arguments{ -\item{locations}{A \code{data.frame} containing 3 columns referring to site ID, latitude and longitude.} +\item{locations}{A \code{data.frame} containing 3 columns referring to site +ID, latitude and longitude.} -\item{loc.names}{Optional: Column names referring to site ID, latitude and longitude. Default: \code{c("id","lat","lon")}} +\item{loc.names}{Optional: Column names referring to site ID, latitude and +longitude. Default: \code{c("id","lat","lon")}} } \value{ A SoilProfileCollection } \description{ -This function obtains SoilGrids properties information (250m raster resolution) given a \code{data.frame} containing site IDs, latitudes and longitudes. +This function obtains SoilGrids properties information (250m raster +resolution) given a \code{data.frame} containing site IDs, latitudes and +longitudes. } \details{ -The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", "30-60cm", "60-100cm", "100-200cm"} and the properties returned are \code{"bdod", "cec", "cfvo", "clay", "nitrogen", "phh2o", "sand", "silt", "soc"} -- each with 5th, 50th, 95th, mean and uncertainty values. Point data requests are made through \code{properties/query} endpoint of the SoilGrids v2.0 REST API: https://rest.soilgrids.org/soilgrids/v2.0/docs +The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", +"30-60cm", "60-100cm", "100-200cm"} and the properties returned are +\code{"bdod", "cec", "cfvo", "clay", "nitrogen", "phh2o", "sand", "silt", +"soc"} -- each with 5th, 50th, 95th, mean and uncertainty values. Point data +requests are made through \code{properties/query} endpoint of the SoilGrids +v2.0 REST API: https://rest.soilgrids.org/soilgrids/v2.0/docs } \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -37,6 +47,7 @@ The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", "30-60cm", plotSPC(x, name = NA, color = "socQ50") } } + } \author{ Andrew G. Brown diff --git a/man/filter_geochem.Rd b/man/filter_geochem.Rd index ceeb69ed..4c857803 100644 --- a/man/filter_geochem.Rd +++ b/man/filter_geochem.Rd @@ -19,15 +19,20 @@ filter_geochem( \item{prep_code}{Character vector of prep code(s) to include in result.} -\item{major_element_method}{Character vector of major element method(s) to include in result.} +\item{major_element_method}{Character vector of major element method(s) to +include in result.} -\item{trace_element_method}{Character vector of trace element method(s) to include in result.} +\item{trace_element_method}{Character vector of trace element method(s) to +include in result.} } \value{ -A data.frame, subsetted according to the constraints specified in arguments. +A data.frame, subsetted according to the constraints specified in +arguments. } \description{ -A function to subset KSSL "geochem" / elemental analysis result table to obtain rows/columns based on: column name, preparation code, major / trace element method. +A function to subset KSSL "geochem" / elemental analysis result table to +obtain rows/columns based on: column name, preparation code, major / trace +element method. } \author{ Andrew G. Brown. diff --git a/man/format_SQL_in_statement.Rd b/man/format_SQL_in_statement.Rd index 2a6c9568..1b33aeb2 100644 --- a/man/format_SQL_in_statement.Rd +++ b/man/format_SQL_in_statement.Rd @@ -2,7 +2,8 @@ % Please edit documentation in R/SDA_query.R \name{format_SQL_in_statement} \alias{format_SQL_in_statement} -\title{Format vector of values into a string suitable for an SQL \code{IN} statement.} +\title{Format vector of values into a string suitable for an SQL \code{IN} +statement.} \usage{ format_SQL_in_statement(x) } @@ -10,16 +11,20 @@ format_SQL_in_statement(x) \item{x}{A character vector.} } \value{ -A character vector (unit length) containing concatenated group syntax for use in SQL \code{IN}, with unique value found in \code{x}. +A character vector (unit length) containing concatenated group +syntax for use in SQL \code{IN}, with unique value found in \code{x}. } \description{ -Concatenate a vector to SQL \code{IN}-compatible syntax: \code{letters[1:3]} becomes \code{('a','b','c')}. Values in \code{x} are first passed through \code{unique()}. +Concatenate a vector to SQL \code{IN}-compatible syntax: \code{letters[1:3]} +becomes \code{('a','b','c')}. Values in \code{x} are first passed through +\code{unique()}. } \note{ Only \code{character} output is supported. } \examples{ + \donttest{ library(aqp) @@ -59,4 +64,5 @@ groupedProfilePlot(res, groups = "mukey", color = "hzname", cex.names=0.8, } + } diff --git a/man/getHzErrorsNASIS.Rd b/man/getHzErrorsNASIS.Rd index f8567159..c30ff61e 100644 --- a/man/getHzErrorsNASIS.Rd +++ b/man/getHzErrorsNASIS.Rd @@ -7,14 +7,18 @@ getHzErrorsNASIS(strict = TRUE, SS = TRUE, static_path = NULL) } \arguments{ -\item{strict}{how strict should horizon boundaries be checked for consistency: TRUE=more | FALSE=less} +\item{strict}{how strict should horizon boundaries be checked for +consistency: TRUE=more | FALSE=less} -\item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: TRUE)} -\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: NULL} } \value{ -A data.frame containing problematic records with columns: 'peiid','pedon_id','hzdept','hzdepb','hzname' +A data.frame containing problematic records with columns: +'peiid','pedon_id','hzdept','hzdepb','hzname' } \description{ Check pedon horizon table for logic errors diff --git a/man/get_NOAA_GHCND.Rd b/man/get_NOAA_GHCND.Rd index 7dc2d228..068c4c3b 100644 --- a/man/get_NOAA_GHCND.Rd +++ b/man/get_NOAA_GHCND.Rd @@ -2,7 +2,8 @@ % Please edit documentation in R/fetchNOAA.R \name{get_NOAA_GHCND} \alias{get_NOAA_GHCND} -\title{Get Global Historical Climatology Network Daily (GHCND) data from NOAA API for given datatype(s), station IDs and years.} +\title{Get Global Historical Climatology Network Daily (GHCND) data from NOAA API +for given datatype(s), station IDs and years.} \usage{ get_NOAA_GHCND(stations, years, datatypeids, apitoken) } @@ -11,22 +12,32 @@ get_NOAA_GHCND(stations, years, datatypeids, apitoken) \item{years}{One or more years (e.g. 2017:2020)} -\item{datatypeids}{One or more NOAA GHCND data type IDs (e.g \code{c("PRCP","SNOW")})} +\item{datatypeids}{One or more NOAA GHCND data type IDs (e.g +\code{c("PRCP","SNOW")})} -\item{apitoken}{API key token for NOAA NCDC web services (https://www.ncdc.noaa.gov/cdo-web/token)} +\item{apitoken}{API key token for NOAA NCDC web services +(https://www.ncdc.noaa.gov/cdo-web/token)} } \value{ -A data.frame containing the GHCND data requested (limit 1000 records) +A data.frame containing the GHCND data requested (limit 1000 +records) } \description{ -Obtain daily climatic summary data for a set of station IDs, years, and datatypes. - -Note that typically results from the NOAA API are limited to 1000 records. However, by "chunking" up data into individual station\emph{year}datatypeid combinations, record results generally do not exceed 365 records for daily summaries. - -In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token +Obtain daily climatic summary data for a set of station IDs, years, and +datatypes. +} +\details{ +Note that typically results from the NOAA API are limited to 1000 records. +However, by "chunking" up data into individual station\emph{year}datatypeid +combinations, record results generally do not exceed 365 records for daily +summaries. + +In order to use this function, you must obtain an API token from this +website: https://www.ncdc.noaa.gov/cdo-web/token } \examples{ + #' ## in order to use this function, you must obtain an API token from this website: ## https://www.ncdc.noaa.gov/cdo-web/token @@ -35,4 +46,5 @@ In order to use this function, you must obtain an API token from this website: h # datatypeids = c("PRCP","SNOW"), # apitoken = "yourtokenhere") + } diff --git a/man/get_NOAA_stations_nearXY.Rd b/man/get_NOAA_stations_nearXY.Rd index 489448f7..28289f2b 100644 --- a/man/get_NOAA_stations_nearXY.Rd +++ b/man/get_NOAA_stations_nearXY.Rd @@ -13,22 +13,30 @@ get_NOAA_stations_nearXY(lat, lng, apitoken, bbox = 1) \item{apitoken}{API key token for NOAA NCDC web service} -\item{bbox}{Optional: Dimension of the bounding box centered at \code{lat}, \code{lng}.} +\item{bbox}{Optional: Dimension of the bounding box centered at \code{lat}, +\code{lng}.} } \value{ -data.frame containing station information for all stations within a bounding box around \code{lat}, \code{lng}. +data.frame containing station information for all stations within a +bounding box around \code{lat}, \code{lng}. } \description{ -Query the NOAA API to get station data (limit 1000 records) near a point. Default extent is plus or minus 0.5 degrees (bounding box) (with \code{bbox = 1}) around the specified point [lat, lng]. - -In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token +Query the NOAA API to get station data (limit 1000 records) near a point. +Default extent is plus or minus 0.5 degrees (bounding box) (with \code{bbox += 1}) around the specified point [lat, lng]. +} +\details{ +In order to use this function, you must obtain an API token from this +website: https://www.ncdc.noaa.gov/cdo-web/token } \examples{ + ## in order to use this function, you must obtain an API token from this website: ## https://www.ncdc.noaa.gov/cdo-web/token # stations <- get_NOAA_stations_nearXY(lat = 37, lng = -120, # apitoken = "yourtokenhere") + } diff --git a/man/get_colors_from_NASIS_db.Rd b/man/get_colors_from_NASIS_db.Rd index ec052814..12b790ee 100644 --- a/man/get_colors_from_NASIS_db.Rd +++ b/man/get_colors_from_NASIS_db.Rd @@ -1,24 +1,33 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_colors_from_NASIS_db.R \name{get_colors_from_NASIS_db} \alias{get_colors_from_NASIS_db} - \title{Extract Soil Color Data from a local NASIS Database} -\description{Get, format, mix, and return color data from a NASIS database.} \usage{ -get_colors_from_NASIS_db(SS = TRUE) +get_colors_from_NASIS_db(SS = TRUE, static_path = NULL) } \arguments{ - \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} -} -\details{This function currently works only on Windows.} -\value{A data.frame with the results.} -\author{Jay M. Skovlin and Dylan E. Beaudette} - - +\item{SS}{fetch data from Selected Set in NASIS or from the entire local +database (default: \code{TRUE})} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame with the results. +} +\description{ +Get, format, mix, and return color data from a NASIS database. +} +\details{ +This function currently works only on Windows. +} \seealso{ -\code{\link{simplifyColorData}}, \code{\link{get_hz_data_from_NASIS_db}}, \code{\link{get_site_data_from_NASIS_db}} +\code{\link{simplifyColorData}}, +\code{\link{get_hz_data_from_NASIS_db}}, +\code{\link{get_site_data_from_NASIS_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_colors_from_pedon_db.Rd b/man/get_colors_from_pedon_db.Rd index 99b25a8c..0284fbab 100644 --- a/man/get_colors_from_pedon_db.Rd +++ b/man/get_colors_from_pedon_db.Rd @@ -1,26 +1,28 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_colors_from_pedon_db.R \name{get_colors_from_pedon_db} \alias{get_colors_from_pedon_db} - - \title{Extract Soil Color Data from a PedonPC Database} -\description{Get, format, mix, and return color data from a PedonPC database.} \usage{ get_colors_from_pedon_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The path to a 'pedon.mdb' database.} +\item{dsn}{The path to a 'pedon.mdb' database.} +} +\value{ +A data.frame with the results. +} +\description{ +Get, format, mix, and return color data from a PedonPC database. +} +\details{ +This function currently works only on Windows. } -\details{This function currently works only on Windows.} -\value{A data.frame with the results.} -\author{Dylan E. Beaudette and Jay M. Skovlin} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +\code{\link{get_hz_data_from_pedon_db}}, +\code{\link{get_site_data_from_pedon_db}} +} +\author{ +Dylan E. Beaudette and Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_comonth_from_NASIS_db.Rd b/man/get_comonth_from_NASIS_db.Rd index 467c943a..e7e0bc9c 100644 --- a/man/get_comonth_from_NASIS_db.Rd +++ b/man/get_comonth_from_NASIS_db.Rd @@ -1,32 +1,43 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_component_data_from_NASIS_db.R \name{get_comonth_from_NASIS_db} \alias{get_comonth_from_NASIS_db} - \title{Extract component month data from a local NASIS Database} -\description{Extract component month data from a local NASIS Database.} - \usage{ -get_comonth_from_NASIS_db(SS = TRUE, fill = FALSE, - stringsAsFactors = default.stringsAsFactors() - ) +get_comonth_from_NASIS_db( + SS = TRUE, + fill = FALSE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } - \arguments{ - \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} - \item{fill}{should missing "month" rows in the comonth table be filled with NA (FALSE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} -} - -\details{This function currently works only on Windows.} -\value{A list with the results.} -\author{Stephen Roecker} +\item{SS}{get data from the currently loaded Selected Set in NASIS or from +the entire local database (default: TRUE)} +\item{fill}{should missing "month" rows in the comonth table be filled with +NA (FALSE)} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the uncode() function. It does not +convert those vectors that have set outside of uncode() (i.e. hard coded). +The 'factory-fresh' default is TRUE, but this can be changed by setting +options(stringsAsFactors = FALSE)} -\seealso{ -\code{\link{fetchNASIS}} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A list with the results. +} +\description{ +Extract component month data from a local NASIS Database. +} +\details{ +This function currently works only on Windows. } - \examples{ + \donttest{ if(local_NASIS_defined()) { # query text note data @@ -36,7 +47,12 @@ if(local_NASIS_defined()) { str(cm) } } + +} +\seealso{ +\code{\link{fetchNASIS}} +} +\author{ +Stephen Roecker } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_component_data_from_NASIS_db.Rd b/man/get_component_data_from_NASIS_db.Rd index e8c55f8e..140c213e 100644 --- a/man/get_component_data_from_NASIS_db.Rd +++ b/man/get_component_data_from_NASIS_db.Rd @@ -1,32 +1,37 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_component_data_from_NASIS_db.R \name{get_component_data_from_NASIS_db} \alias{get_component_data_from_NASIS_db} \alias{get_component_restrictions_from_NASIS_db} - \title{Extract component data from a local NASIS Database} -\description{Extract component data from a local NASIS Database.} - \usage{ -get_component_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors()) -get_component_restrictions_from_NASIS_db(SS = TRUE) +get_component_data_from_NASIS_db( + SS = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } - - \arguments{ - \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} -} +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: \code{TRUE})} -\details{This function currently works only on Windows.} -\value{A list with the results.} -\author{Dylan E. Beaudette, Stephen Roecker, and Jay M. Skovlin} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have set outside of \code{uncode()} (i.e. hard coded). +The 'factory-fresh' default is TRUE, but this can be changed by setting +options(\code{stringsAsFactors = FALSE})} -%% ~Make other sections like Warning with \section{Warning }{....} ~ - -\seealso{ -\code{\link{fetchNASIS}} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A list with the results. +} +\description{ +This function currently works only on Windows. } - \examples{ + \donttest{ if(local_NASIS_defined()) { # query text note data @@ -36,7 +41,12 @@ if(local_NASIS_defined()) { str(fc) } } + +} +\seealso{ +\code{\link{fetchNASIS}} +} +\author{ +Dylan E. Beaudette, Stephen Roecker, and Jay M. Skovlin } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_cosoilmoist_from_NASIS.Rd b/man/get_cosoilmoist_from_NASIS.Rd index f7f73c85..9373da66 100644 --- a/man/get_cosoilmoist_from_NASIS.Rd +++ b/man/get_cosoilmoist_from_NASIS.Rd @@ -1,27 +1,51 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_cosoilmoist_from_NASIS.R \name{get_cosoilmoist_from_NASIS} \alias{get_cosoilmoist_from_NASIS} - \title{Read and Flatten the Component Soil Moisture Tables} -\description{Read and flatten the component soil moisture month tables from a local NASIS Database.} \usage{ -get_cosoilmoist_from_NASIS(impute = TRUE, stringsAsFactors = default.stringsAsFactors()) +get_cosoilmoist_from_NASIS( + SS = TRUE, + impute = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } \arguments{ - \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} -} -\value{A data.frame.} -\author{S.M. Roecker} -\details{The component soil moisture tables within NASIS house monthly data on flooding, ponding, and soil moisture status. The soil moisture status is used to specify the water table depth for components (e.g. \code{status == "Moist"}). -} -\note{This function currently works only on Windows.} +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: \code{TRUE})} -\seealso{ -\link{fetchNASIS}, \link{get_cosoilmoist_from_NASISWebReport}, \link{get_cosoilmoist_from_SDA}, \code{get_comonth_from_SDA} -} +\item{impute}{replace missing (i.e. \code{NULL}) values with \code{"Not_Populated"} for +categorical data, or the "RV" for numeric data or \code{201} cm if the "RV" is also +\code{NULL} (default: \code{TRUE})} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have set outside of \code{uncode()} (i.e. hard coded). +The 'factory-fresh' default is TRUE, but this can be changed by setting +options(\code{stringsAsFactors = FALSE})} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame. +} +\description{ +Read and flatten the component soil moisture month tables from a local NASIS +Database. +} +\details{ +The component soil moisture tables within NASIS house monthly data on +flooding, ponding, and soil moisture status. The soil moisture status is +used to specify the water table depth for components (e.g. \code{status == +"Moist"}). +} +\note{ +This function currently works only on Windows. +} \examples{ + \donttest{ if(local_NASIS_defined()) { # load cosoilmoist (e.g. water table data) @@ -32,6 +56,13 @@ if(local_NASIS_defined()) { head(test) } } -}} +} +} +\seealso{ +\link{fetchNASIS}, \link{get_cosoilmoist_from_NASISWebReport}, +\link{get_cosoilmoist_from_SDA}, \code{get_comonth_from_SDA} +} +\author{ +S.M. Roecker +} \keyword{manip} - diff --git a/man/get_extended_data_from_NASIS_db.Rd b/man/get_extended_data_from_NASIS_db.Rd new file mode 100644 index 00000000..40c67a8f --- /dev/null +++ b/man/get_extended_data_from_NASIS_db.Rd @@ -0,0 +1,57 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_extended_data_from_NASIS_db.R +\name{get_extended_data_from_NASIS_db} +\alias{get_extended_data_from_NASIS_db} +\title{Extract accessory tables and summaries from a local NASIS Database} +\usage{ +get_extended_data_from_NASIS_db( + SS = TRUE, + nullFragsAreZero = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) +} +\arguments{ +\item{SS}{get data from the currently loaded Selected Set in NASIS or from +the entire local database (default: \code{TRUE})} + +\item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? +(default: TRUE), see details} + +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have been set outside of \code{uncode()} (i.e. hard +coded).} + +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A list with the results. +} +\description{ +Extract accessory tables and summaries from a local NASIS Database +} +\examples{ + +\donttest{ + +if(local_NASIS_defined()) { + # query extended data + e <- try(get_extended_data_from_NASIS_db()) + + # show contents of extended data + str(e) +} + +} + +} +\seealso{ +\code{\link{get_hz_data_from_NASIS_db}}, +\code{\link{get_site_data_from_NASIS_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette +} +\keyword{manip} diff --git a/man/get_extended_data_from_pedon_db.Rd b/man/get_extended_data_from_pedon_db.Rd index 47e96565..c2e2dda3 100644 --- a/man/get_extended_data_from_pedon_db.Rd +++ b/man/get_extended_data_from_pedon_db.Rd @@ -1,25 +1,28 @@ -\name{get_extended_data_from_pedon_db} -\alias{get_extended_data_from_pedon_db} - -\title{Extract accessory tables and summaries from a local pedonPC Database} -\description{Extract accessory tables and summaries from a local pedonPC Database.} -\usage{ -get_extended_data_from_pedon_db(dsn) -} -%- maybe also 'usage' for other objects documented here. -\arguments{ - \item{dsn}{The path to a 'pedon.mdb' database.} -} -\details{This function currently works only on Windows.} -\value{A list with the results.} -\author{Jay M. Skovlin and Dylan E. Beaudette} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - -\seealso{ -\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} -} - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. -\keyword{manip} +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_extended_data_from_pedon_db.R +\name{get_extended_data_from_pedon_db} +\alias{get_extended_data_from_pedon_db} +\title{Extract accessory tables and summaries from a local pedonPC Database} +\usage{ +get_extended_data_from_pedon_db(dsn) +} +\arguments{ +\item{dsn}{The path to a 'pedon.mdb' database.} +} +\value{ +A list with the results. +} +\description{ +Extract accessory tables and summaries from a local pedonPC Database. +} +\details{ +This function currently works only on Windows. +} +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, +\code{\link{get_site_data_from_pedon_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette +} +\keyword{manip} diff --git a/man/get_hz_data_from_NASIS_db.Rd b/man/get_hz_data_from_NASIS_db.Rd index 73431521..a844aec0 100644 --- a/man/get_hz_data_from_NASIS_db.Rd +++ b/man/get_hz_data_from_NASIS_db.Rd @@ -1,27 +1,39 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_hz_data_from_NASIS_db.R \name{get_hz_data_from_NASIS_db} \alias{get_hz_data_from_NASIS_db} - \title{Extract Horizon Data from a local NASIS Database} -\description{Get horizon-level data from a local NASIS database.} \usage{ -get_hz_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors()) +get_hz_data_from_NASIS_db( + SS = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } \arguments{ - \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} -} -\details{This function currently works only on Windows.} -\value{A data.frame.} - -\author{Jay M. Skovlin and Dylan E. Beaudette} -\note{NULL total rock fragment values are assumed to represent an _absence_ of rock fragments, and set to 0.} +\item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: \code{TRUE})} -%% ~Make other sections like Warning with \section{Warning }{....} ~ +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have been set outside of \code{uncode()} (i.e. hard +coded).} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame. +} +\description{ +Get horizon-level data from a local NASIS database. +} +\note{ +\code{NULL} total rock fragment values are assumed to represent an \emph{absence} of rock fragments, and set to 0. +} \seealso{ \code{\link{get_hz_data_from_NASIS_db}}, \code{\link{get_site_data_from_NASIS_db}} } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. +\author{ +Jay M. Skovlin and Dylan E. Beaudette +} \keyword{manip} diff --git a/man/get_hz_data_from_pedon_db.Rd b/man/get_hz_data_from_pedon_db.Rd index 328c0389..7c204d0c 100644 --- a/man/get_hz_data_from_pedon_db.Rd +++ b/man/get_hz_data_from_pedon_db.Rd @@ -1,27 +1,32 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_hz_data_from_pedon_db.R \name{get_hz_data_from_pedon_db} \alias{get_hz_data_from_pedon_db} - \title{Extract Horizon Data from a PedonPC Database} -\description{Get horizon-level data from a PedonPC database.} \usage{ get_hz_data_from_pedon_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The path to a 'pedon.mdb' database.} -} -\details{This function currently works only on Windows.} -\value{A data.frame.} - -\author{Dylan E. Beaudette and Jay M. Skovlin} -\note{NULL total rock fragment values are assumed to represent an _absence_ of rock fragments, and set to 0.} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - +\item{dsn}{The path to a 'pedon.mdb' database.} +} +\value{ +A data.frame. +} +\description{ +Get horizon-level data from a PedonPC database. +} +\details{ +This function currently works only on Windows. +} +\note{ +NULL total rock fragment values are assumed to represent an \emph{absence} +of rock fragments, and set to 0. +} \seealso{ -\code{\link{get_colors_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +\code{\link{get_colors_from_pedon_db}}, +\code{\link{get_site_data_from_pedon_db}} +} +\author{ +Dylan E. Beaudette and Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_lablayer_data_from_NASIS_db.Rd b/man/get_lablayer_data_from_NASIS_db.Rd index 1b8ee4d6..5c423f08 100644 --- a/man/get_lablayer_data_from_NASIS_db.Rd +++ b/man/get_lablayer_data_from_NASIS_db.Rd @@ -1,23 +1,32 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_lablayer_data_from_NASIS_db.R \name{get_lablayer_data_from_NASIS_db} \alias{get_lablayer_data_from_NASIS_db} - \title{Extract lab pedon layer data from a local NASIS Database} -\description{Get lab pedon layer-level(horizon-level) data from a local NASIS database.} -\usage{get_lablayer_data_from_NASIS_db(SS = TRUE)} -\arguments{ - \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +\usage{ +get_lablayer_data_from_NASIS_db(SS = TRUE, static_path = NULL) } -\value{A data.frame.} -\author{Jay M. Skovlin and Dylan E. Beaudette} -\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} -\note{This function queries KSSL laboratory site/horizon data from a local NASIS database from the lab layer data table.} +\arguments{ +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: \code{TRUE})} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame. +} +\description{ +Get lab pedon layer-level (horizon-level) data from a local NASIS database. +} +\note{ +This function queries KSSL laboratory site/horizon data from a local +NASIS database from the lab layer data table. +} \seealso{ -\code{\link{get_labpedon_data_from_NASIS_db}} +\code{\link{get_labpedon_data_from_NASIS_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. - \keyword{manip} - diff --git a/man/get_labpedon_data_from_NASIS_db.Rd b/man/get_labpedon_data_from_NASIS_db.Rd index a3ad3471..e2c3f67b 100644 --- a/man/get_labpedon_data_from_NASIS_db.Rd +++ b/man/get_labpedon_data_from_NASIS_db.Rd @@ -1,23 +1,36 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_labpedon_data_from_NASIS_db.R \name{get_labpedon_data_from_NASIS_db} \alias{get_labpedon_data_from_NASIS_db} - \title{Extract lab pedon data from a local NASIS Database} -\description{Get lab pedon-level data from a local NASIS database.} -\usage{get_labpedon_data_from_NASIS_db(SS = TRUE)} -\arguments{ - \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +\usage{ +get_labpedon_data_from_NASIS_db(SS = TRUE, static_path = NULL) } -\value{A data.frame.} -\author{Jay M. Skovlin and Dylan E. Beaudette} -\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} -\note{This function queries KSSL laboratory site/horizon data from a local NASIS database from the lab pedon data table.} +\arguments{ +\item{SS}{fetch data from the currently loaded selected set in NASIS or from +the entire local database (default: TRUE)} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame. +} +\description{ +Get lab pedon-level data from a local NASIS database. +} +\details{ +This function currently works only on Windows, and requires a 'nasis_local' +ODBC connection. +} +\note{ +This function queries KSSL laboratory site/horizon data from a local +NASIS database from the lab pedon data table. +} \seealso{ -\code{\link{get_lablayer_data_from_NASIS_db}} +\code{\link{get_lablayer_data_from_NASIS_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. - \keyword{manip} - diff --git a/man/get_site_data_from_NASIS_db.Rd b/man/get_site_data_from_NASIS_db.Rd index 42869586..04ebbcf5 100644 --- a/man/get_site_data_from_NASIS_db.Rd +++ b/man/get_site_data_from_NASIS_db.Rd @@ -1,23 +1,41 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_site_data_from_NASIS_db.R \name{get_site_data_from_NASIS_db} \alias{get_site_data_from_NASIS_db} - \title{Extract Site Data from a local NASIS Database} -\description{Get site-level data from a local NASIS database.} -\usage{get_site_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors())} -\arguments{ - \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +\usage{ +get_site_data_from_NASIS_db( + SS = TRUE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } -\value{A data.frame.} -\author{Jay M. Skovlin and Dylan E. Beaudette} -\details{When multiple "site bedrock" entries are present, only the shallowest is returned by this function.} -\note{This function currently works only on Windows.} +\arguments{ +\item{SS}{fetch data from Selected Set in NASIS or from the entire local +database (default: \code{TRUE})} + +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have been set outside of \code{uncode()} (i.e. hard +coded).} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data.frame +} +\description{ +Get site-level data from a local NASIS database. +} +\details{ +When multiple "site bedrock" entries are present, only the shallowest is +returned by this function. +} \seealso{ -\code{\link{get_hz_data_from_NASIS_db}}, +\code{\link{get_hz_data_from_NASIS_db}} +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette } - - - \keyword{manip} - diff --git a/man/get_site_data_from_pedon_db.Rd b/man/get_site_data_from_pedon_db.Rd index d275630f..07a202af 100644 --- a/man/get_site_data_from_pedon_db.Rd +++ b/man/get_site_data_from_pedon_db.Rd @@ -1,28 +1,28 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_site_data_from_pedon_db.R \name{get_site_data_from_pedon_db} \alias{get_site_data_from_pedon_db} - \title{Extract Site Data from a PedonPC Database} -\description{Get site-level data from a PedonPC database.} \usage{ get_site_data_from_pedon_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The path to a 'pedon.mdb' database.} +\item{dsn}{The path to a 'pedon.mdb' database.} +} +\value{ +A data.frame. +} +\description{ +Get site-level data from a PedonPC database. +} +\note{ +This function currently works only on Windows. } - -\value{A data.frame.} - -\author{Dylan E. Beaudette and Jay M. Skovlin} -\note{This function currently works only on Windows.} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_veg_from_AK_Site}}, +\code{\link{get_hz_data_from_pedon_db}}, +\code{\link{get_veg_from_AK_Site}}, +} +\author{ +Dylan E. Beaudette and Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} - diff --git a/man/get_soilseries_from_NASIS.Rd b/man/get_soilseries_from_NASIS.Rd index 14c5f21a..08daf84c 100644 --- a/man/get_soilseries_from_NASIS.Rd +++ b/man/get_soilseries_from_NASIS.Rd @@ -1,26 +1,32 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_soilseries_from_NASIS.R \name{get_soilseries_from_NASIS} \alias{get_soilseries_from_NASIS} \alias{get_soilseries_from_NASISWebReport} - \title{Get records from the Soil Classification (SC) database} -\description{These functions return records from the Soil Classification database, either from the local NASIS database (all series) or via web report (named series only).} - \usage{ - -get_soilseries_from_NASIS(stringsAsFactors = default.stringsAsFactors()) -get_soilseries_from_NASISWebReport(soils, -stringsAsFactors = default.stringsAsFactors()) +get_soilseries_from_NASIS( + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } - \arguments{ - \item{soils}{character vector of soil series names} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - -} - - -\value{A \code{data.frame}.} - -\author{Stephen Roecker} +\item{stringsAsFactors}{logical: should character vectors be converted to +factors? This argument is passed to the \code{uncode()} function. It does not +convert those vectors that have set outside of \code{uncode()} (i.e. hard coded).} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A \code{data.frame} +} +\description{ +These functions return records from the Soil Classification database, either +from the local NASIS database (all series) or via web report (named series +only). +} +\author{ +Stephen Roecker +} \keyword{manip} diff --git a/man/get_text_notes_from_NASIS_db.Rd b/man/get_text_notes_from_NASIS_db.Rd index 67f67309..80def18e 100644 --- a/man/get_text_notes_from_NASIS_db.Rd +++ b/man/get_text_notes_from_NASIS_db.Rd @@ -1,27 +1,32 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_text_notes_from_NASIS_db.R \name{get_text_notes_from_NASIS_db} \alias{get_text_notes_from_NASIS_db} - \title{Extract text note data from a local NASIS Database} -\description{Extract text note data from a local NASIS Database.} \usage{ -get_text_notes_from_NASIS_db(SS = TRUE, fixLineEndings = TRUE) +get_text_notes_from_NASIS_db( + SS = TRUE, + fixLineEndings = TRUE, + static_path = NULL +) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} - \item{fixLineEndings}{convert line endings from "\\r\\n" to "\\n"} -} -\details{This function currently works only on Windows.} -\value{A list with the results.} -\author{Dylan E. Beaudette and Jay M. Skovlin} +\item{SS}{get data from the currently loaded Selected Set in NASIS or from +the entire local database (default: \code{TRUE})} -%% ~Make other sections like Warning with \section{Warning }{....} ~ +\item{fixLineEndings}{convert line endings from \verb{\\r\\n} to \verb{\\n}} -\seealso{ -\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A \code{list} with the results. +} +\description{ +Extract text note data from a local NASIS Database } - \examples{ + \donttest{ if(local_NASIS_defined()) { # query text note data @@ -36,7 +41,13 @@ if(local_NASIS_defined()) { } } } + +} +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, +\code{\link{get_site_data_from_pedon_db}} +} +\author{ +Dylan E. Beaudette and Jay M. Skovlin } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_veg_data_from_NASIS_db.Rd b/man/get_veg_data_from_NASIS_db.Rd index 891bb0e3..e05c408e 100644 --- a/man/get_veg_data_from_NASIS_db.Rd +++ b/man/get_veg_data_from_NASIS_db.Rd @@ -1,21 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_data_from_NASIS_db.R \name{get_veg_data_from_NASIS_db} \alias{get_veg_data_from_NASIS_db} - \title{Extract veg data from a local NASIS Database} -\description{Extract veg data from a local NASIS Database.} \usage{ -get_veg_data_from_NASIS_db(SS = TRUE) +get_veg_data_from_NASIS_db(SS = TRUE, static_path = NULL) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} -} -\details{This function currently works only on Windows.} -\value{A list with the results.} -\author{Jay M. Skovlin and Dylan E. Beaudette} - +\item{SS}{get data from the currently loaded Selected Set in NASIS or from +the entire local database (default: \code{TRUE})} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A list with the results. +} +\description{ +Extract veg data from a local NASIS Database. +} +\details{ +This function currently works only on Windows. +} \examples{ + \donttest{ if(local_NASIS_defined()) { # query text note data @@ -25,7 +33,9 @@ if(local_NASIS_defined()) { str(v) } } + +} +\author{ +Jay M. Skovlin and Dylan E. Beaudette } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_veg_from_AK_Site.Rd b/man/get_veg_from_AK_Site.Rd index 9ac9cd80..438e0d66 100644 --- a/man/get_veg_from_AK_Site.Rd +++ b/man/get_veg_from_AK_Site.Rd @@ -1,26 +1,28 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_from_AK_Site.R \name{get_veg_from_AK_Site} \alias{get_veg_from_AK_Site} -%- Also NEED an '\alias' for EACH other topic documented here. \title{Retrieve Vegetation Data from an AK Site Database} -\description{Retrieve Vegetation Data from an AK Site Database} \usage{ get_veg_from_AK_Site(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{file path the the AK Site access database} +\item{dsn}{file path the the AK Site access database} +} +\value{ +A data.frame with vegetation data in long format, linked to site ID. +} +\description{ +Retrieve Vegetation Data from an AK Site Database +} +\note{ +This function currently works only on Windows. } - -\value{A data.frame with vegetation data in long format, linked to site ID.} -\author{Dylan E. Beaudette} -\note{This function currently works only on Windows.} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +\code{\link{get_hz_data_from_pedon_db}}, +\code{\link{get_site_data_from_pedon_db}} +} +\author{ +Dylan E. Beaudette } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_veg_from_MT_veg_db.Rd b/man/get_veg_from_MT_veg_db.Rd index 37413c28..9e7169bd 100644 --- a/man/get_veg_from_MT_veg_db.Rd +++ b/man/get_veg_from_MT_veg_db.Rd @@ -1,25 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_from_MT_veg_db.R \name{get_veg_from_MT_veg_db} \alias{get_veg_from_MT_veg_db} - \title{Extract Site and Plot-level Data from a Montana RangeDB database} -\description{Get Site and Plot-level data from a Montana RangeDB database.} \usage{ -get_veg_from_MT_veg_db(dsn) +get_veg_from_MT_veg_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +\item{dsn}{The name of the Montana RangeDB front-end database connection +(see details).} +} +\value{ +A data.frame. +} +\description{ +Get Site and Plot-level data from a Montana RangeDB database. +} +\details{ +This function currently works only on Windows. } -\details{This function currently works only on Windows.} -\value{A data.frame.} -\author{Jay M. Skovlin} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_veg_species_from_MT_veg_db}}, \code{\link{get_veg_other_from_MT_veg_db}} +\code{\link{get_veg_species_from_MT_veg_db}}, +\code{\link{get_veg_other_from_MT_veg_db}} +} +\author{ +Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_veg_from_NPS_PLOTS_db.Rd b/man/get_veg_from_NPS_PLOTS_db.Rd index 9b61585d..b290c866 100644 --- a/man/get_veg_from_NPS_PLOTS_db.Rd +++ b/man/get_veg_from_NPS_PLOTS_db.Rd @@ -1,20 +1,27 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_from_NPS_PLOTS_db.R \name{get_veg_from_NPS_PLOTS_db} \alias{get_veg_from_NPS_PLOTS_db} - \title{Retrieve Vegetation Data from an NPS PLOTS Database} - -\description{Used to extract species, stratum, and cover vegetation data from a backend NPS PLOTS Database. Currently works for any Microsoft Access database with an .mdb file format.} - -\usage{get_veg_from_NPS_PLOTS_db(dsn)} - +\usage{ +get_veg_from_NPS_PLOTS_db(dsn) +} \arguments{ - \item{dsn}{file path to the NPS PLOTS access database on your system.} +\item{dsn}{file path to the NPS PLOTS access database on your system.} +} +\value{ +A data.frame with vegetation data in a long format with linkage to +NRCS soil pedon data via the site_id key field. +} +\description{ +Used to extract species, stratum, and cover vegetation data from a backend +NPS PLOTS Database. Currently works for any Microsoft Access database with +an .mdb file format. +} +\note{ +This function currently only works on Windows. +} +\author{ +Jay M. Skovlin } - -\value{A data.frame with vegetation data in a long format with linkage to NRCS soil pedon data via the site_id key field.} - -\author{Jay M. Skovlin} - -\note{This function currently only works on Windows.} - \keyword{manip} diff --git a/man/get_veg_other_from_MT_veg_db.Rd b/man/get_veg_other_from_MT_veg_db.Rd index 8d2bb515..86639b95 100644 --- a/man/get_veg_other_from_MT_veg_db.Rd +++ b/man/get_veg_other_from_MT_veg_db.Rd @@ -1,25 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_other_from_MT_veg_db.R \name{get_veg_other_from_MT_veg_db} \alias{get_veg_other_from_MT_veg_db} - \title{Extract cover composition data from a Montana RangeDB database} -\description{Get cover composition data from a Montana RangeDB database.} \usage{ -get_veg_other_from_MT_veg_db(dsn) +get_veg_other_from_MT_veg_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +\item{dsn}{The name of the Montana RangeDB front-end database connection +(see details).} +} +\value{ +A data.frame. +} +\description{ +Get cover composition data from a Montana RangeDB database. +} +\details{ +This function currently works only on Windows. } -\details{This function currently works only on Windows.} -\value{A data.frame.} -\author{Jay M. Skovlin} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_veg_from_MT_veg_db}}, \code{\link{get_veg_species_from_MT_veg_db}} +\code{\link{get_veg_from_MT_veg_db}}, +\code{\link{get_veg_species_from_MT_veg_db}} +} +\author{ +Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/get_veg_species_from_MT_veg_db.Rd b/man/get_veg_species_from_MT_veg_db.Rd index 2e6c8b2e..02cfdae2 100644 --- a/man/get_veg_species_from_MT_veg_db.Rd +++ b/man/get_veg_species_from_MT_veg_db.Rd @@ -1,25 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_veg_species_from_MT_veg_db.R \name{get_veg_species_from_MT_veg_db} \alias{get_veg_species_from_MT_veg_db} - \title{Extract species-level Data from a Montana RangeDB database} -\description{Get species-level data from a Montana RangeDB database.} \usage{ -get_veg_species_from_MT_veg_db(dsn) +get_veg_species_from_MT_veg_db(dsn) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +\item{dsn}{The name of the Montana RangeDB front-end database connection +(see details).} +} +\value{ +A data.frame. +} +\description{ +Get species-level data from a Montana RangeDB database. +} +\details{ +This function currently works only on Windows. } -\details{This function currently works only on Windows.} -\value{A data.frame.} -\author{Jay M. Skovlin} - -%% ~Make other sections like Warning with \section{Warning }{....} ~ - \seealso{ -\code{\link{get_veg_from_MT_veg_db}}, \code{\link{get_veg_other_from_MT_veg_db}} +\code{\link{get_veg_from_MT_veg_db}}, +\code{\link{get_veg_other_from_MT_veg_db}} +} +\author{ +Jay M. Skovlin } - -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. \keyword{manip} diff --git a/man/loafercreek.Rd b/man/loafercreek.Rd index cbad533a..80f7b94b 100644 --- a/man/loafercreek.Rd +++ b/man/loafercreek.Rd @@ -1,73 +1,67 @@ -\name{loafercreek} -\alias{loafercreek} -\alias{gopheridge} -\alias{mineralKing} - - -\docType{data} -\title{Example \code{SoilProfilecollection} Objects Returned by \code{fetchNASIS}.} - -\description{Several examples of soil profile collections returned by \code{fetchNASIS(from='pedons')} as \code{SoilProfileCollection} objects.} - -\usage{ -data(loafercreek) -data(gopheridge) -data(mineralKing) -} - - -\examples{ -\donttest{ -if(require("aqp")) { -# load example dataset - data("gopheridge") - - # what kind of object is this? - class(gopheridge) - - # how many profiles? - length(gopheridge) - - # there are 60 profiles, this calls for a split plot - par(mar=c(0,0,0,0), mfrow=c(2,1)) - - # plot soil colors - plot(gopheridge[1:30, ], name='hzname', color='soil_color') - plot(gopheridge[31:60, ], name='hzname', color='soil_color') - - # need a larger top margin for legend - par(mar=c(0,0,4,0), mfrow=c(2,1)) - # generate colors based on clay content - plot(gopheridge[1:30, ], name='hzname', color='clay') - plot(gopheridge[31:60, ], name='hzname', color='clay') - - # single row and no labels - par(mar=c(0,0,0,0), mfrow=c(1,1)) - # plot soils sorted by depth to contact - plot(gopheridge, name='', print.id=FALSE, plot.order=order(gopheridge$bedrckdepth)) - - # plot first 10 profiles - plot(gopheridge[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') - - # add rock fragment data to plot: - addVolumeFraction(gopheridge[1:10, ], colname='total_frags_pct') - - # add diagnostic horizons - addDiagnosticBracket(gopheridge[1:10, ], kind='argillic horizon', col='red', offset=-0.4) - - ## loafercreek - data("loafercreek") - # plot first 10 profiles - plot(loafercreek[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') - - # add rock fragment data to plot: - addVolumeFraction(loafercreek[1:10, ], colname='total_frags_pct') - - # add diagnostic horizons - addDiagnosticBracket(loafercreek[1:10, ], kind='argillic horizon', col='red', offset=-0.4) -} -} -} - - -\keyword{datasets} +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/soilDB-package.R +\docType{data} +\name{loafercreek} +\alias{loafercreek} +\alias{gopheridge} +\alias{mineralKing} +\title{Example \code{SoilProfilecollection} Objects Returned by \code{fetchNASIS}.} +\description{ +Several examples of soil profile collections returned by +\code{fetchNASIS(from='pedons')} as \code{SoilProfileCollection} objects. +} +\examples{ + +\donttest{ +if(require("aqp")) { +# load example dataset + data("gopheridge") + + # what kind of object is this? + class(gopheridge) + + # how many profiles? + length(gopheridge) + + # there are 60 profiles, this calls for a split plot + par(mar=c(0,0,0,0), mfrow=c(2,1)) + + # plot soil colors + plot(gopheridge[1:30, ], name='hzname', color='soil_color') + plot(gopheridge[31:60, ], name='hzname', color='soil_color') + + # need a larger top margin for legend + par(mar=c(0,0,4,0), mfrow=c(2,1)) + # generate colors based on clay content + plot(gopheridge[1:30, ], name='hzname', color='clay') + plot(gopheridge[31:60, ], name='hzname', color='clay') + + # single row and no labels + par(mar=c(0,0,0,0), mfrow=c(1,1)) + # plot soils sorted by depth to contact + plot(gopheridge, name='', print.id=FALSE, plot.order=order(gopheridge$bedrckdepth)) + + # plot first 10 profiles + plot(gopheridge[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') + + # add rock fragment data to plot: + addVolumeFraction(gopheridge[1:10, ], colname='total_frags_pct') + + # add diagnostic horizons + addDiagnosticBracket(gopheridge[1:10, ], kind='argillic horizon', col='red', offset=-0.4) + + ## loafercreek + data("loafercreek") + # plot first 10 profiles + plot(loafercreek[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') + + # add rock fragment data to plot: + addVolumeFraction(loafercreek[1:10, ], colname='total_frags_pct') + + # add diagnostic horizons + addDiagnosticBracket(loafercreek[1:10, ], kind='argillic horizon', col='red', offset=-0.4) +} +} + +} +\keyword{datasets} diff --git a/man/local_NASIS_defined.Rd b/man/local_NASIS_defined.Rd index 7c4de80f..3a6875cd 100644 --- a/man/local_NASIS_defined.Rd +++ b/man/local_NASIS_defined.Rd @@ -7,7 +7,8 @@ local_NASIS_defined(static_path = NULL) } \arguments{ -\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: NULL} } \value{ logical @@ -17,9 +18,11 @@ Check for presence of \code{nasis_local} ODBC data source } \examples{ + if(local_NASIS_defined()) { # use fetchNASIS or some other lower-level fetch function } else { message('could not find `nasis_local` ODBC data source') } + } diff --git a/man/makeChunks.Rd b/man/makeChunks.Rd index e10918d1..a8a9f31c 100644 --- a/man/makeChunks.Rd +++ b/man/makeChunks.Rd @@ -19,10 +19,12 @@ Generate chunk labels for splitting data } \examples{ + # split the lowercase alphabet into 2 chunks aggregate(letters, by = list(makeChunks(letters, size=13)), FUN = paste0, collapse=",") + } diff --git a/man/mapunit_geom_by_ll_bbox.Rd b/man/mapunit_geom_by_ll_bbox.Rd index 207daa6f..ae6cfecf 100644 --- a/man/mapunit_geom_by_ll_bbox.Rd +++ b/man/mapunit_geom_by_ll_bbox.Rd @@ -9,21 +9,30 @@ mapunit_geom_by_ll_bbox(bbox, source = "sda") \arguments{ \item{bbox}{a bounding box in WGS coordinates} -\item{source}{the source database, currently limited to soil data access (SDA)} +\item{source}{the source database, currently limited to soil data access +(SDA)} } \value{ -A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) coordinates. +A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) +coordinates. } \description{ -Fetch map unit geometry from the SDA website by WGS84 bounding box. There is a limit on the amount of data returned as serialized JSON (~32Mb) and a total record limit of 100,000. +Fetch map unit geometry from the SDA website by WGS84 bounding box. There is +a limit on the amount of data returned as serialized JSON (~32Mb) and a +total record limit of 100,000. } \details{ -The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See examples for bounding box formatting. +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. +See examples for bounding box formatting. } \note{ -SDA does not return the spatial intersection of map unit polygons and bounding box. Rather, just those polygons that are completely within the bounding box / overlap with the bbox. This function requires the 'rgdal' package. +SDA does not return the spatial intersection of map unit polygons and +bounding box. Rather, just those polygons that are completely within the +bounding box / overlap with the bbox. This function requires the 'rgdal' +package. } \examples{ + ## fetch map unit geometry from a bounding-box: # # +------------- (-120.41, 38.70) @@ -61,6 +70,7 @@ curl::has_internet() & } } } + } \author{ Dylan E. Beaudette diff --git a/man/mix_and_clean_colors.Rd b/man/mix_and_clean_colors.Rd new file mode 100644 index 00000000..421611a4 --- /dev/null +++ b/man/mix_and_clean_colors.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/mix_and_clean_colors.R +\name{mix_and_clean_colors} +\alias{mix_and_clean_colors} +\title{Mix and Clean Colors} +\usage{ +mix_and_clean_colors(x, wt = "pct", backTransform = FALSE) +} +\arguments{ +\item{x}{a \code{data.frame} object containing sRGB coordinates associated} + +\item{wt}{a group of colors to mix +wt: fractional weights, usually area of hz face} + +\item{backTransform}{logical, should the mixed sRGB representation of soil +color be transformed to closest Munsell chips? This is performed by} +} +\value{ +A data.frame containing mixed colors +} +\description{ +Deprecated: only used in PedonPC functionality; use \code{estimateColorMixture} instead +} diff --git a/man/mukey.wcs.Rd b/man/mukey.wcs.Rd index 0b42915e..bda04d39 100644 --- a/man/mukey.wcs.Rd +++ b/man/mukey.wcs.Rd @@ -7,34 +7,44 @@ mukey.wcs(aoi, db = c("gnatsgo", "gssurgo"), res = 30, quiet = FALSE) } \arguments{ -\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} +\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a +\code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} -\item{db}{name of the gridded map unit key grid to access, should be either 'gnatsgo' or 'gssurgo'} +\item{db}{name of the gridded map unit key grid to access, should be either +'gnatsgo' or 'gssurgo'} -\item{res}{grid resolution, units of meters. The native resolution of gNATSGO and gSSURGO (this WCS) is 30m.} +\item{res}{grid resolution, units of meters. The native resolution of +gNATSGO and gSSURGO (this WCS) is 30m.} -\item{quiet}{logical, passed to \code{download.file} to enable / suppress URL and progress bar for download.} +\item{quiet}{logical, passed to \code{download.file} to enable / suppress +URL and progress bar for download.} } \value{ -\code{raster} object containing indexed map unit keys and associated raster attribute table +\code{raster} object containing indexed map unit keys and associated +raster attribute table } \description{ -Download chunks of the gNATSGO or gSSURGO map unit key grid via bounding-box from the SoilWeb WCS. +Download chunks of the gNATSGO or gSSURGO map unit key grid via bounding-box +from the SoilWeb WCS. } \details{ -\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: +\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, +\code{sfc} or \code{bbox} object or a \code{list} containing: -\describe{ -\item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} -\item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} -} +\describe{ \item{list("aoi")}{bounding-box specified as (xmin, ymin, xmax, +ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} \item{list("crs")}{coordinate +reference system of BBOX, e.g. '+init=epsg:4326'} } -The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the gNATSGO / gSSURGO grid. +The WCS query is parameterized using \code{raster::extent} derived from the +above AOI specification, after conversion to the native CRS (EPSG:6350) of +the gNATSGO / gSSURGO grid. -Databases available from this WCS can be queried using \code{WCS_details(wcs = 'mukey')}. +Databases available from this WCS can be queried using \code{WCS_details(wcs += 'mukey')}. } \note{ -The gNATSGO grid includes raster soil survey map unit keys which are not in SDA. +The gNATSGO grid includes raster soil survey map unit keys which are +not in SDA. } \author{ D.E. Beaudette and A.G. Brown diff --git a/man/parseWebReport.Rd b/man/parseWebReport.Rd index d25ac569..9ab03a79 100644 --- a/man/parseWebReport.Rd +++ b/man/parseWebReport.Rd @@ -1,30 +1,42 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/parseWebReport.R \name{parseWebReport} \alias{parseWebReport} - \title{Parse contents of a web report, based on supplied arguments.} -\description{Parse contents of a web report, based on supplied arguments.} \usage{ parseWebReport(url, args, index = 1) } - \arguments{ - \item{url}{Base URL to a LIMS/NASIS web report.} - \item{args}{List of named arguments to send to report, see details.} - \item{index}{Integer index specifying the table to return, or, NULL for a list of tables} -} - -\details{Report argument names can be inferred by inspection of the HTML source associated with any given web report.} +\item{url}{Base URL to a LIMS/NASIS web report.} -\value{A \code{data.frame} object in the case of a single integer passed to \code{index}, a \code{list} object in the case of an integer vector or NULL passed to \code{index}.} - -\author{D.E. Beaudette and S.M. Roecker} - -\keyword{ IO } - -\note{Most web reports are for internal use only.} +\item{args}{List of named arguments to send to report, see details.} +\item{index}{Integer index specifying the table to return, or, NULL for a +list of tables} +} +\value{ +A \code{data.frame} object in the case of a single integer passed to +\code{index}, a \code{list} object in the case of an integer vector or NULL +passed to \code{index}. +} +\description{ +Parse contents of a web report, based on supplied arguments. +} +\details{ +Report argument names can be inferred by inspection of the HTML source +associated with any given web report. +} +\note{ +Most web reports are for internal use only. +} \examples{ + \donttest{ # pending } + +} +\author{ +D.E. Beaudette and S.M. Roecker } +\keyword{IO} diff --git a/man/processSDA_WKT.Rd b/man/processSDA_WKT.Rd index cf554f42..4bb56dbc 100644 --- a/man/processSDA_WKT.Rd +++ b/man/processSDA_WKT.Rd @@ -7,7 +7,8 @@ processSDA_WKT(d, g = "geom", p4s = "+proj=longlat +datum=WGS84") } \arguments{ -\item{d}{\code{data.frame} returned by \code{SDA_query}, containing WKT representation of geometry} +\item{d}{\code{data.frame} returned by \code{SDA_query}, containing WKT +representation of geometry} \item{g}{name of column in \code{d} containing WKT geometry} @@ -17,13 +18,16 @@ processSDA_WKT(d, g = "geom", p4s = "+proj=longlat +datum=WGS84") A \code{Spatial*} object. } \description{ -This is a helper function, commonly used with \code{SDA_query} to extract WKT (well-known text) representation of geometry to an sp-class object. +This is a helper function, commonly used with \code{SDA_query} to extract +WKT (well-known text) representation of geometry to an sp-class object. } \details{ -The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for detailed examples. +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. +See the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for detailed examples. } \note{ -This function requires the \code{httr}, \code{jsonlite}, \code{XML}, and \code{rgeos} packages. +This function requires the \code{httr}, \code{jsonlite}, \code{XML}, +and \code{rgeos} packages. } \author{ D.E. Beaudette diff --git a/man/seriesExtent.Rd b/man/seriesExtent.Rd index 2354a373..ea4bf276 100644 --- a/man/seriesExtent.Rd +++ b/man/seriesExtent.Rd @@ -9,17 +9,29 @@ seriesExtent(s, type = c("vector", "raster"), timeout = 60) \arguments{ \item{s}{a soil series name, case-insensitive} -\item{type}{series extent representation, \code{vector} results in a \code{SpatialPolygonsDataFrame} object and \code{raster} results in a \code{raster} object} +\item{type}{series extent representation, \code{vector} results in a +\code{SpatialPolygonsDataFrame} object and \code{raster} results in a +\code{raster} object} \item{timeout}{time that we are willing to wait for a response, in seconds} } \description{ -This function downloads a generalized representations of a soil series extent from SoilWeb, derived from the current SSURGO snapshot. Data can be returned as vector outlines (\code{SpatialPolygonsDataFrame} object) or gridded representation of area proportion falling within 800m cells (\code{raster} object). Gridded series extent data are only available in CONUS. Vector representations are returned with a GCS/WGS84 coordinate reference system and raster representations are returned with an Albers Equal Area / NAD83 coordinate reference system. +This function downloads a generalized representations of a soil series +extent from SoilWeb, derived from the current SSURGO snapshot. Data can be +returned as vector outlines (\code{SpatialPolygonsDataFrame} object) or +gridded representation of area proportion falling within 800m cells +(\code{raster} object). Gridded series extent data are only available in +CONUS. Vector representations are returned with a GCS/WGS84 coordinate +reference system and raster representations are returned with an Albers +Equal Area / NAD83 coordinate reference system. } \note{ -This function requires the \code{rgdal} package. Warning messages about the proj4 CRS specification may be printed depending on your version of \code{rgdal}. This should be resolved soon. +This function requires the \code{rgdal} package. Warning messages +about the proj4 CRS specification may be printed depending on your version +of \code{rgdal}. This should be resolved soon. } \examples{ + \donttest{ if(requireNamespace("curl") & @@ -54,6 +66,7 @@ if(requireNamespace("curl") & } } + } \references{ \url{https://casoilresource.lawr.ucdavis.edu/see/} diff --git a/man/siblings.Rd b/man/siblings.Rd index 47a62b91..0c36e4e6 100644 --- a/man/siblings.Rd +++ b/man/siblings.Rd @@ -1,45 +1,43 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/siblings.R \name{siblings} \alias{siblings} - \title{Lookup siblings and cousins for a given soil series.} -\description{Lookup siblings and cousins for a given soil series, from the current fiscal year SSURGO snapshot via SoilWeb.} \usage{ -siblings(s, only.major=FALSE, component.data = FALSE, cousins = FALSE) +siblings(s, only.major = FALSE, component.data = FALSE, cousins = FALSE) } - \arguments{ - \item{s}{character vector, the name of a single soil series, case-insensitive.} - \item{only.major}{logical, should only return siblings that are major components} - \item{component.data}{logical, should component data for siblings (and optionally cousins) be returned?} - \item{cousins}{logical, should siblings-of-siblings (cousins) be returned?} -} - -\details{The siblings of any given soil series are defined as those soil series (major and minor component) that share a parent map unit with the named series (as a major component). Cousins are siblings of siblings. Data are sourced from SoilWeb which maintains a copy of the current SSURGO snapshot.} +\item{s}{character vector, the name of a single soil series, +case-insensitive.} -\value{ -\describe{ - \item{sib}{\code{data.frame} containing siblings, major component flag, and number of co-occurrences} - \item{sib.data}{\code{data.frame} containing sibling component data} - \item{cousins}{\code{data.frame} containing cousins, major component flag, and number of co-occurrences} - \item{cousin.data}{\code{data.frame} containing cousin component data} - } -} +\item{only.major}{logical, should only return siblings that are major +components} -\references{ -\href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{soilDB Soil Series Query Functionality} +\item{component.data}{logical, should component data for siblings (and +optionally cousins) be returned?} -\href{http://ncss-tech.github.io/AQP/soilDB/siblings.html}{Related tutorial.} +\item{cousins}{logical, should siblings-of-siblings (cousins) be returned?} } - -\author{ -D.E. Beaudette +\value{ +\describe{ \item{sib}{\code{data.frame} containing siblings, major +component flag, and number of co-occurrences} +\item{sib.data}{\code{data.frame} containing sibling component data} +\item{cousins}{\code{data.frame} containing cousins, major component flag, +and number of co-occurrences} \item{cousin.data}{\code{data.frame} +containing cousin component data} } } - -\seealso{ -\link{OSDquery}, \link{siblings}, \link{fetchOSD} +\description{ +Lookup siblings and cousins for a given soil series, from the current fiscal +year SSURGO snapshot via SoilWeb. +} +\details{ +The siblings of any given soil series are defined as those soil series +(major and minor component) that share a parent map unit with the named +series (as a major component). Cousins are siblings of siblings. Data are +sourced from SoilWeb which maintains a copy of the current SSURGO snapshot. } - \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet()) { @@ -54,7 +52,18 @@ if(requireNamespace("curl") & x$sib } } -} - -\keyword{ manip } +} +\references{ +\itemize{ +\item \href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{Soil Series Query Functions} +\item \href{http://ncss-tech.github.io/AQP/soilDB/siblings.html}{Soil "Siblings" Tutorial} +} +} +\seealso{ +\link{OSDquery}, \link{siblings}, \link{fetchOSD} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/man/simplifyColorData.Rd b/man/simplifyColorData.Rd index acdd2b6f..fba5aaf1 100644 --- a/man/simplifyColorData.Rd +++ b/man/simplifyColorData.Rd @@ -1,38 +1,59 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/simplifyColorData.R \name{simplifyColorData} \alias{simplifyColorData} -\alias{mix_and_clean_colors} - \title{Simplify Color Data by ID} -\description{Simplify multiple Munsell color observations associated with each horizon.} \usage{ simplifyColorData(d, id.var = "phiid", wt = "colorpct", bt = FALSE) -mix_and_clean_colors(x, wt='pct', backTransform = FALSE) } - \arguments{ - \item{d}{a \code{data.frame} object, typically returned from NASIS, see details} - \item{id.var}{character vector with the name of the column containing an ID that is unique among all horizons in \code{d}} - \item{x}{a \code{data.frame} object containing sRGB coordinates associated with a group of colors to mix} - \item{wt}{a character vector with the name of the column containing color weights for mixing} - \item{bt}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by \code{aqp::rgb2Munsell}} - \item{backTransform}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by \code{aqp::rgb2Munsell}} -} - -\details{ -This function is mainly intended for the processing of NASIS pedon/horizon data which may or may not contain multiple colors per horizon/moisture status combination. \code{simplifyColorData} will "mix" multiple colors associated with horizons in \code{d}, according to IDs specified by \code{id.var}, using "weights" (area percentages) specified by the \code{wt} argument to \code{mix_and_clean_colors}. - -Note that this function doesn't actually simulate the mixture of pigments on a surface, rather, "mixing" is approximated via weighted average in the CIELAB colorspace. +\item{d}{a \code{data.frame} object, typically returned from NASIS, see +details} -The \code{simplifyColorData} function can be applied to data sources other than NASIS by careful use of the \code{id.var} and \code{wt} arguments. However, \code{d} must contain Munsell colors split into columns named "colorhue", "colorvalue", and "colorchroma". In addition, the moisture state ("Dry" or "Moist") must be specified in a column named "colormoistst". +\item{id.var}{character vector with the name of the column containing an ID +that is unique among all horizons in \code{d}} -The \code{mix_and_clean_colors} function can be applied to arbitrary data sources as long as \code{x} contains sRGB coordinates in columns named "r", "g", and "b". This function should be applied to chunks of rows within which color mixtures make sense. +\item{wt}{a character vector with the name of the column containing color +weights for mixing} -There are examples in \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{the KSSL data tutorial} and \href{http://ncss-tech.github.io/AQP/soilDB/mixing-soil-color-data.html}{the soil color mixing tutorial}. +\item{bt}{logical, should the mixed sRGB representation of soil color be +transformed to closest Munsell chips? This is performed by +\code{aqp::rgb2Munsell} +\code{aqp::rgb2Munsell}} +} +\description{ +Simplify multiple Munsell color observations associated with each horizon. +} +\details{ +This function is mainly intended for the processing of NASIS pedon/horizon +data which may or may not contain multiple colors per horizon/moisture +status combination. \code{simplifyColorData} will "mix" multiple colors +associated with horizons in \code{d}, according to IDs specified by +\code{id.var}, using "weights" (area percentages) specified by the \code{wt} +argument to \code{mix_and_clean_colors}. + +Note that this function doesn't actually simulate the mixture of pigments on +a surface, rather, "mixing" is approximated via weighted average in the +CIELAB colorspace. + +The \code{simplifyColorData} function can be applied to data sources other +than NASIS by careful use of the \code{id.var} and \code{wt} arguments. +However, \code{d} must contain Munsell colors split into columns named +"colorhue", "colorvalue", and "colorchroma". In addition, the moisture state +("Dry" or "Moist") must be specified in a column named "colormoistst". + +The \code{mix_and_clean_colors} function can be applied to arbitrary data +sources as long as \code{x} contains sRGB coordinates in columns named "r", +"g", and "b". This function should be applied to chunks of rows within which +color mixtures make sense. + +Examples: +\itemize{ +\item \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{KSSL data} +\item \href{http://ncss-tech.github.io/AQP/soilDB/mixing-soil-color-data.html}{soil color mixing tutorial} +} +} +\author{ +D.E. Beaudette } - - -\author{D.E. Beaudette} - - \keyword{manip} - diff --git a/man/simplifyFragmentData.Rd b/man/simplifyFragmentData.Rd new file mode 100644 index 00000000..8f6f2072 --- /dev/null +++ b/man/simplifyFragmentData.Rd @@ -0,0 +1,44 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/simplfyFragmentData.R +\name{simplifyFragmentData} +\alias{simplifyFragmentData} +\alias{simplfyFragmentData} +\alias{simplifyArtifactData} +\title{Simplify Coarse Fraction Data} +\usage{ +simplifyFragmentData(rf, id.var, nullFragsAreZero = TRUE) +} +\arguments{ +\item{rf}{a \code{data.frame} object, typically returned from NASIS, see +details} + +\item{id.var}{character vector with the name of the column containing an ID +that is unique among all horizons in \code{rf}} + +\item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? +(default: TRUE), see details} +} +\description{ +Simplify multiple coarse fraction (>2mm) records by horizon. +} +\details{ +This function is mainly intended for the processing of NASIS pedon/horizon +data which contains multiple coarse fragment descriptions per horizon. +\code{simplifyFragmentData} will "sieve out" coarse fragments into the USDA +classes, split into hard and para- fragments. + +The \code{simplifyFragmentData} function can be applied to data sources +other than NASIS by careful use of the \code{id.var} argument. However, +\code{rf} must contain coarse fragment volumes in the column "fragvol", +fragment size (mm) in columns "fragsize_l", "fragsize_r", "fragsize_h", and +fragment cementation class in "fraghard". + +Examples: +\itemize{ +\item \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{KSSL data} +} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/man/soilDB-package.Rd b/man/soilDB-package.Rd index 930f7ae7..810866af 100644 --- a/man/soilDB-package.Rd +++ b/man/soilDB-package.Rd @@ -1,15 +1,22 @@ -\name{soilDB-package} -\alias{soilDB.env} -\alias{soilDB-package} -\alias{soilDB} -\docType{package} -\title{Soil Database Interface} -\description{This package provides methods for extracting soils information from local PedonPC and AK Site databases (MS Access format), local NASIS databases (MS SQL Server), and the SDA webservice. Currently USDA-NCSS data sources are supported, however, there are plans to develop interfaces to outside systems such as the Global Soil Mapping project.} -\details{ -It can be difficult to locate all of the dependencies required for sending/processing SOAP requests, especially on UNIX-like operating systems. Windows binary packages for the dependencies can be found \href{http://www.stats.ox.ac.uk/pub/RWin/bin/windows/contrib/2.15/}{here}. See \code{\link{fetchPedonPC}} for a simple wrapper function that should suffice for typical site/pedon/hz queries. An introduction to the soilDB package can be found \href{https://r-forge.r-project.org/scm/viewvc.php/*checkout*/docs/soilDB/soilDB-Intro.html?root=aqp}{here}. -} -\author{J.M. Skovlin and D.E. Beaudette} -\keyword{package} -\seealso{\code{\link{fetchPedonPC}, \link{fetchNASIS}, \link{SDA_query}, \link{loafercreek}}} - - +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/soilDB-package.R +\docType{package} +\name{soilDB-package} +\alias{soilDB-package} +\alias{soilDB.env} +\alias{soilDB} +\title{Soil Database Interface} +\description{ +This package provides methods for extracting soils information from local +PedonPC and AK Site databases (MS Access format), local NASIS databases (MS +SQL Server), and the SDA web service. Currently USDA-NCSS data sources are +supported, however, there are plans to develop interfaces to outside systems +such as the Global Soil Mapping project. +} +\seealso{ +\code{\link{fetchPedonPC}, \link{fetchNASIS}, \link{SDA_query}, \link{loafercreek}} +} +\author{ +J.M. Skovlin, D.E. Beaudette, S.M Roecker, A.G. Brown +} +\keyword{package} diff --git a/man/taxaExtent.Rd b/man/taxaExtent.Rd index b95825cd..13176dd9 100644 --- a/man/taxaExtent.Rd +++ b/man/taxaExtent.Rd @@ -13,7 +13,8 @@ taxaExtent( \arguments{ \item{x}{single taxa name, case-insensitive} -\item{level}{the taxonomic level within the top 4 tiers of Soil Taxonomy, one of \code{c('order', 'suborder', 'greatgroup', 'subgroup')}} +\item{level}{the taxonomic level within the top 4 tiers of Soil Taxonomy, +one of \code{c('order', 'suborder', 'greatgroup', 'subgroup')}} \item{timeout}{time that we are willing to wait for a response, in seconds} } @@ -21,12 +22,18 @@ taxaExtent( a \code{raster} object } \description{ -This function downloads a generalized representation of the geographic extent of any single taxa from the top 4 tiers of Soil Taxonomy. Data are provided by SoilWeb, ultimately sourced from from the current SSURGO snapshot. Data are returned as \code{raster} objects representing area proportion falling within 800m cells. Data are only available in CONUS and returned using an Albers Equal Area / NAD83 coordinate reference system. +This function downloads a generalized representation of the geographic +extent of any single taxa from the top 4 tiers of Soil Taxonomy. Data are +provided by SoilWeb, ultimately sourced from from the current SSURGO +snapshot. Data are returned as \code{raster} objects representing area +proportion falling within 800m cells. Data are only available in CONUS and +returned using an Albers Equal Area / NAD83 coordinate reference system. } \note{ This is a work in progress. } \examples{ + \donttest{ if(requireNamespace("curl") & @@ -76,6 +83,7 @@ if(requireNamespace("curl") & } + } \author{ D.E. Beaudette diff --git a/man/uncode.Rd b/man/uncode.Rd index 43d81272..261aa3e0 100644 --- a/man/uncode.Rd +++ b/man/uncode.Rd @@ -1,39 +1,76 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/uncode.R \name{uncode} -\alias{metadata} \alias{uncode} +\alias{metadata} \alias{code} - \title{Convert coded values returned from NASIS and SDA queries to factors} -\description{These functions convert the coded values returned from NASIS or SDA to factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA the metadata is pulled from a static snapshot in the soilDB package (/data/metadata.rda).} \usage{ -uncode(df, invert = FALSE, db = "NASIS", - droplevels = FALSE, - stringsAsFactors = default.stringsAsFactors() - ) -code(df, ...) +uncode( + df, + invert = FALSE, + db = "NASIS", + droplevels = FALSE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) } -%- maybe also 'usage' for other objects documented here. \arguments{ - \item{df}{data.frame} - \item{invert}{converts the code labels back to their coded values (FALSE)} - \item{db}{label specifying the soil database the data is coming from, which indicates whether or not to query metadata from local NASIS database ("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA")} - \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} - \item{stringsAsFactors}{logical: should character vectors be converted to factors? The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} - \item{\dots}{arguments passed on to \code{uncode}} - } - -\details{These functions convert the coded values returned from NASIS into their plain text representation. It duplicates the functionality of the CODELABEL function found in NASIS. This function is primarily intended to be used internally by other soilDB R functions, in order to minimizes the need to manually convert values. +\item{df}{data.frame} -The function works by iterating through the column names in a data frame and looking up whether they match any of the ColumnPhysicalNames found in the metadata domain tables. If matches are found then the columns coded values are converted to their corresponding factor levels. Therefore it is not advisable to reuse column names from NASIS unless the contents match the range of values and format found in NASIS. Otherwise uncode() will convert their values to NA. +\item{invert}{converts the code labels back to their coded values (\code{FALSE})} -When data is being imported from NASIS, the metadata tables are sourced directly from NASIS. When data is being imported from SDA or the NASIS Web Reports, the metadata is pulled from a static snapshot in the soilDB package. +\item{db}{label specifying the soil database the data is coming from, which +indicates whether or not to query metadata from local NASIS database +("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA")} -Beware the default is to return the values as factors rather than strings. While strings are generally preferable, factors make plotting more convenient. Generally the factor level ordering returned by uncode() follows the naturally ordering of categories that would be expected (e.g. sand, silt, clay). +\item{droplevels}{logical: indicating whether to drop unused levels in +classifying factors. This is useful when a class has large number of unused +classes, which can waste space in tables and figures.} + +\item{stringsAsFactors}{logical: should character vectors be converted to +factors?} + +\item{static_path}{Optional: path to local SQLite database containing NASIS +table structure; default: \code{NULL}} +} +\value{ +A data frame with the results. } +\description{ +These functions convert the coded values returned from NASIS or SDA to +factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA +the metadata is pulled from a static snapshot in the soilDB package +(/data/metadata.rda). +} +\details{ +These functions convert the coded values returned from NASIS into their +plain text representation. It duplicates the functionality of the CODELABEL +function found in NASIS. This function is primarily intended to be used +internally by other soilDB R functions, in order to minimizes the need to +manually convert values. + +The function works by iterating through the column names in a data frame and +looking up whether they match any of the ColumnPhysicalNames found in the +metadata domain tables. If matches are found then the columns coded values +are converted to their corresponding factor levels. Therefore it is not +advisable to reuse column names from NASIS unless the contents match the +range of values and format found in NASIS. Otherwise uncode() will convert +their values to NA. -\value{A data frame with the results.} -\author{Stephen Roecker} +When data is being imported from NASIS, the metadata tables are sourced +directly from NASIS. When data is being imported from SDA or the NASIS Web +Reports, the metadata is pulled from a static snapshot in the soilDB +package. + +Beware the default is to return the values as factors rather than strings. +While strings are generally preferable, factors make plotting more +convenient. Generally the factor level ordering returned by uncode() follows +the naturally ordering of categories that would be expected (e.g. sand, +silt, clay). +} \examples{ + \donttest{ if(requireNamespace("curl") & curl::has_internet() & @@ -47,7 +84,9 @@ if(requireNamespace("curl") & levels(s$taxorder) } } + +} +\author{ +Stephen Roecker } -% Add one or more standard keywords, see file 'KEYWORDS' in the -% R documentation directory. -\keyword{manip}% use one of RShowDoc("KEYWORDS") +\keyword{manip} diff --git a/man/us_ss_timeline.Rd b/man/us_ss_timeline.Rd index f18b8557..61ae6c2b 100644 --- a/man/us_ss_timeline.Rd +++ b/man/us_ss_timeline.Rd @@ -1,33 +1,24 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/soilDB-package.R +\docType{data} \name{us_ss_timeline} \alias{us_ss_timeline} -\docType{data} - -\title{ -Timeline of US Published Soil Surveys +\title{Timeline of US Published Soil Surveys} +\format{ +A data frame with 5209 observations on the following 5 variables. +\describe{ \item{list("ssa")}{Soil Survey name, a character vector} +\item{list("year")}{year of publication, a numeric vector} +\item{list("pdf")}{does a pdf exists, a logical vector} +\item{list("state")}{State abbreviation, a character vector} } +} +\source{ +https://www.nrcs.usda.gov/wps/portal/nrcs/soilsurvey/soils/survey/state/ } - \description{ This dataset contains the years of each US Soil Survey was published. } - -\usage{data("us_ss_timeline")} - -\format{ - A data frame with 5209 observations on the following 5 variables. - \describe{ - \item{\code{ssa}}{Soil Survey name, a character vector} - \item{\code{year}}{year of publication, a numeric vector} - \item{\code{pdf}}{does a pdf exists, a logical vector} - \item{\code{state}}{State abbreviation, a character vector} - } -} - \details{ -This data was web scraped from the NRCS Soils Website. The scraping procedure and a example plot are included in the examples section below. -} - -\source{ -https://www.nrcs.usda.gov/wps/portal/nrcs/soilsurvey/soils/survey/state/ +This data was web scraped from the NRCS Soils Website. The scraping +procedure and a example plot are included in the examples section below. } - \keyword{datasets} diff --git a/man/waterDayYear.Rd b/man/waterDayYear.Rd index f28b3fb1..c087a42d 100644 --- a/man/waterDayYear.Rd +++ b/man/waterDayYear.Rd @@ -1,38 +1,39 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/waterDayYear.R \name{waterDayYear} \alias{waterDayYear} - \title{Compute Water Day and Year} -\description{Compute "water" day and year, based on the end of the typical or legal dry season. This is September 30 in California.} - \usage{ waterDayYear(d, end = "09-30") } - \arguments{ - \item{d}{anything the can be safely converted to \code{PPOSIXlt}} - \item{end}{"MM-DD" notation for end of water year} -} - -\details{This function doesn't know about leap-years. Probably worth checking.} +\item{d}{anything the can be safely converted to \code{PPOSIXlt}} +\item{end}{"MM-DD" notation for end of water year} +} \value{ -A \code{data.frame} object with the following - \item{wy}{the "water year"} - \item{wd}{the "water day"} +A \code{data.frame} object with the following \item{wy}{the "water +year"} \item{wd}{the "water day"} } - -\references{Ideas borrowed from: -\url{https://github.com/USGS-R/dataRetrieval/issues/246} and -\url{https://stackoverflow.com/questions/48123049/create-day-index-based-on-water-year} +\description{ +Compute "water" day and year, based on the end of the typical or legal dry +season. This is September 30 in California. +} +\details{ +This function doesn't know about leap-years. Probably worth checking. } - -\author{D.E. Beaudette} - - \examples{ + # try it waterDayYear('2019-01-01') -} - -\keyword{ manip }% use one of RShowDoc("KEYWORDS") +} +\references{ +Ideas borrowed from: +\url{https://github.com/USGS-R/dataRetrieval/issues/246} and +\url{https://stackoverflow.com/questions/48123049/create-day-index-based-on-water-year} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/manbak/ISSR800.wcs.Rd b/manbak/ISSR800.wcs.Rd new file mode 100644 index 00000000..772a0bd6 --- /dev/null +++ b/manbak/ISSR800.wcs.Rd @@ -0,0 +1,41 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ISSR800.R +\name{ISSR800.wcs} +\alias{ISSR800.wcs} +\title{ISSR-800 Web Coverage Service (WCS)} +\usage{ +ISSR800.wcs(aoi, var, res = 800, quiet = FALSE) +} +\arguments{ +\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} + +\item{var}{ISSR-800 grid name, see details} + +\item{res}{grid resolution, units of meters. The native resolution of ISSR-800 grids (this WCS) is 800m.} + +\item{quiet}{logical, passed to \code{download.file} to enable / suppress URL and progress bar for download.} +} +\value{ +\code{raster} object containing indexed map unit keys and associated raster attribute table +} +\description{ +Intermediate-scale gridded (800m) soil property and interpretation maps from aggregated SSURGO and STATSGO data. These maps were developed by USDA-NRCS-SPSD staff in collaboration with UCD-LAWR. Originally for educational use and \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/}{interactive thematic maps}, these data are a suitable alternative to gridded STATSGO-derived thematic soil maps. The full size grids can be \href{https://casoilresource.lawr.ucdavis.edu/soil-properties/download.php}{downloaded here}. +} +\details{ +\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: + +\describe{ +\item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} +\item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} +} + +The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the ISSR-800 grids. + +Variables available from this WCS can be queried using \code{WCS_details(wcs = 'ISSR800')}. +} +\note{ +There are still some issues to be resolved related to the encoding of NA Variables with a natural zero (e.g. SAR) have 0 set to NA. +} +\author{ +D.E. Beaudette and A.G. Brown +} diff --git a/manbak/KSSL_VG_model.Rd b/manbak/KSSL_VG_model.Rd new file mode 100644 index 00000000..017007fe --- /dev/null +++ b/manbak/KSSL_VG_model.Rd @@ -0,0 +1,61 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/KSSL_VG_model.R +\name{KSSL_VG_model} +\alias{KSSL_VG_model} +\title{Develop a Water Retention Curve from KSSL Data} +\usage{ +KSSL_VG_model(VG_params, phi_min = 10^-6, phi_max = 10^8, pts = 100) +} +\arguments{ +\item{VG_params}{\code{data.frame} or \code{list} object with the parameters of the van Genuchten model, see details} + +\item{phi_min}{lower limit for water potential in kPa} + +\item{phi_max}{upper limit for water potential in kPa} + +\item{pts}{number of points to include in estimated water retention curve} +} +\value{ +A list with the following components: +\describe{ +\item{VG_curve}{estimated water retention curve: paired estimates of water potential (phi) and water content (theta)} +\item{VG_function}{spline function for converting water potential (phi, units of kPa) to estimated volumetric water content (theta, units of percent, range: \{0, 1\})} +\item{VG_inverse_function}{spline function for converting volumetric water content (theta, units of percent, range: \{0, 1\}) to estimated water potential (phi, units of kPa)} +} +} +\description{ +Water retention curve modeling via van Genuchten model and KSSL data. +} +\details{ +This function was developed to work with measured or estimated parameters of the \href{https://en.wikipedia.org/wiki/Water_retention_curve}{van Genuchten model}, as generated by the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{Rosetta model}. As such, \code{VG_params} should have the following format and conventions: +\describe{ +\item{theta_r}{saturated water content, values should be in the range of \{0, 1\}} +\item{theta_s}{residual water content, values should be in the range of \{0, 1\}} +\item{alpha}{related to the inverse of the air entry suction, function expects log10-transformed values with units of cm} +\item{npar}{index of pore size distribution, function expects log10-transformed values with units of 1/cm} +} +} +\note{ +A practical example is given in the \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{fetchSCAN tutorial}. +} +\examples{ + +# basic example +d <- data.frame( + theta_r = 0.0337216, + theta_s = 0.4864061, + alpha = -1.581517, + npar = 0.1227247 +) + +vg <- KSSL_VG_model(d) + +str(vg) + +} +\references{ +\href{https://en.wikipedia.org/wiki/Water_retention_curve}{water retention curve estimation} +} +\author{ +D.E. Beaudette +} diff --git a/manbak/OSDquery.Rd b/manbak/OSDquery.Rd new file mode 100644 index 00000000..ab000356 --- /dev/null +++ b/manbak/OSDquery.Rd @@ -0,0 +1,97 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/OSDquery.R +\name{OSDquery} +\alias{OSDquery} +\title{Full text searching of the USDA-NRCS Official Series Descriptions} +\usage{ +OSDquery( + mlra = "", + taxonomic_class = "", + typical_pedon = "", + brief_narrative = "", + ric = "", + use_and_veg = "", + competing_series = "", + geog_location = "", + geog_assoc_soils = "" +) +} +\arguments{ +\item{mlra}{a comma-delimited list of MLRA to search ('17,18,22A')} + +\item{taxonomic_class}{search family level classification} + +\item{typical_pedon}{search typical pedon section} + +\item{brief_narrative}{search brief narrative} + +\item{ric}{search range in characteristics section} + +\item{use_and_veg}{search use and vegetation section} + +\item{competing_series}{search competing series section} + +\item{geog_location}{search geographic setting section} + +\item{geog_assoc_soils}{search geographically associated soils section} +} +\value{ +a \code{data.frame} object containing soil series names that match patterns supplied as arguments. +} +\description{ +This is a rough example of how chunks of text parsed from OSD records can be made search-able with the \href{https://www.postgresql.org/docs/9.5/textsearch.html}{PostgreSQL fulltext indexing} and query system (\href{https://www.postgresql.org/docs/9.5/datatype-textsearch.html}{syntax details}). Each search field (except for the "brief narrative" and MLRA) corresponds with a section header in an OSD. The results may not include every OSD due to formatting errors and typos. Results are scored based on the number of times search terms match words in associated sections. This is the R API corresponding to \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage}. +} +\details{ +See \href{https://casoilresource.lawr.ucdavis.edu/osd-search/}{this webpage} for more information. +\itemize{ +\item family level taxa are derived from SC database, not parsed OSD records +\item MLRA are derived via spatial intersection (SSURGO x MLRA polygons) +\item MLRA-filtering is only possible for series used in the current SSURGO snapshot (component name) +\item logical AND: \code{&} +\item logical OR: \code{|} +\item wildcard, e.g. rhy-something \verb{rhy:*} +\item search terms with spaces need doubled single quotes: \verb{''san joaquin''} +\item combine search terms into a single expression: \verb{(grano:* | granite)} +} + +Related documentation can be found in the following tutorials +\itemize{ +\item \href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions} +\item \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series} +\item \href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings} +} +} +\note{ +SoilWeb maintains a snapshot of the Official Series Description data. +} +\examples{ + + +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(aqp)) { + + # find all series that list Pardee as a geographically associated soil. + s <- OSDquery(geog_assoc_soils = 'pardee') + + # get data for these series + x <- fetchOSD(s$series, extended = TRUE, colorState = 'dry') + + # simple figure + par(mar=c(0,0,1,1)) + plot(x$SPC) +} +} + +} +\references{ +\url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +} +\seealso{ +\code{\link{fetchOSD}, \link{siblings}, \link{fetchOSD}} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/manbak/ROSETTA.Rd b/manbak/ROSETTA.Rd new file mode 100644 index 00000000..b56cf4fd --- /dev/null +++ b/manbak/ROSETTA.Rd @@ -0,0 +1,92 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ROSETTA.R +\name{ROSETTA} +\alias{ROSETTA} +\title{ROSETTA Model API} +\usage{ +ROSETTA(x, vars, v = c("1", "2", "3"), chunkSize = 10000, conf = NULL) +} +\arguments{ +\item{x}{a \code{data.frame} of required soil properties, may contain other columns, see details} + +\item{vars}{character vector of column names in \code{x} containing relevant soil property values, see details} + +\item{v}{ROSETTA model version number: '1', '2', or '3', see details and references.} + +\item{chunkSize}{number of records per API call} + +\item{conf}{configuration passed to \code{httr::POST()} such as \code{verbose()}.} +} +\value{ +a \code{data.frame} object with the following columns: +\itemize{ +\item \code{...}: pre-existing columns from \code{x} +\item \code{theta_r}: residual volumetric water content (cm^3/cm^3) +\item \code{theta_s}: saturated volumetric water content (cm^3/cm^3) +\item \code{alpha}: related to the inverse of the air entry suction, log10-transformed values with units of cm +\item \code{npar}: index of pore size distribution, log10-transformed values with units of 1/cm +\item \code{ksat}: saturated hydraulic conductivity, log10-transformed values with units of cm/day +\item \code{.rosetta.model}: best-available model selection (-1 signifies that prediction was not possible due to missing values in \code{x}) +\item \code{.rosetta.version}: ROSETTA algorithm version, selected via function argument \code{v} +} +} +\description{ +A simple interface to the \href{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}{ROSETTA model} for predicting hydraulic parameters from soil properties. The ROSETTA API was developed by Dr. Todd Skaggs (USDA-ARS) and links to the work of Zhang and Schaap, (2017). See the \href{http://ncss-tech.github.io/AQP/soilDB/ROSETTA-API.html}{related tutorial} for additional examples. +} +\details{ +Soil properties supplied in \code{x} must be described, in order, via \code{vars} argument. The API does not use the names but column ordering must follow: sand, silt, clay, bulk density, volumetric water content at 33kPa (1/3 bar), and volumetric water content at 1500 kPa (15 bar). + +The ROSETTA model relies on a minimum of 3 soil properties, with increasing (expected) accuracy as additional properties are included: +\itemize{ +\item required, \code{sand}, \code{silt}, \code{clay}: USDA soil texture separates (percentages) that sum to 100\\% +\item optional, \verb{bulk density} (any moisture basis): mass per volume after accounting for >2mm fragments, units of gm/cm3 +\item optional, \verb{volumetric water content at 33 kPa}: roughly "field capacity" for most soils, units of cm^3/cm^3 +\item optional, \verb{volumetric water content at 1500 kPa}: roughly "permanent wilting point" for most plants, units of cm^3/cm^3 +} + +Column names not specified in \code{vars} are retained in the output. + +Three versions of the ROSETTA model are available, selected using \code{v = 1}, \code{v = 2}, or \code{v = 3}. +\itemize{ +\item \strong{version 1}: Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer program for estimating soil hydraulic parameters with hierarchical pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: \doi{10.1016/S0022-1694(01)00466-8}. +\item \strong{version 2}: Schaap, M.G., A. Nemes, and M.T. van Genuchten. 2004. Comparison of Models for Indirect Estimation of Water Retention and Available Water in Surface Soils. Vadose Zone Journal 3(4): 1455-1463. doi: \doi{10.2136/vzj2004.1455}. +\item \strong{version 3}: Zhang, Y., and M.G. Schaap. 2017. Weighted recalibration of the Rosetta pedotransfer model with improved estimates of hydraulic parameter distributions and summary statistics (Rosetta3). Journal of Hydrology 547: 39-53. doi: \doi{10.1016/j.jhydrol.2017.01.004} +} +} +\note{ +Input data should not contain columns names that will conflict with the ROSETTA API results: \code{theta_r}, \code{theta_s}, \code{alpha}, \code{npar}, \code{ksat}. +} +\references{ +Consider using the interactive version, with copy/paste functionality at: \url{https://www.handbook60.org/rosetta}. + +Rosetta Model Home Page: \url{https://www.ars.usda.gov/pacific-west-area/riverside-ca/agricultural-water-efficiency-and-salinity-research-unit/docs/model/rosetta-model/}. + +Python ROSETTA model: \url{http://www.u.arizona.edu/~ygzhang/download.html}. + +Yonggen Zhang, Marcel G. Schaap. 2017. Weighted recalibration of the Rosetta pedotransfer model with improved estimates of hydraulic parameter distributions and summary statistics (Rosetta3). Journal of Hydrology. 547: 39-53. \doi{10.1016/j.jhydrol.2017.01.004}. + +Kosugi, K. 1999. General model for unsaturated hydraulic conductivity for soils with lognormal pore-size distribution. Soil Sci. Soc. Am. J. 63:270-277. + +Mualem, Y. 1976. A new model predicting the hydraulic conductivity of unsaturated porous media. Water Resour. Res. 12:513-522. + +Schaap, M.G. and W. Bouten. 1996. Modeling water retention curves of sandy soils using neural networks. Water Resour. Res. 32:3033-3040. + +Schaap, M.G., Leij F.J. and van Genuchten M.Th. 1998. Neural network analysis for hierarchical prediction of soil water retention and saturated hydraulic conductivity. Soil Sci. Soc. Am. J. 62:847-855. + +Schaap, M.G., and F.J. Leij, 1998. Database Related Accuracy and Uncertainty of Pedotransfer Functions, Soil Science 163:765-779. + +Schaap, M.G., F.J. Leij and M. Th. van Genuchten. 1999. A bootstrap-neural network approach to predict soil hydraulic parameters. In: van Genuchten, M.Th., F.J. Leij, and L. Wu (eds), Proc. Int. Workshop, Characterization and Measurements of the Hydraulic Properties of Unsaturated Porous Media, pp 1237-1250, University of California, Riverside, CA. + +Schaap, M.G., F.J. Leij, 1999, Improved prediction of unsaturated hydraulic conductivity with the Mualem-van Genuchten, Submitted to Soil Sci. Soc. Am. J. + +van Genuchten, M.Th. 1980. A closed-form equation for predicting the hydraulic conductivity of unsaturated soils. Soil Sci. Am. J. 44:892-898. + +Schaap, M.G., F.J. Leij, and M.Th. van Genuchten. 2001. ROSETTA: a computer program for estimating soil hydraulic parameters with hierarchical pedotransfer functions. Journal of Hydrology 251(3-4): 163-176. doi: \doi{10.1016/S0022-1694(01)00466-8}. + +Schaap, M.G., A. Nemes, and M.T. van Genuchten. 2004. Comparison of Models for Indirect Estimation of Water Retention and Available Water in Surface Soils. Vadose Zone Journal 3(4): 1455-1463. doi: \doi{10.2136/vzj2004.1455}. + +Zhang, Y., and M.G. Schaap. 2017. Weighted recalibration of the Rosetta pedotransfer model with improved estimates of hydraulic parameter distributions and summary statistics (Rosetta3). Journal of Hydrology 547: 39-53. doi: \doi{10.1016/j.jhydrol.2017.01.004}. +} +\author{ +D.E. Beaudette, Todd Skaggs (ARS), Richard Reid +} diff --git a/manbak/SCAN_SNOTEL_metadata.Rd b/manbak/SCAN_SNOTEL_metadata.Rd new file mode 100644 index 00000000..bd2c1b89 --- /dev/null +++ b/manbak/SCAN_SNOTEL_metadata.Rd @@ -0,0 +1,34 @@ +\name{SCAN_SNOTEL_metadata} +\alias{SCAN_SNOTEL_metadata} +\alias{state_FIPS_codes} + +\docType{data} + +\title{SCAN and SNOTEL Station Metadata} + +\description{SCAN and SNOTEL station metadata, a work in progress.} + +\usage{data("SCAN_SNOTEL_metadata")} + +\format{ + A data frame with 1092 observations on the following 12 variables. + \describe{ + \item{\code{Name}}{station name} + \item{\code{Site}}{station ID} + \item{\code{State}}{state} + \item{\code{Network}}{sensor network: SCAN / SNOTEL} + \item{\code{County}}{county} + \item{\code{Elevation_ft}}{station elevation in feet} + \item{\code{Latitude}}{latitude of station} + \item{\code{Longitude}}{longitude of station} + \item{\code{HUC}}{associated watershed} + \item{\code{climstanm}}{climate station name (TODO: remove this column)} + \item{\code{upedonid}}{associated user pedon ID} + \item{\code{pedlabsampnum}}{associated lab sample ID} + } +} + +\details{These data have been compiled from several sources and represent a progressive effort to organize SCAN/SNOTEL station metadata. Therefore, some records may be missing or incorrect. Details on this effort can be found at the associated GitHub issue page: \url{https://github.com/ncss-tech/soilDB/issues/61}.} + + +\keyword{datasets} diff --git a/manbak/SDA_query.Rd b/manbak/SDA_query.Rd new file mode 100644 index 00000000..7fd99c15 --- /dev/null +++ b/manbak/SDA_query.Rd @@ -0,0 +1,93 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SDA_query.R +\name{SDA_query} +\alias{SDA_query} +\title{Soil Data Access Query} +\usage{ +SDA_query(q) +} +\arguments{ +\item{q}{A valid T-SQL query surrounded by double quotes} +} +\value{ +a data.frame result (\code{NULL} if empty, try-error on error) +} +\description{ +Submit a query to the Soil Data Access (SDA) REST/JSON web-service and return the results as a data.frame. There is a 100,000 record limit and 32Mb JSON serializer limit, per query. Queries should contain a WHERE statement or JOIN condition to limit the number of rows affected / returned. Consider wrapping calls to \code{SDA_query} in a function that can iterate over logical chunks (e.g. areasymbol, mukey, cokey, etc.). The function \code{makeChunks} can help with such iteration. +} +\details{ +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov} and query examples can be found at \url{https://sdmdataaccess.nrcs.usda.gov/QueryHelp.aspx}. A library of query examples can be found at \url{https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=SDA-SQL_Library_Home}. + +SSURGO (detailed soil survey) and STATSGO (generalized soil survey) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +} +\note{ +This function requires the \code{httr}, \code{jsonlite}, and \code{XML} packages +} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + ## get SSURGO export date for all soil survey areas in California + # there is no need to filter STATSGO + # because we are filtering on SSURGO area symbols + q <- "SELECT areasymbol, saverest FROM sacatalog WHERE areasymbol LIKE 'CA\%';" + x <- SDA_query(q) + head(x) + + + ## get SSURGO component data associated with the + ## Amador series / major component only + # this query must explicitly filter out STATSGO data + q <- "SELECT cokey, compname, comppct_r FROM legend + INNER JOIN mapunit mu ON mu.lkey = legend.lkey + INNER JOIN component co ON mu.mukey = co.mukey + WHERE legend.areasymbol != 'US' AND compname = 'Amador';" + + res <- SDA_query(q) + str(res) + + + ## get component-level data for a specific soil survey area (Yolo county, CA) + # there is no need to filter STATSGO because the query contains + # an implicit selection of SSURGO data by areasymbol + q <- "SELECT + component.mukey, cokey, comppct_r, compname, taxclname, + taxorder, taxsuborder, taxgrtgroup, taxsubgrp + FROM legend + INNER JOIN mapunit ON mapunit.lkey = legend.lkey + LEFT OUTER JOIN component ON component.mukey = mapunit.mukey + WHERE legend.areasymbol = 'CA113' ;" + + res <- SDA_query(q) + str(res) + + ## get tabular data based on result from spatial query + # there is no need to filter STATSGO because + # SDA_Get_Mukey_from_intersection_with_WktWgs84() implies SSURGO + # + # requires raster and rgeos packages because raster is suggested + # and rgeos is additional + if(require(raster) & require(rgeos)) { + # text -> bbox -> WKT + # xmin, xmax, ymin, ymax + b <- c(-120.9, -120.8, 37.7, 37.8) + p <- writeWKT(as(extent(b), 'SpatialPolygons')) + q <- paste0("SELECT mukey, cokey, compname, comppct_r FROM component + WHERE mukey IN (SELECT DISTINCT mukey FROM + SDA_Get_Mukey_from_intersection_with_WktWgs84('", p, + "')) ORDER BY mukey, cokey, comppct_r DESC") + + x <- SDA_query(q) + str(x) + } + } +} +} +\seealso{ +\code{\link{mapunit_geom_by_ll_bbox}} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/manbak/SDA_spatialQuery.Rd b/manbak/SDA_spatialQuery.Rd new file mode 100644 index 00000000..42b32f72 --- /dev/null +++ b/manbak/SDA_spatialQuery.Rd @@ -0,0 +1,187 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SDA-spatial.R +\name{SDA_spatialQuery} +\alias{SDA_spatialQuery} +\alias{SDA_make_spatial_query} +\alias{SDA_query_features} +\title{SDA Spatial Query} +\usage{ +SDA_spatialQuery( + geom, + what = "mukey", + geomIntersection = FALSE, + db = c("SSURGO", "STATSGO") +) +} +\arguments{ +\item{geom}{a Spatial* object, with valid CRS. May contain multiple features.} + +\item{what}{a character vector specifying what to return. 'mukey': \code{data.frame} with intersecting map unit keys and names, \code{geom} overlapping or intersecting map unit polygons} + +\item{geomIntersection}{logical; \code{FALSE}: overlapping map unit polygons returned, \code{TRUE}: intersection of \code{geom} + map unit polygons is returned.} + +\item{db}{a character vector identifying the Soil Geographic Databases +('SSURGO' or 'STATSGO') to query. Option \var{STATSGO} currently works +only in combination with \code{what = "geom"}.} +} +\value{ +A \code{data.frame} if \code{what = 'mukey'}, otherwise \code{SpatialPolygonsDataFrame} object. +} +\description{ +Query SDA (SSURGO / STATSGO) records via spatial intersection with supplied geometries. Input can be SpatialPoints, SpatialLines, or SpatialPolygons objects with a valid CRS. Map unit keys, overlapping polygons, or the spatial intersection of \code{geom} + SSURGO / STATSGO polygons can be returned. See details. +} +\details{ +Queries for map unit keys are always more efficient vs. queries for overlapping or intersecting (i.e. least efficient) features. \code{geom} is converted to GCS / WGS84 as needed. Map unit keys are always returned when using \code{what = "geom"}. + +There is a 100,000 record limit and 32Mb JSON serializer limit, per query. + +SSURGO (detailed soil survey, typically 1:24,000 scale) and STATSGO (generalized soil survey, 1:250,000 scale) data are stored together within SDA. This means that queries that don't specify an area symbol may result in a mixture of SSURGO and STATSGO records. See the examples below and the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for details. +} +\note{ +Row-order is not preserved across features in \code{geom} and returned object. Use \code{sp::over()} or similar functionality to extract from results. Polygon area in acres is computed server-side when \code{what = 'geom'} and \code{geomIntersection = TRUE}. +} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + requireNamespace("sp") & + requireNamespace("raster") + ) { + +library(aqp) +library(sp) +library(raster) + +## query at a point + +# example point +p <- SpatialPoints(cbind(x = -119.72330, y = 36.92204), + proj4string = CRS('+proj=longlat +datum=WGS84')) + +# query map unit records at this point +res <- SDA_spatialQuery(p, what = 'mukey') + +# convert results into an SQL "IN" statement +# useful when there are multiple intersecting records +mu.is <- format_SQL_in_statement(res$mukey) + +# composite SQL WHERE clause +sql <- sprintf("mukey IN \%s", mu.is) + +# get commonly used map unit / component / chorizon records +# as a SoilProfileCollection object +# confusing but essential: request that results contain `mukey` +# with `duplicates = TRUE` +x <- fetchSDA(sql, duplicates = TRUE) + +# safely set texture class factor levels +# by making a copy of this column +# this will save in lieu of textures in the original +# `texture` column +horizons(x)$texture.class <- factor(x$texture, levels = SoilTextureLevels()) + +# graphical depiction of the result +plotSPC(x, color='texture.class', label='compname', + name='hzname', cex.names = 1, width=0.25, + plot.depth.axis=FALSE, hz.depths=TRUE, + name.style='center-center' +) + + + +## query mukey + geometry that intersect with a bounding box + +# define a bounding box: xmin, xmax, ymin, ymax +# +# +-------------------(ymax, xmax) +# | | +# | | +# (ymin, xmin) ----------------+ +b <- c(-119.747629, -119.67935, 36.912019, 36.944987) + +# convert bounding box to WKT +bbox.sp <-as(extent(b), 'SpatialPolygons') +proj4string(bbox.sp) <- '+proj=longlat +datum=WGS84' + +# results contain associated map unit keys (mukey) +# return SSURGO polygons, after intersection with provided BBOX +ssurgo.geom <- SDA_spatialQuery( + bbox.sp, + what = 'geom', + db = 'SSURGO', + geomIntersection = TRUE +) + +# return STATSGO polygons, after intersection with provided BBOX +statsgo.geom <- SDA_spatialQuery( + bbox.sp, + what = 'geom', + db = 'STATSGO', + geomIntersection = TRUE +) + +# inspect results +par(mar = c(0,0,3,1)) +plot(ssurgo.geom, border = 'royalblue') +plot(statsgo.geom, lwd = 2, border = 'firebrick', add = TRUE) +plot(bbox.sp, lwd = 3, add = TRUE) +legend( + x = 'topright', + legend = c('BBOX', 'STATSGO', 'SSURGO'), + lwd = c(3, 2, 1), + col = c('black', 'firebrick', 'royalblue'), +) + + +# quick reminder that STATSGO map units often contain many components +# format an SQL IN statement using the first STATSGO mukey +mu.is <- format_SQL_in_statement(statsgo.geom$mukey[1]) + +# composite SQL WHERE clause +sql <- sprintf("mukey IN \%s", mu.is) + +# get commonly used map unit / component / chorizon records +# as a SoilProfileCollection object +x <- fetchSDA(sql) + +# tighter figure margins +par(mar = c(0,0,3,1)) + + +# organize component sketches by national map unit symbol +# color horizons via awc +# adjust legend title +# add alternate label (vertical text) containing component percent +# move horizon names into the profile sketches +# make profiles wider +groupedProfilePlot( + x, + groups = 'nationalmusym', + label = 'compname', + color = 'awc_r', + col.label = 'Available Water Holding Capacity (cm / cm)', + alt.label = 'comppct_r', + name.style = 'center-center', + width = 0.3 +) + + +mtext( + 'STATSGO (1:250,000) map units contain a lot of components!', + side = 1, + adj = 0, + line = -1.5, + at = 0.25, + font = 4 +) + } +} + +} +\seealso{ +\code{\link{SDA_query}} +} +\author{ +D.E. Beaudette, A.G. Brown, D.R. Schlaepfer +} +\keyword{manip} diff --git a/man/SSURGO_spatial_query.Rd b/manbak/SSURGO_spatial_query.Rd similarity index 100% rename from man/SSURGO_spatial_query.Rd rename to manbak/SSURGO_spatial_query.Rd diff --git a/manbak/STRplot.Rd b/manbak/STRplot.Rd new file mode 100644 index 00000000..61995c12 --- /dev/null +++ b/manbak/STRplot.Rd @@ -0,0 +1,41 @@ +\name{STRplot} +\alias{STRplot} + +\title{Graphical Description of US Soil Taxonomy Soil Temperature Regimes} +\description{Graphical Description of US Soil Taxonomy Soil Temperature Regimes} + +\usage{ +STRplot(mast, msst, mwst, permafrost = FALSE, pt.cex = 2.75, leg.cex = 0.85) +} + +\arguments{ + \item{mast}{single value or vector of mean annual soil temperature (deg C)} + \item{msst}{single value or vector of mean summer soil temperature (deg C)} + \item{mwst}{single value of mean winter soil temperature (deg C)} + \item{permafrost}{logical: permafrost presence / absence} + \item{pt.cex}{symbol size} + \item{leg.cex}{legend size} +} + +\details{ +\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Related tutorial}. +} + +\references{ +Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. U.S. Department of Agriculture, Natural Resources Conservation Service, National Soil Survey Center, Lincoln, Nebraska. +} + +\author{D.E. Beaudette} + + +\seealso{ +\code{\link{estimateSTR}} +} + +\examples{ +par(mar=c(4,1,0,1)) +STRplot(mast = 0:25, msst = 10, mwst = 1) +} + +\keyword{ hplot }% use one of RShowDoc("KEYWORDS") + diff --git a/manbak/WCS_details.Rd b/manbak/WCS_details.Rd new file mode 100644 index 00000000..225a7ccf --- /dev/null +++ b/manbak/WCS_details.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WCS-utils.R +\name{WCS_details} +\alias{WCS_details} +\title{Web Coverage Services Details} +\usage{ +WCS_details(wcs = c("mukey", "ISSR800")) +} +\arguments{ +\item{wcs}{a WCS label ('mukey' or 'ISSR800')} +} +\value{ +a \code{data.frame} +} +\description{ +List variables or databases provided by soilDB web coverage service (WCS) abstraction. These lists will be expanded in future versions. +} +\examples{ + +WCS_details(wcs = 'ISSR800') +} diff --git a/manbak/createStaticNASIS.Rd b/manbak/createStaticNASIS.Rd new file mode 100644 index 00000000..f1c95933 --- /dev/null +++ b/manbak/createStaticNASIS.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/createStaticNASIS.R +\name{createStaticNASIS} +\alias{createStaticNASIS} +\title{Create a memory or file-based instance of NASIS database (for selected tables)} +\usage{ +createStaticNASIS( + tables = NULL, + SS = FALSE, + systables = FALSE, + static_path = NULL, + output_path = NULL +) +} +\arguments{ +\item{tables}{Character vector of target tables. Default: \code{NULL} is all tables meeting the following criteria.} + +\item{SS}{Logical. Include "selected set" tables (ending with suffix \code{"_View1"}). Default: \code{FALSE}} + +\item{systables}{Logical. Include "system" tables (starting with prefix \code{"system"}). Default: \code{FALSE}} + +\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} + +\item{output_path}{Optional: path to new/existing SQLite database to write tables to. Default: \code{NULL} returns table results as named list.} +} +\value{ +A named list of results from calling \code{dbQueryNASIS} for all columns in each NASIS table. +} +\description{ +Create a memory or file-based instance of NASIS database (for selected tables) +} +\examples{ + +\dontrun{ + str(createStaticNASIS(tables = c("calculation","formtext"))) +} + +} diff --git a/manbak/dbConnectNASIS.Rd b/manbak/dbConnectNASIS.Rd new file mode 100644 index 00000000..5697bbca --- /dev/null +++ b/manbak/dbConnectNASIS.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/dbQueryNASIS.R +\name{dbConnectNASIS} +\alias{dbConnectNASIS} +\title{Create a connection to a local NASIS database} +\usage{ +dbConnectNASIS(static_path = NULL) +} +\arguments{ +\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} +} +\value{ +A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}. +} +\description{ +Create a connection to a local NASIS database +} diff --git a/manbak/dbQueryNASIS.Rd b/manbak/dbQueryNASIS.Rd new file mode 100644 index 00000000..7965d3ff --- /dev/null +++ b/manbak/dbQueryNASIS.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/dbQueryNASIS.R +\name{dbQueryNASIS} +\alias{dbQueryNASIS} +\title{Send queries to a NASIS DBIConnection} +\usage{ +dbQueryNASIS(conn, q, close = TRUE, ...) +} +\arguments{ +\item{conn}{A \code{DBIConnection} object, as returned by \code{DBI::dbConnect()}.} + +\item{q}{A statement to execute using \code{DBI::dbGetQuery}} + +\item{close}{Close connection after query? Default: \code{TRUE}} + +\item{...}{Additional arguments to \code{DBI::dbGetQuery}} +} +\value{ +Result of \code{DBI::dbGetQuery} +} +\description{ +Send queries to a NASIS DBIConnection +} diff --git a/manbak/dot-dump_NASIS_table.Rd b/manbak/dot-dump_NASIS_table.Rd new file mode 100644 index 00000000..eac48870 --- /dev/null +++ b/manbak/dot-dump_NASIS_table.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/createStaticNASIS.R +\name{.dump_NASIS_table} +\alias{.dump_NASIS_table} +\title{Method for "dumping" contents of an entire NASIS table} +\usage{ +.dump_NASIS_table(table_name, static_path = NULL) +} +\arguments{ +\item{table_name}{Character name of table.} + +\item{static_path}{Optional: path to SQLite database containing NASIS table structure; Default: \code{NULL}} +} +\value{ +A data.frame or other result of \code{DBI::dbGetQuery} +} +\description{ +Method for "dumping" contents of an entire NASIS table +} diff --git a/manbak/estimateColorMixture.Rd b/manbak/estimateColorMixture.Rd new file mode 100644 index 00000000..4de22f10 --- /dev/null +++ b/manbak/estimateColorMixture.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/estimateColorMixture.R +\name{estimateColorMixture} +\alias{estimateColorMixture} +\title{Estimate color mixtures using weighted average of CIELAB color coordinates} +\usage{ +estimateColorMixture(x, wt = "pct", backTransform = FALSE) +} +\arguments{ +\item{x}{data.frame, typically from NASIS containing at least CIE LAB ('L', 'A', 'B') and some kind of weight} + +\item{wt}{fractional weights, usually area of hz face} + +\item{backTransform}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by aqp::rgb2Munsell default: \code{FALSE}} +} +\value{ +A data.frame containing estimated color mixture +} +\description{ +Estimate color mixtures using weighted average of CIELAB color coordinates +} +\note{ +See \code{\link[aqp]{mixMunsell}} for a more realistic (but slower) simulation of subtractive mixing of pigments. +} +\author{ +D.E. Beaudette +} diff --git a/manbak/estimateSTR.Rd b/manbak/estimateSTR.Rd new file mode 100644 index 00000000..1cc735fc --- /dev/null +++ b/manbak/estimateSTR.Rd @@ -0,0 +1,44 @@ +\name{estimateSTR} +\alias{estimateSTR} + +\title{Estimate Soil Temperature Regime} +\description{Estimate soil temperature regime (STR) based on mean annual soil temperature (MAST), mean summer temperature (MSST), mean winter soil temperature (MWST), presence of O horizons, saturated conditions, and presence of permafrost. Several assumptions are made when O horizon or saturation are undefined.} + +\usage{ +estimateSTR(mast, mean.summer, mean.winter, O.hz = NA, saturated = NA, permafrost = FALSE) +} + +\arguments{ + \item{mast}{vector of mean annual soil temperature (deg C)} + \item{mean.summer}{vector of mean summer soil temperature (deg C)} + \item{mean.winter}{vector of mean winter soil temperature (deg C)} + \item{O.hz}{logical vector of O horizon presence / absence} + \item{saturated}{logical vector of seasonal saturation} + \item{permafrost}{logical vector of permafrost presence / absence} +} + +\details{ +\href{http://ncss-tech.github.io/AQP/soilDB/STR-eval.html}{Related tutorial}. +} + +\value{Vector of soil temperature regimes.} + +\references{ +Soil Survey Staff. 2015. Illustrated guide to soil taxonomy. U.S. Department of Agriculture, Natural Resources Conservation Service, National Soil Survey Center, Lincoln, Nebraska. +} + +\author{D.E. Beaudette} + + +\seealso{ +\code{\link{STRplot}} +} + +\examples{ +# simple example +estimateSTR(mast=17, mean.summer = 22, mean.winter = 12) + +} + +\keyword{ manip }% use one of RShowDoc("KEYWORDS") + diff --git a/manbak/fetchGDB.Rd b/manbak/fetchGDB.Rd new file mode 100644 index 00000000..ee031d50 --- /dev/null +++ b/manbak/fetchGDB.Rd @@ -0,0 +1,83 @@ +\name{fetchGDB} +\alias{fetchGDB} +\alias{get_legend_from_GDB} +\alias{get_mapunit_from_GDB} +\alias{get_component_from_GDB} + +\title{Load and Flatten Data from SSURGO file geodatabases} +\description{Functions to load and flatten commonly used tables and from SSURGO file geodatabases, and create soil profile collection objects (SPC).} +\usage{ +fetchGDB(dsn = "gNATSGO_CONUS.gdb", + WHERE = NULL, + childs = TRUE, + droplevels = TRUE, + stringsAsFactors = TRUE + ) + + +get_legend_from_GDB(dsn = "gNATSGO_CONUS.gdb", + WHERE = NULL, + droplevels = TRUE, + stringsAsFactors = TRUE, + stats = FALSE + ) + +get_mapunit_from_GDB(dsn = "gNATSGO_CONUS.gdb", + WHERE = NULL, + droplevels = TRUE, + stringsAsFactors = TRUE, + stats = FALSE + ) + +get_component_from_GDB(dsn = "gNATSGO_CONUS.gdb", + WHERE = NULL, + childs = FALSE, + droplevels = TRUE, + stringsAsFactors = TRUE + ) + +} + + +\arguments{ + \item{dsn}{data source name (interpretation varies by driver - for some drivers, dsn is a file name, but may also be a folder, or contain the name and access credentials of a database); in case of GeoJSON, dsn may be the character string holding the geojson data. It can also be an open database connection.} + \item{WHERE}{text string formatted as an SQL WHERE clause (default: FALSE)} + \item{childs}{logical; if FALSE parent material and geomorphic child tables are not flattened and appended} + \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + \item{stats}{Return extended summary statistics (for legend or mapunit only)} + } + + +\details{These functions return data from SSURGO file geodatabases with the use of a simple text string that formatted as an SQL WHERE clause (e.g. \code{WHERE = "areasymbol = 'IN001'"}. Any columns within the target table can be specified (except for fetchGDB() currently, which only targets the legend with the WHERE clause). +} +\value{A \code{data.frame} or \code{SoilProfileCollection} object.} +\author{Stephen Roecker} + + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\examples{ +\donttest{ + +## replace `dsn` with path to your own geodatabase (SSURGO OR gNATSGO) +## +## +## download CONUS gNATSGO from here: +## https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625 +## +## +# dsn <- "D:/geodata/soils/gNATSGO_CONUS.gdb" + +# le <- get_legend_from_GDB(dsn = dsn, WHERE = "areasymbol LIKE '\%'") + +# mu <- get_mapunit_from_GDB(dsn = dsn, WHERE = "muname LIKE 'Miami\%'") + +# co <- get_component_from_GDB(dsn, WHERE = "compname = 'Miami' +# AND majcompflag = 'Yes'", childs = FALSE) + +# f_in_GDB <- fetchGDB(WHERE = "areasymbol LIKE 'IN\%'") + +} +} +\keyword{manip} diff --git a/manbak/fetchHenry.Rd b/manbak/fetchHenry.Rd new file mode 100644 index 00000000..a9f680f0 --- /dev/null +++ b/manbak/fetchHenry.Rd @@ -0,0 +1,63 @@ +\name{fetchHenry} +\alias{fetchHenry} +\alias{month2season} +\alias{summarizeSoilTemperature} + +\title{Download Data from the Henry Mount Soil Temperature and Water Database} + +\description{This function is a front-end to the REST query functionality of the Henry Mount Soil Temperature and Water Database.} + +\usage{ +fetchHenry(what='all', usersiteid = NULL, project = NULL, sso = NULL, +gran = "day", start.date = NULL, stop.date = NULL, +pad.missing.days = TRUE, soiltemp.summaries = TRUE) +} + +\arguments{ + \item{what}{type of data to return: 'sensors': sensor metadata only | 'soiltemp': sensor metadata + soil temperature data | 'soilVWC': sensor metadata + soil moisture data | 'airtemp': sensor metadata + air temperature data | 'waterlevel': sensor metadata + water level data |'all': sensor metadata + all sensor data} + \item{usersiteid}{(optional) filter results using a NASIS user site ID} + \item{project}{(optional) filter results using a project ID} + \item{sso}{(optional) filter results using a soil survey office code} + \item{gran}{data granularity: "day", "week", "month", "year"; returned data are averages} + \item{start.date}{(optional) starting date filter} + \item{stop.date}{(optional) ending date filter} + \item{pad.missing.days}{should missing data ("day" granularity) be filled with NA? see details} + \item{soiltemp.summaries}{should soil temperature ("day" granularity only) be summarized? see details} +} + +\details{Filling missing days with NA is useful for computing and index of how complete the data are, and for estimating (mostly) unbiased MAST and seasonal mean soil temperatures. Summaries are computed by first averaging over Julian day, then averaging over all days of the year (MAST) or just those days that occur within "summer" or "winter". This approach makes it possible to estimate summaries in the presence of missing data. The quality of summaries should be weighted by the number of "functional years" (number of years with non-missing data after combining data by Julian day) and "complete years" (number of years of data with >= 365 days of non-missing data).} + +\value{a list containing: + \item{sensors}{a \code{SpatialPointsDataFrame} object containing site-level information} + \item{soiltemp}{a \code{data.frame} object containing soil temperature timeseries data} + \item{soilVWC}{a \code{data.frame} object containing soil moisture timeseries data} + \item{airtemp}{a \code{data.frame} object containing air temperature timeseries data} + \item{waterlevel}{a \code{data.frame} object containing water level timeseries data} +} + +\author{D.E. Beaudette} +\note{This function and the back-end database are very much a work in progress.} + +\seealso{\code{\link{fetchSCAN}}} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(lattice)) { + + # get CA630 data as daily averages + x <- fetchHenry(project='CA630', gran = 'day') + + # inspect data gaps + levelplot(factor(!is.na(sensor_value)) ~ doy * factor(year) | name, + data=x$soiltemp, col.regions=c('grey', 'RoyalBlue'), cuts=1, + colorkey=FALSE, as.table=TRUE, scales=list(alternating=3), + par.strip.text=list(cex=0.75), strip=strip.custom(bg='yellow'), + xlab='Julian Day', ylab='Year') + +} +} +} + +\keyword{manip} + diff --git a/manbak/fetchKSSL.Rd b/manbak/fetchKSSL.Rd new file mode 100644 index 00000000..69803095 --- /dev/null +++ b/manbak/fetchKSSL.Rd @@ -0,0 +1,89 @@ +\name{fetchKSSL} +\alias{fetchKSSL} +\title{Fetch KSSL Data} +\description{Download soil characterization and morphologic data via BBOX, MLRA, or soil series name query, from the KSSL database.} + +\usage{fetchKSSL(series=NA, bbox=NA, mlra=NA, pedlabsampnum=NA, +pedon_id=NA, pedon_key=NA, returnMorphologicData=FALSE, returnGeochemicalData=FALSE, +simplifyColors=FALSE, progress=TRUE)} + +\arguments{ + \item{series}{vector of soil series names, case insensitive} + \item{bbox}{a single bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}} + \item{mlra}{vector of MLRA IDs, e.g. "18" or "22A"} + \item{pedlabsampnum}{vector of KSSL pedon lab sample number} + \item{pedon_id}{vector of user pedon ID} + \item{pedon_key}{vector of KSSL internal pedon ID} + \item{returnMorphologicData}{logical, optionally request basic morphologic data, see details section} + \item{returnGeochemicalData}{logical, optionally request geochemical, optical and XRD/thermal data, see details section} + \item{simplifyColors}{logical, simplify colors (from morphologic data) and join with horizon data} + \item{progress}{logical, optionally give progress when iterating over multiple requests} +} + + +\details{This is an experimental interface to a subset for the most commonly used data from a snapshot of KSSL (lab characterization) and NASIS (morphologic) data. + +Series-queries are case insensitive. Series name is based on the "correlated as" field (from KSSL snapshot) when present. The "sampled as" classification was promoted to "correlated as" if the "correlated as" classification was missing. + +When \code{returnMorphologicData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are basic morphologic data: soil color, rock fragment volume, pores, structure, and redoximorphic features. There is a 1:many relationship between the horizon data in "SPC" and the additional dataframes in \code{morph}. See examples for ideas on how to "flatten" these tables. + +When \code{returnGeochemicalData} is TRUE, the resulting object is a list. The standard output from \code{fetchKSSL} (\code{SoilProfileCollection} object) is stored in the named element "SPC". The additional elements are geochemical and mineralogy analysis tables, specifically: geochemical/elemental analyses "geochem", optical mineralogy "optical", and X-ray diffraction / thermal "xrd_thermal". \code{returnGeochemicalData} will include additional dataframes \code{geochem}, \code{optical}, and \code{xrd_thermal} in list result. + +Setting \code{simplifyColors=TRUE} will automatically flatten the soil color data and join to horizon level attributes. + +Function arguments (\code{series}, \code{mlra}, etc.) are fully vectorized except for \code{bbox}. +} + +\value{a \code{SoilProfileCollection} object when \code{returnMorphologicData} is FALSE, otherwise a list.} + +\author{D.E. Beaudette and A.G. Brown} +\note{SoilWeb maintains a snapshot of these KSSL and NASIS data. The SoilWeb snapshot was developed using methods described here: \url{https://github.com/dylanbeaudette/process-kssl-snapshot}. Please use the link below for the live data.} + +\references{ +\url{http://ncsslabdatamart.sc.egov.usda.gov/} +} + +\seealso{\code{\link{fetchOSD}}} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + library(aqp) + library(plyr) + library(reshape2) + + # search by series name + s <- fetchKSSL(series='auburn') + + # search by bounding-box + # s <- fetchKSSL(bbox=c(-120, 37, -122, 38)) + + # how many pedons + length(s) + + # plot + plotSPC(s, name='hzn_desgn', max.depth=150) + + ## + ## morphologic data + ## + + # get lab and morphologic data + s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE) + + # extract SPC + pedons <- s$SPC + + ## automatically simplify color data + s <- fetchKSSL(series='auburn', returnMorphologicData = TRUE, simplifyColors=TRUE) + + # check + par(mar=c(0,0,0,0)) + plot(pedons, color='moist_soil_color', print.id=FALSE) + +} +} +} + +\keyword{utilities} diff --git a/manbak/fetchNASIS.Rd b/manbak/fetchNASIS.Rd new file mode 100644 index 00000000..085f450e --- /dev/null +++ b/manbak/fetchNASIS.Rd @@ -0,0 +1,88 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchNASIS.R +\name{fetchNASIS} +\alias{fetchNASIS} +\alias{get_phorizon_from_NASIS_db} +\alias{get_component_copm_data_from_NASIS_db} +\alias{get_component_horizon_data_from_NASIS_db} +\alias{get_component_correlation_data_from_NASIS_db} +\alias{get_component_cogeomorph_data_from_NASIS_db} +\alias{get_component_esd_data_from_NASIS_db} +\alias{get_component_otherveg_data_from_NASIS_db} +\alias{get_copedon_from_NASIS_db} +\alias{get_legend_from_NASISget_lmuaoverlap_from_NASIS} +\alias{get_mapunit_from_NASIS} +\alias{get_projectmapunit_from_NASIS} +\alias{get_component_diaghz_from_NASIS_db} +\alias{get_mutext_from_NASIS_db} +\alias{get_phfmp_from_NASIS_db} +\alias{get_RMF_from_NASIS_db} +\alias{get_concentrations_from_NASIS_db} +\alias{fetchVegdata} +\alias{get_vegplot_from_NASIS_db} +\alias{get_vegplot_location_from_NASIS_db} +\alias{get_vegplot_species_from_NASIS_db} +\alias{get_vegplot_textnote_from_NASIS_db} +\alias{get_vegplot_transect_from_NASIS_db} +\alias{get_vegplot_transpecies_from_NASIS_db} +\alias{get_vegplot_tree_si_details_from_NASIS_db} +\alias{get_vegplot_tree_si_summary_from_NASIS_db} +\alias{get_vegplot_trhi_from_NASIS_db} +\alias{get_legend_from_NASIS} +\alias{get_lmuaoverlap_from_NASIS} +\title{Fetch commonly used site/pedon/horizon or component data from NASIS.} +\usage{ +fetchNASIS( + from = "pedons", + url = NULL, + SS = TRUE, + rmHzErrors = TRUE, + nullFragsAreZero = TRUE, + soilColorState = "moist", + lab = FALSE, + fill = FALSE, + stringsAsFactors = default.stringsAsFactors(), + static_path = NULL +) +} +\arguments{ +\item{from}{determines what objects should fetched? ('pedons' | 'components' | 'pedon_report')} + +\item{url}{string specifying the url for the NASIS pedon_report (default: NULL)} + +\item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} + +\item{rmHzErrors}{should pedons with horizon depth errors be removed from the results? (default: TRUE)} + +\item{nullFragsAreZero}{should fragment volumes of NULL be interpreted as 0? (default: TRUE), see details} + +\item{soilColorState}{which colors should be used to generate the convenience field 'soil_color'? ('moist' | 'dry')} + +\item{lab}{should the phlabresults child table be fetched with site/pedon/horizon data (default: FALSE)} + +\item{fill}{(fetchNASIS(from='components') only: include component records without horizon data in result? (default: FALSE)} + +\item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + +\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +} +\value{ +A SoilProfileCollection object +} +\description{ +Fetch commonly used site/pedon/horizon data or component from NASIS, returned as a SoilProfileCollection object. +} +\details{ +This function imports data from NASIS into R as a \code{SoilProfileCollection} object. It "flattens" NASIS pedon and component tables, including their child tables, into several more easily manageable data frames. Primarily these functions access the local NASIS database using an ODBC connection. However using the \code{fetchNASIS()} argument \code{from = "pedon_report"}, data can be read from the NASIS Report 'fetchNASIS', as either a txt file or url. The primary purpose of \code{fetchNASIS(from = "pedon_report")} is to facilitate importing datasets larger than 8000+ pedons/components. + +The value of \code{nullFragsAreZero} will have a significant impact on the rock fragment fractions returned by fetchNASIS. Set \code{nullFragsAreZero = FALSE} in those cases where there are many data-gaps and \code{NULL} rock fragment values should be interpreted as \code{NULL}. Set \code{nullFragsAreZero = TRUE} in those cases where \code{NULL} rock fragment values should be interpreted as 0. + +This function attempts to do most of the boilerplate work when extracting site/pedon/horizon or component data from a local NASIS database. Pedons that are missing horizon data, or have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. Pedons with combination horizons (e.g. B/C) are erroneously marked as errors due to the way in which they are stored in NASIS as two overlapping horizon records. +\itemize{ +\item \href{http://ncss-tech.github.io/AQP/soilDB/fetchNASIS-mini-tutorial.html}{fetchNASIS Pedons Tutorial} +\item \href{http://ncss-tech.github.io/AQP/soilDB/NASIS-component-data.html}{fetchNASIS Components Tutorial} +} +} +\author{ +D. E. Beaudette, J. M. Skovlin, S.M. Roecker, A.G. Brown +} diff --git a/manbak/fetchNASISLabData.Rd b/manbak/fetchNASISLabData.Rd new file mode 100644 index 00000000..7bbb1d28 --- /dev/null +++ b/manbak/fetchNASISLabData.Rd @@ -0,0 +1,20 @@ +\name{fetchNASISLabData} +\alias{fetchNASISLabData} + + +\title{Fetch lab data used site/horizon data from a PedonPC database.} +\description{Fetch KSSL laboratory pedon/horizon layer data from a local NASIS database, return as a SoilProfileCollection object.} + +\usage{fetchNASISLabData(SS = TRUE)} +\arguments{ + \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +} +\value{a SoilProfileCollection class object} +\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} +\author{J.M. Skovlin and D.E. Beaudette} +\note{This function attempts to do most of the boilerplate work when extracting KSSL laboratory site/horizon data from a local NASIS database. Lab pedons that have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. See \code{\link{getHzErrorsNASIS}} for a simple approach to identifying pedons with problematic horizonation.} + +\seealso{\code{\link{get_labpedon_data_from_NASIS_db}}} + +\keyword{manip} + diff --git a/manbak/fetchNASISWebReport.Rd b/manbak/fetchNASISWebReport.Rd new file mode 100644 index 00000000..37a5bc19 --- /dev/null +++ b/manbak/fetchNASISWebReport.Rd @@ -0,0 +1,158 @@ +\name{fetchNASISWebReport} +\alias{fetchNASISWebReport} +\alias{get_project_from_NASISWebReport} +\alias{get_progress_from_NASISWebReport} +\alias{get_project_correlation_from_NASISWebReport} +\alias{get_legend_from_NASISWebReport} +\alias{get_mapunit_from_NASISWebReport} +\alias{get_projectmapunit_from_NASISWebReport} +\alias{get_projectmapunit2_from_NASISWebReport} +\alias{get_component_from_NASISWebReport} +\alias{get_chorizon_from_NASISWebReport} +\alias{get_cosoilmoist_from_NASISWebReport} +\alias{get_sitesoilmoist_from_NASISWebReport} +\alias{get_lmuaoverlap_from_NASISWebReport} + +\title{Extract component tables from a the NASIS Web Reports} +\description{Get, format, impute, and return component tables.} +\usage{ +fetchNASISWebReport(projectname, rmHzErrors = FALSE, fill = FALSE, + stringsAsFactors = default.stringsAsFactors() + ) +get_progress_from_NASISWebReport(mlrassoarea, fiscalyear, projecttypename) +get_project_from_NASISWebReport(mlrassoarea, fiscalyear) +get_project_correlation_from_NASISWebReport(mlrassoarea, fiscalyear, projectname) +get_projectmapunit_from_NASISWebReport(projectname, + stringsAsFactors = default.stringsAsFactors() + ) +get_projectmapunit2_from_NASISWebReport(mlrassoarea, fiscalyear, projectname, + stringsAsFactors = default.stringsAsFactors() + ) +get_legend_from_NASISWebReport(mlraoffice, + areasymbol, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) +get_mapunit_from_NASISWebReport(areasymbol, + droplevels = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) +get_component_from_NASISWebReport(projectname, + stringsAsFactors = default.stringsAsFactors() + ) +get_chorizon_from_NASISWebReport(projectname, fill = FALSE, + stringsAsFactors = default.stringsAsFactors() + ) +get_cosoilmoist_from_NASISWebReport(projectname, impute = TRUE, + stringsAsFactors = default.stringsAsFactors() + ) +get_sitesoilmoist_from_NASISWebReport(usiteid) +} + + +\arguments{ + \item{projectname}{text string vector of project names to be inserted into a SQL WHERE clause (default: NA)} + \item{mlraoffice}{text string value identifying the MLRA Regional Soil Survey Office group name inserted into a SQL WHERE clause (default: NA)} + \item{mlrassoarea}{text string value identifying the MLRA Soil Survey Office areasymbol symbol inserted into a SQL WHERE clause (default: NA)} + \item{fiscalyear}{text string value identifying the fiscal year inserted into a SQL WHERE clause (default: NA)} + \item{projecttypename}{text string value identifying the project type name inserted into a SQL WHERE clause (default: NA)} + \item{areasymbol}{text string value identifying the area symbol (e.g. "IN001" or "IN\%") inserted into a SQL WHERE clause (default: NA)} + \item{usiteid}{text string value identifying the user site id inserted into a SQL WHERE clause (default: NA)} + \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} + \item{fill}{should rows with missing component ids be removed NA (FALSE)} + \item{rmHzErrors}{should pedons with horizonation errors be removed from the results? (default: FALSE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} +} + + +\value{A data.frame or list with the results.} +\author{Stephen Roecker} + + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + + +\examples{ +\donttest{ + + +if (requireNamespace("curl") & + curl::has_internet() & + require("aqp") & + require("ggplot2") & + require("gridExtra") +) { + # query soil components by projectname + test = fetchNASISWebReport( + "EVAL - MLRA 111A - Ross silt loam, 0 to 2 percent slopes, frequently flooded" + ) + test = test$spc + + # profile plot + plot(test) + + # convert the data for depth plot + clay_slice = horizons(slice(test, 0:200 ~ claytotal_l + claytotal_r + claytotal_h)) + names(clay_slice) <- gsub("claytotal_", "", names(clay_slice)) + + om_slice = horizons(slice(test, 0:200 ~ om_l + om_r + om_h)) + names(om_slice) = gsub("om_", "", names(om_slice)) + + test2 = rbind(data.frame(clay_slice, var = "clay"), + data.frame(om_slice, var = "om") + ) + + h = merge(test2, site(test)[c("dmuiid", "coiid", "compname", "comppct_r")], + by = "coiid", + all.x = TRUE + ) + + # depth plot of clay content by soil component + gg_comp <- function(x) { + ggplot(x) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_line(aes(y = r, x = hzdept_r)) + + geom_ribbon(aes(ymin = l, ymax = h, x = hzdept_r), alpha = 0.2) + + xlim(200, 0) + + xlab("depth (cm)") + + facet_grid(var ~ dmuiid + paste(compname, comppct_r)) + + coord_flip() + } + g1 <- gg_comp(subset(h, var == "clay")) + g2 <- gg_comp(subset(h, var == "om")) + + grid.arrange(g1, g2) + + + # query cosoilmoist (e.g. water table data) by mukey + # NA depths are interpreted as (???) with impute=TRUE argument + x <- get_cosoilmoist_from_NASISWebReport( + "EVAL - MLRA 111A - Ross silt loam, 0 to 2 percent slopes, frequently flooded" + ) + + ggplot(x, aes(x = as.integer(month), y = dept_r, lty = status)) + + geom_rect(aes(xmin = as.integer(month), xmax = as.integer(month) + 1, + ymin = 0, ymax = max(x$depb_r), + fill = flodfreqcl)) + + geom_line(cex = 1) + + geom_point() + + geom_ribbon(aes(ymin = dept_l, ymax = dept_h), alpha = 0.2) + + ylim(max(x$depb_r), 0) + + xlab("month") + ylab("depth (cm)") + + scale_x_continuous(breaks = 1:12, labels = month.abb, name="Month") + + facet_wrap(~ paste0(compname, ' (', comppct_r , ')')) + + ggtitle(paste0(x$nationalmusym[1], + ': Water Table Levels from Component Soil Moisture Month Data')) + + +} + + + +} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/fetchOSD.Rd b/manbak/fetchOSD.Rd new file mode 100644 index 00000000..b7bebe0c --- /dev/null +++ b/manbak/fetchOSD.Rd @@ -0,0 +1,108 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchOSD.R +\name{fetchOSD} +\alias{fetchOSD} +\title{Fetch Official Series Descriptions and summaries from SoilWeb API} +\usage{ +fetchOSD(soils, colorState = "moist", extended = FALSE) +} +\arguments{ +\item{soils}{a character vector of named soil series; case-insensitive} + +\item{colorState}{color state for horizon soil color visualization: "moist" or "dry"} + +\item{extended}{if \code{TRUE} additional soil series summary data are returned, see details} +} +\value{ +a \code{SoilProfileCollection} object containing basic soil morphology and taxonomic information. +} +\description{ +This function fetches a variety of data associated with named soil series, extracted from the USDA-NRCS Official Series Description text files and detailed soil survey (SSURGO). These data are periodically updated and made available via SoilWeb. +} +\details{ +{ +\itemize{ +\item{\href{https://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{overview of all soil series query functions}} + +\item{\href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}} + +\item{\href{https://ncss-tech.github.io/AQP/soilDB/siblings.html}{siblings}} +} + +The standard set of "site" and "horizon" data are returned as a \code{SoilProfileCollection} object (\code{extended=FALSE}. The "extended" suite of summary data can be requested by setting \code{extended=TRUE}. The resulting object will be a \code{list} with the following elements:) + +\describe{ +\item{SPC}{\code{SoilProfileCollection} containing standards "site" and "horizon" data} +\item{competing}{competing soil series from the SC database snapshot} +\item{geog_assoc_soils}{geographically associated soils, extracted from named section in the OSD} +\item{geomcomp}{empirical probabilities for geomorphic component, derived from the current SSURGO snapshot} +\item{hillpos}{empirical probabilities for hillslope position, derived from the current SSURGO snapshot} +\item{mtnpos}{empirical probabilities for mountain slope position, derived from the current SSURGO snapshot} +\item{terrace}{empirical probabilities for river terrace position, derived from the current SSURGO snapshot} +\item{flats}{empirical probabilities for flat landscapes, derived from the current SSURGO snapshot} +\item{pmkind}{empirical probabilities for parent material kind, derived from the current SSURGO snapshot} +\item{pmorigin}{empirical probabilities for parent material origin, derived from the current SSURGO snapshot} +\item{mlra}{empirical MLRA membership values, derived from the current SSURGO snapshot} +\item{climate}{experimental climate summaries from PRISM stack} +\item{metadata}{metadata associated with SoilWeb cached summaries} +} + +When using \code{extended=TRUE}, there are a couple of scenarios in which series morphology contained in \code{SPC} do not fully match records in the associated series summaries (e.g. \code{competing}). + +\describe{ + +\item{1. A query for soil series that exist entirely outside of CONUS (e.g. PALAU).}{ - Climate summaries are empty \code{data.frames} because these summaries are currently generated from PRISM. We are working on a solution.} + +\item{2. A query for data within CONUS, but OSD morphology missing due to parsing error (e.g. formatting, typos).}{ - Extended summaries are present but morphology missing from \code{SPC}. A warning is issued.} + +\item{3. A query for multiple soil series, with one more more listed as "inactive" (e.g. BREADSPRINGS).}{ - Extended summaries are present but morphology missing from \code{SPC}. A warning is issued.} + +} + +These last two cases are problematic for analysis that makes use of morphology and extended data, such as outlined in this tutorial on \href{https://ncss-tech.github.io/AQP/soilDB/competing-series.html}{competing soil series}. + +} +} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # soils of interest + s.list <- c('musick', 'cecil', 'drummer', 'amador', 'pentz', + 'reiff', 'san joaquin', 'montpellier', 'grangeville', 'pollasky', 'ramona') + + # fetch and convert data into an SPC + s.moist <- fetchOSD(s.list, colorState='moist') + s.dry <- fetchOSD(s.list, colorState='dry') + + # plot profiles + # moist soil colors + if(require("aqp")) { + + par(mar=c(0,0,0,0), mfrow=c(2,1)) + plot(s.moist, name='hzname', + cex.names=0.85, axis.line.offset=-4) + plot(s.dry, name='hzname', + cex.names=0.85, axis.line.offset=-4) + + # extended mode: return a list with SPC + summary tables + x <- fetchOSD(s.list, extended = TRUE, colorState = 'dry') + + par(mar=c(0,0,1,1)) + plot(x$SPC) + str(x, 1) + } +} +} +} +\references{ +USDA-NRCS OSD search tools: \url{https://www.nrcs.usda.gov/wps/portal/nrcs/detailfull/soils/home/?cid=nrcs142p2_053587} +} +\seealso{ +\link{OSDquery}, \link{siblings} +} +\author{ +D.E. Beaudette +} +\keyword{manip} diff --git a/manbak/fetchPedonPC.Rd b/manbak/fetchPedonPC.Rd new file mode 100644 index 00000000..b768c092 --- /dev/null +++ b/manbak/fetchPedonPC.Rd @@ -0,0 +1,26 @@ +\name{fetchPedonPC} +\alias{fetchPedonPC} +\alias{getHzErrorsPedonPC} + +\title{Fetch commonly used site/horizon data from a PedonPC v.5 database.} +\description{Fetch commonly used site/horizon data from a version 5.x PedonPC database, return as a SoilProfileCollection object.} + +\usage{ +fetchPedonPC(dsn) +getHzErrorsPedonPC(dsn, strict=TRUE) +} + +\arguments{ + \item{dsn}{The path to a PedonPC version 5.x database} + \item{strict}{should horizonation by strictly enforced? (TRUE)} +} + +\details{This function currently works only on Windows.} +\value{a SoilProfileCollection class object} +\author{D. E. Beaudette and J. M. Skovlin} +\note{This function attempts to do most of the boilerplate work when extracting site/horizon data from a PedonPC or local NASIS database. Pedons that have errors in their horizonation are excluded from the returned object, however, their IDs are printed on the console. See \code{\link{getHzErrorsPedonPC}} for a simple approach to identifying pedons with problematic horizonation. Records from the 'taxhistory' table are selected based on 1) most recent record, or 2) record with the least amount of missing data.} + +\seealso{\code{\link{get_hz_data_from_pedon_db}}} + +\keyword{manip} + diff --git a/manbak/fetchRaCA.Rd b/manbak/fetchRaCA.Rd new file mode 100644 index 00000000..cf77281b --- /dev/null +++ b/manbak/fetchRaCA.Rd @@ -0,0 +1,85 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchRaCA.R +\name{fetchRaCA} +\alias{fetchRaCA} +\title{Get Rapid Carbon Assessment (RaCA) data} +\usage{ +fetchRaCA( + series = NULL, + bbox = NULL, + state = NULL, + rcasiteid = NULL, + get.vnir = FALSE +) +} +\arguments{ +\item{series}{a soil series name; case-insensitive} + +\item{bbox}{a bounding box in WGS84 geographic coordinates e.g. \code{c(-120, 37, -122, 38)}, constrained to a 5-degree block} + +\item{state}{a two-letter US state abbreviation; case-insensitive} + +\item{rcasiteid}{a RaCA site id (e.g. 'C1609C01')} + +\item{get.vnir}{logical, should associated VNIR spectra be downloaded? (see details)} +} +\value{ +{ +\describe{ +\item{\code{pedons}:}{a \code{SoilProfileCollection} object containing site/pedon/horizon data} +\item{\code{trees}:}{a \code{data.frame} object containing tree DBH and height} +\item{\code{veg}:}{a \code{data.frame} object containing plant species} +\item{\code{stock}:}{a \code{data.frame} object containing carbon quantities (stocks) at standardized depths} +\item{\code{sample}:}{a \code{data.frame} object containing sample-level bulk density and soil organic carbon values} +\item{\code{spectra}:}{a numeric \code{matrix} containing VNIR reflectance spectra from 350--2500 nm} +} +} +} +\description{ +Get Rapid Carbon Assessment (RaCA) data via state, geographic bounding-box, RaCA site ID, or series query from the SoilWeb API. +} +\details{ +The VNIR spectra associated with RaCA data are quite large (each gzip-compressed VNIR spectra record is about 6.6kb), so requests for these data are disabled by default. Note that VNIR spectra can only be queried by soil series or geographic BBOX. +} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + if(require(aqp)) { + + # search by series name + s <- fetchRaCA(series='auburn') + + # search by bounding-box + # s <- fetchRaCA(bbox=c(-120, 37, -122, 38)) + + # check structure + str(s, 1) + + # extract pedons + p <- s$pedons + + # how many pedons + length(p) + + # plot + par(mar=c(0,0,0,0)) + plot(p, name='hzn_desgn', max.depth=150) + } +} +} +} +\references{ +{ +\url{https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/?cid=nrcs142p2_054164} +\href{https://r-forge.r-project.org/scm/viewvc.php/\emph{checkout}/docs/soilDB/RaCA-demo.html?root=aqp}{fetchRaCA() Tutorial} +} +} +\seealso{ +\code{\link{fetchOSD}} +} +\author{ +D.E. Beaudette, USDA-NRCS staff +} +\keyword{utilities} diff --git a/manbak/fetchSCAN.Rd b/manbak/fetchSCAN.Rd new file mode 100644 index 00000000..4d4169cc --- /dev/null +++ b/manbak/fetchSCAN.Rd @@ -0,0 +1,54 @@ +\name{fetchSCAN} +\alias{fetchSCAN} +\alias{SCAN_sensor_metadata} +\alias{SCAN_site_metadata} + + +\title{Fetch SCAN Data} +\description{Query soil/climate data from USDA-NRCS SCAN Stations (experimental)} + +\usage{ +# get SCAN data +fetchSCAN(site.code, year, report='SCAN', req=NULL) + +# get sensor metadata for one or more sites +SCAN_sensor_metadata(site.code) + +# get site metadata for one or more sites +SCAN_site_metadata(site.code) +} + +\arguments{ + \item{site.code}{a vector of site codes} + \item{year}{a vector of years} + \item{report}{report name, single value only} + \item{req}{list of SCAN request parameters, for backwards-compatibility only} +} + +\details{See \href{http://ncss-tech.github.io/AQP/soilDB/fetchSCAN-demo.html}{the fetchSCAN tutorial for details.} These functions require the `httr` and `rvest` libraries.} + +\note{\code{SCAN_sensor_metadata()} is known to crash on 32bit R / libraries (Windows).} + +\value{a \code{data.frame} object} +\references{https://www.wcc.nrcs.usda.gov/index.html} +\author{D.E. Beaudette} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # get data: new interface + x <- fetchSCAN(site.code=c(356, 2072), year=c(2015, 2016)) + str(x) + + # get sensor metadata + m <- SCAN_sensor_metadata(site.code=c(356, 2072)) + + # get site metadata + m <- SCAN_site_metadata(site.code=c(356, 2072)) +} +} +} +\keyword{manip} + diff --git a/man/fetchSDA_component.Rd b/manbak/fetchSDA_component.Rd similarity index 100% rename from man/fetchSDA_component.Rd rename to manbak/fetchSDA_component.Rd diff --git a/manbak/fetchSDA_spatial.Rd b/manbak/fetchSDA_spatial.Rd new file mode 100644 index 00000000..9e616724 --- /dev/null +++ b/manbak/fetchSDA_spatial.Rd @@ -0,0 +1,74 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchSDA_spatial.R +\name{fetchSDA_spatial} +\alias{fetchSDA_spatial} +\title{Query Soil Data Access and Return Spatial Data} +\usage{ +fetchSDA_spatial( + x, + by.col = "mukey", + method = "feature", + geom.src = "mupolygon", + db = "SSURGO", + add.fields = NULL, + chunk.size = 10, + verbose = TRUE +) +} +\arguments{ +\item{x}{A vector of MUKEYs / national mapunit symbols (for mupolygon geometry); OR legend keys (LKEY) / area symbols (for sapolygon geometry)} + +\item{by.col}{Column name containing mapunit identifier \code{"mukey"}, \code{"nmusym"}, or \code{"areasymbol"} for \code{geom.src} \code{sapolygon}; default is inferred from \code{is.numeric(x) == TRUE} for \code{mukey} or \code{lkey} and (\code{nationalmusym} or \code{areasymbol} otherwise.} + +\item{method}{geometry result type: \code{"feature"} returns polygons, \code{"bbox"} returns the bounding box of each polygon, and \code{"point"} returns a single point within each polygon.} + +\item{geom.src}{Either \code{mupolygon} or \code{sapolygon}} + +\item{db}{Default: SSURGO. When \code{geom.src} is \code{mupolygon}, use STATSGO polygon geometry instead of SSURGO by setting \code{db = "STATSGO"}} + +\item{add.fields}{Column names from \code{mapunit} or \code{legend} table to add to result. Must specify parent table name as the prefix \code{mapunit} before column name e.g. \code{mapunit.muname}.} + +\item{chunk.size}{How many queries should spatial request be divided into? Necessary for large results. Default: 10} + +\item{verbose}{Print messages?} +} +\value{ +A Spatial*DataFrame corresponding to SDA spatial data for all symbols requested. Default result contains geometry with attribute table containing unique feature ID, symbol and area symbol plus additional fields in result specified with \code{add.fields}. +} +\description{ +This is a high-level "fetch" method to facilitate spatial queries to Soil Data Access (SDA) based on mapunit key (\code{mukey}) and national mapunit symbol (\code{nationalmusym}) for \code{mupolygon} (SSURGO) or \code{gsmmupolygon} (STATSGO) geometry OR legend key (\code{lkey}) and area symbols (\code{areasymbol}) for \code{sapolygon} (Soil Survey Area; SSA) geometry). + +A Soil Data Access spatial query is made returning geometry and key identifying information about the mapunit or area of interest. Additional columns from the mapunit or legend table can be included using \code{add.fields} argument. + +This function automatically "chunks" the input vector (using \code{soilDB::makeChunks}) of mapunit identifiers to minimize the likelihood of exceeding the SDA data request size. The number of chunks varies with the \code{chunk.size} setting and the length of your input vector. If you are working with many mapunits and/or large extents, you may need to decrease this number in order to have more chunks. + +Querying regions with complex mapping may require smaller \code{chunk.size}. Numerically adjacent IDs in the input vector may share common qualities (say, all from same soil survey area or region) which could cause specific chunks to perform "poorly" (slow or error) no matter what the chunk size is. Shuffling the order of the inputs using \code{sample} may help to eliminate problems related to this, depending on how you obtained your set of MUKEY/nationalmusym to query. One could feasibly use \code{muacres} as a heuristic to adjust for total acreage within chunks. +} +\details{ +Note that STATSGO data are fetched using \code{CLIPAREASYMBOL = 'US'} to avoid duplicating state and national subsets of the geometry. +} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # get spatial data for a single mukey + single.mukey <- fetchSDA_spatial(x = "2924882") + + # demonstrate fetching full extent (multi-mukey) of national musym + full.extent.nmusym <- fetchSDA_spatial(x = "2x8l5", by = "nmusym") + + # compare extent of nmusym to single mukey within it + if(require(sp)) { + plot(full.extent.nmusym, col = "RED",border=0) + plot(single.mukey, add = TRUE, col = "BLUE", border=0) + } + + # demo adding a field (`muname`) to attribute table of result + head(fetchSDA_spatial(x = "2x8l5", by="nmusym", add.fields="muname")) +} +} +} +\author{ +Andrew G. Brown +} diff --git a/manbak/fetchSoilGrids.Rd b/manbak/fetchSoilGrids.Rd new file mode 100644 index 00000000..22623381 --- /dev/null +++ b/manbak/fetchSoilGrids.Rd @@ -0,0 +1,43 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchSoilGrids.R +\name{fetchSoilGrids} +\alias{fetchSoilGrids} +\title{Fetch SoilGrids 250m properties information from point locations} +\usage{ +fetchSoilGrids(locations, loc.names = c("id", "lat", "lon")) +} +\arguments{ +\item{locations}{A \code{data.frame} containing 3 columns referring to site ID, latitude and longitude.} + +\item{loc.names}{Optional: Column names referring to site ID, latitude and longitude. Default: \code{c("id","lat","lon")}} +} +\value{ +A SoilProfileCollection +} +\description{ +This function obtains SoilGrids properties information (250m raster resolution) given a \code{data.frame} containing site IDs, latitudes and longitudes. +} +\details{ +The depth intervals returned are: \code{"0-5cm", "5-15cm", "15-30cm", "30-60cm", "60-100cm", "100-200cm"} and the properties returned are \code{"bdod", "cec", "cfvo", "clay", "nitrogen", "phh2o", "sand", "silt", "soc"} -- each with 5th, 50th, 95th, mean and uncertainty values. Point data requests are made through \code{properties/query} endpoint of the SoilGrids v2.0 REST API: https://rest.soilgrids.org/soilgrids/v2.0/docs +} +\examples{ +\donttest{ + if(requireNamespace("curl") & + curl::has_internet()) { + + library(aqp) + + your.points <- data.frame(id = c("A", "B"), + lat = c(37.9, 38.1), + lon = c(-120.3, -121.5), + stringsAsFactors = FALSE) + + x <- fetchSoilGrids(your.points) + + plotSPC(x, name = NA, color = "socQ50") + } +} +} +\author{ +Andrew G. Brown +} diff --git a/manbak/filter_geochem.Rd b/manbak/filter_geochem.Rd new file mode 100644 index 00000000..ceeb69ed --- /dev/null +++ b/manbak/filter_geochem.Rd @@ -0,0 +1,34 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/filter_KSSL.R +\name{filter_geochem} +\alias{filter_geochem} +\title{Filter KSSL Geochemical Table} +\usage{ +filter_geochem( + geochem, + columns = NULL, + prep_code = NULL, + major_element_method = NULL, + trace_element_method = NULL +) +} +\arguments{ +\item{geochem}{geochemical data, as returned by fetchKSSL} + +\item{columns}{Column name(s) to include in result} + +\item{prep_code}{Character vector of prep code(s) to include in result.} + +\item{major_element_method}{Character vector of major element method(s) to include in result.} + +\item{trace_element_method}{Character vector of trace element method(s) to include in result.} +} +\value{ +A data.frame, subsetted according to the constraints specified in arguments. +} +\description{ +A function to subset KSSL "geochem" / elemental analysis result table to obtain rows/columns based on: column name, preparation code, major / trace element method. +} +\author{ +Andrew G. Brown. +} diff --git a/manbak/format_SQL_in_statement.Rd b/manbak/format_SQL_in_statement.Rd new file mode 100644 index 00000000..2a6c9568 --- /dev/null +++ b/manbak/format_SQL_in_statement.Rd @@ -0,0 +1,62 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SDA_query.R +\name{format_SQL_in_statement} +\alias{format_SQL_in_statement} +\title{Format vector of values into a string suitable for an SQL \code{IN} statement.} +\usage{ +format_SQL_in_statement(x) +} +\arguments{ +\item{x}{A character vector.} +} +\value{ +A character vector (unit length) containing concatenated group syntax for use in SQL \code{IN}, with unique value found in \code{x}. +} +\description{ +Concatenate a vector to SQL \code{IN}-compatible syntax: \code{letters[1:3]} becomes \code{('a','b','c')}. Values in \code{x} are first passed through \code{unique()}. +} +\note{ +Only \code{character} output is supported. +} +\examples{ + +\donttest{ + +library(aqp) + +# get some mukeys +q <- "select top(2) mukey from mapunit;" +mukeys <- SDA_query(q) + +# format for use in an SQL IN statement +mukey.inst <- format_SQL_in_statement(mukeys$mukey) +mukey.inst + +# make a more specific query: for component+horizon data, just for those mukeys +q2 <- sprintf("SELECT * FROM mapunit + INNER JOIN component ON mapunit.mukey = component.mukey + INNER JOIN chorizon ON component.cokey = chorizon.cokey + WHERE mapunit.mukey IN \%s;", mukey.inst) +# do the query +res <- SDA_query(q2) + +# build a SoilProfileCollection from horizon-level records +depths(res) <- cokey ~ hzdept_r + hzdepb_r + +# normalize mapunit/component level attributes to site-level for plot +site(res) <- ~ muname + mukey + compname + comppct_r + taxclname + +# make a nice label +res$labelname <- sprintf("\%s (\%s\%s)", res$compname, res$comppct_r, "\%") + +# major components only +res <- filter(res, comppct_r >= 85) + +# inspect plot of result +par(mar=c(0,0,0,0)) +groupedProfilePlot(res, groups = "mukey", color = "hzname", cex.names=0.8, + id.style = "side", label = "labelname") +} + + +} diff --git a/manbak/getHzErrorsNASIS.Rd b/manbak/getHzErrorsNASIS.Rd new file mode 100644 index 00000000..f8567159 --- /dev/null +++ b/manbak/getHzErrorsNASIS.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/getHzErrorsNASIS.R +\name{getHzErrorsNASIS} +\alias{getHzErrorsNASIS} +\title{Check pedon horizon table for logic errors} +\usage{ +getHzErrorsNASIS(strict = TRUE, SS = TRUE, static_path = NULL) +} +\arguments{ +\item{strict}{how strict should horizon boundaries be checked for consistency: TRUE=more | FALSE=less} + +\item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} + +\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +} +\value{ +A data.frame containing problematic records with columns: 'peiid','pedon_id','hzdept','hzdepb','hzname' +} +\description{ +Check pedon horizon table for logic errors +} diff --git a/manbak/get_NOAA_GHCND.Rd b/manbak/get_NOAA_GHCND.Rd new file mode 100644 index 00000000..7dc2d228 --- /dev/null +++ b/manbak/get_NOAA_GHCND.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchNOAA.R +\name{get_NOAA_GHCND} +\alias{get_NOAA_GHCND} +\title{Get Global Historical Climatology Network Daily (GHCND) data from NOAA API for given datatype(s), station IDs and years.} +\usage{ +get_NOAA_GHCND(stations, years, datatypeids, apitoken) +} +\arguments{ +\item{stations}{Station ID (e.g. \code{GHCND:USC00388786})} + +\item{years}{One or more years (e.g. 2017:2020)} + +\item{datatypeids}{One or more NOAA GHCND data type IDs (e.g \code{c("PRCP","SNOW")})} + +\item{apitoken}{API key token for NOAA NCDC web services (https://www.ncdc.noaa.gov/cdo-web/token)} +} +\value{ +A data.frame containing the GHCND data requested (limit 1000 records) +} +\description{ +Obtain daily climatic summary data for a set of station IDs, years, and datatypes. + +Note that typically results from the NOAA API are limited to 1000 records. However, by "chunking" up data into individual station\emph{year}datatypeid combinations, record results generally do not exceed 365 records for daily summaries. + +In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token +} +\examples{ + +#' ## in order to use this function, you must obtain an API token from this website: +## https://www.ncdc.noaa.gov/cdo-web/token + +# get_NOAA_GHCND(c("GHCND:USC00388786", "GHCND:USC00388787"), +# years = 2017:2020, +# datatypeids = c("PRCP","SNOW"), +# apitoken = "yourtokenhere") + +} diff --git a/manbak/get_NOAA_stations_nearXY.Rd b/manbak/get_NOAA_stations_nearXY.Rd new file mode 100644 index 00000000..489448f7 --- /dev/null +++ b/manbak/get_NOAA_stations_nearXY.Rd @@ -0,0 +1,34 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/fetchNOAA.R +\name{get_NOAA_stations_nearXY} +\alias{get_NOAA_stations_nearXY} +\title{Query the NOAA API to get station data near a given latitude and longitude} +\usage{ +get_NOAA_stations_nearXY(lat, lng, apitoken, bbox = 1) +} +\arguments{ +\item{lat}{Latitude} + +\item{lng}{Longitude} + +\item{apitoken}{API key token for NOAA NCDC web service} + +\item{bbox}{Optional: Dimension of the bounding box centered at \code{lat}, \code{lng}.} +} +\value{ +data.frame containing station information for all stations within a bounding box around \code{lat}, \code{lng}. +} +\description{ +Query the NOAA API to get station data (limit 1000 records) near a point. Default extent is plus or minus 0.5 degrees (bounding box) (with \code{bbox = 1}) around the specified point [lat, lng]. + +In order to use this function, you must obtain an API token from this website: https://www.ncdc.noaa.gov/cdo-web/token +} +\examples{ + +## in order to use this function, you must obtain an API token from this website: +## https://www.ncdc.noaa.gov/cdo-web/token + +# stations <- get_NOAA_stations_nearXY(lat = 37, lng = -120, +# apitoken = "yourtokenhere") + +} diff --git a/manbak/get_colors_from_NASIS_db.Rd b/manbak/get_colors_from_NASIS_db.Rd new file mode 100644 index 00000000..ec052814 --- /dev/null +++ b/manbak/get_colors_from_NASIS_db.Rd @@ -0,0 +1,24 @@ +\name{get_colors_from_NASIS_db} +\alias{get_colors_from_NASIS_db} + +\title{Extract Soil Color Data from a local NASIS Database} +\description{Get, format, mix, and return color data from a NASIS database.} +\usage{ +get_colors_from_NASIS_db(SS = TRUE) +} +\arguments{ + \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} +} +\details{This function currently works only on Windows.} +\value{A data.frame with the results.} +\author{Jay M. Skovlin and Dylan E. Beaudette} + + + +\seealso{ +\code{\link{simplifyColorData}}, \code{\link{get_hz_data_from_NASIS_db}}, \code{\link{get_site_data_from_NASIS_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_colors_from_pedon_db.Rd b/manbak/get_colors_from_pedon_db.Rd new file mode 100644 index 00000000..99b25a8c --- /dev/null +++ b/manbak/get_colors_from_pedon_db.Rd @@ -0,0 +1,26 @@ +\name{get_colors_from_pedon_db} +\alias{get_colors_from_pedon_db} + + +\title{Extract Soil Color Data from a PedonPC Database} +\description{Get, format, mix, and return color data from a PedonPC database.} +\usage{ +get_colors_from_pedon_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The path to a 'pedon.mdb' database.} +} +\details{This function currently works only on Windows.} +\value{A data.frame with the results.} +\author{Dylan E. Beaudette and Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_comonth_from_NASIS_db.Rd b/manbak/get_comonth_from_NASIS_db.Rd new file mode 100644 index 00000000..467c943a --- /dev/null +++ b/manbak/get_comonth_from_NASIS_db.Rd @@ -0,0 +1,42 @@ +\name{get_comonth_from_NASIS_db} +\alias{get_comonth_from_NASIS_db} + +\title{Extract component month data from a local NASIS Database} +\description{Extract component month data from a local NASIS Database.} + +\usage{ +get_comonth_from_NASIS_db(SS = TRUE, fill = FALSE, + stringsAsFactors = default.stringsAsFactors() + ) +} + +\arguments{ + \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} + \item{fill}{should missing "month" rows in the comonth table be filled with NA (FALSE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +} + +\details{This function currently works only on Windows.} +\value{A list with the results.} +\author{Stephen Roecker} + + + +\seealso{ +\code{\link{fetchNASIS}} +} + +\examples{ +\donttest{ +if(local_NASIS_defined()) { + # query text note data + cm <- try(get_comonth_from_NASIS_db()) + + # show structure of component month data + str(cm) +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_component_data_from_NASIS_db.Rd b/manbak/get_component_data_from_NASIS_db.Rd new file mode 100644 index 00000000..e8c55f8e --- /dev/null +++ b/manbak/get_component_data_from_NASIS_db.Rd @@ -0,0 +1,42 @@ +\name{get_component_data_from_NASIS_db} +\alias{get_component_data_from_NASIS_db} +\alias{get_component_restrictions_from_NASIS_db} + +\title{Extract component data from a local NASIS Database} +\description{Extract component data from a local NASIS Database.} + +\usage{ +get_component_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors()) +get_component_restrictions_from_NASIS_db(SS = TRUE) +} + + +\arguments{ + \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +} + +\details{This function currently works only on Windows.} +\value{A list with the results.} +\author{Dylan E. Beaudette, Stephen Roecker, and Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{fetchNASIS}} +} + +\examples{ +\donttest{ +if(local_NASIS_defined()) { + # query text note data + fc <- try(get_component_data_from_NASIS_db()) + + # show structure of component data returned + str(fc) +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_cosoilmoist_from_NASIS.Rd b/manbak/get_cosoilmoist_from_NASIS.Rd new file mode 100644 index 00000000..f7f73c85 --- /dev/null +++ b/manbak/get_cosoilmoist_from_NASIS.Rd @@ -0,0 +1,37 @@ +\name{get_cosoilmoist_from_NASIS} +\alias{get_cosoilmoist_from_NASIS} + +\title{Read and Flatten the Component Soil Moisture Tables} +\description{Read and flatten the component soil moisture month tables from a local NASIS Database.} +\usage{ +get_cosoilmoist_from_NASIS(impute = TRUE, stringsAsFactors = default.stringsAsFactors()) +} +\arguments{ + \item{impute}{replace missing (i.e. NULL) values with "Not_Populated" for categorical data, or the "RV" for numeric data or 201 cm if the "RV" is also NULL (default: TRUE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +} +\value{A data.frame.} +\author{S.M. Roecker} +\details{The component soil moisture tables within NASIS house monthly data on flooding, ponding, and soil moisture status. The soil moisture status is used to specify the water table depth for components (e.g. \code{status == "Moist"}). +} +\note{This function currently works only on Windows.} + +\seealso{ +\link{fetchNASIS}, \link{get_cosoilmoist_from_NASISWebReport}, \link{get_cosoilmoist_from_SDA}, \code{get_comonth_from_SDA} +} + + +\examples{ +\donttest{ +if(local_NASIS_defined()) { + # load cosoilmoist (e.g. water table data) + test <- try(get_cosoilmoist_from_NASIS()) + + # inspect + if(!inherits(test, 'try-error')) { + head(test) + } +} +}} +\keyword{manip} + diff --git a/man/get_extended_data_from_NASIS.Rd b/manbak/get_extended_data_from_NASIS.Rd similarity index 100% rename from man/get_extended_data_from_NASIS.Rd rename to manbak/get_extended_data_from_NASIS.Rd diff --git a/manbak/get_extended_data_from_pedon_db.Rd b/manbak/get_extended_data_from_pedon_db.Rd new file mode 100644 index 00000000..d4c58064 --- /dev/null +++ b/manbak/get_extended_data_from_pedon_db.Rd @@ -0,0 +1,25 @@ +\name{get_extended_data_from_pedon_db} +\alias{get_extended_data_from_pedon_db} + +\title{Extract accessory tables and summaries from a local pedonPC Database} +\description{Extract accessory tables and summaries from a local pedonPC Database.} +\usage{ +get_extended_data_from_pedon_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The path to a 'pedon.mdb' database.} +} +\details{This function currently works only on Windows.} +\value{A list with the results.} +\author{Jay M. Skovlin and Dylan E. Beaudette} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_hz_data_from_NASIS_db.Rd b/manbak/get_hz_data_from_NASIS_db.Rd new file mode 100644 index 00000000..73431521 --- /dev/null +++ b/manbak/get_hz_data_from_NASIS_db.Rd @@ -0,0 +1,27 @@ +\name{get_hz_data_from_NASIS_db} +\alias{get_hz_data_from_NASIS_db} + +\title{Extract Horizon Data from a local NASIS Database} +\description{Get horizon-level data from a local NASIS database.} +\usage{ +get_hz_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors()) +} +\arguments{ + \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +} +\details{This function currently works only on Windows.} +\value{A data.frame.} + +\author{Jay M. Skovlin and Dylan E. Beaudette} +\note{NULL total rock fragment values are assumed to represent an _absence_ of rock fragments, and set to 0.} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_NASIS_db}}, \code{\link{get_site_data_from_NASIS_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_hz_data_from_pedon_db.Rd b/manbak/get_hz_data_from_pedon_db.Rd new file mode 100644 index 00000000..328c0389 --- /dev/null +++ b/manbak/get_hz_data_from_pedon_db.Rd @@ -0,0 +1,27 @@ +\name{get_hz_data_from_pedon_db} +\alias{get_hz_data_from_pedon_db} + +\title{Extract Horizon Data from a PedonPC Database} +\description{Get horizon-level data from a PedonPC database.} +\usage{ +get_hz_data_from_pedon_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The path to a 'pedon.mdb' database.} +} +\details{This function currently works only on Windows.} +\value{A data.frame.} + +\author{Dylan E. Beaudette and Jay M. Skovlin} +\note{NULL total rock fragment values are assumed to represent an _absence_ of rock fragments, and set to 0.} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_colors_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_lablayer_data_from_NASIS_db.Rd b/manbak/get_lablayer_data_from_NASIS_db.Rd new file mode 100644 index 00000000..1b8ee4d6 --- /dev/null +++ b/manbak/get_lablayer_data_from_NASIS_db.Rd @@ -0,0 +1,23 @@ +\name{get_lablayer_data_from_NASIS_db} +\alias{get_lablayer_data_from_NASIS_db} + +\title{Extract lab pedon layer data from a local NASIS Database} +\description{Get lab pedon layer-level(horizon-level) data from a local NASIS database.} +\usage{get_lablayer_data_from_NASIS_db(SS = TRUE)} +\arguments{ + \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +} +\value{A data.frame.} +\author{Jay M. Skovlin and Dylan E. Beaudette} +\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} +\note{This function queries KSSL laboratory site/horizon data from a local NASIS database from the lab layer data table.} + +\seealso{ +\code{\link{get_labpedon_data_from_NASIS_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. + +\keyword{manip} + diff --git a/manbak/get_labpedon_data_from_NASIS_db.Rd b/manbak/get_labpedon_data_from_NASIS_db.Rd new file mode 100644 index 00000000..a3ad3471 --- /dev/null +++ b/manbak/get_labpedon_data_from_NASIS_db.Rd @@ -0,0 +1,23 @@ +\name{get_labpedon_data_from_NASIS_db} +\alias{get_labpedon_data_from_NASIS_db} + +\title{Extract lab pedon data from a local NASIS Database} +\description{Get lab pedon-level data from a local NASIS database.} +\usage{get_labpedon_data_from_NASIS_db(SS = TRUE)} +\arguments{ + \item{SS}{fetch data from the currently loaded selected set in NASIS or from the entire local database (default: TRUE)} +} +\value{A data.frame.} +\author{Jay M. Skovlin and Dylan E. Beaudette} +\details{This function currently works only on Windows, and requires a 'nasis_local' ODBC connection.} +\note{This function queries KSSL laboratory site/horizon data from a local NASIS database from the lab pedon data table.} + +\seealso{ +\code{\link{get_lablayer_data_from_NASIS_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. + +\keyword{manip} + diff --git a/manbak/get_site_data_from_NASIS_db.Rd b/manbak/get_site_data_from_NASIS_db.Rd new file mode 100644 index 00000000..42869586 --- /dev/null +++ b/manbak/get_site_data_from_NASIS_db.Rd @@ -0,0 +1,23 @@ +\name{get_site_data_from_NASIS_db} +\alias{get_site_data_from_NASIS_db} + +\title{Extract Site Data from a local NASIS Database} +\description{Get site-level data from a local NASIS database.} +\usage{get_site_data_from_NASIS_db(SS = TRUE, stringsAsFactors = default.stringsAsFactors())} +\arguments{ + \item{SS}{fetch data from Selected Set in NASIS or from the entire local database (default: TRUE)} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have been set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} +} +\value{A data.frame.} +\author{Jay M. Skovlin and Dylan E. Beaudette} +\details{When multiple "site bedrock" entries are present, only the shallowest is returned by this function.} +\note{This function currently works only on Windows.} + +\seealso{ +\code{\link{get_hz_data_from_NASIS_db}}, +} + + + +\keyword{manip} + diff --git a/manbak/get_site_data_from_pedon_db.Rd b/manbak/get_site_data_from_pedon_db.Rd new file mode 100644 index 00000000..d275630f --- /dev/null +++ b/manbak/get_site_data_from_pedon_db.Rd @@ -0,0 +1,28 @@ +\name{get_site_data_from_pedon_db} +\alias{get_site_data_from_pedon_db} + +\title{Extract Site Data from a PedonPC Database} +\description{Get site-level data from a PedonPC database.} +\usage{ +get_site_data_from_pedon_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The path to a 'pedon.mdb' database.} +} + +\value{A data.frame.} + +\author{Dylan E. Beaudette and Jay M. Skovlin} +\note{This function currently works only on Windows.} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_veg_from_AK_Site}}, +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} + diff --git a/manbak/get_soilseries_from_NASIS.Rd b/manbak/get_soilseries_from_NASIS.Rd new file mode 100644 index 00000000..14c5f21a --- /dev/null +++ b/manbak/get_soilseries_from_NASIS.Rd @@ -0,0 +1,26 @@ +\name{get_soilseries_from_NASIS} +\alias{get_soilseries_from_NASIS} +\alias{get_soilseries_from_NASISWebReport} + +\title{Get records from the Soil Classification (SC) database} +\description{These functions return records from the Soil Classification database, either from the local NASIS database (all series) or via web report (named series only).} + +\usage{ + +get_soilseries_from_NASIS(stringsAsFactors = default.stringsAsFactors()) +get_soilseries_from_NASISWebReport(soils, +stringsAsFactors = default.stringsAsFactors()) +} + +\arguments{ + \item{soils}{character vector of soil series names} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? This argument is passed to the uncode() function. It does not convert those vectors that have set outside of uncode() (i.e. hard coded). The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + +} + + +\value{A \code{data.frame}.} + +\author{Stephen Roecker} + +\keyword{manip} diff --git a/manbak/get_text_notes_from_NASIS_db.Rd b/manbak/get_text_notes_from_NASIS_db.Rd new file mode 100644 index 00000000..67f67309 --- /dev/null +++ b/manbak/get_text_notes_from_NASIS_db.Rd @@ -0,0 +1,42 @@ +\name{get_text_notes_from_NASIS_db} +\alias{get_text_notes_from_NASIS_db} + +\title{Extract text note data from a local NASIS Database} +\description{Extract text note data from a local NASIS Database.} +\usage{ +get_text_notes_from_NASIS_db(SS = TRUE, fixLineEndings = TRUE) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} + \item{fixLineEndings}{convert line endings from "\\r\\n" to "\\n"} +} +\details{This function currently works only on Windows.} +\value{A list with the results.} +\author{Dylan E. Beaudette and Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +} + +\examples{ +\donttest{ +if(local_NASIS_defined()) { + # query text note data + t <- try(get_text_notes_from_NASIS_db()) + + # show contents text note data, includes: siteobs, site, pedon, horizon level text notes data. + str(t) + + # view text categories for site text notes + if(!inherits(t, 'try-error')) { + table(t$site_text$textcat) + } +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_veg_data_from_NASIS_db.Rd b/manbak/get_veg_data_from_NASIS_db.Rd new file mode 100644 index 00000000..891bb0e3 --- /dev/null +++ b/manbak/get_veg_data_from_NASIS_db.Rd @@ -0,0 +1,31 @@ +\name{get_veg_data_from_NASIS_db} +\alias{get_veg_data_from_NASIS_db} + +\title{Extract veg data from a local NASIS Database} +\description{Extract veg data from a local NASIS Database.} +\usage{ +get_veg_data_from_NASIS_db(SS = TRUE) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{SS}{get data from the currently loaded Selected Set in NASIS or from the entire local database (default: TRUE)} +} +\details{This function currently works only on Windows.} +\value{A list with the results.} +\author{Jay M. Skovlin and Dylan E. Beaudette} + + +\examples{ +\donttest{ +if(local_NASIS_defined()) { + # query text note data + v <- try(get_veg_from_NASIS_db()) + + # show contents veg data returned + str(v) +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_veg_from_AK_Site.Rd b/manbak/get_veg_from_AK_Site.Rd new file mode 100644 index 00000000..9ac9cd80 --- /dev/null +++ b/manbak/get_veg_from_AK_Site.Rd @@ -0,0 +1,26 @@ +\name{get_veg_from_AK_Site} +\alias{get_veg_from_AK_Site} +%- Also NEED an '\alias' for EACH other topic documented here. +\title{Retrieve Vegetation Data from an AK Site Database} +\description{Retrieve Vegetation Data from an AK Site Database} +\usage{ +get_veg_from_AK_Site(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{file path the the AK Site access database} +} + +\value{A data.frame with vegetation data in long format, linked to site ID.} +\author{Dylan E. Beaudette} +\note{This function currently works only on Windows.} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_hz_data_from_pedon_db}}, \code{\link{get_site_data_from_pedon_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_veg_from_MT_veg_db.Rd b/manbak/get_veg_from_MT_veg_db.Rd new file mode 100644 index 00000000..37413c28 --- /dev/null +++ b/manbak/get_veg_from_MT_veg_db.Rd @@ -0,0 +1,25 @@ +\name{get_veg_from_MT_veg_db} +\alias{get_veg_from_MT_veg_db} + +\title{Extract Site and Plot-level Data from a Montana RangeDB database} +\description{Get Site and Plot-level data from a Montana RangeDB database.} +\usage{ +get_veg_from_MT_veg_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +} +\details{This function currently works only on Windows.} +\value{A data.frame.} +\author{Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_veg_species_from_MT_veg_db}}, \code{\link{get_veg_other_from_MT_veg_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_veg_from_NPS_PLOTS_db.Rd b/manbak/get_veg_from_NPS_PLOTS_db.Rd new file mode 100644 index 00000000..9b61585d --- /dev/null +++ b/manbak/get_veg_from_NPS_PLOTS_db.Rd @@ -0,0 +1,20 @@ +\name{get_veg_from_NPS_PLOTS_db} +\alias{get_veg_from_NPS_PLOTS_db} + +\title{Retrieve Vegetation Data from an NPS PLOTS Database} + +\description{Used to extract species, stratum, and cover vegetation data from a backend NPS PLOTS Database. Currently works for any Microsoft Access database with an .mdb file format.} + +\usage{get_veg_from_NPS_PLOTS_db(dsn)} + +\arguments{ + \item{dsn}{file path to the NPS PLOTS access database on your system.} +} + +\value{A data.frame with vegetation data in a long format with linkage to NRCS soil pedon data via the site_id key field.} + +\author{Jay M. Skovlin} + +\note{This function currently only works on Windows.} + +\keyword{manip} diff --git a/manbak/get_veg_other_from_MT_veg_db.Rd b/manbak/get_veg_other_from_MT_veg_db.Rd new file mode 100644 index 00000000..8d2bb515 --- /dev/null +++ b/manbak/get_veg_other_from_MT_veg_db.Rd @@ -0,0 +1,25 @@ +\name{get_veg_other_from_MT_veg_db} +\alias{get_veg_other_from_MT_veg_db} + +\title{Extract cover composition data from a Montana RangeDB database} +\description{Get cover composition data from a Montana RangeDB database.} +\usage{ +get_veg_other_from_MT_veg_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +} +\details{This function currently works only on Windows.} +\value{A data.frame.} +\author{Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_veg_from_MT_veg_db}}, \code{\link{get_veg_species_from_MT_veg_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/get_veg_species_from_MT_veg_db.Rd b/manbak/get_veg_species_from_MT_veg_db.Rd new file mode 100644 index 00000000..2e6c8b2e --- /dev/null +++ b/manbak/get_veg_species_from_MT_veg_db.Rd @@ -0,0 +1,25 @@ +\name{get_veg_species_from_MT_veg_db} +\alias{get_veg_species_from_MT_veg_db} + +\title{Extract species-level Data from a Montana RangeDB database} +\description{Get species-level data from a Montana RangeDB database.} +\usage{ +get_veg_species_from_MT_veg_db(dsn) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{dsn}{The name of the Montana RangeDB front-end database connection (see details).} +} +\details{This function currently works only on Windows.} +\value{A data.frame.} +\author{Jay M. Skovlin} + +%% ~Make other sections like Warning with \section{Warning }{....} ~ + +\seealso{ +\code{\link{get_veg_from_MT_veg_db}}, \code{\link{get_veg_other_from_MT_veg_db}} +} + +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip} diff --git a/manbak/loafercreek.Rd b/manbak/loafercreek.Rd new file mode 100644 index 00000000..0b3f564f --- /dev/null +++ b/manbak/loafercreek.Rd @@ -0,0 +1,73 @@ +\name{loafercreek} +\alias{loafercreek} +\alias{gopheridge} +\alias{mineralKing} + + +\docType{data} +\title{Example \code{SoilProfilecollection} Objects Returned by \code{fetchNASIS}.} + +\description{Several examples of soil profile collections returned by \code{fetchNASIS(from='pedons')} as \code{SoilProfileCollection} objects.} + +\usage{ +data(loafercreek) +data(gopheridge) +data(mineralKing) +} + + +\examples{ +\donttest{ +if(require("aqp")) { +# load example dataset + data("gopheridge") + + # what kind of object is this? + class(gopheridge) + + # how many profiles? + length(gopheridge) + + # there are 60 profiles, this calls for a split plot + par(mar=c(0,0,0,0), mfrow=c(2,1)) + + # plot soil colors + plot(gopheridge[1:30, ], name='hzname', color='soil_color') + plot(gopheridge[31:60, ], name='hzname', color='soil_color') + + # need a larger top margin for legend + par(mar=c(0,0,4,0), mfrow=c(2,1)) + # generate colors based on clay content + plot(gopheridge[1:30, ], name='hzname', color='clay') + plot(gopheridge[31:60, ], name='hzname', color='clay') + + # single row and no labels + par(mar=c(0,0,0,0), mfrow=c(1,1)) + # plot soils sorted by depth to contact + plot(gopheridge, name='', print.id=FALSE, plot.order=order(gopheridge$bedrckdepth)) + + # plot first 10 profiles + plot(gopheridge[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') + + # add rock fragment data to plot: + addVolumeFraction(gopheridge[1:10, ], colname='total_frags_pct') + + # add diagnostic horizons + addDiagnosticBracket(gopheridge[1:10, ], kind='argillic horizon', col='red', offset=-0.4) + + ## loafercreek + data("loafercreek") + # plot first 10 profiles + plot(loafercreek[1:10, ], name='hzname', color='soil_color', label='pedon_id', id.style='side') + + # add rock fragment data to plot: + addVolumeFraction(loafercreek[1:10, ], colname='total_frags_pct') + + # add diagnostic horizons + addDiagnosticBracket(loafercreek[1:10, ], kind='argillic horizon', col='red', offset=-0.4) +} +} +} + + +\keyword{datasets} diff --git a/manbak/local_NASIS_defined.Rd b/manbak/local_NASIS_defined.Rd new file mode 100644 index 00000000..7c4de80f --- /dev/null +++ b/manbak/local_NASIS_defined.Rd @@ -0,0 +1,25 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/openNASISchannel.R +\name{local_NASIS_defined} +\alias{local_NASIS_defined} +\title{Check for presence of \code{nasis_local} ODBC data source} +\usage{ +local_NASIS_defined(static_path = NULL) +} +\arguments{ +\item{static_path}{Optional: path to local SQLite database containing NASIS table structure; default: NULL} +} +\value{ +logical +} +\description{ +Check for presence of \code{nasis_local} ODBC data source +} +\examples{ + +if(local_NASIS_defined()) { + # use fetchNASIS or some other lower-level fetch function +} else { + message('could not find `nasis_local` ODBC data source') +} +} diff --git a/manbak/makeChunks.Rd b/manbak/makeChunks.Rd new file mode 100644 index 00000000..e10918d1 --- /dev/null +++ b/manbak/makeChunks.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SDA_query.R +\name{makeChunks} +\alias{makeChunks} +\title{Generate chunk labels for splitting data} +\usage{ +makeChunks(ids, size = 100) +} +\arguments{ +\item{ids}{vector of IDs} + +\item{size}{chunk (group) size} +} +\value{ +A numeric vector +} +\description{ +Generate chunk labels for splitting data +} +\examples{ + +# split the lowercase alphabet into 2 chunks + +aggregate(letters, + by = list(makeChunks(letters, size=13)), + FUN = paste0, collapse=",") + +} diff --git a/manbak/mapunit_geom_by_ll_bbox.Rd b/manbak/mapunit_geom_by_ll_bbox.Rd new file mode 100644 index 00000000..207daa6f --- /dev/null +++ b/manbak/mapunit_geom_by_ll_bbox.Rd @@ -0,0 +1,67 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/mapunit_geom_by_ll_bbox.R +\name{mapunit_geom_by_ll_bbox} +\alias{mapunit_geom_by_ll_bbox} +\title{Fetch Map Unit Geometry from SDA} +\usage{ +mapunit_geom_by_ll_bbox(bbox, source = "sda") +} +\arguments{ +\item{bbox}{a bounding box in WGS coordinates} + +\item{source}{the source database, currently limited to soil data access (SDA)} +} +\value{ +A SpatialPolygonsDataFrame of map unit polygons, in WGS84 (long,lat) coordinates. +} +\description{ +Fetch map unit geometry from the SDA website by WGS84 bounding box. There is a limit on the amount of data returned as serialized JSON (~32Mb) and a total record limit of 100,000. +} +\details{ +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See examples for bounding box formatting. +} +\note{ +SDA does not return the spatial intersection of map unit polygons and bounding box. Rather, just those polygons that are completely within the bounding box / overlap with the bbox. This function requires the 'rgdal' package. +} +\examples{ +## fetch map unit geometry from a bounding-box: +# +# +------------- (-120.41, 38.70) +# | | +# | | +# (-120.54, 38.61) --------------+ +# +\donttest{ +if(requireNamespace("curl") & +curl::has_internet() & + require(sp) & + require(rgdal)) { + + # basic usage + b <- c(-120.54,38.61,-120.41,38.70) + x <- try(mapunit_geom_by_ll_bbox(b)) # about 20 seconds + + if(!inherits(x,'try-error')) { + # note that the returned geometry is everything overlapping the bbox + # and not an intersection... why? + plot(x) + rect(b[1], b[2], b[3], b[4], border='red', lwd=2) + + + # get map unit data for matching map unit keys + in.statement <- format_SQL_in_statement(unique(x$mukey)) + + q <- paste("SELECT mukey, muname FROM mapunit WHERE mukey IN ", in.statement, sep="") + res <- SDA_query(q) + + #inspect + head(res) + } else { + message('could not download XML result from SDA') + } + } +} +} +\author{ +Dylan E. Beaudette +} diff --git a/manbak/mukey.wcs.Rd b/manbak/mukey.wcs.Rd new file mode 100644 index 00000000..0b42915e --- /dev/null +++ b/manbak/mukey.wcs.Rd @@ -0,0 +1,41 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/mukey-WCS.R +\name{mukey.wcs} +\alias{mukey.wcs} +\title{gNATSGO / gSSURGO Map Unit Key Web Coverage Service (WCS)} +\usage{ +mukey.wcs(aoi, db = c("gnatsgo", "gssurgo"), res = 30, quiet = FALSE) +} +\arguments{ +\item{aoi}{area of interest (AOI) defined using a \code{Spatial*}, a \code{sf}, \code{sfc} or \code{bbox} object or a \code{list}, see details} + +\item{db}{name of the gridded map unit key grid to access, should be either 'gnatsgo' or 'gssurgo'} + +\item{res}{grid resolution, units of meters. The native resolution of gNATSGO and gSSURGO (this WCS) is 30m.} + +\item{quiet}{logical, passed to \code{download.file} to enable / suppress URL and progress bar for download.} +} +\value{ +\code{raster} object containing indexed map unit keys and associated raster attribute table +} +\description{ +Download chunks of the gNATSGO or gSSURGO map unit key grid via bounding-box from the SoilWeb WCS. +} +\details{ +\code{aoi} should be specified as either a \code{Spatial*}, \code{sf}, \code{sfc} or \code{bbox} object or a \code{list} containing: + +\describe{ +\item{\code{aoi}}{bounding-box specified as (xmin, ymin, xmax, ymax) e.g. c(-114.16, 47.65, -114.08, 47.68)} +\item{\code{crs}}{coordinate reference system of BBOX, e.g. '+init=epsg:4326'} +} + +The WCS query is parameterized using \code{raster::extent} derived from the above AOI specification, after conversion to the native CRS (EPSG:6350) of the gNATSGO / gSSURGO grid. + +Databases available from this WCS can be queried using \code{WCS_details(wcs = 'mukey')}. +} +\note{ +The gNATSGO grid includes raster soil survey map unit keys which are not in SDA. +} +\author{ +D.E. Beaudette and A.G. Brown +} diff --git a/manbak/parseWebReport.Rd b/manbak/parseWebReport.Rd new file mode 100644 index 00000000..d25ac569 --- /dev/null +++ b/manbak/parseWebReport.Rd @@ -0,0 +1,30 @@ +\name{parseWebReport} +\alias{parseWebReport} + +\title{Parse contents of a web report, based on supplied arguments.} +\description{Parse contents of a web report, based on supplied arguments.} +\usage{ +parseWebReport(url, args, index = 1) +} + +\arguments{ + \item{url}{Base URL to a LIMS/NASIS web report.} + \item{args}{List of named arguments to send to report, see details.} + \item{index}{Integer index specifying the table to return, or, NULL for a list of tables} +} + +\details{Report argument names can be inferred by inspection of the HTML source associated with any given web report.} + +\value{A \code{data.frame} object in the case of a single integer passed to \code{index}, a \code{list} object in the case of an integer vector or NULL passed to \code{index}.} + +\author{D.E. Beaudette and S.M. Roecker} + +\keyword{ IO } + +\note{Most web reports are for internal use only.} + +\examples{ +\donttest{ +# pending +} +} diff --git a/manbak/processSDA_WKT.Rd b/manbak/processSDA_WKT.Rd new file mode 100644 index 00000000..cf554f42 --- /dev/null +++ b/manbak/processSDA_WKT.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/SDA-spatial.R +\name{processSDA_WKT} +\alias{processSDA_WKT} +\title{Post-process WKT returned from SDA.} +\usage{ +processSDA_WKT(d, g = "geom", p4s = "+proj=longlat +datum=WGS84") +} +\arguments{ +\item{d}{\code{data.frame} returned by \code{SDA_query}, containing WKT representation of geometry} + +\item{g}{name of column in \code{d} containing WKT geometry} + +\item{p4s}{PROJ4 CRS definition, typically GCS WGS84} +} +\value{ +A \code{Spatial*} object. +} +\description{ +This is a helper function, commonly used with \code{SDA_query} to extract WKT (well-known text) representation of geometry to an sp-class object. +} +\details{ +The SDA website can be found at \url{https://sdmdataaccess.nrcs.usda.gov}. See the \href{http://ncss-tech.github.io/AQP/soilDB/SDA-tutorial.html}{SDA Tutorial} for detailed examples. +} +\note{ +This function requires the \code{httr}, \code{jsonlite}, \code{XML}, and \code{rgeos} packages. +} +\author{ +D.E. Beaudette +} diff --git a/manbak/seriesExtent.Rd b/manbak/seriesExtent.Rd new file mode 100644 index 00000000..2354a373 --- /dev/null +++ b/manbak/seriesExtent.Rd @@ -0,0 +1,63 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/seriesExtent.R +\name{seriesExtent} +\alias{seriesExtent} +\title{Retrieve Soil Series Extent Maps from SoilWeb} +\usage{ +seriesExtent(s, type = c("vector", "raster"), timeout = 60) +} +\arguments{ +\item{s}{a soil series name, case-insensitive} + +\item{type}{series extent representation, \code{vector} results in a \code{SpatialPolygonsDataFrame} object and \code{raster} results in a \code{raster} object} + +\item{timeout}{time that we are willing to wait for a response, in seconds} +} +\description{ +This function downloads a generalized representations of a soil series extent from SoilWeb, derived from the current SSURGO snapshot. Data can be returned as vector outlines (\code{SpatialPolygonsDataFrame} object) or gridded representation of area proportion falling within 800m cells (\code{raster} object). Gridded series extent data are only available in CONUS. Vector representations are returned with a GCS/WGS84 coordinate reference system and raster representations are returned with an Albers Equal Area / NAD83 coordinate reference system. +} +\note{ +This function requires the \code{rgdal} package. Warning messages about the proj4 CRS specification may be printed depending on your version of \code{rgdal}. This should be resolved soon. +} +\examples{ + +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # required packages + library(sp) + library(raster) + library(rgdal) + + # specify a soil series name + s <- 'magnor' + + # return as SpatialPolygonsDataFrame + x <- seriesExtent(s, type = 'vector') + # return as raster + y <- seriesExtent(s, type = 'raster') + + # note that CRS are different + proj4string(x) + projection(y) + + # transform vector representation to CRS of raster + x <- spTransform(x, CRS(projection(y))) + + # graphical comparison + par(mar = c(1, 1 , 1, 3)) + plot(y, axes = FALSE) + plot(x, add = TRUE) + + +} +} + +} +\references{ +\url{https://casoilresource.lawr.ucdavis.edu/see/} +} +\author{ +D.E. Beaudette +} diff --git a/manbak/siblings.Rd b/manbak/siblings.Rd new file mode 100644 index 00000000..47a62b91 --- /dev/null +++ b/manbak/siblings.Rd @@ -0,0 +1,60 @@ +\name{siblings} +\alias{siblings} + +\title{Lookup siblings and cousins for a given soil series.} +\description{Lookup siblings and cousins for a given soil series, from the current fiscal year SSURGO snapshot via SoilWeb.} +\usage{ +siblings(s, only.major=FALSE, component.data = FALSE, cousins = FALSE) +} + +\arguments{ + \item{s}{character vector, the name of a single soil series, case-insensitive.} + \item{only.major}{logical, should only return siblings that are major components} + \item{component.data}{logical, should component data for siblings (and optionally cousins) be returned?} + \item{cousins}{logical, should siblings-of-siblings (cousins) be returned?} +} + +\details{The siblings of any given soil series are defined as those soil series (major and minor component) that share a parent map unit with the named series (as a major component). Cousins are siblings of siblings. Data are sourced from SoilWeb which maintains a copy of the current SSURGO snapshot.} + +\value{ +\describe{ + \item{sib}{\code{data.frame} containing siblings, major component flag, and number of co-occurrences} + \item{sib.data}{\code{data.frame} containing sibling component data} + \item{cousins}{\code{data.frame} containing cousins, major component flag, and number of co-occurrences} + \item{cousin.data}{\code{data.frame} containing cousin component data} + } +} + +\references{ +\href{http://ncss-tech.github.io/AQP/soilDB/soil-series-query-functions.html}{soilDB Soil Series Query Functionality} + +\href{http://ncss-tech.github.io/AQP/soilDB/siblings.html}{Related tutorial.} +} + +\author{ +D.E. Beaudette +} + +\seealso{ +\link{OSDquery}, \link{siblings}, \link{fetchOSD} +} + +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet()) { + + # basic usage + x <- siblings('zook') + x$sib + + # restrict to siblings that are major components + # e.g. the most likely siblings + x <- siblings('zook', only.major = TRUE) + x$sib +} +} +} + +\keyword{ manip } + diff --git a/man/simplfyFragmentData.Rd b/manbak/simplfyFragmentData.Rd similarity index 100% rename from man/simplfyFragmentData.Rd rename to manbak/simplfyFragmentData.Rd diff --git a/manbak/simplifyColorData.Rd b/manbak/simplifyColorData.Rd new file mode 100644 index 00000000..acdd2b6f --- /dev/null +++ b/manbak/simplifyColorData.Rd @@ -0,0 +1,38 @@ +\name{simplifyColorData} +\alias{simplifyColorData} +\alias{mix_and_clean_colors} + +\title{Simplify Color Data by ID} +\description{Simplify multiple Munsell color observations associated with each horizon.} +\usage{ +simplifyColorData(d, id.var = "phiid", wt = "colorpct", bt = FALSE) +mix_and_clean_colors(x, wt='pct', backTransform = FALSE) +} + +\arguments{ + \item{d}{a \code{data.frame} object, typically returned from NASIS, see details} + \item{id.var}{character vector with the name of the column containing an ID that is unique among all horizons in \code{d}} + \item{x}{a \code{data.frame} object containing sRGB coordinates associated with a group of colors to mix} + \item{wt}{a character vector with the name of the column containing color weights for mixing} + \item{bt}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by \code{aqp::rgb2Munsell}} + \item{backTransform}{logical, should the mixed sRGB representation of soil color be transformed to closest Munsell chips? This is performed by \code{aqp::rgb2Munsell}} +} + +\details{ +This function is mainly intended for the processing of NASIS pedon/horizon data which may or may not contain multiple colors per horizon/moisture status combination. \code{simplifyColorData} will "mix" multiple colors associated with horizons in \code{d}, according to IDs specified by \code{id.var}, using "weights" (area percentages) specified by the \code{wt} argument to \code{mix_and_clean_colors}. + +Note that this function doesn't actually simulate the mixture of pigments on a surface, rather, "mixing" is approximated via weighted average in the CIELAB colorspace. + +The \code{simplifyColorData} function can be applied to data sources other than NASIS by careful use of the \code{id.var} and \code{wt} arguments. However, \code{d} must contain Munsell colors split into columns named "colorhue", "colorvalue", and "colorchroma". In addition, the moisture state ("Dry" or "Moist") must be specified in a column named "colormoistst". + +The \code{mix_and_clean_colors} function can be applied to arbitrary data sources as long as \code{x} contains sRGB coordinates in columns named "r", "g", and "b". This function should be applied to chunks of rows within which color mixtures make sense. + +There are examples in \href{http://ncss-tech.github.io/AQP/soilDB/KSSL-demo.html}{the KSSL data tutorial} and \href{http://ncss-tech.github.io/AQP/soilDB/mixing-soil-color-data.html}{the soil color mixing tutorial}. +} + + +\author{D.E. Beaudette} + + +\keyword{manip} + diff --git a/manbak/soilDB-package.Rd b/manbak/soilDB-package.Rd new file mode 100644 index 00000000..4f2f122d --- /dev/null +++ b/manbak/soilDB-package.Rd @@ -0,0 +1,15 @@ +\name{soilDB-package} +\alias{soilDB.env} +\alias{soilDB-package} +\alias{soilDB} +\docType{package} +\title{Soil Database Interface} +\description{This package provides methods for extracting soils information from local PedonPC and AK Site databases (MS Access format), local NASIS databases (MS SQL Server), and the SDA webservice. Currently USDA-NCSS data sources are supported, however, there are plans to develop interfaces to outside systems such as the Global Soil Mapping project.} +\details{ +It can be difficult to locate all of the dependencies required for sending/processing SOAP requests, especially on UNIX-like operating systems. Windows binary packages for the dependencies can be found \href{http://www.stats.ox.ac.uk/pub/RWin/bin/windows/contrib/2.15/}{here}. See \code{\link{fetchPedonPC}} for a simple wrapper function that should suffice for typical site/pedon/hz queries. An introduction to the soilDB package can be found \href{https://r-forge.r-project.org/scm/viewvc.php/*checkout*/docs/soilDB/soilDB-Intro.html?root=aqp}{here}. +} +\author{J.M. Skovlin and D.E. Beaudette} +\keyword{package} +\seealso{\code{\link{fetchPedonPC}, \link{fetchNASIS}, \link{SDA_query}, \link{loafercreek}}} + + diff --git a/manbak/taxaExtent.Rd b/manbak/taxaExtent.Rd new file mode 100644 index 00000000..b95825cd --- /dev/null +++ b/manbak/taxaExtent.Rd @@ -0,0 +1,82 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/taxaExtent.R +\name{taxaExtent} +\alias{taxaExtent} +\title{Retrieve Soil Taxonomy Membership Grids} +\usage{ +taxaExtent( + x, + level = c("order", "suborder", "greatgroup", "subgroup"), + timeout = 60 +) +} +\arguments{ +\item{x}{single taxa name, case-insensitive} + +\item{level}{the taxonomic level within the top 4 tiers of Soil Taxonomy, one of \code{c('order', 'suborder', 'greatgroup', 'subgroup')}} + +\item{timeout}{time that we are willing to wait for a response, in seconds} +} +\value{ +a \code{raster} object +} +\description{ +This function downloads a generalized representation of the geographic extent of any single taxa from the top 4 tiers of Soil Taxonomy. Data are provided by SoilWeb, ultimately sourced from from the current SSURGO snapshot. Data are returned as \code{raster} objects representing area proportion falling within 800m cells. Data are only available in CONUS and returned using an Albers Equal Area / NAD83 coordinate reference system. +} +\note{ +This is a work in progress. +} +\examples{ +\donttest{ + +if(requireNamespace("curl") & + curl::has_internet()) { + + library(raster) + + # try a couple of different examples + + # soil order + taxa <- 'vertisols' + x <- taxaExtent(taxa, level = 'order') + a <- raster::aggregate(x, fact = 5) + + # suborder + taxa <- 'ustalfs' + x <- taxaExtent(taxa, level = 'suborder') + a <- raster::aggregate(x, fact = 5) + + # greatgroup + taxa <- 'haplohumults' + x <- taxaExtent(taxa, level = 'greatgroup') + a <- raster::aggregate(x, fact = 5) + + # subgroup + taxa <- 'Typic Haploxerepts' + x <- taxaExtent(taxa, level = 'subgroup') + a <- raster::aggregate(x, fact = 5) + + # quick evaluation of the result + if(requireNamespace("rasterVis") & requireNamespace('viridis')) { + rasterVis::levelplot(a, + margin = FALSE, scales = list(draw = FALSE), + col.regions = viridis::viridis, + main = names(a) + ) + } + + # slippy map + if(requireNamespace("mapview")) { + mapview::mapview(a, col.regions = viridis::viridis, na.color = NA, use.layer.names = TRUE) + } + + + +} + +} + +} +\author{ +D.E. Beaudette +} diff --git a/manbak/uncode.Rd b/manbak/uncode.Rd new file mode 100644 index 00000000..43d81272 --- /dev/null +++ b/manbak/uncode.Rd @@ -0,0 +1,53 @@ +\name{uncode} +\alias{metadata} +\alias{uncode} +\alias{code} + +\title{Convert coded values returned from NASIS and SDA queries to factors} +\description{These functions convert the coded values returned from NASIS or SDA to factors (e.g. 1 = Alfisols) using the metadata tables from NASIS. For SDA the metadata is pulled from a static snapshot in the soilDB package (/data/metadata.rda).} +\usage{ +uncode(df, invert = FALSE, db = "NASIS", + droplevels = FALSE, + stringsAsFactors = default.stringsAsFactors() + ) +code(df, ...) +} +%- maybe also 'usage' for other objects documented here. +\arguments{ + \item{df}{data.frame} + \item{invert}{converts the code labels back to their coded values (FALSE)} + \item{db}{label specifying the soil database the data is coming from, which indicates whether or not to query metadata from local NASIS database ("NASIS") or use soilDB-local snapshot ("LIMS" or "SDA")} + \item{droplevels}{logical: indicating whether to drop unused levels in classifying factors. This is useful when a class has large number of unused classes, which can waste space in tables and figures.} + \item{stringsAsFactors}{logical: should character vectors be converted to factors? The 'factory-fresh' default is TRUE, but this can be changed by setting options(stringsAsFactors = FALSE)} + \item{\dots}{arguments passed on to \code{uncode}} + } + +\details{These functions convert the coded values returned from NASIS into their plain text representation. It duplicates the functionality of the CODELABEL function found in NASIS. This function is primarily intended to be used internally by other soilDB R functions, in order to minimizes the need to manually convert values. + +The function works by iterating through the column names in a data frame and looking up whether they match any of the ColumnPhysicalNames found in the metadata domain tables. If matches are found then the columns coded values are converted to their corresponding factor levels. Therefore it is not advisable to reuse column names from NASIS unless the contents match the range of values and format found in NASIS. Otherwise uncode() will convert their values to NA. + +When data is being imported from NASIS, the metadata tables are sourced directly from NASIS. When data is being imported from SDA or the NASIS Web Reports, the metadata is pulled from a static snapshot in the soilDB package. + +Beware the default is to return the values as factors rather than strings. While strings are generally preferable, factors make plotting more convenient. Generally the factor level ordering returned by uncode() follows the naturally ordering of categories that would be expected (e.g. sand, silt, clay). +} + +\value{A data frame with the results.} +\author{Stephen Roecker} +\examples{ +\donttest{ +if(requireNamespace("curl") & + curl::has_internet() & + require(aqp)) { + # query component by nationalmusym + comp <- fetchSDA(WHERE = "nationalmusym = '2vzcp'") + s <- site(comp) + + # use SDA uncoding domain via db argument + s <- uncode(s, db="SDA") + levels(s$taxorder) +} +} +} +% Add one or more standard keywords, see file 'KEYWORDS' in the +% R documentation directory. +\keyword{manip}% use one of RShowDoc("KEYWORDS") diff --git a/manbak/us_ss_timeline.Rd b/manbak/us_ss_timeline.Rd new file mode 100644 index 00000000..f18b8557 --- /dev/null +++ b/manbak/us_ss_timeline.Rd @@ -0,0 +1,33 @@ +\name{us_ss_timeline} +\alias{us_ss_timeline} +\docType{data} + +\title{ +Timeline of US Published Soil Surveys +} + +\description{ +This dataset contains the years of each US Soil Survey was published. +} + +\usage{data("us_ss_timeline")} + +\format{ + A data frame with 5209 observations on the following 5 variables. + \describe{ + \item{\code{ssa}}{Soil Survey name, a character vector} + \item{\code{year}}{year of publication, a numeric vector} + \item{\code{pdf}}{does a pdf exists, a logical vector} + \item{\code{state}}{State abbreviation, a character vector} + } +} + +\details{ +This data was web scraped from the NRCS Soils Website. The scraping procedure and a example plot are included in the examples section below. +} + +\source{ +https://www.nrcs.usda.gov/wps/portal/nrcs/soilsurvey/soils/survey/state/ +} + +\keyword{datasets} diff --git a/manbak/waterDayYear.Rd b/manbak/waterDayYear.Rd new file mode 100644 index 00000000..f28b3fb1 --- /dev/null +++ b/manbak/waterDayYear.Rd @@ -0,0 +1,38 @@ +\name{waterDayYear} +\alias{waterDayYear} + +\title{Compute Water Day and Year} +\description{Compute "water" day and year, based on the end of the typical or legal dry season. This is September 30 in California.} + +\usage{ +waterDayYear(d, end = "09-30") +} + +\arguments{ + \item{d}{anything the can be safely converted to \code{PPOSIXlt}} + \item{end}{"MM-DD" notation for end of water year} +} + +\details{This function doesn't know about leap-years. Probably worth checking.} + +\value{ +A \code{data.frame} object with the following + \item{wy}{the "water year"} + \item{wd}{the "water day"} +} + +\references{Ideas borrowed from: +\url{https://github.com/USGS-R/dataRetrieval/issues/246} and +\url{https://stackoverflow.com/questions/48123049/create-day-index-based-on-water-year} +} + +\author{D.E. Beaudette} + + +\examples{ +# try it +waterDayYear('2019-01-01') +} + +\keyword{ manip }% use one of RShowDoc("KEYWORDS") +