Calls all other functions for the ContDataQC library.

ContDataQC(
  fun.myData.Operation,
  fun.myData.SiteID,
  fun.myData.Type,
  fun.myData.DateRange.Start,
  fun.myData.DateRange.End,
  fun.myDir.import = getwd(),
  fun.myDir.export = getwd(),
  fun.myConfig = "",
  fun.myFile = "",
  fun.myReport.format = "",
  fun.myReport.Dir = "",
  fun.CreateReport = TRUE,
  fun.AddDeployCol = TRUE
)

Arguments

fun.myData.Operation

Operation to be performed; c("GetGageData","QCRaw", "Aggregate", "SummaryStats")

fun.myData.SiteID

Station/SiteID.

fun.myData.Type

data type; c("Air","Water","AW","Gage","AWG","AG","WG")

fun.myData.DateRange.Start

Start date for requested data. Format = YYYY-MM-DD.

fun.myData.DateRange.End

End date for requested data. Format = YYYY-MM-DD.

fun.myDir.import

Directory for import data. Default is current working directory.

fun.myDir.export

Directory for export data. Default is current working directory.

fun.myConfig

Configuration file to use for this data analysis. The default is always loaded first so only "new" values need to be included. This is the easiest way to control time zones.

fun.myFile

Single file (or vector of files) to perform functions. SiteID, Type, and Date Range not used when file name(s) provided.

fun.myReport.format

Report format (docx or html). Default is specified in config.R (docx). Can be customized in config.R; ContData.env$myReport.Format.

fun.myReport.Dir

Report (rmd) template folder. Default is the package rmd folder. Can be customized in config.R; ContData.env$myReport.Dir.

fun.CreateReport

Boolean parameter to create reports or not. Default = TRUE.

fun.AddDeployCol

Boolean for adding column name. Default = TRUE Can be customized in config.R; ContData.env$myName.LoggerDeployment.

Value

Returns a csv into the specified export directory with additional columns for calculated statistics.

Details

Below are the default data directories assumed to exist in the working directory. These can be created with code in the example.

./Data0_Original/ = Unmodified data logger files.

./Data1_RAW/ = Data logger files modified for use with library. Modifications for extra rows and file and column names.

./Data2_QC/ = Repository for library output for QCed files.

./Data3_Aggregated/ = Repository for library output for aggregated (or split) files.

./Data4_Stats/ = Repository for library output for statistical summary files.

It is possible to call the Aggregate file portion of the script to meld together files from multiple sites (e.g., all sites in a watershed or from different depths on a lake). The file will be named by the first file named with "Append_x" where "x" is the number of files that were aggregated. The purpose is to allow users to analyze the data in these files from a single file.

Pandoc is needed for docx reports (default). Pandoc comes packaged with RStudio. To install Pandoc on Windows use the `installr` package.

https://CRAN.R-project.org/package=installr

install.packages("installr") installr::install.pandoc()

The above won't work if don't have admin rights on your computer. Alternative = Download the msi file from the link below for the latest release. You may need to have your IT dept install it for you. https://github.com/jgm/pandoc/releases For help for installing via command window: http://www.intowindows.com/how-to-run-msi-file-as-administrator-from-command-prompt-in-windows/

Examples

# Examples of each operation

# 00. Set up
# Parameters
Selection.Operation <- c("GetGageData"
                         , "QCRaw"
                         , "Aggregate"
                         , "SummaryStats")
Selection.Type      <- c("Air","Water","AW","Gage","AWG","AG","WG")
Selection.SUB <- c("Data0_Original"
                   , "Data1_RAW"
                   , "Data2_QC"
                   , "Data3_Aggregated"
                   , "Data4_Stats")
(myDir.BASE <- tempdir()) # create and print temp directory for example data
#> [1] "/var/folders/24/8k48jl6d249_n_qfxwsl6xvm0000gn/T//RtmpZn7gXz"

# Create data directories
myDir.create <- file.path(myDir.BASE, Selection.SUB[1])
  ifelse(dir.exists(myDir.create) == FALSE
         , dir.create(myDir.create)
         , "Directory already exists")
#> [1] TRUE
myDir.create <- file.path(myDir.BASE, Selection.SUB[2])
  ifelse(dir.exists(myDir.create) == FALSE
         , dir.create(myDir.create)
         , "Directory already exists")
#> [1] TRUE
myDir.create <- file.path(myDir.BASE, Selection.SUB[3])
  ifelse(dir.exists(myDir.create) == FALSE
         , dir.create(myDir.create)
         , "Directory already exists")
#> [1] TRUE
myDir.create <- file.path(myDir.BASE, Selection.SUB[4])
  ifelse(dir.exists(myDir.create) == FALSE
         , dir.create(myDir.create)
         , "Directory already exists")
#> [1] TRUE
myDir.create <- file.path(myDir.BASE, Selection.SUB[5])
  ifelse(dir.exists(myDir.create) == FALSE
         , dir.create(myDir.create)
         , "Directory already exists")
#> [1] TRUE

# Save example data (assumes myDir.BASE directory exists)
myData <- data_raw_test2_AW_20130426_20130725
  write.csv(myData, file.path(myDir.BASE
                              , Selection.SUB[2]
                              , "test2_AW_20130426_20130725.csv"))
myData <- data_raw_test2_AW_20130725_20131015
  write.csv(myData, file.path(myDir.BASE
                              , Selection.SUB[2]
                              , "test2_AW_20130725_20131015.csv"))
myData <- data_raw_test2_AW_20140901_20140930
  write.csv(myData, file.path(myDir.BASE
                              , Selection.SUB[2]
                              , "test2_AW_20140901_20140930.csv"))
myData <- data_raw_test4_AW_20160418_20160726
  write.csv(myData, file.path(myDir.BASE
                              , Selection.SUB[2]
                              , "test4_AW_20160418_20160726.csv"))
myFile <- "config.TZ.Central.R"
  file.copy(file.path(path.package("ContDataQC"), "extdata", myFile)
            , file.path(myDir.BASE, Selection.SUB[2], myFile))
#> [1] TRUE

# 01.A. Get Gage Data
myData.Operation       <- "GetGageData" #Selection.Operation[1]
myData.SiteID          <- "01187300" # Hubbard River near West Hartland, CT
myData.Type            <- Selection.Type[4] #"Gage"
myData.DateRange.Start <- "2013-01-01"
myData.DateRange.End   <- "2014-12-31"
myDir.import           <- ""
myDir.export           <- file.path(myDir.BASE, Selection.SUB[2])
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export)
#> [1] "Total items to process = 1"
#> 
#> [1] "Getting available data; 01187300."
#> 
#>     agency_cd  site_no                           station_nm site_tp_cd
#> 530      USGS 01187300 HUBBARD RIVER NEAR WEST HARTLAND, CT         ST
#> 531      USGS 01187300 HUBBARD RIVER NEAR WEST HARTLAND, CT         ST
#> 532      USGS 01187300 HUBBARD RIVER NEAR WEST HARTLAND, CT         ST
#>     dec_lat_va dec_long_va coord_acy_cd dec_coord_datum_cd alt_va alt_acy_va
#> 530    42.0375   -72.93933            H              NAD83 594.57       0.01
#> 531    42.0375   -72.93933            H              NAD83 594.57       0.01
#> 532    42.0375   -72.93933            H              NAD83 594.57       0.01
#>     alt_datum_cd   huc_cd data_type_cd parm_cd stat_cd ts_id loc_web_ds
#> 530       NGVD29 01080207           uv   00010    <NA> 66869         NA
#> 531       NGVD29 01080207           uv   00060    <NA> 66866         NA
#> 532       NGVD29 01080207           uv   00065    <NA> 66867         NA
#>     medium_grp_cd parm_grp_cd   srs_id access_cd begin_date   end_date count_nu
#> 530           wat        <NA>  1645597         0 2015-11-04 2018-08-08     1008
#> 531           wat        <NA>  1645423         0 1990-10-01 2022-12-22    11770
#> 532           wat        <NA> 17164583         0 2007-10-01 2022-12-22     5561
#> 
#> 
#> [1] "Processing item 1 of 1, COMPLETE, 01187300."
#> 
#> [1] "Task COMPLETE; 0.23 min."

# 01.B. Get Gage Data (central time zone)
myData.Operation       <- "GetGageData" #Selection.Operation[1]
myData.SiteID          <- "07032000" # Mississippi River at Memphis, TN
myData.Type            <- Selection.Type[4] #"Gage"
myData.DateRange.Start <- "2013-01-01"
myData.DateRange.End   <- "2014-12-31"
myDir.import           <- ""
myDir.export           <- file.path(myDir.BASE, Selection.SUB[2])
# include path if not in working directory
myConfig               <- file.path(myDir.BASE, Selection.SUB[2]
                                    , "config.TZ.central.R")
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export
           , myConfig)
#> [1] "Total items to process = 1"
#> 
#> [1] "Getting available data; 07032000."
#> 
#>     agency_cd  site_no                       station_nm site_tp_cd dec_lat_va
#> 360      USGS 07032000 MISSISSIPPI RIVER AT MEMPHIS, TN         ST   35.12315
#> 361      USGS 07032000 MISSISSIPPI RIVER AT MEMPHIS, TN         ST   35.12315
#>     dec_long_va coord_acy_cd dec_coord_datum_cd alt_va alt_acy_va alt_datum_cd
#> 360   -90.07759            S              NAD83    184         21       NAVD88
#> 361   -90.07759            S              NAD83    184         21       NAVD88
#>       huc_cd data_type_cd parm_cd stat_cd  ts_id loc_web_ds medium_grp_cd
#> 360 08010100           uv   00060    <NA> 131634         NA           wat
#> 361 08010100           uv   00065    <NA> 131635         NA           wat
#>     parm_grp_cd   srs_id access_cd begin_date   end_date count_nu
#> 360        <NA>  1645423         0 2014-10-01 2022-12-22     3004
#> 361        <NA> 17164583         0 2011-05-09 2022-12-22     4245
#> 
#> 
#> [1] "Processing item 1 of 1, COMPLETE, 07032000."
#> 
#> [1] "Task COMPLETE; 0.01 min."

# 02.A. QC Raw Data
myData.Operation       <- "QCRaw" #Selection.Operation[2]
myData.SiteID          <- "test2"
myData.Type            <- Selection.Type[3] #"AW"
myData.DateRange.Start <- "2013-01-01"
myData.DateRange.End   <- "2014-12-31"
myDir.import           <- file.path(myDir.BASE, Selection.SUB[2]) #"Data1_RAW"
myDir.export           <- file.path(myDir.BASE, Selection.SUB[3]) #"Data2_QC"
myReport.format        <- "docx"
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export
           , fun.myReport.format = myReport.format)
#> [1] "Total files to process = 6"
#> [1] "Processing item 1 of 6, SKIPPED (Non-Match, SiteID), 01187300_Gage_20130101_20150101.csv."
#> [1] "Processing item 2 of 6, SKIPPED (Non-Match, SiteID), 07032000_Gage_20140930_20141231.csv."
#> [1] "Processing item 3 of 6, WORKING (QC Tests and Flags - WaterTemp), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 3 of 6, WORKING (QC Tests and Flags - AirTemp), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 3 of 6, WORKING (QC Tests and Flags - WaterP), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 3 of 6, WORKING (QC Tests and Flags - AirP), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 3 of 6, WORKING (QC Tests and Flags - SensorDepth), test2_AW_20130426_20130725.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 9.081066 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-04-26 to 2013-07-25)."
#> [1] "Processing item 3 of 6, COMPLETE, test2_AW_20130426_20130725.csv."
#> [1] "Processing item 4 of 6, WORKING (QC Tests and Flags - WaterTemp), test2_AW_20130725_20131015.csv."
#> [1] "Processing item 4 of 6, WORKING (QC Tests and Flags - AirTemp), test2_AW_20130725_20131015.csv."
#> [1] "Processing item 4 of 6, WORKING (QC Tests and Flags - WaterP), test2_AW_20130725_20131015.csv."
#> [1] "Processing item 4 of 6, WORKING (QC Tests and Flags - AirP), test2_AW_20130725_20131015.csv."
#> [1] "Processing item 4 of 6, WORKING (QC Tests and Flags - SensorDepth), test2_AW_20130725_20131015.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 7.8039 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-07-25 to 2013-10-15)."
#> [1] "Processing item 4 of 6, COMPLETE, test2_AW_20130725_20131015.csv."
#> [1] "Processing item 5 of 6, WORKING (QC Tests and Flags - WaterTemp), test2_AW_20140901_20140930.csv."
#> [1] "Processing item 5 of 6, WORKING (QC Tests and Flags - AirTemp), test2_AW_20140901_20140930.csv."
#> [1] "Processing item 5 of 6, WORKING (QC Tests and Flags - WaterP), test2_AW_20140901_20140930.csv."
#> [1] "Processing item 5 of 6, WORKING (QC Tests and Flags - AirP), test2_AW_20140901_20140930.csv."
#> [1] "Processing item 5 of 6, WORKING (QC Tests and Flags - SensorDepth), test2_AW_20140901_20140930.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 7.146763 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2014-09-01 to 2014-09-30)."
#> [1] "Processing item 5 of 6, COMPLETE, test2_AW_20140901_20140930.csv."
#> [1] "Processing item 6 of 6, SKIPPED (Non-Match, SiteID), test4_AW_20160418_20160726.csv."
#> [1] "Task COMPLETE; 0.48 min."

# 02.B. QC Raw Data (offset collection times for air and water sensors)
myData.Operation       <- "QCRaw" #Selection.Operation[2]
myData.SiteID          <- "test4"
myData.Type            <- Selection.Type[3] #"AW"
myData.DateRange.Start <- "2016-04-28"
myData.DateRange.End   <- "2016-07-26"
myDir.import           <- file.path(myDir.BASE, Selection.SUB[2]) #"Data1_RAW"
myDir.export           <- file.path(myDir.BASE, Selection.SUB[3]) #"Data2_QC"
myReport.format        <- "html"
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export
           , fun.myReport.format = myReport.format)
#> [1] "Total files to process = 6"
#> [1] "Processing item 1 of 6, SKIPPED (Non-Match, SiteID), 01187300_Gage_20130101_20150101.csv."
#> [1] "Processing item 2 of 6, SKIPPED (Non-Match, SiteID), 07032000_Gage_20140930_20141231.csv."
#> [1] "Processing item 3 of 6, SKIPPED (Non-Match, SiteID), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 4 of 6, SKIPPED (Non-Match, SiteID), test2_AW_20130725_20131015.csv."
#> [1] "Processing item 5 of 6, SKIPPED (Non-Match, SiteID), test2_AW_20140901_20140930.csv."
#> [1] "Processing item 6 of 6, WORKING (QC Tests and Flags - WaterTemp), test4_AW_20160418_20160726.csv."
#> [1] "Processing item 6 of 6, WORKING (QC Tests and Flags - AirTemp), test4_AW_20160418_20160726.csv."
#> [1] "Processing item 6 of 6, WORKING (QC Tests and Flags - WaterP), test4_AW_20160418_20160726.csv."
#> [1] "Processing item 6 of 6, WORKING (QC Tests and Flags - AirP), test4_AW_20160418_20160726.csv."
#> [1] "Processing item 6 of 6, WORKING (QC Tests and Flags - SensorDepth), test4_AW_20160418_20160726.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 13.06512 secs."
#> [1] "User defined parameters: SiteID (test4), Data Type (Aw), Date Range (2016-04-18 to 2016-07-26)."
#> [1] "Processing item 6 of 6, COMPLETE, test4_AW_20160418_20160726.csv."
#> [1] "Task COMPLETE; 0.27 min."

# 03. Aggregate Data
myData.Operation       <- "Aggregate" #Selection.Operation[3]
myData.SiteID          <- "test2"
myData.Type            <- Selection.Type[3] #"AW"
myData.DateRange.Start <- "2013-01-01"
myData.DateRange.End   <- "2014-12-31"
myDir.import           <- file.path(myDir.BASE, Selection.SUB[3]) #"Data2_QC"
myDir.export           <- file.path(myDir.BASE, Selection.SUB[4]) #"Data3_Aggregated"
#Leave off myReport.format and get default (docx).
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export)
#> [1] "Total files to process = 4"
#> [1] "Task COMPLETE. QC Report.  Total time = 7.931497 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-01-01 to 2014-12-31)."
#> [1] "Processing item 1 of 4, COMPLETE, QC_test2_Aw_20130426_20130725.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 10.34814 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-01-01 to 2014-12-31)."
#> [1] "Processing item 2 of 4, COMPLETE, QC_test2_Aw_20130725_20131015.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 11.03408 secs."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-01-01 to 2014-12-31)."
#> [1] "Processing item 3 of 4, COMPLETE, QC_test2_Aw_20140901_20140930.csv."
#> [1] "Processing item 4 of 4, SKIPPED (Non-Match, SiteID), QC_test4_Aw_20160418_20160726.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 10.71593 secs."
#> [1] "User defined parameters: File Name (test2_Aw_20130101_20141231.csv)."
#> [1] "Processing of 4 of 4 files complete."
#> [1] "Processing of items (n=4) finished.  Total time = 41.93611 secs."
#> [1] "Items COMPLETE = 3."
#> [1] "Items SKIPPPED = 1."
#> [1] "User defined parameters: SiteID (test2), Data Type (Aw), Date Range (2013-01-01 to 2014-12-31)."
#> [1] "No other data type files exist for this SiteID and Date Range.\n    No combining across data types is possible."

# 04. Summary Stats
myData.Operation       <- "SummaryStats" #Selection.Operation[4]
myData.SiteID          <- "test2"
myData.Type            <- Selection.Type[3] #"AW"
myData.DateRange.Start <- "2013-01-01"
myData.DateRange.End   <- "2014-12-31"
myDir.import           <- file.path(myDir.BASE, Selection.SUB[4]) #"Data3_Aggregated"
myDir.export           <- file.path(myDir.BASE, Selection.SUB[5]) #"Data4_Stats"
#Leave off myReport.format and get default (docx).
ContDataQC(myData.Operation
           , myData.SiteID
           , myData.Type
           , myData.DateRange.Start
           , myData.DateRange.End
           , myDir.import
           , myDir.export)
#> [1] "Total items to process = 3:"
#> [1] "Water.Temp.C"    "Air.Temp.C"      "Sensor.Depth.ft"
#> [1] "Processing item 1 of 3; Water.Temp.C"
#> [1] "Processing item 2 of 3; Air.Temp.C"
#> [1] "Processing item 3 of 3; Sensor.Depth.ft"

#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# File Versions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

# 02.Alt. QC Data
myData.Operation <- "QCRaw" #Selection.Operation[2]
#myFile <- "test2_AW_20130426_20130725.csv"
myFile <- c("test2_AW_20130426_20130725.csv"
           , "test2_AW_20130725_20131015.csv"
           , "test2_AW_20140901_20140930.csv")
myDir.import <- file.path(myDir.BASE, "Data1_RAW")
myDir.export <- file.path(myDir.BASE, "Data2_QC")
myReport.format <- "docx"
ContDataQC(myData.Operation
           , fun.myDir.import = myDir.import
           , fun.myDir.export = myDir.export
           , fun.myFile = myFile
           , fun.myReport.format = myReport.format)
#> [1] "Total files to process = 3"
#> [1] "Processing item 1 of 3, WORKING (QC Tests and Flags - WaterTemp), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 1 of 3, WORKING (QC Tests and Flags - AirTemp), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 1 of 3, WORKING (QC Tests and Flags - WaterP), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 1 of 3, WORKING (QC Tests and Flags - AirP), test2_AW_20130426_20130725.csv."
#> [1] "Processing item 1 of 3, WORKING (QC Tests and Flags - SensorDepth), test2_AW_20130426_20130725.csv."
#> [1] "ERROR; no such file exists.  Cannot create QC Report."
#> [1] "PATH = /var/folders/24/8k48jl6d249_n_qfxwsl6xvm0000gn/T//RtmpZn7gXz/Data2_QC"
#> [1] "FILE = QC_test2_AW_20130426_20130725.csv"
#> Error in fun.Report.File(strFile, fun.myDir.export, fun.myDir.export,     strFile.Out.Prefix, fun.myReport.format, fun.myReport.Dir): Bad file.

# 03.Alt. Aggregate Data
myData.Operation <- "Aggregate" #Selection.Operation[3]
myFile <- c("QC_test2_Aw_20130426_20130725.csv"
           , "QC_test2_Aw_20130725_20131015.csv"
           , "QC_test2_Aw_20140901_20140930.csv")
myDir.import <- file.path(myDir.BASE, "Data2_QC")
myDir.export <- file.path(myDir.BASE, "Data3_Aggregated")
myReport.format <- "html"
ContDataQC(myData.Operation
           , fun.myDir.import = myDir.import
           , fun.myDir.export = myDir.export
           , fun.myFile = myFile
           , fun.myReport.format = myReport.format)
#> [1] "Total files to process = 3"
#> [1] "Processing item 1 of 3, COMPLETE, QC_test2_Aw_20130426_20130725.csv."
#> [1] "Processing item 2 of 3, COMPLETE, QC_test2_Aw_20130725_20131015.csv."
#> [1] "Processing item 3 of 3, COMPLETE, QC_test2_Aw_20140901_20140930.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 13.57352 secs."
#> [1] "User defined parameters: File Name (QC_test2_Aw_20130426_20140930.csv)."
#> [1] "Processing of 3 of 3 files complete."
#> [1] "Processing of items (n=3) finished.  Total time = 15.78808 secs."
#> [1] "Items COMPLETE = 3."
#> [1] "Items SKIPPPED = 0."
#> [1] "User defined parameters: File Output(DATA_QC_test2_Aw_20130426_20140930.csv)."

# 04. Alt. Summary Stats
myData.Operation <- "SummaryStats" #Selection.Operation[4]
myFile <- "QC_test2_AW_20130426_20130725.csv"
#myFile <- c("QC_test2_AW_20130426_20130725.csv"
#            , "QC_test2_AW_20130725_20131015.csv"
#            , "QC_test2_AW_20140901_20140930.csv")
myDir.import <- file.path(myDir.BASE, "Data2_QC")
myDir.export <- file.path(myDir.BASE, "Data4_Stats")
#Leave off myReport.format and get default (docx).
ContDataQC(myData.Operation
           , fun.myDir.import = myDir.import
           , fun.myDir.export = myDir.export
           , fun.myFile = myFile)
#> [1] "Total files to process = 1"
#> [1] "ERROR; no such file exits.  Cannot generate summary statistics."
#> [1] "PATH = /var/folders/24/8k48jl6d249_n_qfxwsl6xvm0000gn/T//RtmpZn7gXz/Data2_QC"
#> [1] "FILE = QC_test2_AW_20130426_20130725.csv"
#> Error in fun.Stats.File(fun.myFile, fun.myDir.import, fun.myDir.export,     fun.myReport.format, fun.myReport.Dir): Bad file.

#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Summary Stats from Other Data
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 05. Gage Data
# Get Gage Data via the dataRetrieval package from USGS 01187300 2013
#  (~4 seconds)
data.gage <- dataRetrieval::readNWISuv("01187300"
                                       , "00060"
                                       , "2013-01-01"
                                       , "2014-12-31")
head(data.gage)
#>   agency_cd  site_no            dateTime X_00060_00000 X_00060_00000_cd tz_cd
#> 1      USGS 01187300 2013-01-10 05:00:00          22.3                A   UTC
#> 2      USGS 01187300 2013-01-10 05:15:00          22.5                A   UTC
#> 3      USGS 01187300 2013-01-10 05:30:00          22.5                A   UTC
#> 4      USGS 01187300 2013-01-10 05:45:00          22.3                A   UTC
#> 5      USGS 01187300 2013-01-10 06:00:00          22.3                A   UTC
#> 6      USGS 01187300 2013-01-10 06:15:00          22.3                A   UTC
# Rename fields
myNames <- c("Agency"
             , "SiteID"
             , "Date.Time"
             , "Discharge.ft3.s"
             , "Code"
             , "TZ")
names(data.gage) <- myNames
# Add Date and Time
data.gage[,"Date"] <- as.Date(data.gage[,"Date.Time"])
data.gage[,"Time"] <-  strftime(data.gage[,"Date.Time"], format="%H:%M:%S")
# Add "flag" fields that are added by QC function.
Names.Flags <- paste0("Flag.",c("Date.Time", "Discharge.ft3.s"))
data.gage[,Names.Flags] <- "P"
# Save File
myFile <- "01187300_Gage_20130101_20141231.csv"
write.csv(data.gage, file.path(myDir.BASE, myFile), row.names=FALSE)
# Run Stats (File)
myData.Operation <- "SummaryStats"
myDir.import <- myDir.BASE
myDir.export <- myDir.BASE
ContDataQC(myData.Operation
           , fun.myDir.import = myDir.import
           , fun.myDir.export = myDir.export
           , fun.myFile = myFile)
#> [1] "Total files to process = 1"
#> [1] "Total items to process = 1:"
#> [1] "Discharge.ft3.s"
#> [1] "Processing item 4 of 1; Discharge.ft3.s"

#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Lake Data, Aggregate
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(myDir.BASE <- tempdir()) # create and print temp directory for example data
#> [1] "/var/folders/24/8k48jl6d249_n_qfxwsl6xvm0000gn/T//RtmpZn7gXz"
# 06. Lake Data
# Save example data (assumes directory exists)
myFile <- c("QC_Ellis--1.0m_Water_20180524_20180918.csv"
           , "QC_Ellis--3.0m_Water_20180524_20180918.csv")
file.copy(file.path(system.file("extdata", package="ContDataQC"), myFile)
          , file.path(myDir.BASE, "Data2_QC", myFile))
#> [1] TRUE TRUE

# Aggregate Data
myData.Operation <- "Aggregate" #Selection.Operation[3]
myFile           <- myFile
myDir.import     <- file.path(myDir.BASE, "Data2_QC")
myDir.export     <- file.path(myDir.BASE, "Data3_Aggregated")
myReport.format  <- "html"
ContDataQC(myData.Operation
           , fun.myDir.import = myDir.import
           , fun.myDir.export = myDir.export
           , fun.myFile = myFile
           , fun.myReport.format = myReport.format)
#> [1] "Total files to process = 2"
#> [1] "Processing item 1 of 2, COMPLETE, QC_Ellis--1.0m_Water_20180524_20180918.csv."
#> [1] "Processing item 2 of 2, COMPLETE, QC_Ellis--3.0m_Water_20180524_20180918.csv."
#> [1] "Task COMPLETE. QC Report.  Total time = 2.43078 secs."
#> [1] "User defined parameters: File Name (QC_Ellis--1.0m_Water_20180524_20180918.csv)."
#> [1] "Processing of 2 of 2 files complete."
#> [1] "Processing of items (n=2) finished.  Total time = 2.905718 secs."
#> [1] "Items COMPLETE = 2."
#> [1] "Items SKIPPPED = 0."
#> [1] "User defined parameters: File Output(DATA_QC_Ellis--1.0m_Water_20180524_20180918.csv)."