#!/usr/bin/perl -w
#
use File::Copy;
use Time::Local;

BEGIN
  {
  $ENV{LD_LIBRARY_PATH}="LD_LIBRARY_PATH:/data/starfs1/libs/netcdf-4.2/lib:/data/starfs1/libs/hdf5-1.8.7/lib";
  }


#------------------------------------------------------------------------
# This script captures HRRR analysis data, extracts selected parameters, and writes
# the selected data into a new HRRR Daily Data File.

printf("\nCapturing HRRR Analysis data and creating an HRRR Daily Data File\n\n");


#------------------------------------------------------------------------
# Set the date to process


# Calculate the date of the data to be processed. By default, this
# is usually yesterday. To override, replace $data_date with the
# date in the form YYYYMMDD

$data_day = time();
$data_day -= 86400;

($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($data_day);
$year += 1900;
$mon++;
$sec = 0;
$min = 0;
$hour = 0;
$wday = 0;
$yday = 0;
$isdst = 0;

$date_to_process = sprintf("%4d%02d%02d", $year, $mon, $mday);
#$date_to_process = 20170521;

printf("Date to be processed:  %d\n\n", $date_to_process);


#------------------------------------------------------------------------
# Set the locations of the various directories

$source_dir = "/data/data065/nprovs/source/data_transfer/hrrr";
$work_dir   = "/data/data065/nprovs/source/data_transfer/hrrr";
$nprovs_dir = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";

chdir $work_dir;


# Create the name of the HDDF daily file

$hddf_file = sprintf("%s/hrrr.hddf", $nprovs_dir);

# Remove the previous HDDF daily file if it already exists

unlink $hddf_file;



#------------------------------------------------------------------------
# Call the subroutine processHour which will copy the HRRR Grib file
# and extract the data to a binary file

for (my $time=0; $time<=23; $time++)
#for (my $time=0; $time<=0; $time++)
  {
  processHour($date_to_process, $time, "conus", "");
  processHour($date_to_process, $time, "alaska", ".ak");
  }



#--------------------------------------------------------------------------------
# Gzip the daily data file and copy it the the orbital archive

# Copy the daily file to the orbital archive and the backup archive

$archive_dir  = "/data/data599/orbital_archive";
$archive_dir2 = "/data/data215/nprovs/data/orbital_archive";


printf("\n\nCopying the daily file to the archive directory\n\n");

printf("Making a copy of the daily file...\n\n");
$to_file = sprintf("%s/hrrr_%d.hddf", $nprovs_dir, $date_to_process);
copy($hddf_file, $to_file);

printf("Gzipping the file...\n\n");
system "gzip " . $to_file;

printf("Copying to the main orbital archive...\n");
$gzip_file = sprintf("%s/hrrr_%d.hddf.gz", $nprovs_dir, $date_to_process);
$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir, $date_to_process);
printf("Copying from: %s\n", $gzip_file);
printf("          to: %s\n\n", $to_file);
copy($gzip_file, $to_file);

#printf("Copying to the backup orbital archive...\n");
#$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir2, $date_to_process);
#printf("Copying from: %s\n", $gzip_file);
#printf("          to: %s\n\n", $to_file);
#copy($gzip_file, $to_file);

unlink $gzip_file;



#--------------------------------------------------------------------------------
# Capture data from the HRRR daily data file and write the data to an ODS file

system "/data/data065/nprovs/scripts/graphics/run_hrrr_ods_capture";


# end of main script
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------




# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine processHour extracts data for a specific hour from the
# HRRR grib file and writes the data to a binary file

sub processHour
  {
  my ($date, $hour, $region, $region_ext) = @_;

  printf("\n\n==========================================================\n");
  printf("Processing the RAP/HRRR %d %02dZ %s data\n\n", $date, $hour, $region);

  # Use wget to transfer the file

  $grib_file = sprintf("hrrr_%s_%d_t%02dz.grib2", $region, $date, $hour);

  unlink $grib_file;

  #$wget = sprintf("wget -q https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.%d/%s/hrrr.t%02dz.wrfprsf00%s.grib2 --no-check-certificate -O %s", $date, $region, $hour, $region_ext, $grib_file);
  $wget = sprintf("wget -q https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.%d/%s/hrrr.t%02dz.wrfnatf00%s.grib2 --no-check-certificate -O %s", $date, $region, $hour, $region_ext, $grib_file);

  printf("%s\n\n", $wget);

  system $wget;


  # Only continue if a wgrib file was transferred

  if (-e $grib_file)
    {

    # Convert the wgrib file to a netCDF file

    $netcdf_file = sprintf("hrrr_%s_%d_t%02dz.nc", $region, $date, $hour);

    unlink $netcdf_file;

    $unpack_to_netcdf = sprintf("/data/starfs1/utils/wgrib2-v2.0.8/wgrib2 %s -nc_table %s/soundings_nc.table -netcdf %s", $grib_file, $source_dir, $netcdf_file);

    system $unpack_to_netcdf;


    # Run the program to convert the data from the netCDF file to an HDDF daily binary file

    printf("\nProcessing file: %s\n", $netcdf_file);

    symlink $netcdf_file, "in.file" or warn "Cannot link $netcdf_file to in.file";
    symlink $hddf_file, "out.file" or warn "Cannot link $hddf_file to out.file";

    system $source_dir . "/HRRRToHDDF.x " . $date . " " . $hour;

    unlink "out.file" or warn "Cannot unlink out.file";
    unlink "in.file" or warn "Cannot unlink in.file";


    # Remove the netCDF and grib files

    unlink $netcdf_file;
    unlink $grib_file;
    }  # if (-e $grib_file...
  else
    {
    printf("The %s data from %d %02dz was not transferred.\n", $region, $date, $hour);
    printf("The data will be skipped.\n\n");
    }
  }

# end of file
