#!/usr/bin/perl -w
#
use File::Copy;
use Time::Local;

BEGIN
  {
  $ENV{LD_LIBRARY_PATH}="LD_LIBRARY_PATH:/data/starfs1/libs/netcdf-4.2/lib:/data/starfs1/libs/hdf5-1.8.7/lib";
  }


#------------------------------------------------------------------------
# This script captures HRRR analysis data, extracts selected parameters, and writes
# the selected data into a new HRRR Daily Data File.

printf("\nCapturing HRRR Analysis data and creating an HRRR Daily Data File\n\n");

$true  = 0;
$false = 1;

#$do_ods = $true;
$do_ods = $false;


#------------------------------------------------------------------------
# Set the date to process

$date_to_process = $ARGV[0];

if (not defined $date_to_process)
  {
  $date_to_process = 20210719;
  #$date_to_process = 20220117;
  }

printf("Date to be processed:  %d\n\n", $date_to_process);

$year  = $date_to_process / 10000;
$mmdd  = $date_to_process % 10000;
$month = $mmdd / 100;


#------------------------------------------------------------------------
# Set the locations of the various directories

$source_dir = "/data/data065/nprovs/source/data_transfer/hrrr";
$work_dir   = "/data/data065/nprovs/source/data_transfer/hrrr/reprocess";
$nprovs_dir = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";

chdir $work_dir;


# Create the name of the HDDF daily file

$hddf_file = sprintf("%s/hrrr_%d.hddf", $nprovs_dir, $date_to_process);

# Remove the previous HDDF daily file if it already exists

unlink $hddf_file;



#------------------------------------------------------------------------
# Call the subroutine processHour which will copy the HRRR Grib file
# and extract the data to a binary file

for (my $time=0; $time<=23; $time++)
#for (my $time=0; $time<=1; $time++)
  {
  processHour($date_to_process, $time, "conus", "");
  processHour($date_to_process, $time, "alaska", ".ak");
  }



#--------------------------------------------------------------------------------
# Gzip the daily data file and copy it the the orbital archive

# Copy the daily file to the orbital archive and the backup archive

$archive_dir  = "/data/data599/orbital_archive";
$archive_dir2 = "/data/data215/nprovs/data/orbital_archive";


printf("\n\nCopying the daily file to the archive directory\n\n");

#printf("Making a copy of the daily file...\n\n");
#$to_file = sprintf("%s/hrrr_%d.hddf", $nprovs_dir, $date_to_process);
#copy($hddf_file, $to_file);

printf("Gzipping the file...\n\n");
#system "gzip " . $to_file;
system "gzip " . $hddf_file;

printf("Copying to the main orbital archive...\n");
$gzip_file = sprintf("%s/hrrr_%d.hddf.gz", $nprovs_dir, $date_to_process);
$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir, $date_to_process);
printf("Copying from: %s\n", $gzip_file);
printf("          to: %s\n\n", $to_file);
copy($gzip_file, $to_file);

#printf("Copying to the backup orbital archive...\n");
#$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir2, $date_to_process);
#printf("Copying from: %s\n", $gzip_file);
#printf("          to: %s\n\n", $to_file);
#copy($gzip_file, $to_file);

#unlink $gzip_file;
system "gunzip " . $gzip_file;


#--------------------------------------------------------------------------------
# Capture data from the HRRR daily data file and write the data to an ODS file

#if ($do_ods == $true)
#  {
#  generateODS();
#  }


#-------------------------------------------------------------------------
# Remove the reprocessed file

unlink $hddf_file;




# end of main script
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------




# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine processHour extracts data for a specific hour from the
# HRRR grib file and writes the data to a binary file

sub processHour
  {
  my ($date, $hour, $region, $region_ext) = @_;

  printf("\n\n==========================================================\n");
  printf("Processing the RAP/HRRR %d %02dZ %s data\n\n", $date, $hour, $region);

  # Use wget to transfer the file

  $grib_file = sprintf("hrrr_%s_%d_t%02dz.grib2", $region, $date, $hour);

  unlink $grib_file;

  #$wget = sprintf("wget -q https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.%d/%s/hrrr.t%02dz.wrfprsf00%s.grib2 --no-check-certificate -O %s", $date, $region, $hour, $region_ext, $grib_file);
  $wget = sprintf("wget -q https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.%d/%s/hrrr.t%02dz.wrfnatf00%s.grib2 --no-check-certificate -O %s", $date, $region, $hour, $region_ext, $grib_file);

  printf("%s\n\n", $wget);

  system $wget;


  # Only continue if a wgrib file was transferred

  if (-e $grib_file)
    {

    # Convert the wgrib file to a netCDF file

    $netcdf_file = sprintf("hrrr_%s_%d_t%02dz.nc", $region, $date, $hour);

    unlink $netcdf_file;

    $unpack_to_netcdf = sprintf("/data/starfs1/utils/wgrib2-v2.0.8/wgrib2 %s -nc_table %s/soundings_nc.table -netcdf %s", $grib_file, $source_dir, $netcdf_file);

    system $unpack_to_netcdf;


    # Run the program to convert the data from the netCDF file to an HDDF daily binary file

    printf("\nProcessing file: %s\n", $netcdf_file);

    symlink $netcdf_file, "in.file" or warn "Cannot link $netcdf_file to in.file";
    symlink $hddf_file, "out.file" or warn "Cannot link $hddf_file to out.file";

    system $source_dir . "/HRRRToHDDF.x " . $date . " " . $hour;

    unlink "out.file" or warn "Cannot unlink out.file";
    unlink "in.file" or warn "Cannot unlink in.file";


    # Remove the netCDF and grib files

    unlink $netcdf_file;
    unlink $grib_file;
    }  # if (-e $grib_file...
  else
    {
    printf("The %s data from %d %02dz was not transferred.\n", $region, $date, $hour);
    printf("The data will be skipped.\n\n");
    }
  }



# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine generateODS creates an ODS file from the newly created
# daily file. It then creates images that are used on the NPROVS Daily
# Coverage web page.

sub generateODS
  {
  my @hours = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23);
  #my @hours = (0, 1);

  my $hours_to_process = "";

  foreach (@hours)
    {
    $hours_to_process = sprintf("%s%d ", $hours_to_process, $_);
    }

  # =============================================================================
  # Create a temporary directory into which the pieces of the file 
  # will be written

  my $ods_source_dir = "/data/data065/nprovs/source/graphics/ods_capture/hrrr";
  my $ods_work_dir   = "/data/data065/nprovs/source/data_transfer/hrrr/reprocess";
  my $ods_xml_dir    = "/data/data065/nprovs/source/graphics/ods_capture/hrrr";
  my $ods_dir        = "/data/data599/ods/hrrr";

  $file_name = "hrrr_" . $date_to_process . ".ods";
  $ods_file = $ods_dir . "/" . $file_name;

  chdir $ods_work_dir;
  mkdir $ods_work_dir . "/ods";

  # =============================================================================
  # Create the ODS file

  symlink $hddf_file, "in.file" or warn "Cannot link to in.file";
  symlink $ods_work_dir . "/ods", "out.dir" or warn "Cannot link to out.dir";
  symlink $ods_source_dir . "/hrrr.xml", "parm.file" or warn "Cannot link to parm.file";
  symlink $ods_source_dir . "/parm_defs", "parmdefs.dir" or warn "Cannot link to parmdefs.dir";

  system $ods_source_dir . "/createODSFromHRRR.x \"RAP/HRRR\" " . $date_to_process . " " . $hours_to_process;

  unlink "in.file" or warn "Cannot unlink in.file";
  unlink "out.dir" or warn "Cannot unlink out.dir";
  unlink "parm.file" or warn "Cannot unlink parm.file";
  unlink "parmdefs.dir" or warn "Cannot unlink parmdefs.dir";

  # Copy the filter_options.xml file to the ods directory
  system "cp " . $ods_source_dir . "/filtering_options.xml ods/filtering_options.xml";

  # =============================================================================
  # Zip everything and then delete the temporary directory and everything in it

  unlink $ods_file;
  system "zip -r " . $ods_file . " ods";

  printf("\nRemoving the ODS directory\n");
  system "rm -r " . $ods_work_dir . "/ods";

  # =============================================================================
  # For each hour, create images for the daily monitoring web page

  foreach (@hours)
    {
    printf("Process Hour ODS:  %d\n", $_);

#    $input_file   = sprintf("file:%s", $ods_file);
#    $parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/hrrr_%02dz.xml", $_);
#    $output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/hrrr_%02dz_%d.png", $_, $date_to_process);
#    $thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/hrrr_%02dz_%d_thumb.png", $_, $date_to_process);
#    $thumb_width  = sprintf("thumbwidth:100");
#    $thumb_height = sprintf("thumbheight:75");
#    $verbose      = sprintf("verbose:no");
#    $file_info    = sprintf("file_info:no");

#    $exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

#    system $exec;
    }



  # =============================================================================
  # Copy the ODS file to the FTP site

#  $ftp_file = $ftp_dir . "/" . $file_name;

#  printf("Copying from: %s\n", $ods_file);
#  printf("          to: %s\n\n", $ftp_file);

#  copy($ods_file, $ftp_file);
  }

# end of file
