#!/usr/bin/perl -w
#
use File::Copy;
use Time::Local;

BEGIN
  {
  $ENV{LD_LIBRARY_PATH}="LD_LIBRARY_PATH:/data/starfs1/libs/netcdf-4.2/lib:/data/starfs1/libs/hdf5-1.8.7/lib";
  }


#------------------------------------------------------------------------
# This script captures RAP/HRRR analysis data, extracts selected parameters,
# and writes the selected data into a new HRRR Daily Data File.

printf("\nCapturing RAP/HRRR Analysis data and creating an HRRR Daily Data File\n\n");

##$do_ods = "true";
#$do_ods = "false";


#------------------------------------------------------------------------
# Set the date to process

#$date_to_process = $ARGV[0];

#if (not defined $date_to_process)
#  {
#  $date_to_process = 20211010;
#  }


#------------------------------------------------------------------------
# Set the date to process. By default this will be yesterday. That can be
# overridden by setting $date_to_process to a specific value.

$data_day = time();
$data_day -= (1 * 86400);

($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($data_day);
$year += 1900;
$mon++;
$sec = 0;
$min = 0;
$hour = 0;
$wday = 0;
$yday = 0;
$isdst = 0;

$date_to_process = sprintf("%4d%02d%02d", $year, $mon, $mday);
#$date_to_process = 20190731;



printf("Date to be processed:  %d\n\n", $date_to_process);

$year  = $date_to_process / 10000;
$mmdd  = $date_to_process % 10000;
$month = $mmdd / 100;


#------------------------------------------------------------------------
# Set the locations of the various directories

$source_dir = "/data/data065/nprovs/source/data_transfer/hrrr";
$work_dir   = "/data/data065/nprovs/source/data_transfer/hrrr";
$nprovs_dir = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";

chdir $work_dir;



#------------------------------------------------------------------------
# Create the name of the HDDF daily file

$hddf_file = sprintf("%s/hrrr.hddf", $nprovs_dir, $date_to_process);

# Remove the previous HDDF daily file

unlink $hddf_file;




#------------------------------------------------------------------------
# Call the subroutine processHour which will copy the HRRR Grib file
# and extract the data to a binary file

printf("\nBeginning\n");
system "date";

#for (my $time=0; $time<=23; $time+=6)
for (my $time=0; $time<=23; $time++)
  {
  processHour($date_to_process, $time, "conus", "", -135, -60, 55, 20);
  processHour($date_to_process, $time, "alaska", ".ak", 155, -115, 80, 40);
  }

printf("\nFinished\n");
system "date";



#--------------------------------------------------------------------------------
# Gzip the daily data file and copy it the the orbital archive

# Copy the daily file to the orbital archive and the backup archive

$archive_dir  = "/data/data599/orbital_archive";
$archive_dir2 = "/data/data215/nprovs/data/orbital_archive";


printf("\n\nCopying the daily file to the archive directory\n\n");

printf("Making a copy of the daily file...\n\n");
$to_file = sprintf("%s/hrrr_%d.hddf", $nprovs_dir, $date_to_process);
copy($hddf_file, $to_file);

printf("Gzipping the file...\n\n");
system "gzip " . $to_file;

printf("Copying to the main orbital archive...\n");
$gzip_file = sprintf("%s/hrrr_%d.hddf.gz", $nprovs_dir, $date_to_process);
$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir, $date_to_process);
printf("Copying from: %s\n", $gzip_file);
printf("          to: %s\n\n", $to_file);
copy($gzip_file, $to_file);

printf("Copying to the backup orbital archive...\n");
$to_file = sprintf("%s/hrrr/hrrr_%d.hddf.gz", $archive_dir2, $date_to_process);
printf("Copying from: %s\n", $gzip_file);
printf("          to: %s\n\n", $to_file);
copy($gzip_file, $to_file);

unlink $gzip_file;


#--------------------------------------------------------------------------------
# Remove older files from the archive directories

#my @tempfiles = glob $archive_dir . "/*.hddf.gz";

#foreach $file (@tempfiles)
#  {
#  if (-M $file > 30)
#    {
#    unlink $file;
#    }
#  }

#my @tempfiles2 = glob $archive_dir2 . "/*.hddf.gz";

#foreach $file (@tempfiles2)
#  {
#  if (-M $file > 30)
#    {
#    unlink $file;
#    }
#  }



#--------------------------------------------------------------------------------
# Capture data from the MIRS daily data file (MDDF) and write
# the data to ODS files.

###if ($do_ods == "true")
##  generateODS();


#-------------------------------------------------------------------------
# Remove the reprocessed file

##unlink $ecmwf_daily_file;


# end of main script
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------




# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine processHour extracts data for a specific hour from the
# ECMWF grib file and writes the data to a binary file

sub processHour
  {
  my ($date, $hour, $region, $region_ext, $left_lon, $right_lon, $top_lat, $bottom_lat) = @_;

  printf("\n\n==========================================================\n");
  printf("Processing the RAP/HRRR %d %02dZ %s data\n\n", $date, $hour, $region);

  # Use wget to transfer the file

  $grib_file = sprintf("hrrr_%s_%d_t%02dz.grib2", $region, $date, $hour);

  unlink $grib_file;

  $wget = sprintf("wget -q https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.%d/%s/hrrr.t%02dz.wrfprsf00%s.grib2 --no-check-certificate -O %s", $date, $region, $hour, $region_ext, $grib_file);

  printf("%s\n\n", $wget);

  system $wget;


  # Only continue if a wgrib file was transferred

  if (-e $grib_file)
    {

    # Convert the wgrib file to a netCDF file

    $netcdf_file = sprintf("hrrr_%s_%d_t%02dz.nc", $region, $date, $hour);

    unlink $netcdf_file;

    $unpack_to_netcdf = sprintf("/data/starfs1/utils/wgrib2-v2.0.8/wgrib2 %s -nc_table soundings_nc.table -netcdf %s", $grib_file, $netcdf_file);

    system $unpack_to_netcdf;


    # Run the program to convert the data from the netCDF file to an HDDF daily binary file

    printf("\nProcessing file: %s\n", $netcdf_file);

    symlink $netcdf_file, "in.file" or warn "Cannot link $netcdf_file to in.file";
    symlink $hddf_file, "out.file" or warn "Cannot link $hddf_file to out.file";

    system $source_dir . "/HRRRToHDDF.x " . $date . " " . $hour . " " . $left_lon . " " . $right_lon . " " . $top_lat . " " . $bottom_lat;

    unlink "out.file" or warn "Cannot unlink out.file";
    unlink "in.file" or warn "Cannot unlink in.file";


    # Remove the netCDF and grib files

    unlink $netcdf_file;
    unlink $grib_file;
    }  # if (-e $grib_file...
  else
    {
    printf("The %s data from %d %02dz was not transferred.\n", $region, $date, $hour);
    printf("The data will be skipped.\n\n");
    }
  }




# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine generateODS creates an ODS file from the newly created
# daily file. It then creates images that are used on the NPROVS Daily
# Coverage web page.

sub generateODS
  {
#  printf("do ods\n");

#  my $hours_to_process = "0 6 12 18";
#  #my $hours_to_process = "12";

  # =============================================================================
  # Create a temporary directory into which the pieces of the file 
  # will be written

#  my $ods_source_dir = "/data/data065/nprovs/source/graphics/ods_capture/ecmwf_o3";
#  my $ods_dir = "/data/data065/nprovs/data/ods/ecmwf";
#  my $ftp_dir = "/net/www/aftp/pub/smcd/opdb/nprovs/ods";

#  $file_name = "ecmwf_" . $date_to_process . ".ods";
#  $ods_file = $ods_dir . "/" . $file_name;


#  chdir $ods_source_dir;
#  mkdir $ods_source_dir . "/ods";

  # =============================================================================
  # Create an ODS file for the ECMWF

#  symlink $ecmwf_daily_file, "in.file" or warn "Cannot link to in.file";
#  symlink $ods_source_dir . "/ods", "out.dir" or warn "Cannot link to out.dir";
#  symlink $ods_source_dir . "/ecmwf_o3.xml", "parm.file" or warn "Cannot link to parm.file";
#  symlink $ods_source_dir . "/parm_defs", "parmdefs.dir" or warn "Cannot link to parmdefs.dir";

#  system $ods_source_dir . "/createODSFromECMWF.x \"ECMWF Analysis\" " . $date_to_process . " " . $hours_to_process;

#  unlink "in.file" or warn "Cannot unlink in.file";
#  unlink "out.dir" or warn "Cannot unlink out.dir";
#  unlink "parm.file" or warn "Cannot unlink parm.file";
#  unlink "parmdefs.dir" or warn "Cannot unlink parmdefs.dir";

  # Copy the filter_options.xml file to the ods directory

#  #system "cp filtering_options.xml ods/filtering_options.xml";

  # Zip everything and then delete the temporary directory and everything in it

#  system "zip -r " . $ods_file . " ods";
#  system "rm -r " . $ods_source_dir . "/ods";

  # =============================================================================
  # Create an image from the ODS file and send the image to the STAR web site
  # for routine monitoring

  # 00Z
#  $input_file   = sprintf("file:%s", $ods_file);
#  $parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/ecmwf_00z.xml");
#  $output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_00z_%d.png", $date_to_process);
#  $thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_00z_%d_thumb.png", $date_to_process);
#  $thumb_width  = sprintf("thumbwidth:100");
#  $thumb_height = sprintf("thumbheight:75");
#  $verbose      = sprintf("verbose:no");
#  $file_info    = sprintf("file_info:no");

#  $exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

#  system $exec;


  # 06Z
#  $input_file   = sprintf("file:%s", $ods_file);
#  $parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/ecmwf_06z.xml");
#  $output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_06z_%d.png", $date_to_process);
#  $thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_06z_%d_thumb.png", $date_to_process);
#  $thumb_width  = sprintf("thumbwidth:100");
#  $thumb_height = sprintf("thumbheight:75");
#  $verbose      = sprintf("verbose:no");
#  $file_info    = sprintf("file_info:no");

#  $exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

#  system $exec;


  # 12Z
#  $input_file   = sprintf("file:%s", $ods_file);
#  $parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/ecmwf_12z.xml");
#  $output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_12z_%d.png", $date_to_process);
#  $thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_12z_%d_thumb.png", $date_to_process);
#  $thumb_width  = sprintf("thumbwidth:100");
#  $thumb_height = sprintf("thumbheight:75");
#  $verbose      = sprintf("verbose:no");
#  $file_info    = sprintf("file_info:no");

#  $exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

#  system $exec;


  # 18Z
#  $input_file   = sprintf("file:%s", $ods_file);
#  $parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/ecmwf_18z.xml");
#  $output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_18z_%d.png", $date_to_process);
#  $thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/ecmwf_18z_%d_thumb.png", $date_to_process);
#  $thumb_width  = sprintf("thumbwidth:100");
#  $thumb_height = sprintf("thumbheight:75");
#  $verbose      = sprintf("verbose:no");
#  $file_info    = sprintf("file_info:no");

#  $exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

#  system $exec;


  # =============================================================================
  # Copy the ODS file to the FTP site

#  $ftp_file = $ftp_dir . "/" . $file_name;

#  printf("Copying from: %s\n", $ods_file);
#  printf("          to: %s\n\n", $ftp_file);

#  copy($ods_file, $ftp_file);
  }

# end of file
