#!/usr/bin/perl -w
#
use File::Copy;
use Time::Local;
use Net::FTP;

#------------------------------------------------------------------------
# This script captures GFS analysis data, extracts selected parameters, and writes
# the selected data into a new GFS Daily Data File.

printf("\nCapturing GFS Analysis data and creating a GFS Daily Data File\n\n");


#-------------------------------------------------------------------------
# Variables that can be changed to affect the date to be processed, the location
# of the input data and the location of the output

# Uncomment $year, $mon and $mday to process a specific day. If they are
# commented, then yesterday's data will be processed.

# ***********************
# ***********************
# ***********************
$year = 2021;
$mon  = 9;
$mday = 18;
#$year = 0;
#$mon  = 0;
#$mday = 0;
# ***********************
# ***********************
# ***********************


$nprovs_dir   = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";
#$archive_dir2  = "/data/data215/nprovs/data/orbital_archive/gfs_v2";
$archive_dir2  = "/data/data599/orbital_archive/gfs_v2";
$archive_dir = "/data/data065/nprovs/data/matchup/DailyFiles_incoming/archive/gfs_v2";

$source_dir = "/data/data065/nprovs/source/data_transfer/gfs";
$work_dir = "/data/data065/nprovs/source/data_transfer/gfs";

$ods_source_dir = "/data/data065/nprovs/source/graphics/ods_capture/gfs_v2";
$ods_dir = "/data/data065/nprovs/data/ods/gfs_v2";
$ftp_dir = "/net/www/aftp/pub/smcd/opdb/nprovs/ods";

my $hours_to_process = "0 6 12 18";


chdir $work_dir;


#-------------------------------------------------------------------------
# If $year, $mon or $mday are set, then that date will be that date
# that is processed. Otherwise, the date to process will be yesterday.

$process_specific_date = 0;

if (($year == 0) || ($mon == 0) || ($mday == 0))
  {
  $data_day = time();
  $data_day -= 86400;

  ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($data_day);
  $year += 1900;
  $mon++;
  $sec   = 0;
  $min   = 0;
  $hour  = 12;
  $wday  = 0;
  $yday  = 0;
  $isdst = 0;

  $process_specific_date = 1;
  }

$date_to_process = ($year * 10000) + ($mon * 100) + $mday;

$year  = $date_to_process / 10000;
$mmdd  = $date_to_process % 10000;

printf("Date to be processed:  %d\n\n", $date_to_process);


#------------------------------------------------------------------------
# Call the subroutine processHour which will copy the GFS Grib file
# and extract the data to a binary file

processHour($date_to_process, "00");
processHour($date_to_process, "06");
processHour($date_to_process, "12");
processHour($date_to_process, "18");


#------------------------------------------------------------------------
# Run the program that copies the binary data into a GFS Daily File

# Remove the previous GFS Daily Data File if it exists

$gfs_daily_file = $nprovs_dir . "/gfs_v2_" . $date_to_process . ".gadf";

if (-e $gfs_daily_file)
  {
  unlink $gfs_daily_file;
  }


# Remove the previous log file

unlink $work_dir . "/gfsToBinary.log";


# Run the program

symlink $gfs_daily_file, "fort.11";
symlink $work_dir . "/gfsToBinary_output.00z.bin", "fort.20";
symlink $work_dir . "/gfsToBinary_output.06z.bin", "fort.21";
symlink $work_dir . "/gfsToBinary_output.12z.bin", "fort.22";
symlink $work_dir . "/gfsToBinary_output.18z.bin", "fort.23";

$exec = sprintf("%s/gfsBinaryToDailyFile.x <<EOD > gfsBinaryToDailyFile.log\n%04d\n%04d\nEOD", $source_dir, $year, $mmdd);

system $exec;

unlink "fort.11";
unlink "fort.20";
unlink "fort.21";
unlink "fort.22";
unlink "fort.23";


# Remove the binary files

unlink $work_dir . "/gfsToBinary_output.00z.bin";
unlink $work_dir . "/gfsToBinary_output.06z.bin";
unlink $work_dir . "/gfsToBinary_output.12z.bin";
unlink $work_dir . "/gfsToBinary_output.18z.bin";


# =============================================================================
# Create an ODS file for the GFS

$ods_file = sprintf("%s/gfs_v2_%d.ods", $ods_dir, $date_to_process);

symlink $gfs_daily_file, "in.file" or warn "Cannot link to in.file";
symlink $ods_source_dir . "/ods", "out.dir" or warn "Cannot link to out.dir";
symlink $ods_source_dir . "/gfs.xml", "parm.file" or warn "Cannot link to parm.file";
symlink $ods_source_dir . "/parm_defs", "parmdefs.dir" or warn "Cannot link to parmdefs.dir";

system $ods_source_dir . "/createODSFromGFS.x \"GFS (v2) Analysis\" " . $date_to_process . " " . $hours_to_process;

unlink "in.file" or warn "Cannot unlink in.file";
unlink "out.dir" or warn "Cannot unlink out.dir";
unlink "parm.file" or warn "Cannot unlink parm.file";
unlink "parmdefs.dir" or warn "Cannot unlink parmdefs.dir";

# Copy the filter_options.xml file to the ods directory

#system "cp filtering_options.xml ods/filtering_options.xml";

# Zip everything and then delete the temporary directory and everything in it

system "zip -r " . $ods_file . " ods";
system "rm -r " . $ods_source_dir . "/ods";



# =============================================================================
# Create an image from the ODS file and send the image to the STAR web site
# for routine monitoring

# 00Z
$input_file   = sprintf("file:%s", $ods_file);
$parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/gfs_00z.xml");
$output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_00z_%d.png", $date_to_process);
$thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_00z_%d_thumb.png", $date_to_process);
$thumb_width  = sprintf("thumbwidth:100");
$thumb_height = sprintf("thumbheight:75");
$verbose      = sprintf("verbose:no");
$file_info    = sprintf("file_info:no");

$exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

system $exec;



# 06Z
$input_file   = sprintf("file:%s", $ods_file);
$parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/gfs_06z.xml");
$output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_06z_%d.png", $date_to_process);
$thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_06z_%d_thumb.png", $date_to_process);
$thumb_width  = sprintf("thumbwidth:100");
$thumb_height = sprintf("thumbheight:75");
$verbose      = sprintf("verbose:no");
$file_info    = sprintf("file_info:no");

$exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

system $exec;



# 12Z
$input_file   = sprintf("file:%s", $ods_file);
$parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/gfs_12z.xml");
$output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_12z_%d.png", $date_to_process);
$thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_12z_%d_thumb.png", $date_to_process);
$thumb_width  = sprintf("thumbwidth:100");
$thumb_height = sprintf("thumbheight:75");
$verbose      = sprintf("verbose:no");
$file_info    = sprintf("file_info:no");

$exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

system $exec;



# 18Z
$input_file   = sprintf("file:%s", $ods_file);
$parm_file    = sprintf("parmfile:/data/data065/nprovs/source/graphics/imagemaker/defaults/gfs_18z.xml");
$output_file  = sprintf("output:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_18z_%d.png", $date_to_process);
$thumb_file   = sprintf("thumbnail:/net/www/www/smcd/opdb/nprovs/images/coverage/gfs_v2_18z_%d_thumb.png", $date_to_process);
$thumb_width  = sprintf("thumbwidth:100");
$thumb_height = sprintf("thumbheight:75");
$verbose      = sprintf("verbose:no");
$file_info    = sprintf("file_info:no");

$exec = sprintf("/usr/bin/java -Djava.awt.headless=true -jar /data/data065/nprovs/source/graphics/imagemaker/ImageMaker.jar %s %s %s %s %s %s %s %s", $input_file, $parm_file, $output_file, $thumb_file, $thumb_width, $thumb_height, $verbose, $file_info);

system $exec;




# =============================================================================
# Copy the ODS file to the FTP site

$ftp_file = $ftp_dir . "/gfs_v2_" . $date_to_process . ".ods";

printf("Copying from: %s\n", $ods_file);
printf("          to: %s\n\n", $ftp_file);

copy($ods_file, $ftp_file);



# =============================================================================
# Search the ftp directory and remove any file that was created
# more than 5 days ago

my @tempftpfiles = glob $ftp_dir . "/gfs*.ods";

foreach $file (@tempftpfiles)
  {
  if (-M $file > 4)
    {
    unlink $file;
    }
  }



# =============================================================================
# Search the ods directory and remove any file that was created
# more than 10 days ago

my @tempodsfiles = glob $ods_dir . "/gfs*.ods";

foreach $file (@tempodsfiles)
  {
  if (-M $file > 9)
    {
    unlink $file;
    }
  }



#--------------------------------------------------------------------------------
# Gzip the daily data file and copy it the the orbital archive

# Copy the daily file to the orbital archive and the backup archive

$archive_dir  = "/data/data215/nprovs/data/orbital_archive";
$archive_dir2 = "/data/data065/nprovs/data/matchup/DailyFiles_incoming/archive";


printf("\nCopying the daily files to the archive directory\n\n");

printf("Gzipping the file...\n\n");
$gzip_file = sprintf("gfs_v2_%d.gadf", $date_to_process) . ".gz";
system "gzip " . $gfs_daily_file;

printf("Copying to the main orbital archive...\n");
$from_file = $nprovs_dir . "/" . $gzip_file;
$to_file = $archive_dir . "/gfs_v2/" . $gzip_file;
printf("Copying from: %s\n", $from_file);
printf("          to: %s\n\n", $to_file);
copy($from_file, $to_file);

printf("Copying to the backup orbital archive...\n");
$to_file = $archive_dir2 . "/gfs_v2/" . $gzip_file;
printf("Copying from: %s\n", $from_file);
printf("          to: %s\n\n", $to_file);
copy($from_file, $to_file);


# end of main script
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------




# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine processHour extracts data for a specific hour from the
# GFS grib file and writes the data to a binary file

sub processHour
  {
  my ($date, $hour) = @_;

  printf("\n\n==========================================================\n");
  printf("Processing the GFS %s %sZ data\n\n", $date, $hour);

  # Set up the FTP directory and file names

  my $ftp_dir        = sprintf("/pub/data/nccf/com/gfs/prod/gfs.%s/%02d/atmos", $date, $hour);
  my $ftp_file1      = sprintf("%s/gfs.t%02dz.pgrb2.0p25.f000", $ftp_dir, $hour);
  my $ftp_file2      = sprintf("%s/gfs.t%02dz.pgrb2b.0p25.f000", $ftp_dir, $hour);
  my $from_ftp_file1 = sprintf("%s/from_ftp_file_1_%02dz", $work_dir, $hour);
  my $from_ftp_file2 = sprintf("%s/from_ftp_file_2_%02dz", $work_dir, $hour);

  # Copy the GFS data via FTP

  my $host = "ftpprd.ncep.noaa.gov";

  my $ftp = Net::FTP->new($host) or die "Cannot open the ftp server\n";
  $ftp->login() or warn "Cannot log into the ftp server\n";

  $ftp->binary();
  $ftp->cwd($ftp_dir);

  printf("\nFTPing the first %02dz file\n", $hour);
  printf("   From: %s\n", $ftp_file1);
  printf("   To:   %s\n\n", $from_ftp_file1);

  $ftp->get($ftp_file1, $from_ftp_file1) or warn "Could not transfer the file: " . $ftp->code() . ": " . $ftp->message();

  printf("FTPing the second %02dz file\n", $hour);
  printf("   From: %s\n", $ftp_file2);
  printf("   To:   %s\n\n", $from_ftp_file2);

  $ftp->get($ftp_file2, $from_ftp_file2) or warn "Could not transfer the file: " . $ftp->code() . ": " . $ftp->message();

  $ftp->quit();


  # If the files were transferred, then begin processing them

  if ((-e $from_ftp_file1) && (-e $from_ftp_file2))
    {

    # Set the name of the temporary output binary file

    $temp_binary_file = sprintf("%s/gfs_wgrib_output.%02dz.bin", $work_dir, $hour);

    # Run the unpack_from_grib script to extract the grib data into a binary format

    printf("Running WGRIB to extract data from the GFS file\n\n");

    system $source_dir . "/unpack_from_grib " . $from_ftp_file1 . " " . $from_ftp_file2 . " " . $temp_binary_file;

    # Run the program to read the data from the binary files

    printf("\nConverting the WGRIB output to a binary file\n");

    $binary_file = sprintf("%s/gfsToBinary_output.%02dz.bin", $work_dir, $hour);

    symlink $temp_binary_file, "fort.11";
    symlink $binary_file, "fort.20";

    $exec = sprintf("%s/gfsToBinary.x <<EOD > gfs.bin.%s.log\n%04d\n%04d\n%s\nEOD", $source_dir, $hour, $year, $mmdd, $hour);
    #$exec = sprintf("%s/gfsToBinary.x <<EOD \n%04d\n%04d\n%s\nEOD", $source_dir, $year, $mmdd, $hour);

    system $exec;

    unlink $work_dir . "/fort.11";
    unlink $work_dir . "/fort.20";

    # Delete the temporary file that was generated by the WGRIB step

    unlink $temp_binary_file;

    # Delete the files that were transferred from the FTP server

    unlink($from_ftp_file1);
    unlink($from_ftp_file2);
    }
  else
    {
    printf("The files for %02dz could not be processed.\n", $hour);
    printf("The hour will be skipped.\n");
    }
  }

# end of file
