#!/usr/bin/perl -w
#
use File::Copy;
use Time::Local;


# Run the M1 and A11 captures first

system "/data/data065/nprovs/scripts/data_input/run_atovs_m1";

system "/data/data065/nprovs/scripts/data_input/run_atovs_a11";

#------------------------------------------------------------------------
# Call the subroutine transferFile which will copy the selected ATOVS file
# and process it

#transferFile("rrodf1000", "m1", "m1", "atovs_m1", "rrodf1000");
#transferFile("rrodf1000", "a11", "oa11", "atovs_oa11", "rrodf");
transferFile("rrodf1_1000", "d19_1", "d19", "atovs_d19_1", "rrodf");
transferFile("rrodf1_1000", "m1x_1", "m1x", "atovs_m1x_1", "rrodf");
transferFile("rrodf2_1000", "d19_2", "d19", "atovs_d19_2", "rrodf");
transferFile("rrodf2_1000", "m1x_2", "m1x", "atovs_m1x_2", "rrodf");



# ===========================================================================
# ===========================================================================
# ===========================================================================
# The subroutine processHour extracts data for a specific hour from the
# ECMWF grib file and writes the data to a binary file

sub transferFile
  {
  my ($file, $sat, $sat2, $arch_dir, $arch_name) = @_;

  printf("\n\n==========================================================\n");
  printf("Transferring the ATOVS file %s.%s\n\n", $file, $sat);

  # Set up the file names

  my $daily_dir     = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";

  $byteswap_file = sprintf("%s/rrodf1000.%s.byteswap", $daily_dir, $sat);
  $big_file      = sprintf("%s/rrodf1000.%s", $daily_dir, $sat);
  $small_file    = sprintf("%s/rrodf1000_oneday.%s", $daily_dir, $sat);

  # Transfer the file from SATEPSANONE

  chdir $daily_dir or warn "Could not chdir to the daily incoming directory: ".$daily_dir;

#  $wget = sprintf("wget -4 --wait=5 -O %s http://satepsanone.nesdis.noaa.gov/pub/atovsdev/rrodf/%s.%s", $byteswap_file, $file, $sat2);

  $wget = sprintf("wget -4 --wait=5 -O %s http://satepsanone.nesdis.noaa.gov/pub/atovsdev/%s/%s.%s", $byteswap_file, $sat2, $file, $sat2);

  system $wget;

  # Switch into a temporary directory

  $temp_dir = sprintf("%s/raw_data/atovs/%s", $daily_dir, $sat);

  chdir $temp_dir or warn "Could not chdir to the raw data directory: ".$temp_dir;

  # Byte-swap the words in the UNIX files so they can be read properly
  # in a Linux environment.

  if (-e $byteswap_file)
    {
    printf("\nByte swapping the file...\n");

    unlink $big_file;

    symlink $byteswap_file, "fort.20" or warn "Cannot link to fort.20";
    symlink $big_file, "fort.21" or warn "Cannot link to fort.21";

    system "/data/smcd/nprovs/source/byteswap/swaprrodf1000.x";

    unlink "fort.20" or warn "Cannot unlink fort.20";
    unlink "fort.21" or warn "Cannot unlink fort.21";

    system "chmod 664 $big_file";

    unlink $byteswap_file or warn "Could not delete the byte swap file";
    }


  # Determine the date of the data to process

  $data_day = time();

  #$data_day -= (2 * 86400);
  $data_day -= (1 * 86400);

  ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($data_day);
  $year += 1900;
  $mon++;
  $sec = 0;
  $min = 0;
  $hour = 12;
  $wday = 0;
  $yday = 0;
  $isdst = 0;

  my $day_of_data = ($year * 10000) + ($mon * 100) + $mday;


  # Extract the day of data from the larger file into a single day file

  chdir $temp_dir or warn "Could not chdir to the temp raw data directory: ".$temp_dir;

  unlink $small_file;


  symlink $big_file, "in.file" or warn "Cannot link to in.file";
  symlink $small_file, "out.file" or warn "Cannot link to out.file";

  system "/data/data065/nprovs/source/data_transfer/atovs/ExtractDayFromRRODF1000.x " . $day_of_data;

  unlink "in.file" or warn "Cannot unlink in.file";
  unlink "out.file" or warn "Cannot unlink out.file";


  # Rename the big file to a backup file name and rename the small file
  # to the the original big file name

  #unlink $big_file;
  system "mv " . $big_file . " " . $big_file . ".bkp";

  system "mv " . $small_file . " " . $big_file;



  # =============================================================================
  # Run the ODS capture script

  $ods_exec = sprintf("/data/data065/nprovs/scripts/graphics/run_atovs_%s_ods_capture", $sat);

  system $ods_exec;


  #===================================================================
  # Copy the daily file to the orbital archive and the backup archive

  #$nprovs_dir   = "/data/data065/nprovs/data/matchup/DailyFiles_incoming";
  #$archive_dir2 = "/data/data215/nprovs/data/orbital_archive";
  #$archive_dir  = "/data/data065/nprovs/data/matchup/DailyFiles_incoming/archive";


  #printf("Copying the daily files to the archive directory\n\n");


  # Get yesterday's date
  #$data_day = time();
  #$data_day -= 86400;

  #($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime($data_day);
  #$year += 1900;
  #$mon++;
  #$sec = 0;
  #$min = 0;
  #$hour = 0;
  #$wday = 0;
  #$yday = 0;
  #$isdst = 0;


  # Copy and gzip the file to the first archive

  #$from_file = sprintf("%s/rrodf1000.%s", $nprovs_dir, $sat);
  #$to_file   = sprintf("%s/%s/atovs_%s_%d%02d%02d.%s", $archive_dir, $arch_dir, $arch_name, $year, $mon, $mday, $sat);

  #printf("Copying from: %s\n", $from_file);
  #printf("          to: %s\n\n", $to_file);

  #copy($from_file, $to_file);

  #$gzip_exec = "gzip " . $to_file;

  #system $gzip_exec;


  # Copy the gzipped file to the backup archive
  #$from_file = $to_file . ".gz"; 
  #$to_file   = sprintf("%s/%s/atovs_%s_%d%02d%02d.%s.gz", $archive_dir2, $arch_dir, $arch_name, $year, $mon, $mday, $sat);

  #printf("Copying from: %s\n", $from_file);
  #printf("          to: %s\n\n", $to_file);

  #copy($from_file, $to_file);
  }

# end of file
