#!/usr/bin/perl

#
# This program is scheduled to run on pg4 by cron but can also be run from the command line.
#

#
# allband = 0xF1
# antenna = 0xF0 merge
#           0xF1 allant
#
# Base start time on the first usc contact of the day in the contacts file for the day
# (20061029_contacts.txt appears not to have any usc passes - thats because it was a
# sunday...)
# 20061101_contacts.txt msp1 is first real, so maybe look for first real contact time
# and use that as the start time. Also sundays do not have any real time contacts...
#

use Time::Local;
use Date::Calc qw(Delta_Days);
use Env qw(EGSE_SW_LIB);

# Instead of die-ing would be better to report the error in a file somewhere
# and then exit. This applies to all instances of die of course. Todo.

do "$EGSE_SW_LIB/date_calc.pl"     || die "Can't do date_calc.pl: $!\n";
do "$EGSE_SW_LIB/pipeline_defs.pl" || die "Can't do pipeline_defs.pl: $!\n";
do "$EGSE_SW_LIB/pipeline.pl"      || die "Can't do pipeline.pl: $!\n";

$TESTING = 1 - 1;
$FORCE_REFORMAT = 1 - 1;
$RESCUE_MOD = 1;
$USE_NEW_RESCUE = 1;
$SKIP_HK = 1 - 1;

# These key words appear before the date/time fields:
# fetch 	just gets md files
# fits_only 	reformats files into fits but doesn't copy to darts
# just_fits 	reformats fits files without fetching first
# special	
# no_soda	produces all the files but does not copy the fits files to DARTS
# recover_test	doesn't fetch new data; requires fetched data to already be present. Tests the recovery chain.
# louisa        don't use new rescue, no_soda
# old_plan	don't do the sequence id wrap around. Sets $OLD_SEQ_FLAG to 1
# old_decomp	don't use the new decompression program which uses the fix tables
# ignore_satrot	don't bother testing sat rot availability
$SPECIAL = "";

# mcrw 20190827
my %translations;
my %rescue_translations;

############################################### -1 - INIT

# Open the master pipeline log if we can.
open(MLOG, ">> $pipeline_log/pipeline_log.txt") || die "Can't open $pipeline_log/pipeline_log.txt for writing: $!";
print MLOG scalar(localtime), " (JST) daily_merge_mission1 started ";

# The start time will either be passed in (interactive mode)
# or will be generated from cron using todays date minus some days

$state = "cron"; # Default to cron job

$ant='usc34';	# Not needed for merge as sdtp will substitute merge antenna
$band=3;	# Merge telemetry
$mode='merge';	# Merge telemetry

# Set default values for start date, end date and start time to empty strings. If in
# interactive state these are not set then bail out.

$sdate = "";
$edate = "";
$stime = "";

my $cron = 0;
my $pending = 0;
my $JUST_FITS_FLAG = 0;
my $RECOVER_TEST_FLAG = 0;
my $FETCH_FLAG = 0;
my $FITS_ONLY_FLAG = 0;
my $NO_SODA_FLAG = 0;
my $OLD_SEQ_FLAG = 0;
my $OLD_DECOMP_FLAG = 0;
my $IGNORE_SATROT = 0;

# If there is anything on the command line then this is interactive mode
# But check first for -c. This option says the program has been run by cron from update file.
if($#ARGV != -1) {
    @command_line = @ARGV;
    $state = "interactive";
#    if($#ARGV == 4) {
    if($#ARGV > 3) {
	while($#ARGV > 3) {
	    my $flag = shift;	# = [fetch|fits_only|...]
#	    $SPECIAL = shift;
	    $cron              = 1 if $flag eq "-c";
	    $cron = $pending   = 1 if $flag eq "-p";
	    $TESTING           = 1 if $flag eq "TEST";
	    $JUST_FITS_FLAG    = 1 if $flag eq "just_fits";
	    $RECOVER_TEST_FLAG = 1 if $flag eq "recover_test";
	    $FETCH_FLAG        = 1 if $flag eq "fetch";
	    $FITS_ONLY_FLAG    = 1 if $flag eq "fits_only";
	    $NO_SODA_FLAG      = 1 if $flag eq "no_soda";
	    $OLD_SEQ_FLAG      = 1 if $flag eq "old_plan";
	    $OLD_DECOMP_FLAG   = 1 if $flag eq "old_decomp";
	    $IGNORE_SATROT     = 1 if $flag eq "ignore_satrot";
###	    $SPECIAL           = "special" if (($flag eq "special") or ($flag ne "-c"));
	    $SPECIAL           = "special" if ($flag eq "special");
	    if($flag eq "louisa") {
		$NO_SODA_FLAG = 1;
		$SPECIAL = "no_soda";
		$USE_NEW_RESCUE = 0;
#		print "louisa: SPECIAL = $SPECIAL, USE_NEW_RESCUE = $USE_NEW_RESCUE\n";
	    }
	}
    }
    #
    # Get any command line arguments (must be in this order)
    #
    if(@ARGV) { $sdate  = shift; }
    if(@ARGV) { $edate  = shift; }
    if(@ARGV) { $stime 	= shift; }
    if(@ARGV) { $etime 	= shift; }
    #
    # Simple check for arguments. Could test actual times but sdtp will catch errors
    #
    pipeline_die("mission", 10, "No start date") if $sdate eq "";
    pipeline_die("mission", 11, "No end date")   if $edate eq "";
    pipeline_die("mission", 12, "No start time") if $stime eq "";
    $etime = $stime     if $etime eq "";
    #
    # All set, open the local log, update the master log and off we go
    #
###    $pipeline_mission_log = "$pipeline_log/pipeline_mission_log_$sdate" . ".txt";

    $pipeline_mission_log = "$logs_directory/pipeline_mission/pipeline_mission_log_$sdate" . ".txt";

    #
    # Debug
    print "pipeline_mission_log = $pipeline_mission_log\n";

    open(LOG, ">> $pipeline_mission_log") || pipeline_die("mission", 20, "Can't open $pipeline_mission_log for writing: $!");

    ###my $kick_string = ($cron) ? "by cron from tracking file" : "manually";
    my $kick_string = ($pending) ? "by cron from tracking file" : "manually";

#    print LOG "Started (manually) ", scalar(localtime), " (JST)\n";
    print LOG "\nStarted (" . $kick_string . ") ", scalar(localtime), " (JST) : daily_merge_mission1 @command_line\n";
###    print MLOG "manually: daily_merge_mission1 $sdate $edate $stime $etime\n";
#    print MLOG "manually: daily_merge_mission1 @command_line\n";
    print MLOG $kick_string . ": daily_merge_mission1 @command_line\n";
    print MLOG "\tLogging to $pipeline_mission_log\n";

    print "Started " . $kick_string . ": daily_merge_mission1 @command_line\n";
    print "Logging to $pipeline_mission_log\n";

    $syear  = substr($sdate, 0, 4);
    $smonth = substr($sdate, 4, 2);
    $sday   = substr($sdate, 6, 2);

    $eyear  = substr($edate, 0, 4);
    $emonth = substr($edate, 4, 2);
    $eday   = substr($edate, 6, 2);

    @today = today();	# @today[0..6] = seconds,minutes,hours,day,month,year,day_of_week (0 = sunday)
    $syear1  = sprintf "%04u", $syear;	# Get starting year
    $smonth1 = sprintf "%u", $smonth;	# Get starting month
    $sday1   = sprintf "%u", $sday;	# Get starting day
    
}
else {
    $cron = 1;

    #
    # Started by cron. Calculate start date, end date and start time
    #
    print MLOG "by cron\n";

    #
    # Started by cron. Calculate start date, end date and start time
    #
    @today = today();	# @today[0..6] = seconds,minutes,hours,day,month,year,day_of_week (0 = sunday)

    #
    # If today is sunday then don't bother to do anything
    #
    if($today[6] == 0) {
	print MLOG "\tSunday - not running\n";
	pipeline_exit("mission", 0);
    }

    # mcrw 20140121 Changed when weekly plans started.
#    $start_day_offset = -14;
###    $start_day_offset = -7;
    $start_day_offset = -9; # Attempt to re-synch 
    $end_day_offset   = $start_day_offset + 1;
	
    #
    # If today is saturday then do saturday until monday morning
    #
    if($today[6] == 6) {	# Saturday == 6
        ++$end_day_offset;	# $end_day_offset should now be 5
    }

    #
    # Save defaults for start and end dates in case we can't get/parse orl files
    #
    @default_start_day = other_date(@today, $start_day_offset);		# Go back 7 days
    @default_end_day   = other_date(@today, $end_day_offset);		# Go back 5 or 6 days

    #
    # Extract strings from the dates
    #
    $default_syear  = sprintf "%04u", $default_start_day[5];	# Get starting year
    $default_smonth = sprintf "%02u", $default_start_day[4];	# Get starting month
    $default_sday   = sprintf "%02u", $default_start_day[3];	# Get starting day

    $default_eyear  = sprintf "%04u", $default_end_day[5];	# Get ending year
    $default_emonth = sprintf "%02u", $default_end_day[4];	# Get ending month
    $default_eday   = sprintf "%02u", $default_end_day[3];	# Get ending day

    #
    # Form start date string
    #
    $default_sdate = "$default_syear$default_smonth$default_sday";
    $default_edate = "$default_eyear$default_emonth$default_eday";

    #
    # Now try to parse start day orl file. If orl file not found for today, try tomorrow and lastly 3 days hence.
    # If all that fails then use today 10:00
    #
    $start_time_from_orl = 0;
#    print MLOG "\tLooking for start time in orl file for $default_sdate \n";
##    foreach $attempt (0 .. 2) {
    @start_day = other_date(@today, $start_day_offset);		# Go back 7 days

    #
    # Extract strings from the dates
    #
    $syear  = sprintf "%04u", $start_day[5];	# Get starting year
    $smonth = sprintf "%02u", $start_day[4];	# Get starting month
    $sday   = sprintf "%02u", $start_day[3];	# Get starting day
    
    #
    # Form start date string
    #
    $sdate = "$syear$smonth$sday";
    
    #
    # Parse orl file
    #
    $stime_str = parse_orl_file($sdate, *MLOG);
    if($stime_str =~ /^\d{4}/) {
	$start_time_from_orl = 1;
	print MLOG "\tStart orl for $sdate: found\n";
    }
    else {
###	print MLOG "\tStart orl for $sdate: " . $stime_str . "\n";
	print MLOG "\tError trying to read start orl file for $sdate: " . $stime_str . "\n";
	pipeline_exit("mission", 10);
    }

#    if(($stime_str ne "not found") && ($stime_str ne "parse error")) {
#	$start_time_from_orl = 1;
###	    last;
#    }
#    elsif($stime_str eq "not found") {
#	print MLOG "\tStart orl file for $sdate not found. Quitting.\n";
#	pipeline_exit("mission", 10);
#    }
#    elsif($stime_str eq "parse error") {
#	print MLOG "\tStart orl file parse error. Quitting.\n";
#	pipeline_exit("mission", 20);
#    }
###    ++$start_day_offset;
##?        ++$end_day_offset;
#
###    }

    print "start_day_offset = $start_day_offset\n";

    #
    # Now try to parse end day orl file. If orl file not found for today, try tomorrow and lastly 3 days hence.
    # If all that fails then use today+1 10:00
    #
    $end_time_from_orl = 0;
    # Normal range is 0 .. 5
    foreach $attempt (0 .. 8) {	# mcrw 20110404 extend to 6 days. 20140120 mcrw extend to 7 days at the start of weekly plans. 20140207 mcrw extend to 8 days.
	$end_day_offset = $start_day_offset + 1;
print "end_day_offset = $end_day_offset\n";
	@end_day   = other_date(@today, $end_day_offset);		# Go back 5 or 6 days

	#
	# Extract strings from the dates
	#
	$eyear  = sprintf "%04u", $end_day[5];	# Get ending year
	$emonth = sprintf "%02u", $end_day[4];	# Get ending month
	$eday   = sprintf "%02u", $end_day[3];	# Get ending day
#	print MLOG "\tLooking for end time in orl file for $eyear$emonth$eday\n";

	#
	# Form end date strings
	#
	$edate = "$eyear$emonth$eday";
    
	#
	# Parse orl file
	#
	$etime_str = parse_orl_file($edate, *MLOG);

	if($etime_str =~ /^\d{4}/) {
	    $end_time_from_orl = 1;
	    print MLOG "\tEnd orl for $edate: found\n";
	    last;
	}
	else {
	    print MLOG "\tEnd orl for $edate: " . $etime_str . "\n";
#	    pipeline_exit("mission", 10);
	}

#	if(($etime_str ne "not found") && ($etime_str ne "parse error")) {
#	    $end_time_from_orl = 1;
#	    last;
#	}
	$start_day_offset += 1;

    }

    if(! $start_time_from_orl) {
	print MLOG "\tError parsing start orl file - using default (1000) for start time\n";
	$stime_str = "1000";
    }
#    else {
#	print MLOG "\tStart time from orl file - $stime_str\n";
#    }

    if(! $end_time_from_orl) {
	print MLOG "\tError parsing stop orl file or file not found - using default (1000) for end time\n";
	$etime_str = "1000";
    }
#    else {
#	print MLOG "\tStop time from orl file - $etime_str\n";
#    }

    if(! $start_time_from_orl) {
	print MLOG "\tNo start orl file found start date is $sdate, bailing\n";
	pipeline_exit("mission", 30);
    }

    if((! $start_time_from_orl) || (! $end_time_from_orl)) {
	#
	# Extract strings from the dates
	#
	$syear  = sprintf "%04u", $default_start_day[5];	# Get starting year
	$smonth = sprintf "%02u", $default_start_day[4];	# Get starting month
	$sday   = sprintf "%02u", $default_start_day[3];	# Get starting day

	$syear1  = sprintf "%04u", $default_start_day[5];	# Get starting year
	$smonth1 = sprintf "%u", $default_start_day[4];	# Get starting month
	$sday1   = sprintf "%u", $default_start_day[3];	# Get starting day

	$eyear  = sprintf "%04u", $default_end_day[5];	# Get ending year
	$emonth = sprintf "%02u", $default_end_day[4];	# Get ending month
	$eday   = sprintf "%02u", $default_end_day[3];	# Get ending day

	#
	# Form start date string
	#
	$sdate = "$syear$smonth$sday";
	$edate = "$eyear$emonth$eday";

	$stime_str = "1000";
	$etime_str = "1000";
    }
    
    #
    # Open the local log if we can
    #
###    $pipeline_mission_log = "$pipeline_log/pipeline_mission_log_$sdate" . ".txt";

    $pipeline_mission_log = "$logs_directory/pipeline_mission/pipeline_mission_log_$sdate" . ".txt";

#    open(LOG, ">> $pipeline_mission_log") || die "Can't open $pipeline_mission_log for writing: $!";
    open(LOG, ">> $pipeline_mission_log") || pipeline_die("mission", 30, "Can't open $pipeline_mission_log for writing: $!");

    print LOG "Started by cron on ", scalar(localtime), " (JST)\n";
    print LOG "\tStart time $stime_str on $sdate\n";
    print LOG "\tEnd   time $etime_str on $edate\n";

    #
    # Addition when using parse_orl
    #
    $stime = $stime_str;
    $etime = $etime_str;

    #
    # Update master log and off we go
    #
    print MLOG "\tdaily_merge_mission1 $sdate $edate $stime $etime\n";
    print MLOG "\tLogging to $pipeline_mission_log\n";
}

# Split the start date into constituent parts
$year  = substr $sdate, 0, 4;
$month = substr $sdate, 4, 2;
$day   = substr $sdate, 6, 2;

$date_string = "$year$month$day";

my $stage_reached = 0;

if (($TESTING) || ($FORCE_REFORMAT) || ($SPECIAL ne "") || ($SKIP_HK)) {
    log_msg(*LOG, $state, "Skipping outstanding test as one of TESTING, FORCE_REFORMAT, SPECIAL, SKIP_HK flags is set ($TESTING, $FORCE_REFORMAT, $SPECIAL, $SKIP_HK)");
    goto skip_outstanding;
}
#goto skip_outstanding if $TESTING;
#goto skip_outstanding if $FORCE_REFORMAT;
#goto skip_outstanding if $SPECIAL;
#goto skip_outstanding if $SKIP_HK;


############################################### -2 - CHECK FOR HK COVERAGE

stage_title(*LOG, $state, "-2. Check for hk coverage");

# Check that the hk for the plan time period has been reformatted already.
my $status_outstanding = 0;
my %status;
# What happens if this file is empty? %status is empty...
if(open(FOO, "< $HOME/track/status_pending.txt")) {
    while(<FOO>) {
	chomp;
	($status_start, $status_end) = split;
	$status{$status_start} = $status_end;
    }
    close FOO;
#    foreach $foo (sort keys %status) {
#	print "status{$foo} = $status{$foo}\n";
#    }
    $status_outstanding = (defined($status{$sdate})) || (defined($status{$edate}));
}
if(open(FOO, "< $HOME/track/latest_status_reformat.txt")) {
    $latest_status_reformat = <FOO>;
    chomp $latest_status_reformat;
    close FOO;
    $status_outstanding ||= ($edate gt $latest_status_reformat);
#    print "sdate, line = $sdate, $latest_status_reformat\n";
}
if($status_outstanding) {
    log_msg(*LOG, $state, "Reformat period not completely covered by HK ($sdate, $edate)");
    print MLOG "\tStatus data not complete for reformat period - quitting.\n";
    # Does the current date already exist in the controller file?
    # Not really necessary as the kick program uses a hash...
    $exists = `grep '$sdate $edate' ~/track/mission_pending.txt`;

    # Write out start/end date to controlling file if not
    `echo -p $sdate $edate $stime $etime >> ~/track/mission_pending.txt` if $exists eq "";
    perform(*LOG, $state, "/bin/cp ~/track/mission_pending.txt $HOME/data/staging/logs/");
    pipeline_exit("mission", 100);
}
log_msg(*LOG, $state, "HK coverage good");

 skip_outstanding:
#goto reformat_fits if $SPECIAL eq "just_fits";
    if ($JUST_FITS_FLAG) {
	log_msg(*LOG, $state, "JUST_FITS_FLAG set, skipping to reformat_fits");
	goto reformat_fits if $JUST_FITS_FLAG;
}
#goto reformat_fits if $JUST_FITS_FLAG;


############################################### -1 - CHECK FOR SATROT CSV FILES

stage_title(*LOG, $state, "-1. Check for satrot csv files");

#log_msg(*LOG, $state, "Turned off for debugging, not checking for full sat rot coverage");
#goto skip_satrot;

if ($IGNORE_SATROT) {
    log_msg(*LOG, $state, "Ignore satrot flag set, not checking for full sat rot coverage");
}
else {
    my $start_days = Date::Calc::Date_to_Days($syear, $smonth, $sday);
    my $end_days = Date::Calc::Date_to_Days($eyear, $emonth, $eday);
    log_msg(*LOG, $state, "Satrot: end_days = $end_days, start_days = $start_days, (0 .. " . ($end_days - $start_days) . ")");
    my $ok = 1;
    foreach $diff (0 .. ($end_days - $start_days)) {
	my ($nyear, $nm, $nd) = Date::Calc::Add_Delta_Days($syear,$smonth,$sday, $diff);
	my $nmonth = sprintf "%02u", $nm;
	my $nday = sprintf "%02u", $nd;
	log_msg(*LOG, $state, "Checking diff ($diff) : $nyear, $nmonth, $nday ($sat_rot_csv_dir/$nyear$nmonth$nday.csv)");
	next if -e "$sat_rot_csv_dir/$nyear$nmonth$nday.csv";
	
	log_msg(*LOG, $state, "$sat_rot_csv_dir/$nyear$nmonth$nday.csv missing");
	$ok = 0;
    }
    if ($ok == 0) {
	log_msg(*LOG, $state, "Sat rot file(s) missing between $sdate, $edate. Rescheduling");
#####	$exists = `grep '$sdate $edate' ~/track/mission_pending.txt`;
	my $pending_file = "$HOME" . "/track/mission_pending.txt";
	#open(FH, ">> $HOME/track/mission_pending.txt") || pipeline_exit("mission", 300);
	#open(FH, ">> $pending_file") || pipeline_exit("mission", 300);
	if (open(FH, ">> $pending_file")) {
	    log_msg(*LOG, $state, "Opened pending file");
###	    if(Delta_Days($syear1, $smonth1, $sday1, $today[5], $today[4], $today[3] ) < 3) {
	    if(0) { # temporary fix for sat rot coverage hang caused by above line
		# Write out start/end date to controlling file if not
		#perform(*LOG, $state, "echo -p $sdate $edate $stime $etime >> ~/track/mission_pending.txt") if $exists eq "";
		#`echo -p $sdate $edate $stime $etime >> $HOME/track/mission_pending.txt` if ($exists eq "");
		print FH "-p $sdate $edate $stime $etime\n";
		log_msg(*LOG, $state, "Scheduling reformat: -p $sdate $edate $stime $etime");
		print MLOG "\tScheduling reformat of plan for $sdate $edate $stime $etime\n";
	    }
	    else {
		#perform(*LOG, $state, "echo -p ignore_satrot $sdate $edate $stime $etime >> ~/track/mission_pending.txt") if $exists eq "";
		#`echo -p ignore_satrot $sdate $edate $stime $etime >> $HOME/track/mission_pending.txt` if ($exists eq "");
		#my $junk = `echo -p ignore_satrot $sdate $edate $stime $etime >> ~/track/mission_pending.txt`;
		print FH "-p ignore_satrot $sdate $edate $stime $etime\n";
		log_msg(*LOG, $state, "Scheduling reformat: -p ignore_satrot $sdate $edate $stime $etime");
		print MLOG "\t*** Scheduling reformat of plan for $sdate $edate $stime $etime to ignore sat_rot check ***\n";
	    }
	    close FH;
	}
	else {
	    log_msg(*LOG, $state, "Unable to write to pending file: $!\n");
	    print MLOG "\tUnable to write to pending file: $!\n";
	}
	perform(*LOG, $state, "/bin/cp $HOME/track/mission_pending.txt $HOME/data/staging/logs/");
	log_msg(*LOG, $state, "Sat rot data not complete for reformat period - quitting.\n");
	print MLOG "\tSat rot data not complete for reformat period - quitting.\n";
	pipeline_exit("mission", 200);
    }
    log_msg(*LOG, $state, "Sat rot coverage good");
}

skip_satrot:

goto the_exit if $SPECIAL eq "special";

############################################### 0 - CLEAR OLD DATA

#goto skip_fetch if $SPECIAL eq "recover_test";
goto skip_fetch if $RECOVER_TEST_FLAG;

stage_title(*LOG, $state, "0. Removing old data in $merge_dir");

# Remove old ccsds packets
perform(*LOG, $state, "cd $merge_dir && /bin/rm -f eis_md_*");
perform(*LOG, $state, "cd $merge_dir && /bin/rm -f eis_sts*");
perform(*LOG, $state, "cd $merge_dir && /bin/rm -f eis_dmp*");
perform(*LOG, $state, "cd $HOME/tmp/join && /bin/rm -f eis_md_*");

# Temp
perform(*LOG, $state, "cd $temp_fits && /bin/rm -f eis_l0*");

# Remove old decompressed ccsds packets
perform(*LOG, $state, "cd $merge_dir/decompressed && /bin/rm -f eis_md_2*");
perform(*LOG, $state, "/bin/rm $pipeline_log" . "/md_fetch*");
perform(*LOG, $state, "/bin/rm -f $merge_dir/*.txt");
perform(*LOG, $state, "unset noclobber");

# Move any files in the rescue directory out of the way
@rfiles = `ls $merge_dir/rescue`;
foreach $file (@files) {
    perform(*LOG, $state, "/bin/mv $merge_dir/rescue/$file $merge_dir/orphans");
}


############################################### 1 - FETCH DATA

stage_title(*LOG, $state, "1. Fetching data");
###$md_fetch_log = "$pipeline_log" . "/md_fetch_" . "$date_string" . ".txt";
#perform(*LOG, $state, "$HOME/bin/sdtp $mode $ant band=$band sdate=$sdate edate=$edate stime=$stime etime=$etime >& $md_fetch_log");
perform(*LOG, $state, "$HOME/bin/sdtp $mode $ant band=$band sdate=$sdate edate=$edate stime=$stime etime=$etime");

# Remove status files
perform(*LOG, $state, "/bin/rm $merge_dir/eis_sts*");


############################################### 2 - MISSION DATA CHECK

stage_title(*LOG, $state, "2. Mission data check");

@rec_files = `ls $merge_dir/ | grep eis_md`;
if(! @rec_files) {
    log_msg(*LOG, $state, "No ccsds mission data files");
    # Does the current date already exist in the controller file?
    $exists = `grep '$sdate $edate' ~/track/mission_pending.txt`;

    if(Delta_Days($syear1,$smonth1,$sday1,$today[5],$today[4],$today[3]) < 14) {
	# Write out start/end date to controlling file if not
	`echo -p $sdate $edate $stime $etime >> ~/track/mission_pending.txt` if $exists eq "";
	print MLOG "\tScheduling reformat of plan for $sdate $edate $stime $etime\n";
    }
    else {
	print MLOG "\t*** Giving up with plan for $sdate $edate $stime $etime - no ccsds packets ***\n";
    }
    #system("/bin/cp ~/track/mission_pending.txt $HOME/data/staging/logs/");
    perform(*LOG, $state, "/bin/cp ~/track/mission_pending.txt $HOME/data/staging/logs/");
    print MLOG "\tNo ccsds packets available\n";
    pipeline_exit("mission", 1);
}

$log_msg_str = "Number of files: " . @rec_files;
log_msg(*LOG, $state, $log_msg_str);

#
# Write out all files received
#
if(open(FOO, ">$merge_dir/received_files.txt")) {
    print FOO @rec_files;
    close FOO;
}
else {
    log_msg(*LOG, $state, "Can't open $merge_dir/received_files.txt: $!");
}

# Do the ccsds check before joining
perform(*LOG, $state, "$HOME/bin/pipeline_md_hdr_dump_pre_join $merge_dir > $merge_dir/md_hdr_dump_pre_join.txt");
perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_pre_split_check.pl $merge_dir > $merge_dir/ccsds_hdr_pre_join.txt");

perform(*LOG, $state, "$HOME/bin/ccsds_merge_md_split_check $merge_dir > $merge_dir/md_split_check.txt");

###print "EDIT $merge_dir/md_split_check.txt then press any key!!!!\n";
###my $key_pressed = getc;

perform(*LOG, $state, "$HOME/bin/md_join_merge.py $merge_dir/md_split_check.txt");

# mcrw 20130902 - put in absolute path temporarily
#print "x","$HOME","x\n";

#@joined_files = `/bin/ls -1 $HOME/tmp/join/ | grep eis_md`;
@joined_files = `ls /nasA_solar1/home/sbukeis/tmp/join/ | grep eis_md`;

$log_msg_str = "Number of joined files: " . @joined_files;
log_msg(*LOG, $state, $log_msg_str);

#
# Write out all the joined files
#
if(open(FOO, ">$merge_dir/joined_files.txt")) {
    print FOO @joined_files;
    close FOO;
}
else {
    log_msg(*LOG, $state, "Can't open $merge_dir/joined_files.txt: $!");
}

if(!@joined_files) {
    log_msg(*LOG, $state, "No files in the join directory - quitting");
    pipeline_exit("mission", -1000);
}

#
# Remove the files currently in the merge directory with the joined ones
#
perform(*LOG, $state, "/bin/rm $merge_dir/eis_md_*");
# mcrw 20130904 - changed path
#perform(*LOG, $state, "/bin/mv $HOME/tmp/join/* $merge_dir");
perform(*LOG, $state, "/bin/mv /nasA_solar1/home/sbukeis/tmp/join/* $merge_dir");

# Do the mission data header dump, which also updates the engineering.txt file
perform(*LOG, $state, "$HOME/bin/pipeline_md_hdr_dump $merge_dir > $merge_dir/md_hdr_check.txt");
#perform(*LOG, $state, "$HOME/bin/pipeline_md_hdr_dump $HOME/tmp/join > $merge_dir/md_hdr_check.txt");

# Do the ccsds check which detects missing packets, updating missing_packets.txt
perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_check $merge_dir > $merge_dir/ccsds_hdr_check.txt");
#perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_check $HOME/tmp/join > $merge_dir/ccsds_hdr_check.txt");

###goto end_join_test;

# Move the files to DARTS
perform(*LOG, $state, "/bin/mv $merge_dir/md_hdr_check.txt         $darts_mission/$year/$month/packet_check/md_hdrs.$year$month$day.html");
perform(*LOG, $state, "/bin/mv $merge_dir/md_hdr_dump_pre_join.txt $darts_mission/$year/$month/packet_check/md_hdrs_pre_join.$year$month$day.html");
perform(*LOG, $state, "/bin/mv $merge_dir/ccsds_hdr_check.txt      $darts_mission/$year/$month/packet_check/ccsds_hdrs.$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $merge_dir/ccsds_hdr_pre_join.txt   $darts_mission/$year/$month/packet_check/ccsds_hdrs_pre_join.$year$month$day.txt");

perform(*LOG, $state, "/bin/cp $shutter_log/shutter.txt      $staging/logs/shutter/shutter_$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $shutter_log/shutter.txt      $staging/logs/shutter/$year/shutter_$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $merge_dir/received_files.txt $darts_mission/$year/$month/packet_check/received.$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $merge_dir/md_split_check.txt $darts_mission/$year/$month/packet_check/md_split_check.$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $merge_dir/joined_files.txt   $darts_mission/$year/$month/packet_check/joined_files.$year$month$day.txt");

#
# Check which files are not compressed. Check to safeguard correct operation of the new rescue c code.
#
@non_compressed_files = ();
if(! open(FOO, "< $darts_mission/$year/$month/packet_check/md_hdrs.$year$month$day.html")) {
    log_msg(*LOG, $state, "Can't open the md_hdrs file for compression setting check");
}
else {
    while(<FOO>) {
	next if /^$/;
	if(/nasA/) {
	    chomp;
	    $filename = substr($_, -26, 26);
	    next;
	}
	next if /Type/;
	next if /headers/;
	@stuff = split;
	$comp = sprintf("%u", hex($stuff[17]));
	if(/ccsds/) {
	    push(@non_compressed_files, $filename) if $comp == 0;
	}
    }
    close FOO;
}


#
# For now, if there are any archives with missing packets move them to the incomplete directory.
# Must handle these more sensibly in the future.
#
$number_missing = 0;
if(-e "$merge_dir/missing_packets.txt") {
    log_msg(*LOG, $state, "Missing packet file exists");
    open(BAD, "< $merge_dir/missing_packets.txt");
    while(<BAD>) {
	next if /^Missing/;
	last if /^$/;
	chomp;
	# TEMP!!!!
#	print STDERR "[$_]\n";

###	log_msg(*LOG, $state, "Moving $merge_dir/$_ to $merge_dir/incomplete/");
	if($RESCUE_MOD) {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/nursery/");
	}
	else {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/incomplete/");
	}
	++$number_missing;
    }
    close BAD;
    perform(*LOG, $state, "/bin/mv $merge_dir/missing_packets.txt $darts_mission/$year/$month/packet_check/missing_packets.$year$month$day.txt");
###    log_msg(*LOG, $state, "Number of missing archives: $number_missing");
}
else {
    log_msg(*LOG, $state, "No missing packet file");
}

#
# Check for and record those files which are continuations of previous files.
# The decompression stage will move these files aside.
#
if(-e "$merge_dir/headless_packets.txt") {
    log_msg(*LOG, $state, "Headless packets file exists");
    open(BAD, "< $merge_dir/headless_packets.txt");
    while(<BAD>) {
	next if /^Headless/;
	last if /^$/;
	chomp;
###	log_msg(*LOG, $state, "Moving $merge_dir/$_ to $merge_dir/incomplete/");
	if($RESCUE_MOD) {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/nursery/") if (-e "$merge_dir/$_");
	}
	else {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/incomplete/") if(-e "$merge_dir/$_");
	}
###	++$number_missing;
    }
    close BAD;
    perform(*LOG, $state, "/bin/mv $merge_dir/headless_packets.txt $darts_mission/$year/$month/packet_check/headless_packets.$year$month$day.txt");
}
else {
    log_msg(*LOG, $state, "No headless packets file");
}
#
# Check for and record those files which are not complete.
# The decompression stage will move these files aside.
#
if(-e "$merge_dir/incomplete_packets.txt") {
    log_msg(*LOG, $state, "Incomplete packets file exists");
    open(BAD, "< $merge_dir/incomplete_packets.txt");
    while(<BAD>) {
	next if /^Incomplete/;
	last if /^$/;
	chomp;
###	log_msg(*LOG, $state, "Moving $merge_dir/$_ to $merge_dir/incomplete/");
	if($RESCUE_MOD) {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/nursery/") if(-e "$merge_dir/$_");
	}
	else {
	    perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/incomplete/") if(-e "$merge_dir/$_");
	}
###	++$number_missing;
    }
    close BAD;
    perform(*LOG, $state, "/bin/mv $merge_dir/incomplete_packets.txt $darts_mission/$year/$month/packet_check/incomplete_packets.$year$month$day.txt");
}
else {
    log_msg(*LOG, $state, "No incomplete packets file");
}

perform(*LOG, $state, "/bin/mv $merge_dir/sequence_counts.txt           $darts_mission/$year/$month/packet_check/sequence_counts_$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $merge_dir/sequence_counts_pre_check.txt $darts_mission/$year/$month/packet_check/sequence_counts_pre_check_$year$month$day.txt");

 end_join_test:

# mcrw 20230127
#
# Check for the existence of a list containing the ccsds files which should not be reformatted as they crash the reformatter...
#
log_msg(*LOG, $state, "Checking for bad input ccsds files");
if (open(FD, "< $HOME/track/do_not_reformat.txt")) {
    while (<FD>) {
	chomp;
	perform(*LOG, $state, "/bin/mv $merge_dir/$_ $merge_dir/hooligans/");
    }
    close FD;
    perform(*LOG, $state, "/usr/bin/unlink $HOME/track/do_not_reformat.txt");
    perform(*LOG, $state, "/usr/bin/touch $HOME/track/do_not_reformat.txt");
}
#
# end new addition
#

# Get a count of number of mission data files here
#@pre_decompression = `ls $merge_dir/eis_md*`;
# mcrw 20130802
@pre_decompression = `ls $merge_dir/ | grep eis_md`;
#@pre_decompression = `ls /nasA_solar1/work/eis/localdata/sdtp/merge/ | grep eis_md`;

$number_pre_decompression = @pre_decompression;
log_msg(*LOG, $state, "Number of compressed files: " . scalar(@pre_decompression));

pipeline_exit("mission", 0) if $FORCE_REFORMAT;
#goto the_exit if $SPECIAL eq 'fetch';
goto the_exit if $FETCH_FLAG;


############################################### 3 - MISSION DATA DECOMPRESS

stage_title(*LOG, $state, "3. Starting mission data decompress");

log_msg(*LOG, $state, "(IDL) eis_md_decomp_fix1_script,'$merge_dir', /merge");

$local_idl = "/nasA_solar1/home/sbukeis/work/idl";
#$tmp_idl = "/nasA_solar1/home/sbukeis/tmp";
#$tmp_idl = "$HOME/tmp";

my $output;

if ($OLD_DECOMP_FLAG) {
$output = <<EOF;
!quiet=1
.comp $local_idl/eis_ccsds_interface__define
.comp $local_idl/mdppmdctrecover
.comp $local_idl/eis_md_decomp
.comp $local_idl/eis_md_decomp_script
eis_md_decomp_script,'$merge_dir', /merge

;.comp $local_idl/eis_md_decomp_fix1
;.comp $local_idl/eis_md_decomp_fix1_script
;eis_md_decomp_fix1_script, '$merge_dir', /merge
!quiet=1
;/ This slash keeps emacs syntax colouring happy
EOF
}
else {
$output = <<EOF;
!quiet=1
.comp $local_idl/eis_ccsds_interface__define
;.comp $local_idl/eis_md_decomp
;.comp $local_idl/eis_md_decomp_script
;eis_md_decomp_script,'$merge_dir', /merge
.comp $local_idl/mdppmdctrecover
;;;.comp $local_idl/mdppmdctrecover_merge
;.comp $local_idl/eis_md_decomp_fix
;.comp $local_idl/eis_md_decomp_fix_script
;eis_md_decomp_fix_script,'$merge_dir', /merge
.comp $local_idl/eis_md_decomp_fix1
.comp $local_idl/eis_md_decomp_fix1_script
eis_md_decomp_fix1_script, '$merge_dir', /merge
!quiet=1
;/ This slash keeps emacs syntax colouring happy
EOF
}
# mcrw 20130903 - write to fixed path
open(MD_DECOMP, "> $temp_idl/md_decomp.pro");
#open(MD_DECOMP, "> /nasA_solar1/home/sbukeis/work/localdata/pipeline/idl/md_decomp.pro");
print MD_DECOMP $output;
close MD_DECOMP;

log_msg(*LOG, $state, "IDL file:\n$output");

# Start the mission data decompress
$decompression_log = "$pipeline_log" . "/md_decompress_" . "$date_string" . ".txt";
#system("/san_darts/solar/ssw/gen/setup/ssw_idl < $temp_idl/md_decomp.pro > /dev/null");
#perform(*LOG, $state, "$HOME/ssw_idl < $temp_idl/md_decomp.pro >& $decompression_log");
perform(*LOG, $state, "$HOME/ssw_idl < $temp_idl/md_decomp.pro >& $decompression_log");

#############die;

# Copy the decompression record and logs to DARTS
perform(*LOG, $state, "/bin/cp $HOME/work/localdata/log/decompression/merge_decomp_record.txt $darts_staging/logs/decompression/");

#??
#log_msg(*LOG, $state, "/bin/mv $pipeline_log/md_decompress_$date_string.txt $darts_staging/logs/decompression/");
#system("/bin/mv $pipeline_log/md_decompress_$date_string.txt $darts_staging/logs/decompression/");

$decompress_log = "$darts_staging" . "/logs/decompression/md_decompress_" . "$date_string" . ".txt";
###perform(*LOG, $state, "$HOME/bin/slim.pl < $decompression_log > $darts_staging/logs/decompression/md_decompress_$date_string.txt");
perform(*LOG, $state, "$HOME/bin/slim.pl < $decompression_log > $decompress_log");

$alt_decompress_log = "$darts_staging" . "/logs/decompression/md_decompress_" . "$date_string" . "_alt.txt";
perform(*LOG, $state, "cat $decompress_log | awk -f '$HOME/bin/slim.awk' > $alt_decompress_log");
#perform(*LOG, $state, "cat $decompress_log | $HOME/bin/slim.awk > $alt_decompress_log");

# mcrw 20190808 - temp, just to get fuller log
perform(*LOG, $state, "/bin/rm $decompression_log");
#perform(*LOG, $state, "/bin/cp $decompression_log $darts_staging" . "/logs/decompression/");

# mcrw 20190130 (20190806 - comment out)
# Remove the decompress_log too - just use the alt version from now
perform(*LOG, $state, "/bin/rm $decompress_log");

### REMOVE md_decompress_$date_$string file here

# Get a count of number of mission data files here
# mcrw 20130802
#@post_decompression = `ls $merge_dir/decompressed/ | grep eis_md`;
@post_decompression = `ls /nasA_solar1/home/sbukeis/work/localdata/sdtp/merge/decompressed/ | grep eis_md`;

$number_post_decompression = @post_decompression;
log_msg(*LOG, $state, "Number of de-compressed files: " . scalar(@post_decompression));

if($number_post_decompression != 0) {
    perform(*LOG, $state, "$HOME/bin/pipeline_md_hdr_dump $merge_dir/decompressed > $merge_dir/md_hdr_check.txt");
#    perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_check $merge_dir/decompressed > $merge_dir/ccsds_hdr_check.txt");

    perform(*LOG, $state, "/bin/mv $merge_dir/md_hdr_check.txt $darts_mission/$year/$month/packet_check/decompressed_md_hdrs.$year$month$day.html");
#    perform(*LOG, $state, "/bin/mv $merge_dir/ccsds_hdr_check.txt $darts_mission/$year/$month/packet_check/decompressed_ccsds_hdrs.$year$month$day.txt");

    perform(*LOG, $state, "gzip -f $darts_mission/$year/$month/packet_check/decompressed_md_hdrs.$year$month$day.html");
#    perform(*LOG, $state, "gzip -f $darts_mission/$year/$month/packet_check/decompressed_ccsds_hdrs.$year$month$day.txt");
}

reformat_fits:

############################################### 4 - MISSION DATA REFORMAT

stage_title(*LOG, $state, "4. Starting mission data reformat");

log_msg(*LOG, $state, "(IDL) fits_script, '$merge_dir/decompressed/', '$temp_fits/'");

#??
#perform(*LOG, $state, "/bin/chmod 644 /nasA_solar1/home/sbukeis/eisco/planning_db/timeline_db/eis_science_db.dbf");

$translate_log_name = "md_translation" . "_$date_string" . ".txt";
$translate_log = "$pipeline_log/" . $translate_log_name;

# mcrw 20190809 added code to able older studies be re-reformatted (avoid sequence over-flow bug)
my $old_seq_str = ""; #($OLD_SEQ_FLAG == 1) ? ", /noalter_seq" : "";

$output = <<EOF;
!quiet=0
;;.comp $local_idl/merge/db_read_sciencestudyraster_entry

;.comp $local_idl/eis_read_orbit_events_reformatter	; no longer needed?
;;.comp $local_idl/merge/eis_hdr__define
;;.comp $local_idl/merge/eis_data__readfits

;.comp $local_idl/merge/eis_plan__define			; add local to SSW

;;.comp $local_idl/merge/eis_cal__define			; no differences
;;.comp $local_idl/merge/eis_data__define			; no differences
;.comp $local_idl/merge/eis_data__readccsds		; add local to SSW
;;.comp $local_idl/merge/eis_fits_calib			; no meaningful differences
;;.comp $local_idl/merge/eis_fits_coord			; no meaningful differences
;.comp $local_idl/merge/eis_fits_dataid			; add local to SSW

;;.comp $local_idl/merge/eis_spcd__define			; no meaningful differences
;.comp $local_idl/merge/eis_fits_script			; add local to SSW
;.comp $local_idl/merge/eis_fits_obstime			; add local to SSW
;.comp $local_idl/merge/eis_mkfits			; add local to SSW
;!quiet=1
eis_fits_script,'$merge_dir/decompressed/', '$temp_fits/',logfile='$translate_log' $old_seq_str
;eis_fits_script,'$merge_dir/decompressed/', '$temp_fits/',logfile='$translate_log', /noplan
;eis_fits_script,'$merge_dir/decompressed/', '$temp_fits/',logfile='$translate_log', /nospcd
;' ; This keeps emacs syntax colouring happy
EOF

open(MD_FITS, "> $temp_idl/md_fits.pro");
print MD_FITS $output;
close MD_FITS;

log_msg(*LOG, $state, "IDL file:\n$output");

$reformat_log = "$pipeline_log/md_reformat" . "_$date_string" . ".txt";
#$reformat_log = "/dev/null";

# Changed cp to mv

perform(*LOG, $state, "$HOME/ssw_idl < $temp_idl/md_fits.pro >& $reformat_log");
#perform(*LOG, $state, "$HOME/ssw_idl < $temp_idl/md_fits.pro");

perform(*LOG, $state, "/bin/cp $reformat_log $darts_staging/logs/md_reformat/$year/");
perform(*LOG, $state, "/bin/cp $translate_log $darts_staging/logs/md_translation/$year/");

# Get a count of number of mission data files here
@post_reformat = `ls $temp_fits/ | grep eis_l0`;
log_msg(*LOG, $state, "Number of reformatted files: " . scalar(@post_reformat));

# TODO: Compare the number of output files with the number of decompressed files. If not the same
# then restart the reformat after separating out the done from the pending. Assume the first pending
# file caused the reformat to crash and move that file to a nursery somewhere.

#goto the_exit if $SPECIAL eq "fits_only";
#goto the_exit if $SPECIAL eq "just_fits";
goto the_exit if $FITS_ONLY_FLAG;
goto the_exit if $JUST_FITS_FLAG;


############################################### 4A - RESCUE DAMAGED FILES
skip_fetch:

goto skip_rescue unless $RESCUE_MOD;

stage_title(*LOG, $state, "5. Rescuing damaged files");

# Copy decompressed files to decompressed1
#perform(*LOG, $state, "/bin/cp $merge_dir/decompressed/eis_md* $merge_dir/merge1/decompressed1/");


# perform rescue.sh -m log_file

if(@non_compressed_files) {
    # If there are non compressed files then send them to the old rescue method
    # perform(*LOG, $state, "/bin/mv $merge_dir/decompressed/eis_md* $merge_dir/merge1/decompressed1/");
}

perform(*LOG, $state, "/bin/cp $merge_dir/nursery/eis_md* $merge_dir/rescue/") if $RESCUE_MOD;

if($USE_NEW_RESCUE) {
    perform(*LOG, $state, "$HOME/bin/rescue_chk_tbl.sh -m " . "$sdate" . "_" . "$eday");
###    perform(*LOG, $state, "$HOME/bin/rescue_chk_tbl.sh -m " . "$sdate");
}
elsif($SPECIAL eq "special") {
    perform(*LOG, $state, "$HOME/bin/rescue_chk_tbl.sh -m " . "$sdate" . "_" . "$eday");
###    perform(*LOG, $state, "$HOME/bin/rescue_chk_tbl.sh -m " . "$sdate");
}
else {
    perform(*LOG, $state, "$HOME/bin/rescue.sh -m " . "$sdate" . "_" . "$eday");
###    perform(*LOG, $state, "$HOME/bin/rescue.sh -m " . "$sdate");
}

# Perform ccsds packet check to see how the rescue performed.
#@rescued_files = `ls $merge_dir/ | grep eis_md`;
#if(! @rescued_files) {
#    log_msg(*LOG, $state, "No rescued ccsds mission data files");
#}
#else {
#
#    log_msg(*LOG, $state, "Perform packet check on rescued ccsds mission data files");
#    perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_check $merge_dir > $merge_dir/ccsds_hdr_check.txt");
#    perform(*LOG, $state, "/bin/mv $merge_dir/ccsds_hdr_check.txt $darts_mission/$year/$month/packet_check/rescued_ccsds_hdrs.$year$month$day.txt");
#
#}

# Copy decompressed files from decompressed1 to decompressed
#perform(*LOG, $state, "/bin/cp $merge_dir/merge1/decompressed1/eis_md* $merge_dir/decompressed/");

# Get count of decompressed files again
#@total_decompressed_files = `ls $merge_dir/decompressed/eis_md*`;

# add log files to this program's log 

# Get the number of fits files after rescue
@pre_decompression = `ls $merge_dir/ | grep eis_md`;
$number_pre_decompression += scalar(@pre_decompression);
@post_decompression = `ls $merge_dir/decompressed/ | grep eis_md`;
$number_post_decompression += scalar(@post_decompression);
@post_reformat = `ls $temp_fits/ | grep eis_l0`;

#if($number_post_decompression != 0) {
#    perform(*LOG, $state, "$HOME/bin/pipeline_md_hdr_dump $merge_dir/decompressed > $merge_dir/md_hdr_check.txt");
#    perform(*LOG, $state, "$HOME/bin/pipeline_ccsds_check $merge_dir/decompressed > $merge_dir/ccsds_hdr_check.txt");
#
#    perform(*LOG, $state, "/bin/mv $merge_dir/md_hdr_check.txt $darts_mission/$year/$month/packet_check/rescued_decompressed_md_hdrs.$year$month$day.html");
#    perform(*LOG, $state, "/bin/mv $merge_dir/ccsds_hdr_check.txt $darts_mission/$year/$month/packet_check/rescued_decompressed_ccsds_hdrs.$year$month$day.txt");

perform(*LOG, $state, "gzip -f $darts_mission/$year/$month/packet_check/rescued_decompressed_md_hdrs.$year$month$day.html");
perform(*LOG, $state, "gzip -f $darts_mission/$year/$month/packet_check/rescued_decompressed_ccsds_hdrs.$year$month$day.txt");


#
# Write out all the fits file names
#
if(open(FOO, ">$merge_dir/fits_files.txt")) {
    print FOO @post_reformat;
    close FOO;
}
else {
#    &$pipe_log("Can't open $merge_dir/received_files.txt: $!");
    log_msg(*LOG, $state, "Can't open $merge_dir/fits_files.txt: $!");
}
perform(*LOG, $state, "/bin/mv $merge_dir/fits_files.txt $darts_mission/$year/$month/packet_check/fits_files.$year$month$day.txt");


skip_rescue:


############################################### 5 - UPDATE MD TRANSLATION DBASE

stage_title(*LOG, $state, "6. Update md translation database");

# eis_mkfits writes all the ccsds packet filenames and the corresponding fits filenames processed
# to a text file. Open the text file and transfer the information to a perl database.

$rescue_translation_log = "$darts_staging/logs/md_translation/rescue/md_translation_rescue_" . "$sdate" . "_" . "$eday" . ".txt";
if(dbmopen(%trans, "$pipeline_log/mission_name_dbase", 0644)) {
    log_msg(*LOG, $state, "$pipeline_log/mission_name_dbase opened ok");
    if(open(REF_LOG, "< $translate_log")) {
	log_msg(*LOG, $state, "Opened $translate_log ok");
	while(<REF_LOG>) {
	    @files = split;		# Split ccsds filename and fits filename
	    $ccsds = $files[0];
	    $fits = $files[1];
	    $ccsds =~ s/ //g;		# Remove any white space from names
	    $fits =~ s/ //g;
	    chomp $fits;		# Remove newline if any
	    $trans{$ccsds} = $fits;
# mcrw 20190827
	    $translations{$ccsds} = $fits;
	    log_msg(*LOG, $state, "Updating mission_name_database: $ccsds => $fits");
	    $temp{$ccsds} = $fits;
	}
	close(REF_LOG);
	log_msg(*LOG, $state, "Closed $translate_log");
    }
    else {
	log_msg(*LOG, $state, "Can't open $translate_log: $!");
    }

    if(open(REF_LOG, "< $rescue_translation_log")) {
	log_msg(*LOG, $state, "Opened $rescue_translation_log ok");
	while(<REF_LOG>) {
	    @files = split;		# Split ccsds filename and fits filename
	    $ccsds = $files[0];
	    $fits = $files[1];
	    $ccsds =~ s/ //g;		# Remove any white space from names
	    $fits =~ s/ //g;
	    chomp $fits;		# Remove newline if any
	    $trans{$ccsds} = $fits;
# mcrw 20190827
	    $translations{$ccsds} = $fits;
	    log_msg(*LOG, $state, "Updating mission_name_database: $ccsds => $fits");
	    $temp{$ccsds} = $fits;
	}
	close(REF_LOG);
	log_msg(*LOG, $state, "Closed $rescue_translation_log");
    }
    else {
	log_msg(*LOG, $state, "Can't open $rescue_translation_log: $!");
    }

    log_msg(*LOG, $state, "Closing mission_name_dbase");
    if(dbmclose(%trans)) {
	log_msg(*LOG, $state, "Closed mission_name_dbase ok");
    }
    else {
	log_msg(*LOG, $state, "Closing mission_name_dbase failed: $!");
    }
}
else {
    log_msg(*LOG, $state, "Cannot open mission_name_dbase: $!");
}

# Copy the database to the staging area even if it was not updated
perform(*LOG, $state, "/bin/cp $pipeline_log/mission_name_dbase $darts_staging/logs/");

# Remove translate_log, reformat_log?

###goto skip1;


############################################### 6A - UPDATE SAT ROT

stage_title(*LOG, $state, "6A. Updating SAT ROT");

open(SR, "> $temp_idl/sat_rot.pro");
#print SR "satrot, /current\n";
#print SR ".comp $HOME/src/IDL/satrot/satrot\n";
print SR "satrot, '$temp_fits', /current\n";
close SR;
#log_msg(*LOG, $state, "$temp_idl/sat_rot.pro : satrot, $temp_fits, /current");
perform(*LOG, $state, "$HOME/ssw_idl < $temp_idl/sat_rot.pro");		# ~/work/localdata/pipeline/idl


############################################### 5A1 - GENERATE FITS HEADER DUMP

stage_title(*LOG, $state, "7. Generating fits header dump");

#log_msg(*LOG, $state, "Skipping as of 20200514 - fits_study_metric.pl needs updating");
#goto skip_fits_header_dump;

# 20200519 mcrw Added eis before glob in next line - was hanging trying to process the join directory
# When did the join directory appear there? I didn't put it there... Might be the python join program...
perform(*LOG, $state, "$HOME/bin/fits_study_metric.pl $temp_fits/eis* > $pipeline_log/fits_dump_$year$month$day.txt");
# Changed cp to mv
perform(*LOG, $state, "/bin/mv $pipeline_log/fits_dump_$year$month$day.txt $HOME/data/staging/packet_summary/$year");

skip_fits_header_dump:
# mcrw 20130918 - needs fixing
# 20200519 mcrw Added eis before glob in next line - was hanging trying to process the join directory
perform(*LOG, $state, "$HOME/bin/fits_header_dump $temp_fits/eis* >> $pipeline_log/fits_headers_$year$month$day.txt");
perform(*LOG, $state, "gzip -f $pipeline_log/fits_headers_$year$month$day.txt");
perform(*LOG, $state, "/bin/mv $pipeline_log/fits_headers_$year$month$day.txt.gz $HOME/data/staging/fits_headers/$year");


############################################### 5B - GET BLANK PIXEL DATA

stage_title(*LOG, $state, "8. Generating blank pixel data");

$data_loss_log = "$darts_staging/logs/data_loss/" . "$date_string" . "_data_loss.txt";

# As the IDL appends to the log file remove it before running the idl
perform(*LOG, $state, "if [ -e $data_loss_log ] ; then /bin/rm $data_loss_log; fi");

log_msg(*LOG, $state, "(IDL) data_loss, '$temp_fits',log_file='$data_loss_log'");

$output = <<EOF;
.comp $local_idl/data_loss
data_loss,'$temp_fits',log_file='$data_loss_log'
EOF

open(DL, "> $HOME/tmp/dl.pro");
print DL $output;
close DL;

log_msg(*LOG, $state, "IDL file:\n$output");
perform(*LOG, $state, "$HOME/ssw_idl < $HOME/tmp/dl.pro");
perform(*LOG, $state, "/bin/rm $HOME/tmp/dl.pro");


############################################### 5C - DUMP RESCUE DECOMPRESSION CHECK TABLE FILES

stage_title(*LOG, $state, "9. Generating check table dump");

$chk_table_dir = "$merge_dir/chk";
$chk_table_dump_dir = "$darts_staging/logs/check_tables";
$chk_table_dump_file = $chk_table_dump_dir . "/dumps/chk_" . $sdate . "_" . $eday . ".txt";
$chk_table_dump_store = $chk_table_dump_dir . "/tables/chk_" . $sdate . "_" . $eday;

log_msg(*LOG, $state, "(IDL) dump_all_chk_files,log_file='$chk_table_dump_file'");

$output = <<EOF;
.comp $local_idl/chk_file_dump
.comp $local_idl/dump_all_chk_files
dump_all_chk_files,LOG_FILE='$chk_table_dump_file'
EOF
    
open(IDL, "> $HOME/tmp/chk.pro");
print IDL $output;
close IDL;

log_msg(*LOG, $state, "IDL file:\n$output");
perform(*LOG, $state, "$HOME/ssw_idl < $HOME/tmp/chk.pro");
perform(*LOG, $state, "/bin/rm $HOME/tmp/chk.pro");
perform(*LOG, $state, "gzip -f $chk_table_dir/eis_md*");
perform(*LOG, $state, "/bin/mkdir $chk_table_dump_store");
perform(*LOG, $state, "/bin/mv $chk_table_dir/eis_md* $chk_table_dump_store");
perform(*LOG, $state, "gzip -f $chk_table_dump_store/*");


############################################### 6 - COMPRESS

# Now gzip everything in the temporary fits directory
# before moving to the DARTS directories

stage_title(*LOG, $state, "10. Gzip temporary fits files");

perform(*LOG, $state, "cd $temp_fits && gzip -f *");

if (($TESTING) || ($NO_SODA_FLAG) || ($SPECIAL eq "special")) {
    log_msg(*LOG, $state,"Skipping soda update because one of the flags TESTING, NO_SODA_FLAG, SPECIAL is set ($TESTING, $NO_SODA_FLAG, $SPECIAL)");
    goto skip_soda_update;
}

if ($RECOVER_TEST_FLAG) {
    log_msg(*LOG, $state,"Exiting as the RECOVER_TEST_FLAG is set");
    goto the_exit;
}

#goto skip_soda_update if $TESTING;
##goto skip_soda_update if (($SPECIAL eq "no_soda") or ($SPECIAL eq "special"));
##goto the_exit if $SPECIAL eq "recover_test";
#goto skip_soda_update if (($NO_SODA_FLAG) or ($SPECIAL eq "special"));
#goto the_exit if $RECOVER_TEST_FLAG;

############################################### 7 - COPY ENGINEERING FILES TO CALIBRATION AREA

stage_title(*LOG, $state, "11. Move engineering files to calibration area");

# If the engineering.txt file exists then there is engineering data to be dealt with.
# Use the ccsds packet filename from the engineering.txt file to look up in the translation
# log to get the fits filename and use the engineering study type information to copy the
# fits file to the calibration directory.

#goto old_copy_method;

# mcrw 20190827 - simplify this
if(-e "$merge_dir/engineering.txt") {
    log_msg(*LOG, $state,"Engineering summary exists ($merge_dir/engineering.txt)");
    if(open(ENG_LOG, "< $merge_dir/engineering.txt")) {
	log_msg(*LOG, $state,"Opened engineering summary OK");
	while(<ENG_LOG>) {
	    last if $_ =~ /^$/;			# Finish on a blank line
	    chomp;
	    next if $_ =~ /^Engineering/;
	    ($path, $type) = split(/:/, $_);
	    log_msg(*LOG, $state, "(path,type) = ($path, $type)");
	    @path = split(/\//, $path);		# Split up filename path
	    $ccsds_file = $path[-1];		# Get the ccsds filename
	    $fits_file = $translations{$ccsds_file};
	    if($fits_file) {
#	    $eng_dbase{$fits_file} = $type if $eng_dbase;	# Update engineering database
		$destination = eng_path($type);		# Get where to copy the fits file and copy it
		$source = $temp_fits . "/"  . $fits_file . ".gz";
		if($destination ne "") {
		    $dest1 = "$darts_mission" . $destination;
		    $dest2 = "$soda_darts_mission" . $destination;
		    log_msg(*LOG, $state, "Moving $source to $dest1 and $dest2");
		    # mcrw 25/07/07 - Move instead of copy so engineering files don't appear in science area
		    # mcrw 19/09/08 - Copy them now, not move
		    perform(*LOG, $state, "/bin/cp $source $dest1");
		    perform(*LOG, $state, "/bin/cp $source $dest2");
		}
		else {
		    log_msg(*LOG, $state, "No destination found for $fits_file ($type)");
		}
	    }
	    else {
		log_msg(*LOG, $state, "No translation found for $ccsds_file");
	    }	
	}
	close(ENG_LOG);
	log_msg(*LOG, $state,"Closed engineering summary OK");
    }
    else {
	log_msg(*LOG, $state, "Could not open engineering.txt file: $!");
    }
    perform(*LOG, $state, "/bin/mv $merge_dir/engineering.txt $darts_mission/$year/$month/packet_check/engineering.$year$month$day.txt");
}
else {
    log_msg(*LOG, $state, "No engineering summary file");
}

goto skip_old_method;

old_copy_method:

$eng_dbase = 0;
if(-e "$merge_dir/engineering.txt") {
    log_msg(*LOG, $state,"Engineering summary exists");
    $eng_dbase = 1 if(dbmopen(%eng_dbase, "$pipeline_log/engineering_name_dbase", 0644));
    if(dbmopen(%etrans, "$pipeline_log/mission_name_dbase", undef)) {
	if(open(ENG_LOG, "< $merge_dir/engineering.txt")) {
	    $line = <ENG_LOG>;				# Read title
	    while($line = <ENG_LOG>) {			# Read ccsds packet filename
		last if $line =~ /^$/;			# Finish on a blank line
		chop $line;				# Remove \n
		($path, $type) = split(/:/, $line);	# Get filename and type of engineering study
		log_msg(*LOG, $state, "(path,type) = ($path, $type)");
		@path = split(/\//, $path);		# Split up filename path
		$ccsds_file = $path[-1];		# Get the ccsds filename

		$fits_file = $etrans{$ccsds_file};	# Get fits filename from ccsds filename
		$fits_file = $temp{$ccsds_file};	# Get fits filename from ccsds filename
#		$ok_fits = $etrans{$ccsds_file};	# Get fits filename from ccsds filename
#		$rescued_fits = $temp{$ccsds_file};	# Get fits filename from ccsds filename

#		$fits_file = $ok_fits if $ok_fits;
#		$fits_file = $rescued_fits if $rescued_fits;

		# Must set source directory accordingly (if ok or rescued)

		if($fits_file) {
		    $eng_dbase{$fits_file} = $type if $eng_dbase;	# Update engineering database
		    $destination = eng_path($type);		# Get where to copy the fits file and copy it
		    $source = $temp_fits . "/"  . $fits_file . ".gz";
		    if($destination ne "") {
			$dest1 = "$darts_mission" . $destination;
			$dest2 = "$soda_darts_mission" . $destination;
			log_msg(*LOG, $state, "Moving $source to $dest1 and $dest2");
			# mcrw 25/07/07 - Move instead of copy so engineering files don't appear in science area
			# mcrw 19/09/08 - Copy them now, not move
			perform(*LOG, $state, "/bin/cp $source $dest1");
			perform(*LOG, $state, "/bin/cp $source $dest2");
		    }
		    else {
			log_msg(*LOG, $state, "No destination found for $fits_file ($type)");
		    }
		}
		else {
		    log_msg(*LOG, $state, "No translation found for $ccsds_file");
		}
	    }
	    close(ENG_LOG);
	}
	dbmclose(%etrans);
    }
    dbmclose(%eng_dbase) if($eng_dabse);

    perform(*LOG, $state, "/bin/mv $merge_dir/engineering.txt $darts_mission/$year/$month/packet_check/engineering.$year$month$day.txt");
    #
    # Generate cal study summary file from fits here?
    #  ./fits_cal_study.pl > ~/work/localdata/log/mission/cal_studies_list.txt
    #
}
else {
    log_msg(*LOG, $state, "No engineering summary file");
}

skip_old_method:

# Copy the engineering database to the staging area even if it was not updated
#log_msg(*LOG, $state, "/bin/cp $pipeline_log/engineering_name_dbase $darts_staging/logs/");

perform(*LOG, $state, "/bin/cp $pipeline_log/engineering_name_dbase $darts_staging/logs/");


############################################### 8 - COPY MISSION FITS TO DARTS

stage_title(*LOG, $state, "12. Moving mission data files to DARTS");

#
# Filenames:
#	eis_l0_yyyymmdd_hhmmss.fits
#

@files = `ls $temp_fits`;
foreach $file (@files) {
    next if !($file =~ /l0/);
    chomp $file;
    # Match destination directory with filename
    $darts_year  = substr $file,  7, 4;
    $darts_month = substr $file, 11, 2;
    $darts_day   = substr $file, 13, 2;

###    log_msg(*LOG, $state,  "/bin/cp $temp_fits/$file $darts_mission/$darts_year/$darts_month/$darts_day/");

#    system("/bin/cp $temp_fits/$file $darts_mission/$darts_year/$darts_month/$darts_day/");
#    system("/bin/cp $temp_fits/$file $soda_darts_mission/$darts_year/$darts_month/$darts_day/");

    perform(*LOG, $state, "/bin/cp $temp_fits/$file $soda_darts_mission/$darts_year/$darts_month/$darts_day/");
}

skip_soda_update:
###skip1:

    perform(*LOG, $state, "/bin/cp $temp_fits/$file $darts_staging/mission/special") if $SPECIAL eq "special";


############################################### 9 - CLEAN UP

stage_title(*LOG, $state, "13. Cleaning up");

# Remove temporary idl programs
perform(*LOG, $state, "/bin/rm $temp_idl/*pro");

# Remove temporary fits files
#print LOG "\t/bin/rm $temp_fits/*\n";
###system("cd $temp_fits && /bin/rm eis_l0*");
###perform(*LOG, $state, "cd $temp_fits && /bin/rm eis_l0*");

# Remove any temporary files from decompression
#print LOG "\t/bin/rm $eis_local/tmp/eis_md*\n";
###system("/bin/rm $eis_local/tmp/eis_md*");
###perform(*LOG, $state, "/bin/rm $eis_local/tmp/eis_md*");

# mcrw 20130918 - needs fixing
perform(*LOG, $state, "mv $HOME/tmp/merge_record.txt $HOME/data/staging");
perform(*LOG, $state, "gzip -f $HOME/data/staging/merge_record.txt");

# Assume any files left in the nursery have been rescued...
perform(*LOG, $state, "/bin/rm $merge_dir/nursery/*") if $RESCUE_MOD;
perform(*LOG, $state, "/bin/rm $merge_dir/rescue/*") if $RESCUE_MOD;

# mcrw 20200727
perform(*LOG, $state, "/bin/rm $HOME/work/localdata/log/dr/*");

############################################### 10 - REMOVE QUICKLOOK MISSION DATA

goto skip_quicklook if $TESTING;

stage_title(*LOG, $state, "14. Removing quicklook mission data for $year$month$day");

$start_days = Date::Calc::Date_to_Days($syear, $smonth, $sday);
$end_days = Date::Calc::Date_to_Days($eyear, $emonth, $eday);
#foreach $diff (-1 .. $end_days - $start_days - 1) {
foreach $diff (0 .. ($end_days - $start_days)) {
    ($nyear, $nm, $nd) = Date::Calc::Add_Delta_Days($syear,$smonth,$sday, $diff);
    $nmonth = sprintf "%02u", $nm;
    $nday = sprintf "%02u", $nd;
    print "Doing $diff -> $nyear, $nmonth, $nday\n";

    # Remove directory in DARTS if it exists
    if(-e "$darts_mission_ql/$nyear/$nmonth/$nday") {
	perform(*LOG, $state, "/bin/rm -rf $darts_mission_ql/$nyear/$nmonth/$nday");
    }
    else {
	log_msg(*LOG, $state, "Directory $darts_mission_ql/$nyear/$nmonth/$nday does not exist");
    }

    # Remove md directory in sdtp if it exists
    if(-e "$ql_mission/$nyear$nmonth$nday") {
	perform(*LOG, $state, "/bin/rm -rf $ql_mission/$nyear$nmonth$nday");
    }
    else {
	log_msg(*LOG, $state, "Directory $ql_mission/$nyear$nmonth$nday does not exist");
    }

    # Remove md fits directory in sdtp if it exists
    if(-e "$ql_mission_fits/$nyear$nmonth$nday") {
	perform(*LOG, $state, "/bin/rm -rf $ql_mission_fits/$nyear$nmonth$nday");
    }
    else {
	log_msg(*LOG, $state, "Directory $ql_mission_fits/$nyear$nmonth$nday does not exist");
    }
}

## Remove directory in DARTS if it exists
#if(-e "$darts_mission_ql/$year/$month/$day") {
#    perform(*LOG, $state, "/bin/rm -rf $darts_mission_ql/$year/$month/$day");
#}
#else {
#    log_msg(*LOG, $state, "Directory $darts_mission_ql/$year/$month/$day does not exist");
#}
#
## Remove md directory in sdtp if it exists
#if(-e "$ql_mission/$year$month$day") {
#    perform(*LOG, $state, "/bin/rm -rf $ql_mission/$year$month$day");
#}
#else {
#    log_msg(*LOG, $state, "Directory $ql_mission/$year$month$day does not exist");
#}
#
## Remove md fits directory in sdtp if it exists
#if(-e "$ql_mission_fits/$year$month$day") {
#    perform(*LOG, $state, "/bin/rm -rf $ql_mission_fits/$year$month$day");
#}
#else {
#    log_msg(*LOG, $state, "Directory $ql_mission_fits/$year$month$day does not exist");
#}

skip_quicklook:


############################################### 11 - Generate summary files

stage_title(*LOG, $state, "15. Generating summary files");

$ccsds_missing_file = "$darts_mission/$year/$month/packet_check/ccsds_hdrs.$year$month$day.txt";
$plan_file = "$staging/eis_plan/eis_plan_$syear$smonth$sday.txt";
log_msg(*LOG, $state, "Looking for eis_plan file at $plan_file");
# This will be set to 1 if the file exists, 0 otherwise
$eis_plan_found = stat $plan_file;
if (! $eis_plan_found) {
    log_msg(*LOG, $state, "Plan file not found");
    $plan_file = "$staging/eis_plan/$year/eis_plan_$syear$smonth$sday.txt";
    log_msg(*LOG, $state, "Looking for eis_plan file at $plan_file");
    $eis_plan_found = stat $plan_file;
}
#goto skip_summary_header if ! $continue_summary;
$continue_summary = $eis_plan_found;

if($eis_plan_found) {
    log_msg(*LOG, $state, "Found plan file");
    $expected_number_of_rasters = `rasters_for_plan.sh $plan_file`;
    chomp $expected_number_of_rasters;
    perform(*LOG, $state, "estimate_rasters1.pl $plan_file > ~/tmp/rasters_for_plan.txt");
}
else {
    log_msg(*LOG, $state, "Plan file not found");
    $expected_number_of_rasters = 0;
}

$plan_index = 0;
if(open PLN, "< $HOME/tmp/rasters_for_plan.txt") {
    while(<PLN>) {
	chomp;
	next if /^TL/;
	($sd, $st, $ed, $et) = split;
    }
    close PLN;
}

# Output format: filename	packets_per_exposure	number_of_exposures	timeline_id	reps_req	reps_done	RID	MID	Fx	Fy	ExpPerPos	data_volume	acronym
perform(*LOG, $state, "exposure_count.pl < $darts_mission/$year/$month/packet_check/md_hdrs.$year$month$day.html > $HOME/tmp/packets_per_exposure.txt");

# Output format: filename	packets_received	number_missing		number_of_gaps	date
perform(*LOG, $state, "ccsds_missing_summary1.pl $year$month$day $ccsds_missing_file > ~/tmp/missing.txt");
$total_ccsds_packets         = `awk '{ tot += \$2 } END { print tot }' ~/tmp/missing.txt`;
$total_ccsds_packets_missing = `awk '{ tot += \$3 } END { print tot }' ~/tmp/missing.txt`;
$total_gaps                  = `awk '{ tot += \$4 } END { print tot }' ~/tmp/missing.txt`;
$total_files                 = `grep eis_md ~/tmp/missing.txt | wc -l`;
chomp $total_files;
chomp $total_ccsds_packets;
chomp $totsl_ccsds_packets_missing;
chomp $total_gaps;

#
# put in exposures_missing_packets.pl output here, send to fold_reports.pl
#
# Output: filename	hdrs
perform(*LOG, $state, "exposures_missing_packets.pl < $ccsds_missing_file > $HOME/tmp/exp_missing.txt");
perform(*LOG, $state, "sequence_count_continuity_check.pl < $darts_mission/$year/$month/packet_check/sequence_counts_$year$month$day.txt > $HOME/tmp/seq_cont.txt");
if($RESCUE_MOD) {
#    $rescue_translation_log = "$darts_staging/logs/md_translation/rescue/md_translation_rescue_" . "$sdate" . "_" . "$eday" . ".txt";
    perform(*LOG, $state, "fold_reports1.pl $HOME/tmp/missing.txt $HOME/tmp/packets_per_exposure.txt $translate_log $HOME/tmp/exp_missing.txt $HOME/tmp/reformat_summary.txt $HOME/tmp/seq_cont.txt $rescue_translation_log $data_loss_log");
}
else {
    perform(*LOG, $state, "fold_reports.pl $HOME/tmp/missing.txt $HOME/tmp/packets_per_exposure.txt $translate_log $HOME/tmp/exp_missing.txt $HOME/tmp/reformat_summary.txt $HOME/tmp/seq_cont.txt");
}

#??
#perform(*LOG, $state, "/bin/mv ~/tmp/missing.txt $staging/packet_summary/summary_$syear$smonth$sday.txt");

# Get total number of exposures expected and got
$total_exps_expected = `awk '{ tot += \$3 * \$11 } END { print tot }' $HOME/tmp/packets_per_exposure.txt`;
$total_exps_expected = -1 if $total_exps_expected == 0;

$total_exps_got = `awk '/^Filename/ { next } { tot += \$8 } END { print tot }' $HOME/tmp/reformat_summary.txt`;
chomp $total_exps_got;

#if(open SUMMARY, "$staging/packet_summary/summary_$syear$smonth$sday.txt") {
if(open SUMMARY, "> $HOME/tmp/ref_sum.txt") {
#    print SUMMARY "Summary for the plan from $sd $st to $ed $et\n";
    print SUMMARY "Summary for the plan from $sdate $stime to $edate $etime\n";
    print SUMMARY "\n";
    my $fits_num = scalar(@post_reformat);
    chomp $fits_num;
    if($continue_summary) {
	print SUMMARY "Rasters Planned          : ", sprintf("%6u", $expected_number_of_rasters), "\n";
	print SUMMARY "Rasters (files) Received : ", sprintf("%6u", $total_files), "\n";
	print SUMMARY "Number Of Damaged Files  : ", sprintf("%6u", $number_missing), "\n";
	print SUMMARY "File Damage Rate         : ", sprintf("%6.2f", ($total_files == 0) ? 0.0 : ($number_missing / $total_files) * 100.0), "%\n";
	print SUMMARY "Files Pre Decompression  : ", sprintf("%6u", $number_pre_decompression), "\n";
	print SUMMARY "Files Post Decompression : ", sprintf("%6u", $number_post_decompression), "\n";
	print SUMMARY "Number Of Fits Files     : ", sprintf("%6u", $fits_num), "\n";
	print SUMMARY "Expected Exposures       : ", sprintf("%6u", $total_exps_expected), "\n";
	print SUMMARY "Actual Exposures         : ", sprintf("%6u", $total_exps_got), "\n";
	print SUMMARY "Exposure Loss            : ", sprintf("%6.2f", ($total_exps_expected == 0) ? 0.0 : 100.0 - ($total_exps_got / $total_exps_expected) * 100.0), "%\n";; 
	print SUMMARY "Total Ccsds Packets      : ", sprintf("%6u", $total_ccsds_packets), "\n";
	print SUMMARY "Ccsds Packets Missing    : ", sprintf("%6u", $total_ccsds_packets_missing), "\n";
	print SUMMARY "Ccsds Packets Loss Rate  : ", sprintf("%6.2f", ($total_ccsds_packets == 0) ? 0.0 : ($total_ccsds_packets_missing / $total_ccsds_packets) * 100.0), "%\n";
	print SUMMARY "Number Of Gaps           : ", sprintf("%6u", $total_gaps), "\n\n";
    }
    else {
	print SUMMARY "\nNo EIS plan file was available. A complete summary cannot be generated.\n\n";
	print SUMMARY "Rasters Planned          : ????\n";
	print SUMMARY "Rasters (files) Received : ", sprintf("%6u", $total_files), "\n";
	print SUMMARY "Number Of Damaged Files  : ", sprintf("%6u", $number_missing), "\n";
	print SUMMARY "File Damage Rate         : ", sprintf("%6.2f", ($total_files == 0) ? 0.0 : ($number_missing / $total_files) * 100.0), "%\n";
	print SUMMARY "Files Pre Decompression  : ", sprintf("%6u", $number_pre_decompression), "\n";
	print SUMMARY "Files Post Decompression : ", sprintf("%6u", $number_post_decompression), "\n";
	print SUMMARY "Number Of Fits Files     : ", sprintf("%6u", $fits_num), "\n";
	print SUMMARY "Expected Exposures       : ", sprintf("%6u", $total_exps_expected), "\n";
	print SUMMARY "Actual Exposures         : ", sprintf("%6u", $total_exps_got), "\n";
	print SUMMARY "Exposure Loss            : ", sprintf("%6.2f", ($total_exps_expected == 0) ? 0.0 : 100.0 - ($total_exps_got / $total_exps_expected) * 100.0), "%\n";; 
	print SUMMARY "Total Ccsds Packets      : ", sprintf("%6u", $total_ccsds_packets), "\n";
	print SUMMARY "Ccsds Packets Missing    : ", sprintf("%6u", $total_ccsds_packets_missing), "\n";
	print SUMMARY "Ccsds Packets Loss Rate  : ", sprintf("%6.2f", ($total_ccsds_packets == 0) ? 0.0 : ($total_ccsds_packets_missing / $total_ccsds_packets) * 100.0), "%\n";
	print SUMMARY "Number Of Gaps           : ", sprintf("%6u", $total_gaps), "\n\n";
    }
    close SUMMARY;
}

if($TESTING) {
    perform(*LOG, $state, "cat $HOME/tmp/ref_sum.txt $HOME/tmp/reformat_summary.txt > $staging/reformat_summary/summary_$syear$smonth$sday" . "_TEST.txt");
}
else {
    perform(*LOG, $state, "cat $HOME/tmp/ref_sum.txt $HOME/tmp/reformat_summary.txt > $staging/reformat_summary/summary_$syear$smonth$sday.txt");
    perform(*LOG, $state, "cat $HOME/tmp/ref_sum.txt $HOME/tmp/reformat_summary.txt > $staging/reformat_summary/$syear/summary_$syear$smonth$sday.txt");
}

perform(*LOG, $state, "/bin/rm $HOME/tmp/reformat_summary.txt");

###perform(*LOG, $state, "/bin/rm $HOME/tmp/ref_sum.txt $HOME/tmp/reformat_summary.txt $HOME/tmp/packets_per_exposure.txt $HOME/tmp/missing.txt $HOME/tmp/exp_missing.txt $HOME/tmp/rasters_for_plan.txt");

perform(*LOG, $state, "echo $sdate $edate $stime $etime > $HOME/track/latest_mission_reformat.txt") unless $TESTING;
perform(*LOG, $state, "echo $sdate $edate $stime $etime >> $HOME/track/cumulative_latest_mission_reformat.txt") if $cron;


############################################### 12 - EXIT

the_exit:
stage_title(*LOG, $state, "16. Exiting");

#&$perform("/bin/rm $reformat_log");
#&$perform("/bin/rm $translate_log");

###perform(*LOG, $state, "/bin/rm $pipeline_log/md_decompress_$date_string.txt");
###perform(*LOG, $state, "/bin/rm $md_fetch_log");
###perform(*LOG, $state, "/bin/rm $reformat_log");
###perform(*LOG, $state, "/bin/rm $translate_log");

perform(*LOG, $state, "/bin/mv $pipeline_log/pipeline_mission_rescue_log_rescue* $darts_staging/logs/pipeline_mission/rescue/");
perform(*LOG, $state, "/bin/mv $pipeline_log/md_translation_rescue_*_* $darts_staging/logs/md_translation/rescue/");
perform(*LOG, $state, "/bin/mv $pipeline_log/md_reformat_rescue_*_* $darts_staging/logs/md_reformat/rescue/");
perform(*LOG, $state, "/bin/mv $pipeline_log/fits_dump_rescue_*.txt $darts_staging/packet_summary/");
perform(*LOG, $state, "/bin/rm $translate_log");
perform(*LOG, $state, "/bin/rm $pipeline_log/md_decompress_rescue_*_*");

###perform(*LOG, $state, "study_log.csh $syear $smonth");

perform(*LOG, $state, "set noclobber");

#print MLOG "\tReformatted " . scalar(@post_reformat) . " files from " . scalar(@post_decompression) . " decompressed (" . scalar(@pre_decompression) . " compressed, " . @rec_files . " received)\n";
print MLOG "\tReformatted " . scalar(@post_reformat) . " files from $number_post_decompression decompressed ($number_pre_decompression compressed, " . @rec_files . " received)\n";

#system("/bin/cp ~/track/mission_pending.txt $HOME/data/staging/logs/");
#system("/bin/cp ~/track/latest_mission_reformat.txt $HOME/data/staging/logs/");
#system("/bin/cp ~/track/cumulative_latest_mission_reformat.txt $HOME/data/staging/logs/");
perform(*LOG, $state, "/bin/cp ~/track/mission_pending.txt $HOME/data/staging/logs/");
perform(*LOG, $state, "/bin/cp ~/track/latest_mission_reformat.txt $HOME/data/staging/logs/");
perform(*LOG, $state, "/bin/cp ~/track/cumulative_latest_mission_reformat.txt $HOME/data/staging/logs/");
pipeline_exit("mission", 0);


############################################### subs

sub parse_contact_file {
    my $date = shift;
    my $fh   = shift;
    my $line;
    my $src_file;
    my $file;
	
    # Parse contact file to get start time
    $src_file = "$date" . "_contacts.txt";
    $file = `/bin/ls $pass_list_dir/$src_file`;
    if($file eq "") {
	print $fh "\tCan't find contact file for $date\n";
	return "";
    }
    chomp $file;
#    print $fh "\tUsing contact file $file for $date\n";
    $line = "";

    # Get the second real time contact if it exists
    # otherwise get the first
    if(open(PASS, "< $file")) {
    	while(<PASS>) {
	    if(/real/) {
		chomp;
		$line = $_;
###		last;
	    }
    	}
    }
    else {
    	print $fh "\tCan't open contacts file for $date ($!)\n";
    	return "";
    }
    if($line eq "") {
        print $fh "\tNo start time found in $file\n";
        return "";
    }
    return $line;
}

##### Change this to look at op_period files
sub parse_orl_file {
    my $date = shift;	# Date of contact
    my $fh   = shift;	# Log file filehandle

    my $line;
    my $src_file;
    my $file;
	
    # Parse orl file to get start time. $orl_dir is defined in pipeline_defs.
    $src_file = "EIS_" . "$date" . "????.orl";
    $file = `/bin/ls $orl_dir/$src_file`;
    if($file eq "") {
#	print $fh "\tCan't find orl file for $date\n";
	return "Orl file not found";
    }
    chomp $file;
#    print $fh "\tUsing orl file $file for $date\n";
    $line = "";
    my $found = 0;
    if(open(ORL, "< $file")) {
	while(<ORL>) {
	    chomp;

	    # Changed mcrw 20111124 to use the start time after the obstbl dump.
	    if(/^START/) {
		++$found;
		if($found == 3) {
		    $line = $_;
		    last;
		}
	    }
#	    # orl file format has changed, with extra STARTs before the obstbl dump start
#	    # Skip past these.
#	    next if /^START:OPOG_UPLOAD_TI/;
#	    if(/^START/) {
#
#		# Read 3 more lines to skip past dump obstbl line
#		<ORL>;
#		<ORL>;
#		<ORL>;
#
#		$line = $_;
#		last;
#	    }
#	    next;
	}

	close(ORL);
    }
    else {
    	return "$file failed to open";
    }

    if($line eq "") {
#        print $fh "\tNo start time found in $file\n";
        return "parse error in $file (found = $found)";
    }
    #
    # $line should be like: START:2007/02/13.10:00:00 { SEQ:EIS_LOAD_OBSTBL_REAL ;}
    # Return hhmm portion
    #
    $hh = substr $line, 17, 2;
    $mm = substr $line, 20, 2;

    return "parse error for hour in $file" unless $hh =~ m/([0-1][0-9]|2[0-3])/;
    return "parse error for minute in $ile" unless $mm =~ m/[0-5][0-9]/;

    return "$hh$mm";
}
