#!/usr/local/bin/perl

#
# rescue_reformat.pl 200803 (for example)
#

use Time::Local;
use Date::Calc;
use Env qw(EGSE_SW_LIB);

$rescue_fits = "$HOME/work/localdata/pipeline/rescue_fits";

# Instead of die-ing would be better to report the error in a file somewhere
# and then exit. This applies to all instances of die of course. Todo.

#do "$EGSE_SW_LIB/date_calc.pl"     || die "Can't do date_calc.pl: $!\n";
do "$EGSE_SW_LIB/pipeline_defs.pl" || die "Can't do pipeline_defs.pl: $!\n";
do "$EGSE_SW_LIB/pipeline.pl"      || die "Can't do pipeline.pl: $!\n";

$sdate = $ARGV[0];

open(MLOG, ">> $pipeline_log/pipeline_log_rescue.txt") || die "Can't open $pipeline_log/$pipeline_log_rescue.txt for writing: $!";
print MLOG scalar(localtime), " (JST) rescue_reformat started ";

#$state = "interactive";

open(LOG, ">> $pipeline_log/pipeline_mission_log_rescue_$sdate") || die "Can't open $pipeline_mission_log_rescue_$sdate for writing: $!";

($pipeline_logger, $pipeline_perform, $pipeline_stage_title) = init_pipeline(*LOG, "interactive");
&$pipeline_stage_title("Started (manually)");
&$pipeline_logger("rescue_reformat1.pl $sdate");

#$pipeline_mission_log = "$pipeline_log/pipeline_mission_log_$sdate";
#open(LOG, ">> $pipeline_mission_log") || die "Can't open $pipeline_mission_log for writing: $!";
#print LOG "Started ", scalar(localtime), " (JST)\n";

$date_string = $sdate;


############################################### 10 - MISSION DATA REFORMAT

&$pipeline_stage_title("Starting mission data reformat");

###$date_string  = "$year$month$day";
$translate_log_name = "md_translation_rescue" . "_" . "$date_string" . ".txt";
$translate_log = $pipeline_log . "/" . $translate_log_name;

# Get number of decompressed files
@post_decompression = `ls $merge_dir/decompressed/ | grep eis_md`;
$number_post_decompression = @post_decompression;

$output = <<EOF;
.comp $local_idl/eis_read_orbit_events_reformatter
    ;.comp $local_idl/test/eis_spcd__define
.comp $local_idl/merge/eis_spcd__define
.comp $local_idl/merge/eis_mkfits
    ;.comp $local_idl/test/eis_hdr__define
    ;.comp $local_idl/test/eis_data__readccsds
    ;.comp $local_idl/merge/eis_data__readfits
    ;.comp $local_idl/test/eis_mkfits
    ;.comp $local_idl/test/eis_fits_script
!quiet=1
eis_fits_script,'$merge_dir/decompressed/', '$rescue_fits/',logfile='$translate_log', /rescued
    ;eis_fits_script,'$merge_dir/decompressed/', '$rescue_fits/',logfile='$translate_log', /nospcd
EOF

    open(MD_FITS, "> $temp_idl/md_fits.pro");
print MD_FITS $output;
close MD_FITS;

&$pipeline_logger("IDL file:\n$output");

$reformat_log = "$pipeline_log/md_reformat_rescue" . "_$date_string" . ".txt";

$total_reformat_count = 0;
$more_files = 1;
while($more_files) {
    # Reformat files
    &$pipeline_perform("/nasA_solar1/home/sbukeis/ssw_idl < $temp_idl/md_fits.pro >> $reformat_log 2>&1");

    # Get a count of number of mission data files here
    @reformat_count = `ls $rescue_fits/ | grep eis_l0`;
    $reformat_count = @reformat_count;
    $total_reformat_count += $reformat_count;
    last if $total_reformat_count == $number_post_decompression;
    
    # Reformatter failed on a file.
    # Remove decompressed files which have been reformatted.
    # Move existing fits to temp_fits directory
    # Move failed source file back to incomplete directory.
    # Start reformat again
    if($reformat_count > 0) {
	while($reformat_count--) {
#foreach $count (0 .. ($reformat_count - 1)) {
	    $file = shift @post_decompression;
	    chomp $file;
	    &$pipeline_perform("/bin/rm $merge_dir/decompressed/$file");
	}
    }
    $file = shift @post_decompression;
    chomp $file;
    &$pipeline_perform("/bin/mv $merge_dir/rescue/$file $merge_dir/incomplete/");
    &$pipeline_perform("/bin/rm $merge_dir/decompressed/$file");
    &$pipeline_perform("/bin/mv $rescue_fits/* $temp_fits");
    &$pipeline_perform("echo >> $reformat_log");
    &$pipeline_perform("echo $file failed. Restarting. >> $reformat_log");
    &$pipeline_perform("echo >> $reformat_log");
    ++$total_reformat_count;
    
    $more_files = ($total_reformat_count < $number_post_decompression);
}

&$pipeline_perform("/bin/mv $rescue_fits/* $temp_fits");
&$pipeline_perform("/bin/cp $reformat_log $darts_staging/logs/md_reformat/rescue/");
&$pipeline_perform("/bin/cp $translate_log $darts_staging/logs/md_translation/rescue/");

# Get a count of number of mission data files here
@post_reformat = `ls $temp_fits/eis_l0*`;
&$pipeline_logger("Number of reformatted files: " . scalar(@post_reformat));


############################################### 5 - UPDATE MD TRANSLATION DBASE

&$pipeline_stage_title("Update md translation database");

# eis_mkfits writes all the ccsds packet filenames and the corresponding fits filenames processed
# to a text file. Open the text file and transfer the information to a perl database.

if(dbmopen(%trans, "$pipeline_log/rescue_mission_name_dbase", 0644)) {
    &$pipeline_logger("rescue_mission_name_dbase opened");
    if(open(REF_LOG, "< $darts_staging/logs/md_translation/rescue/$translate_log_name")) {
	&$pipeline_logger("Opened $translate_log_name ok");
	while(<REF_LOG>) {
	    @files = split;
	    $ccsds = $files[0];
	    $fits = $files[1];
#    ($ccsds, $fits) = split;
	    $ccsds =~ s/ //g;
	    $fits =~ s/ //g;
	    chomp $fits;
	    &$pipeline_logger("Updating mission_name_database: $ccsds => $fits");
	    $trans{$ccsds} = $fits;
	}
	close(REF_LOG);
    }
    else {
	&$pipeline_logger("Can't open $reformat_log: $!");
    }
    &$pipeline_logger("Closing rescue_mission_name_dbase");
    if(dbmclose(%trans)) {
	&$pipeline_logger("Closed rescue_mission_name_dbase ok");
    }
    else {
	&$pipeline_logger("Closing rescue_mission_name_dbase failed: $!");
    }
}
else {
    &$pipeline_logger("Cannot open rescue_mission_name_dbase");
}

# Copy the database to the staging area even if it was not updated
&$pipeline_perform("/bin/cp $pipeline_log/rescue_mission_name_dbase $darts_staging/logs/");


############################################### 5A - GENERATE FITS HEADER DUMP

&$pipeline_stage_title("Generating fits header dump");

&$pipeline_perform("$HOME/bin/fits_study_metric.pl $temp_fits/* > $pipeline_log/fits_dump_$sdate.txt");
&$pipeline_perform("/bin/cp $pipeline_log/fits_dump_$year$month$day.txt $HOME/data/staging/packet_summary");


############################################### 6 - COMPRESS

# Now gzip everything in the temporary fits directory
# before moving to the DARTS directories

&$pipeline_stage_title("Gzip temporary fits files");

&$pipeline_perform("gzip -f $temp_fits/*");


############################################### 7 - COPY ENGINEERING FILES TO CALIBRATION AREA

goto skip;

$current_time = scalar(localtime);
print STDOUT $current_time, " (JST) - Copy engineering files to calibration area\n" if $state eq "interactive";
print LOG "\n", $current_time, " (JST) - Copy engineering files to calibration area\n";

# If the engineering.txt file exists then there is engineering data to be dealt with.
# Use the ccsds packet filename from the engineering.txt file to look up in the translation
# log to get the fits filename and use the engineering study type information to copy the
# fits file to the calibration directory.

$eng_dbase = 0;
if(-e "$merge_dir/engineering.txt") {
    print STDOUT "Engineering summary exists\n" if $state eq "interactive";
    print LOG "Engineering summary exists\n";
    $eng_dbase = 1 if(dbmopen(%eng_dbase, "$pipeline_log/engineering_name_dbase", 0644));
    if(dbmopen(%trans, "$pipeline_log/mission_name_dbase", undef)) {
	if(open(ENG_LOG, "< $merge_dir/engineering.txt")) {
	    $line = <ENG_LOG>;	# Read title
	    while($line = <ENG_LOG>) {	# Read ccsds packet filename
		last if $line =~ /^$/;	# Finish on a blank line
		chop $line;		# Remove \n
		($path, $type) = split(/:/, $line);	# Get filename and type of engineering study
		print LOG "(path,type) = ($path, $type)\n";
		@path = split(/\//, $path);	# Split up filename path
		$ccsds_file = $path[-1];
#$fits_file = $trans{$path[-1]};# Get fits filename from ccsds filename
		$fits_file = $trans{$ccsds_file};	# Get fits filename from ccsds filename
		if($fits_file) {
		    $eng_dbase{$fits_file} = $type if $eng_dbase;	# Update engineering database
		    $destination = eng_path($type);	# Get where to copy the fits file and copy it
		    $source = $temp_fits . "/"  . $fits_file . ".gz";
#    system("/bin/cp $temp_fits/$fits_file.gz $destination") if($destination);
		    if($destination ne "") {
			$dest1 = "$darts_mission" . $destination;
			$dest2 = "$soda_darts_mission" . $destination;
			print STDOUT "Copying $source to $dest1\n" if $state eq "interactive";
			print LOG  "\tCopying $source to $dest1 and $dest2\n";
			system("/bin/cp $source $dest1");
			system("/bin/cp $source $dest2");
		    }
		}
		else {
		    print STDOUT "No translation found for $ccsds_file\n" if $state eq "interactive";
		    print LOG "No translation found for $ccsds_file\n";
		}
	    }
	    close(ENG_LOG);
	}
	dbmclose(%trans);
    }
    dbmclose(%eng_dbase) if($eng_dabse);
    system("/bin/mv $merge_dir/engineering.txt $HOME/data/mission/$year/$month/engineering.$year$month$day.txt");
}
else {
    print LOG "No engineering summary file\n";
}
# Copy the engineering database to the staging area even if it was not updated
#print STDOUT "/bin/cp $pipeline_log/engineering_name_dbase $darts_staging/logs/\n" if $state eq "interactive";
#print LOG  "\t/bin/cp $pipeline_log/engineering_name_dbase $darts_staging/logs/\n";
#system("/bin/cp $pipeline_log/engineering_name_dbase $darts_staging/logs/");

skip:

############################################### 8 - COPY MISSION FITS TO DARTS

    &$pipeline_stage_title("Moving mission data fits files to DARTS");

#
# Filenames:
#eis_l0_yyyymmdd_hhmmss.fits
#
@files = `ls $temp_fits`;
foreach $file (@files) {
    next if !($file =~ /l0/);
    chomp $file;
    # Match destination directory with filename
    $darts_year  = substr $file,  7, 4;
    $darts_month = substr $file, 11, 2;
    $darts_day   = substr $file, 13, 2;
    $soda_md_dir = "$soda_darts_mission" . "/" . "$darts_year" . "/" . "$darts_month" . "/" . "$darts_day";
    &$pipeline_perform("/bin/cp $temp_fits/$file $soda_md_dir");
}

# Generate the local study descriptions file
#perform(*LOG, $state, "fits_study_metric.csh $temp_fits >  $HOME/tmp/studies.txt");
#perform(*LOG, $state, "sed -e '/SSW/,/Type/d' $HOME/tmp/studies.txt > $HOME/tmp/rescue_studies.html");
#perform(*LOG, $state, "/bin/rm $HOME/tmp/studies.txt");


############################################### 15 - CLEAN UP

&$pipeline_stage_title("Cleaning up");

&$pipeline_perform("/bin/rm $temp_idl/*pro");
#&$pipeline_perform("/bin/rm $temp_fits/*");

pipeline_exit("rescue", 0);

