import os
import pyfits
import numpy as np
import time
from gavo import base
import subprocess
import traceback
import sys

rootDir = 'data'


fileList = []
for root, subFolders, files in os.walk(rootDir):
	for file in files:
		if (os.path.splitext(file)[1] == '.fit' and os.path.basename(file).split('_')[-1] != 'norm.fit'
			and not ("%s_norm.fit" % os.path.splitext(file)[0] in files)):
			fileList.append(os.path.join(root,file))

cnt = 0
error = 0
for filename in fileList:
	print ("Processing new source: %d" % cnt)
	print ("Errors thus far: %d" % error)
	cnt+=1
	if os.path.exists("%s_norm.fit" % os.path.splitext(file)[0]):
		print("Skipping file: %s (already done)." % filename)
		continue
	sys.stderr.write("INFO: Normalizing new file: %s\n" % filename)
	try:
		hdulist = pyfits.open(filename)
	except:
		sys.stderr.write("ERROR: File %s has broken header ---> normalization skipped.\n" % filename)
		error+=1
		continue

	header_orig = hdulist[0].header

	flux_arr = hdulist[0].data

	wave_min = header_orig["CRVAL1"]
	wave_step = header_orig["CDELT1"]
	wave_size = header_orig["NAXIS1"]
	wave_max = wave_min + wave_size*wave_step
	wave_arr = np.arange(wave_min,  wave_max, wave_step, dtype=None)


	c1 = pyfits.Column(name='WAVE', format='D', unit='Angstrom')
	c2 = pyfits.Column(name='FLUX', format='D', unit='erg/cm^2/s/Angstrom')

	tbhdu = pyfits.new_table([c1, c2], nrows= wave_size)
	tbhdu.header.update('TUCD1', 'em.wl')
	tbhdu.header.update('TUTYP1', 'spec:Data.SpectralAxis.Value')
	tbhdu.header.update('TUCD2', 'phot.fluxDens;em.wl')
	tbhdu.header.update('TUTYP2', 'spec:Data.FluxAxis.Value')
	tbhdu.header.update('DATALEN', wave_size, 'Spectral axis length')

	#generate 2d wavelength-flux table
	for i in range (0, flux_arr.size):
		tbhdu.data[i][0] = wave_arr[i]
		tbhdu.data[i][1] = flux_arr[i]

	primary = pyfits.PrimaryHDU()
	hdulist = pyfits.HDUList([primary]+[tbhdu])
	try:
		hdulist.writeto('tmpfile', output_verify='warn', clobber=True)
	except:
		sys.stderr.write("ERROR: Could not write tmpfile ---> normalization skipped.\n")
		error+=1
		continue
	primary.header = header_orig
	hdulist.update_extend()

	#Normalizing now
	coefs = []

	executable = os.path.join(base.getConfig("rootDir"), "bin" , "fitfits")
	if not os.path.exists(executable):
		base.ui.notifyError("External binary was not located on its usual place %s" % executable)
		raise RuntimeError("Spectrum cannot be automatially normalized")

	output = subprocess.Popen([executable, 'tmpfile'], stdout=subprocess.PIPE, universal_newlines=True)
	output.wait()
	os.unlink('tmpfile')
	coefs_str = output.stdout.read().split(",")[1:]

	for coef in coefs_str:
		try:
			coefs.append(float(coef.strip()))
		except ValueError:
			sys.stderr.write("ERROR: Returned value from fitfits wasn't float. ---> normalization skipped.\n")
			error+=1
			continue
	if not coefs:
		sys.stderr.write("ERROR: Spectra file %s could not  been normalized because the fitfits failed on it. ---> skipped.\n" % filename)
		error+=1
		del output
		continue

	del output

	coefs = map(lambda x:x, reversed(coefs))

	def poly(l):
		res = 0
		for coef in coefs:
			res *= l
			res += coef
		return res

	for row in tbhdu.data:
		row[1] /= poly(row[0])

	try:
		hdulist.writeto("%s_norm.fit" % os.path.splitext(filename)[0], output_verify='warn')
	except:
		traceback.print_exc()
		sys.stderr.write("ERROR: File %s corrupt. ---> normalization skipped.\n" % filename)
		error+=1
	









