"""
A custom grammar for lightmeter uploads.

It selects an input grammar based on the extension of the file.
Then it computes time averages and standard deviations of the
counts found.  This means it basically does the rowmaker's job.

Well, this was the path of least resistance.
"""

import datetime
import math

from gavo import base
from gavo import stc
from gavo.rscdef import rmkfuncs
from gavo.grammars.customgrammar import CustomRowIterator


def makeDataPack(grammar):
# This grammar's data pack is a map from file extensions to actual
# grammars, filled from a property on the grammar defined in the RD.
	dispatch = {}
	for ext, grammarId in grammar.properties.items():
		dispatch[ext] = grammar.rd.getById(grammarId)
	return dispatch


class RowIterator(CustomRowIterator):

	# Throw away lines with statuses not in this set
	okStatuses = set(["1", "ok"])
	# Length of integration interval (except at the end)
	shippingInterval = datetime.timedelta(minutes=9, seconds=59)

	def __init__(self, *args, **kwargs):
		CustomRowIterator.__init__(self, *args, **kwargs)

	def parseRowLegacy(self, row):
		# (this is for ad-hoc text and csv files)
		# status may not be present for garbled lines
		if not (row.get("status") in self.okStatuses
				or row.get("status1") in self.okStatuses):
			return None, None, None
		time = rmkfuncs.parseTime(row["rawtime"], "%H:%M:%S")
		if "-" in row["rawdate"]:
			timestamp = rmkfuncs.parseDate(row["rawdate"], "%Y-%m-%d")+time
		else:
			timestamp = rmkfuncs.parseDate(row["rawdate"], "%d.%m.%Y")+time
		count = float(row["count"].replace(",", "."))
		temp = float(row["temp"].replace(",", "."))
		if temp>255:
			temp = temp-512
		return timestamp, count, temp

	def parseRowSkyglow(self, row):
		# (this is for IYA lightmeter skyglow (or what I believe it to be.)
		temp = float(row["temperature"])
		if temp>255:
			temp = temp-512
		return (rmkfuncs.parseTimestamp(row["epUTC"]),
			float(row["counts"]), temp)
	
	def parseRow(self, row):
		if "epUTC" in row:
			return self.parseRowSkyglow(row)
		else:
			return self.parseRowLegacy(row)

	def buildValues(self, startTime, endTime, values):
		"""returns a raw row from the data accumulated.
		"""
		if endTime is None:
			pertinentTime = startTime
		else:
			pertinentTime = startTime+(endTime-startTime)/2
		if not values:
			raise ValueError("Empty values not supported")
		elif len(values)==1:
			return {"epoch": stc.dateTimeToJdn(pertinentTime),
				"flux": values[0],
				"s_flux": None,
				"nVals": 1,
			}
		else:
			mean = sum(values)/float(len(values))
			stddev = math.sqrt(sum((v-mean)**2 for v in values))/(
				len(values)-1)
			return {"epoch": stc.dateTimeToJdn(pertinentTime),
				"flux": mean,
				"s_flux": stddev,
				"nVals": len(values),
			}

	def _iterRows(self):
		for ext in self.grammar.dataPack:
			if self.sourceToken.endswith(ext):
				break
		else:
			raise base.SourceParseError(
				"No grammar for files like %s"%self.sourceToken)
		self.slaveGrammar = self.grammar.dataPack[ext]

		row = self.sourceRow
		a, b, c, d = row["calibA"], row["calibB"], row["calibC"], row["calibD"]
		tCorr = None
		if row["timeCorrection"]:
			tCorr = datetime.timedelta(seconds=row["timeCorrection"])
		startTime, curVals = None, []
		self.slaveIterator = self.slaveGrammar.parse(self.sourceToken)

		for row in self.slaveIterator:
			timestamp, count, temp = self.parseRow(row)
			if timestamp is None or temp is None:
				continue

			if tCorr is not None:
				timestamp = timestamp+tCorr
			curVals.append(c*(b*(a*math.exp(count*(1+d*temp)/a)-1)+count))

			if startTime is None:
				startTime = timestamp
			if timestamp-startTime>=self.shippingInterval:
				if curVals:
					yield self.buildValues(startTime, timestamp, curVals)
				curVals, startTime = [], None

		if curVals:
			yield self.buildValues(startTime, timestamp, curVals)
		del self.slaveIterator
	
	def getLocator(self):
		if hasattr(self, "slaveIterator"):
			return self.slaveIterator.getLocator()
		else:
			return "Not currently parsing"


if __name__=="__main__":
	from gavo.user import logui
	from gavo import api
	logui.LoggingUI(base.ui)
	class StandinGrammar:
		rd = api.getRD("lightmeter/q")
		properties = {
			"csv.gz": "csvGrammar",
			"txt.gz": "txtGrammar",
			"skyglow.gz": "skyglowGrammar",
		}
	g = StandinGrammar()
	g.dataPack = makeDataPack(g)

	r = RowIterator(g, "/data/gavo/inputs/lightmeter/uploads/DE_HEIDELBERG_2/20151002_071520_DE_HEIDELBERG_2.skyglow.gz",
		{"calibA": 1, "calibB": 0, "calibC": 0, "calibD": 0,
			"timeCorrection": 0})
	for r in r._iterRows():
		print(r)
		input()
