<resource schema="lightmeter">
	<meta name="creationDate">2009-04-21T13:19:00</meta>
	<meta name="schema-rank">1000</meta>
	<meta name="description">
We give continuous night and day light measurements at all natural
outdoor light levels by a network of low-cost lightmeters. Developed to
start simple, global continuous high cadence monitoring of night sky
brightness and artificial night sky brightening (light pollution) in
2009. The lightmeter network is a project of the Thüringer
Landessternwarte, Tautenburg, Germany and the Kuffner-Sternwarte society
at the Kuffner-Observatory, Vienna, Austria.

It started as part of the Dark Skies Awareness cornerstone of the
International Year of Astronomy.
	</meta>
	<meta name="_intro" format="rst"><![CDATA[
This form lets you retrieve light pollution measurements by time and
station; the interval of data points is typically 10 minutes, where each
data point is an average over several measurements.

For advanced queries, use ADQL_ possibly via TAP_, on the
`lightmeter.measurements table`_.

.. _ADQL: /adql
.. _TAP: /tap
.. _lightmeter.measurements table: /tableinfo/lightmeter.measurements

	]]></meta>
	<meta name="title">Lightmeter Data</meta>
	<meta name="creator">Wuchterl, G.; The Lightmeter community</meta>

	<meta name="subject">astronomical-site-protection</meta>
	<meta name="subject">night-sky-brightness</meta>

	<meta name="facility">Many; you may obtain a list at
		http://lightmeter.astronomy2009.at</meta>

	<meta name="copyright" format="rst">
		This database of lightmeter measurements is made available under the Open
		Database License: http://opendatacommons.org/licenses/odbl/1.0/.
		
		Any rights
		in individual contents of the database are licensed under the Database
		Contents License: http://opendatacommons.org/licenses/dbcl/1.0/
	</meta>
	<meta name="source" format="rst">http://lightmeter.astronomy2009.at</meta>
	<meta name="instrument">92x92 mm solar-cell in Aluminium frame with closed
		electronics protected for all-weather use.</meta>

  <meta name="coverage.waveband">Optical</meta>

	<property name="uploadDir">uploads</property>

	<execute id="updaterecs" title="ingest new files" at="14:10">
		<job>
			<code>
				execDef.spawn("dachs --ui stingy"
					" imp --suppress-meta -c lightmeter/q add".split())
			</code>
		</job>
	</execute>

	<execute title="make coverage plot" at="14:30">
		<job>
			<code>
				mod = utils.loadPythonModule(rd.getAbsPath("bin/coverageplot"))[0]
				mod.makePlot()
			</code>
		</job>
	</execute>

	<table onDisk="True" id="stations" adql="True" primary="stationId"
			dupePolicy="overwrite">
		<meta name="description">Stations in the lightmeter network</meta>
		<column name="stationId" type="text" tablehead="Station ID"
			description="Identifier of the measuring station, starting with an ISO CC"
			verbLevel="1" required="True" ucd="meta.id;meta.main"/>
		<column name="lat" type="double precision" tablehead="Lat."
			verbLevel="1" unit="deg" ucd="pos.earth.lat"
			description="Latitude of the observing station"/>
		<column name="long" type="double precision" tablehead="Long."
			verbLevel="1" unit="deg" ucd="pos.earth.lon"
			description="Longitude of the observing station"/>
		<column name="height" tablehead="Height" verbLevel="10"
			description="Height above sea level" unit="m"
			ucd="pos.earth.altitude"/>
		<column name="fullname" type="text" tablehead="Name"
			description="Full name of the station" required="True"
			verbLevel="15" ucd="meta.id"/>
		<column name="devtype" type="text" tablehead="Type"
			description="Device type (as in: IYA Lightmeter, SQM, ...)"
			verbLevel="15" ucd="meta.code;instr"/>
		<column name="timeCorrection" type="integer" tablehead="dt"
			description="Seconds to add to this stations reported times to obtain UTC"
			verbLevel="25"><values default="0" nullLiteral="-25"/></column>
		
		<column name="calibA" tablehead="Cal. a" ucd="instr.calib"
			description="Calibration parameter, a" note="cal"/>
		<column name="calibB" tablehead="Cal. b" ucd="instr.calib"
			description="Calibration parameter, b" note="cal"/>
		<column name="calibC" tablehead="Cal. c" unit="W.m**-2" ucd="instr.calib"
			description="Calibration parameter, c" note="cal"/>
		<column name="calibD" tablehead="Cal. d" unit="K**-1" ucd="instr.calib"
			description="Calibration parameter, d" note="cal"/>

		<meta name="note" tag="cal">
			The calibration parameters are used to convert raw counts to
			actual fluxes using the formula::

			  Phi = c (b (a exp(n (1+dT)/a)-1)+n)

			where n is the raw count, T the instrument temperature and Phi
			is a flux in units of c.
		</meta>
	</table>

	<table onDisk="True" id="accesskeys" primary="stationId"
			dupePolicy="overwrite">
		<meta name="description">The access keys for uploads.</meta>
		<!--<foreignKey inTable="stations" source="stationId"/>-->
		<column original="stations.stationId" verbLevel="1"/>
		<column name="accessKey" type="text" verbLevel="40" required="True"/>
	</table>

	<table onDisk="True" id="measurements" adql="True">
		<meta name="description">Time-averaged lightmeter measurements</meta>
<!--		<foreignKey inTable="stations" source="stationId"/>-->
		<index columns="epoch"/>
		<index columns="stationId"/>
		<column original="stations.stationId" verbLevel="1"/>
		<column name="epoch" type="double precision" required="True"
			tablehead="JD (UTC)"
			description="JD of measurement, UTC" verbLevel="1" unit="d"
			ucd="time.epoch"/>
		<column name="flux" tablehead="Flux" required="True"
			description="Calibrated flux" verbLevel="1" unit="W.m**-2"
			ucd="phot.flux"/>
		<column name="s_flux" tablehead="Err(Flux)"
			description="Standard deviation of light measurements
				contributing to this average"
			verbLevel="11"
			unit="W.m**-2" displayHint="sf=2" ucd="stat.stdev;phot.flux"/>
		<column name="nVals" type="integer" required="True"
			tablehead="N"
			description="Number of measurements contributing to this value"
			ucd="meta.number;obs"/>
		<column name="source" tablehead="Source" description=
			"Source file key" type="text" verbLevel="24"
			ucd="meta.id;meta.file"/>
	</table>

	<table onDisk="True" id="filesInDb" primary="path">
		<!--<foreignKey inTable="stations" source="stationId"/>-->
		<column original="stations.stationId" verbLevel="1"/>
		<column name="path" type="text" verbLevel="1"/>
		<column name="processedOn" type="timestamp" verbLevel="11"/>
	</table>

	<table id="geocounts" onDisk="True" adql="True">
		<meta name="description">Lightmeter data by date and geographic
			position</meta>
		<column original="measurements.epoch"/>
		<column original="measurements.stationId"/>
		<column original="stations.lat"/>
		<column original="stations.long"/>
		<column original="measurements.flux"/>
		<viewStatement>
			CREATE VIEW \curtable AS (
				SELECT \colNames FROM
					\schema.measurements
					NATURAL JOIN
					\schema.stations)
		</viewStatement>
	</table>

	<data id="stationsdata" updating="true" auto="false">
		<sources pattern="stations/*.kv"/>
		<keyValueGrammar enc="utf-8"/>
		<make table="stations">
			<rowmaker idmaps="*">
				<map key="devtype">vars.get("type", None)</map>
			</rowmaker>
		</make>
		<make table="accesskeys"/>
	</data>

	<data id="createTables" auto="false" dependents="makeView">
		<make table="stations"/>
		<make table="accesskeys"/>
		<make table="measurements"/>
		<make table="filesInDb"/>
		<make table="geocounts"/>
	</data>

	<data id="makeView" auto="false">
		<make table="geocounts"/>
	</data>

	<rowmaker id="fillMeasurements">
		<map dest="stationId">@stationId</map>
		<map dest="epoch">@epoch</map>
		<map dest="flux">@flux</map>
		<map dest="s_flux">@s_flux</map>
		<map dest="nVals">@nVals</map>
		<map dest="source">@srcKey</map>
	</rowmaker>

	<data id="csvdata" auto="False">
		<reGrammar fieldSep=";" names="rawdate,rawtime,temp,unit,count,status1,sth2,status"
			lax="True"
			recordSep="\n(#[^\n]*\n)*"
			id="csvGrammar" gunzip="True" enc="iso-8859-1"/>
	</data>

	<data id="txtdata" auto="False">
		<reGrammar id="txtGrammar"
			names="rawdate,rawtime,temp,count,status" gunzip="True"/>
	</data>

	<data id="skyglowdata" auto="False">
		<customGrammar id="skyglowGrammar" module="res/skyglowgrammar"/>
	</data>

	<data id="add" updating="True">
		<sources patterns="uploads/*.gz" recurse="True">
			<ignoreSources fromdb="SELECT path FROM lightmeter.filesInDb"/>
		</sources>
		<customGrammar id="autoselectGrammar" module="bin/autogrammar">
			<property name="csv.gz">csvGrammar</property>
			<property name="txt.gz">txtGrammar</property>
			<property name="skyglow.gz">skyglowGrammar</property>
			<sourceFields>
				<code>
					stationId = os.path.split(os.path.split(sourceToken)[0])[1]
					if re.match("[0-9]+$", stationId):
						raise base.SkipThis("Archive directory")
					try:
						with base.getTableConn() as conn:
							stationPars = list(conn.query(
								"SELECT calibA, calibB, calibC, calibD, timeCorrection"
								" FROM lightmeter.stations"
								" WHERE stationId=%(stationId)s", locals()))[0]
						calibA, calibB, calibC, calibD, timeCorrection = stationPars
					except IndexError:
						raise base.ValidationError("No station data for %s.  Import"
							" stationsdata first."
							%stationId, "stationId", stationId)
					if calibA is None:
						raise base.SkipThis("No calibration for %s."%stationId)
					srcKey = "/".join(utils.getRelativePath(sourceToken,
						base.getConfig("inputsDir"), liberalChars=True).split("/")[2:])
					return locals()
				</code>
			</sourceFields>
		</customGrammar>
		<make table="measurements" rowmaker="fillMeasurements" role="primary"/>
		<make table="filesInDb" rowSource="parameters">
			<rowmaker id="fillFile" idmaps="*">
				<var name="path">\inputRelativePath</var>
				<var name="processedOn">datetime.datetime.now()</var>
			</rowmaker>
		</make>
	</data>

	<service id="upload" customPage="bin/uploadrender" allowed="custom,static">
		<property name="staticData">uploads</property>
		<nullCore/>
		<meta name="title">Light pollution data upload facility</meta>
		<meta name="_longdoc" format="rst"><![CDATA[
			Getting a Station ID
			--------------------

			To upload into GAVO's light pollution database, you need to obtain
			a station identifier; this uniquely describes the combination of
			sensor and location.  If you change either, please get a new station
			id.

			To get a station id, please fill out the following ASCII form and
			send it to ``gavo@ari.uni-heidelberg.de``::

				fullname:
				stationId:
				long:
				lat:
				height:
				type:

			Here,

			fullname
				is a nice, descriptive title for your station ("Lightmeter
				of the roof of stargazers' lair"),
			stationid
				is all uppercase (country)_(city)_(count), e.g.,
				DE_HEIDELBERG_2.  We may need to change
				your suggestion.  Please give
			long and lat
				in decimal degrees, with longitudes west of Greenwhich having a
				minus sign.
			height
				should be in meters.
			type
				would currently be one of IYA Lightmeter or SQM-LU; if neither
				fits for you, contact us.

			Here's an example for our Heidelberg station::

				fullname: Heidelberg ARI Altbau, new device
				stationId: DE_HEIDELBERG_2
				long: 8.68813
				lat: 49.417645
				height: 115
				accessKey: Leevae4i
				type: IYA Lightmeter

			Note that we currently do not support mobile stations.  If you have data
			from devices that change their location frequently, please let us know.

			Upload Formats
			--------------

			You can upload both CSV and "text" format. Text format consists of lines
			giving whitespace-separated lines of::

				date time(utc) temperature count status

			Such a file could look like this::

				2009-05-29 00:00:01  11,3  235160 1
				2009-05-29 00:00:02  11,3  235240 1
				2009-05-29 00:00:03  11,3  235320 1
				2009-05-29 00:00:04  11,3  235320 1
				2009-05-29 00:00:05  11,3  235264 1
				2009-05-29 00:00:06  11,3  234864 1
				2009-05-29 00:00:07  11,3  235360 1

			The CSV format has semicolons as separators and must not have headers.
			It should look like this::

				27.04.2009;12:31:14;36,9;°C;1942560;ok;
				27.04.2009;12:31:15;37,0;°C;1947960;ok;
				27.04.2009;12:31:16;37,0;°C;1951800;ok;
				27.04.2009;12:31:17;36,9;°C;1943880;ok;
				27.04.2009;12:31:18;37,0;°C;1947960;ok;
				27.04.2009;12:31:19;36,9;°C;1956960;ok;

			We will accept floating point numbers with both commas and decimal
			points as decimal separators.

			You can gzip your submissions before transfer.

			The receiving software is somewhat naive and infers the content from
			the file name extension. Legal extensions are:

			* .txt -- text format
			* .txt.gz -- gzip compressed text format
			* .csv -- csv format
			* .csv.gz -- gzip compressed csv format
			* .skyglow.gz -- gzip compressed skyglow format

			Make sure you follow these conventions.

			You can also upload ZIP archives of such files. The only legal
			extension here is zip (lower case!).

			There is an upload limit of 20 MB on the data center software, i.e.,
			you cannot upload a single file larger than 20 MB.  If you try,
			your client will probably just say something to the effect of "connection
			reset".  So, try to keep your uploads reasonably small.

			Automatic Uploads
			-----------------

			Of course, manual uploads will become quite tedious. Therefore, we
			provide an automatic upload facility. While you can use anything that
			can do HTTP uploads (your the file goes into the inFile key, and you
			must give a __nevow_form__ key with the value upload), we provide
			`a python script`_ that already does everything (including automatic
			transfer compression).  The program is written for python 2.x; unless
			you have a very good reason, you should not use python 3.x just yet,
			and if you have one, please feed back the (minor) patches to make the
			uploader work with 3.x (while keeping it ok for 2.x).

			To use it, put a file named "stationinfo" into the directory from which
			you will upload. It must contain the station id, a blank, and the
			access key.

			Then, in a shell, say (adapt for the location of your python
			interpreter and the script as necessary)::

				python uploadLM.py FILE1 FILE2...

			We expect uploading will be included in some of the readout software.

			You can also just use curl; this would look like this::

				curl http://dc.g-vo.org/lightmeter/q/upload/custom/STATION_ID/ACCESS_KEY \\
				-F __nevow_form__=upload -F inFile=@PATH_TO_FILE

			(of course, you need to adapt everything in ALL_CAPS). In what comes
			back, check the first paragraph of the div element with id
			body.  This should contain something like "File FILENAME uploaded,
			XY bytes".

			.. _a python script: http://dc.g-vo.org/lightmeter/q/upload/static/src/uploadLM.py

			Full Driver Software
			--------------------

			We also provide a full driver software that can run on Raspberry PI
			or similar small computers.  It also offers a web interface that lets
			you see your lightmeter's status
			(`here's <http://carina.ari.uni-heidelberg.de:1082>`_ how this currently
			looks like for the lightmeter in Heidelberg).

			You will probably need some help to set this up -- please contact
			gavo@ari.uni-heidelberg.de as necessary.  The necessary code is
			available from
			http://svn.ari.uni-heidelberg.de/svn/gavo/hdinputs/lightmeter/src,
			you want lightmeter.py and, unless you happen to have a local
			installation of python-libusb1, the usb1.py and libusb1.py modules.


		]]></meta>
	</service>

	<fancyQueryCore id="datacore" queriedTable="measurements">
		<condDesc buildFrom="epoch"/>
		<condDesc required="False">
			<inputKey original="stationId">
				<values fromdb="stationId from lightmeter.stations
					order by stationId"/>
			</inputKey>
		</condDesc>

		<query>
			SELECT DISTINCT stationid, source, AVG(epoch) as meanEpoch
			FROM lightmeter.measurements
			%s
			GROUP BY stationId, source
		</query>

		<outputTable>
			<outputField original="stationId" name="stationid"/>
			<outputField original="source">
				<formatter>
					return T.a(href="././static/"+urllib.parse.quote(data))[
						os.path.basename(data)]
				</formatter>
			</outputField>
			<outputField original="epoch" name="meanepoch"
				description="Mean epoch of measurements within this file"
				tablehead="Mean JD (UTC)"/>
		</outputTable>
	</fancyQueryCore>

	<service id="data" core="datacore" allowed="form,static">
		<property name="staticData">uploads</property>
		<meta name="title">Light Pollution Raw Data</meta>
		<meta name="_related" title="Access to reduced data"
			>/lightmeter/q/weather/form</meta>
		<meta name="_intro" format="rst">This service delivers data
		collected by a network of lightmeters (see also the service info).

		Dates are given in as ranges of Julian Day numbers, e.g.,
		``2455124 +/- 2`` for two days around 2009-10-19 noon UTC, or
		``2455124.45 .. 2455124.55`` for a centiday around midnight UTC
		2009-10-19.

		This service serves uncalibrated, high-resolution raw measurements
		as delivered by the orginal instruments.
		See `Light Pollution Weather`_ for a more accessible way to query
		this data, and use ADQL_ possibly via TAP_, on the
		`lightmeter.measurements table`_ for more advanced queries.

		.. _ADQL: /adql
		.. _TAP: /tap
		.. _lightmeter.measurements table: /tableinfo/lightmeter.measurements
		.. _Light Pollution Weather: /lightmeter/q/weather
		</meta>
	</service>

	<dbCore id="weathercore" queriedTable="geocounts" limit="10000"
			sortKey="epoch">
		<condDesc>
			<inputKey name="epoch" tablehead="Date" type="date"
					required="True"/>
			<inputKey name="time" tablehead="Time (UTC)" type="time"
				required="True"/>
			<inputKey name="within" required="True" type="integer"
				tablehead="plus/minus" unit="minutes"
				description="Give measurements within this many minutes of your chosen
				date and time.  The sampling rate is 20 minutes">11</inputKey>
			<phraseMaker>
				<code>
					baseJD = dateTimeToJdn(
						datetime.datetime.combine(inPars["epoch"], inPars["time"]))
					dt = int(inPars["within"])*60/24./3600.
					yield "epoch BETWEEN %%(%s)s AND %%(%s)s"%(
						base.getSQLKey("epoch", baseJD-dt, outPars),
						base.getSQLKey("epoch", baseJD+dt, outPars))
				</code>
			</phraseMaker>
		</condDesc>
		<condDesc id="stationCond">
			<inputKey original="stationId" required="False" showItems="6"
					multiplicity="multiple">
				<values
					fromdb="stationId FROM lightmeter.stations ORDER BY stationid"/>
			</inputKey>
		</condDesc>
		<outputTable namePath="geocounts">
			<outputField original="epoch" displayHint="type=humanDate"
				tablehead="Date/Time" description="Mean date and time of the
					observation"/>
			<outputField name="logflux" select="ln(flux)" tablehead="log(Flux)"
				description="Natural logarithm of Flux for convenience"
				verbLevel="20" ucd="phot.flux" displayHint="sf=4"/>
			<outputField original="flux"/>
			<outputField original="stationId"/>
			<outputField original="long"/>
			<outputField original="lat"/>
		</outputTable>
	</dbCore>
	
	<service id="weather" core="weathercore" defaultRenderer="form"
			allowed="form,static">
		<property name="staticData">static</property>
		<publish render="form" sets="local,ivo_managed"/>
		<meta name="title">Light Pollution Weather</meta>
		<meta name="shortName">lm weather</meta>
		<meta name="_plotOptions">{'xselIndex': 0, 'yselIndex':1}</meta>
		<meta name="_related" title="Raw data access">/lightmeter/q/data/form</meta>
		<meta name="_related" title="Simple station plots"
			>/lightmeter/q/stationplot/custom</meta>
		<!-- magic buttons for some fixed dates, cheated in via evil HTML in
		the bottom info -->
		<meta name="_bottominfo" format="raw"><![CDATA[
			<p>The following stations delivered data within the last 400 days (darker
			gray means more data points).</p>

			<img src="/lightmeter/q/weather/static/coverage.png" alt="[Lightmeter
			Coverage]"/>

			<div id="datePresets" style="margin-top:0.5ex;margin-left:110px">
				<button type="button" onclick="setNDaysAgo(1)">Yesterday</button>
				<button type="button" onclick="setNDaysAgo(7)">A Week Ago</button>
				<button type="button" onclick="setNDaysAgo(30)">A Month Ago</button>
				<button type="button" onclick="setNDaysAgo(365)">A Year Ago</button>
			</div>

			<script type="text/javascript">
				function enterDate(date) {
					dateRoot = document.getElementById("genForm-epoch-field");
					inputs = dateRoot.getElementsByTagName('input');
					for (ind in inputs) {
						el = inputs[ind];
						if (el.name=="epoch__day") {
							el.value = date.getUTCDate();
						} else if (el.name=="epoch__month") {
							el.value = date.getUTCMonth()+1;
						} else if (el.name=="epoch__year") {
							el.value = date.getUTCFullYear();
						}
					}

					timeRoot = document.getElementById("genForm-time-field");
					timeRoot.getElementsByTagName('input')[0].value =
						date.getUTCHours()+":"+date.getUTCMinutes();

					pmRoot = document.getElementById("genForm-within-field");
					pmRoot.getElementsByTagName('input')[0].value = 1440;
				}

				function setNDaysAgo(nDays) {
					enterDate(new Date(new Date()-24*3600*1000*nDays));
				}

				function movePresets() {
					panel = document.getElementById("datePresets");
					panel.parentNode.removeChild(panel);
					document.getElementById("genForm-epoch-field").appendChild(panel);
				}

				window.addEventListener("load", movePresets, false);
			</script>
		]]></meta>
	</service>

	<service id="stationplot" customPage="bin/curveplot" allowed="custom">
		<meta name="title">Light Pollution Station Plots</meta>

		<meta name="_bottominfo" format="raw"><![CDATA[
			<p>Coverage for the last 400 days:</p>

			<img src="/lightmeter/q/weather/static/coverage.png" alt="[Lightmeter
			Coverage]"/>
		]]></meta>
		<dbCore queriedTable="geocounts">
			<limit>50000</limit>
			<condDesc>
				<inputKey name="daysPast" type="integer"
					tablehead="Days to plot"
					description="Number of days to look back" required="True"/>
				<phraseMaker>
					<code>
					ik = inputKeys[0]
					yield "epoch>%%(%s)s"%base.getSQLKey(ik.name,
						stc.dateTimeToJdn(datetime.datetime.now())-inPars[ik.name],
						outPars)
        	</code>
				</phraseMaker>
			</condDesc>
			<condDesc original="stationCond"/>
			<outputTable>
				<outputField original="epoch"/>
				<outputField original="flux"/>
			</outputTable>
		</dbCore>
	</service>

	<regSuite title="Lightmeter uploading" sequential="True">

		<regTest title="Static renderer on upload service yields some sort of
				listing.">
			<url>upload/static/</url>
			<code>
				self.assertHasStrings("Directory listing", "[Directory]")
			</code>
		</regTest>

		<regTest title="Simple text upload works (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8">
				<httpUpload name="inFile" fileName="zw.txt"
					>2009-12-03 18:04:30   1,8  581136 1
					</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("File zw.txt.gz uploaded, 65 bytes.")
			</code>
		</regTest>

		<regTest title="upload/custom/test/testing">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8">
				<httpUpload name="inFile" fileName="zw.txt"
					>2010-12-03 18:04:30   1,8  581136 1
					</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("File zw.txt.gz already exists.",
					"overwrite it, contact")
			</code>
		</regTest>

		<regTest title="Simple deletion works (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8"
					remove="True">
				<httpUpload name="inFile" fileName="zw.txt"
					>DELETED</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("File zw.txt.gz removed.")
			</code>
		</regTest>

		<regTest title="Deletion of non-existing data fails sensibly
				(may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8"
					remove="True">
				<httpUpload name="inFile" fileName="zw.txt.gz"
					>DELETED</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings(
					"'zw.txt.gz' does not exist and thus cannot be deleted.")
			</code>
		</regTest>

		<regTest title="Upload of zip file works (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8">
				<httpUpload name="inFile" fileName="test.zip"
					source="res/test.zip"/>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("File test.zip uploaded, 14 bytes.")
			</code>
		</regTest>

		<regTest title="Overwriting via zip files is rejected (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8">
				<httpUpload name="inFile" fileName="zw.zip"
					source="res/test.zip"/>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("could not be written: a.txt, b.txt;")
			</code>
		</regTest>

		<regTest title="ZIP file cleanup works I (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8"
					remove="True">
				<httpUpload name="inFile" fileName="a.txt"
					>DELETED</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("a.txt.gz removed")
			</code>
		</regTest>

		<regTest title="ZIP file cleanup works II (may fail on state)">
			<url httpMethod="POST" __nevow_form__="upload" _charset_="UTF-8"
					remove="True">
				<httpUpload name="inFile" fileName="b.txt"
					>DELETED</httpUpload>upload/custom/test/testing</url>
			<code>
				self.assertHasStrings("b.txt.gz removed")
			</code>
		</regTest>
	</regSuite>

	<regSuite title="lightmeter misc" id="test-misc">
		<regTest title="Light Pollution Weather seems to give reasonable data">
			<url parSet="form" epoch__month="10" epoch__year="2009"
				time="10:00:00" epoch__day="20">weather/form</url>
			<code>
				self.assertHasStrings("log(Flux)", "5.5435")
			</code>
		</regTest>

		<regTest title="Invalid upload URLs give reasonable output.">
			<url>upload/custom/nonex/really</url>
			<code>
				self.assertHasStrings("404", "No such upload facility")
			</code>
		</regTest>
	</regSuite>
</resource>
