<resource schema="dasch" resdir=".">
	<meta name="creationDate">2024-01-25T14:24:17Z</meta>
	<meta name="schema-rank">100</meta>

	<meta name="title">DASCH in the VO</meta>
	<meta name="description">
		This is a re-publication of the metadata from StarGlass, an information
		service operated by Harvard College Observatory (HCO) which primarily
		indexes the holdings of the HCO Astronomical Photographic Glass Plate
		Collection. This collection is the world's largest archive of
		astronomical photographic glass plates, consisting of more than 550,000
		images documenting the entire sky over a century-long time baseline.
		Approximately 430,000 plates in the collection, representing the subset
		of sky images deemed suitable for astrometric and photometric
		calibration, have been digitized at high resolution by the DASCH
		project. StarGlass makes available both photographs and calibrated
		scientific data when available. This resource re-publishes the metadata in
		separate tables for “narrow” and “wide” (extremely wide-field) plates.
		The narrow plates are also published in an obscore table and as a SIAP2
		service.
	</meta>
	<meta name="subject">history-of-astronomy</meta>
	<meta name="subject">astrophotography</meta>

	<meta name="creator">Harvard College Observatory</meta>

	<meta name="instrument">Eastman Aero-Ektar K-24 Lens on a K-19 Barrel Formerly KG (1950)</meta>
	<meta name="instrument">Damons North Red</meta>
	<meta name="instrument">Cooke Short Focus</meta>
	<meta name="instrument">Damons South Blue</meta>
	<meta name="instrument">Damons South Yellow</meta>
	<meta name="instrument">4-inch Ross Lundin</meta>
	<meta name="instrument">24-33 in Jewett Schmidt</meta>
	<meta name="instrument">Cerro Tololo 4 meter</meta>
	<meta name="instrument">4-inch Voightlander Lens</meta>
	<meta name="instrument">8-inch Brashear Lens</meta>
	<meta name="instrument">4-inch Cooke (1-327)</meta>
	<meta name="instrument">Eastman Aero-Ektar K-24 Lens on a K-19 Barrel Formerly KE (1951)</meta>
	<meta name="instrument">3-inch Ross</meta>
	<meta name="instrument">1 in Cook Lens #832 Series renamed from ac-a</meta>
	<meta name="instrument">24-inch Clark Reflector</meta>
	<meta name="instrument">Air Force Camera</meta>
	<meta name="instrument">10-inch Metcalf Triplet</meta>
	<meta name="instrument">13-inch Boyden Refractor (spectrum plates)</meta>
	<meta name="instrument">3 inch Perkin-Zeiss Lens</meta>
	<meta name="instrument">Lowel 40 inch reflector</meta>
	<meta name="instrument">1.5 inch Ross (short focus)</meta>
	<meta name="instrument">Damons South Yellow</meta>
	<meta name="instrument">4-inch Ross Lundin</meta>
	<meta name="instrument">24-33 in Jewett Schmidt</meta>
	<meta name="instrument">Cerro Tololo 4 meter</meta>
	<meta name="instrument">4-inch Voightlander Lens</meta>
	<meta name="instrument">8-inch Brashear Lens</meta>
	<meta name="instrument">4-inch Cooke (1-327)</meta>
	<meta name="instrument">Eastman Aero-Ektar K-24 Lens on a K-19 Barrel Formerly KE (1951)</meta>
	<meta name="instrument">3-inch Ross</meta>
	<meta name="instrument">1 in Cook Lens #832 Series renamed from ac-a</meta>
	<meta name="instrument">24-inch Clark Reflector</meta>
	<meta name="instrument">Air Force Camera</meta>
	<meta name="instrument">10-inch Metcalf Triplet</meta>
	<meta name="instrument">13-inch Boyden Refractor (spectrum plates)</meta>
	<meta name="instrument">3 inch Perkin-Zeiss Lens</meta>
	<meta name="instrument">Lowel 40 inch reflector</meta>
	<meta name="instrument">1.5 inch Ross (short focus)</meta>
	<meta name="instrument">8-inch Draper Doublet</meta>
	<meta name="instrument">Logbook Only. Pages without plates.</meta>
	<meta name="instrument">61-inch Wyeth Reflector</meta>
	<meta name="instrument">Roe 6-inch</meta>
	<meta name="instrument">2.6-inch Zeiss-Tessar</meta>
	<meta name="instrument">Damons North Yellow</meta>
	<meta name="instrument">Damons South Red</meta>
	<meta name="instrument">16-inch Boller &amp; Chivens</meta>
	<meta name="instrument">New Cooke Lens</meta>
	<meta name="instrument">5-in Voightlander Transit Photometer</meta>
	<meta name="instrument">11-inch Draper Refractor</meta>
	<meta name="instrument">12-inch Metcalf Doublet</meta>
	<meta name="instrument">8-inch Bache Doublet</meta>
	<meta name="instrument">Patrol cameras</meta>
	<meta name="instrument">3-inch Ross Fecker</meta>
	<meta name="instrument">24-inch Bruce Doublet</meta>
	<meta name="instrument">KE Camera with Installed Rough Focus</meta>
	<meta name="instrument">3-inch Darlot (Series renamed from j)</meta>
	<meta name="instrument">8-inch Ross Lundin</meta>
	<meta name="instrument">32-36 inch BakerSchmidt 10 1/2 inch round Armagh-Dunsink-Harvard</meta>
	<meta name="instrument">K-19 Air Force Camera</meta>
	<meta name="instrument">16-inch Metcalf Doublet (Refigured after 3500)</meta>
	<meta name="instrument">Eastman Aero-Ektar K-24 Lens on a K-19 Barrel</meta>
	<meta name="instrument">3 inch Ross-Tessar Lens</meta>
	<meta name="instrument">7.5-inch Cooke/Clark Refractor at Maria Mitchell Observatory</meta>
	<meta name="instrument">1.5-inch Ross-Xpress</meta>
	<meta name="instrument">2.5-inch Ross Portrait</meta>
	<meta name="instrument">2.5-inch Ross Portrait Lens</meta>
	<meta name="instrument">60 inch Common</meta>
	<meta name="instrument">2.5 inch Voigtlander (Little Bache or "Bachito")</meta>
	<meta name="instrument">3-inch Elmer Ross</meta>
	<meta name="instrument">4-inch Cooke Lens</meta>
	<meta name="instrument">2.8-inch Kodak Aero-Ektar</meta>
	<meta name="instrument">1-inch</meta>
	<meta name="instrument">200 inch Hale Telescope</meta>
	<meta name="instrument">60-inch Rockefeller Reflector</meta>
	<meta name="instrument">2.5 inch Cooke Lens</meta>
	<meta name="instrument">Asiago Observatory 92/67 cm Schmidt</meta>
	<meta name="instrument">Damons North Blue</meta>
	<meta name="instrument">1.5-inch Cooke Lenses</meta>
	<meta name="instrument">Palomar Sky Survey (POSS)</meta>
	<meta name="instrument">1.5 in Cooke "Long Focus"</meta>
	<meta name="instrument">Miscellaneous test plates</meta>
	<meta name="instrument">Misc Basement</meta>
	<meta name="instrument">Various Meteor Cameras</meta>
	<meta name="instrument">13-inch Boyden Refractor</meta>
	<meta name="instrument">Pole Star Recorder</meta>
	<meta name="instrument">YSO Double Astrograph</meta>
	<meta name="facility">Harvard-Smithsonian Center for Astrophysics</meta>

	<meta name="source">2012IAUS..285...29G</meta>
	<meta name="contentLevel">Research</meta>
	<meta name="type">Archive</meta>

	<meta name="coverage.waveband">Optical</meta>

	<STREAM id="plates-content">
		<doc>
			Narrow and and wide plates from DASCH have the same schema; this
			stream contains the common elements.
		</doc>
		<mixin have_bandpass_id="False">//siap2#pgs</mixin>
		<meta>
			_associatedDatalinkService.serviceId: dl
			_associatedDatalinkService.idColumn: obs_publisher_did
		</meta>

		<index columns="t_min"/>
		<index columns="t_max"/>
		<index columns="em_min"/>
		<index columns="em_max"/>
		<index columns="obs_publisher_did"/>
		<index columns="s_region" method="GIST"/>
		<FEED source="//scs#spoint-index-def" ra="s_ra" dec="s_dec"/>

		<column name="dasch_id" type="text"
			ucd="meta.id;meta.main"
			tablehead="DASCH id"
			description="Plate identifier in DASCH"
			verbLevel="15"/>
		<column name="plate_comment" type="text"
			ucd="meta.note"
			tablehead="Plate Comment"
			description="Comment associated with plate describing context or content"
			verbLevel="15"/>
		<column name="plate_class" type="text"
			ucd="meta.code"
			tablehead="Class"
			description="Plate class; Values are taken from
				https://library.cfa.harvard.edu/plate-classes"
			verbLevel="25"
			note="pc"/>
		<column name="mosaics" type="jsonb"
			ucd="meta;meta.dataset"
			tablehead="Plates"
			description="Metadata on the FITS files of the scans available; this
				is largely used by GAVO to produce the datalink document.  In
				case you want to use it directly, see
				https://starglass.cfa.harvard.edu/docs/api/mosaics.html"
			verbLevel="30"/>
		<column name="datalinks" type="jsonb" hidden="True"
			tablehead="URLs"
			description="Links to various artefacts.  We only use this
				to figure out the presence of certain artefacts here, since
				the links typically need an API key.  Actual access via
				datalink goes through our getprod service that handles
				the API key juggling."
			verbLevel="31"/>
	</STREAM>

	<table id="narrow_plates" onDisk="True"
			adql="True" primary="dasch_id">
		<mixin
			preview="'\getConfig{web}{serverURL}/dasch/q/getproduct/preview' || dasch_id"
			>//obscore#publishObscoreLike</mixin>
		<meta name="description">This table holds metadata for the
			parts of DASCH counting as targeted observations (plate scale
			below 400 arcsec/mm.  “Patrol” and “Meteor” plates in DASCH
			nomenclature are found in the wide_plates table.
		</meta>

		<FEED source="plates-content"/>
		<publish sets="local,ivo_managed"/>
	</table>

	<table id="wide_plates" onDisk="True"
			adql="True" primary="dasch_id">
		<meta name="description">This table holds metadata for the
			“Patrol” and “Meteor” plates from DASCH, i.e., very wide-field
			observations presumably not useful in global discovery.  These
			data products are therefore not re-published through obscore.
			For the “narrow” plates, see the narrow_plates table.
		</meta>

		<FEED source="plates-content"/>
		<publish sets="ivo_managed"/>
	</table>

	<table id="plates" onDisk="True" adql="True">
		<meta name="description">This table is a union of
			the narrow_plates and wide_plates tables.  For non-historic
			use, you probably get better results looking at narrow_plates.
		</meta>

		<FEED source="plates-content"/>

		<viewStatement>
			CREATE VIEW \qName AS (
				SELECT \colNames FROM
					dasch.narrow_plates
				UNION ALL
				SELECT \colNames FROM
					dasch.wide_plates)
		</viewStatement>
	</table>

	<coverage>
		<updater sourceTable="narrow_plates"/>
	</coverage>

	<data id="import" recreateAfter="make-view">
		<sources pattern="data/harvested/*.json"/>
		<embeddedGrammar notify="True" isDispatching="True">
			<iterator>
				<setup imports="json">
					<code>
						def condget(val, *keys):
							for key in keys:
								if isinstance(key, int):
									val = val[key]
								else:
									val = val.get(key)
								if val is None:
									break
							return val

						def to_wcs_keys(exposure):
							"""returns a dasch exposure record with proper wcs keys.
							"""
							key_map = {
								"ctr_dec": "CRVAL2",
								"ctr_ra": "CRVAL1",
								"delta_dec_x": "CD2_1",
								"delta_dec_y": "CD2_2",
								"delta_ra_x": "CD1_1",
								"delta_ra_y": "CD1_2",
								"crpix1": "CRPIX1",
								"crpix2": "CRPIX2",
								"naxis1": "NAXIS1",
								"naxis2": "NAXIS2",
							}
							return dict((key_map.get(k, k), v)
								for k, v in exposure.items())

						# we sort into wide and narrow by instrument name rather
						# than plate scale because we don't have the plate scale
						# for may plates.  The following list is the result of
						# looking at the plate scales of the initial import in May 2024,
						# See blog post from May 2024
						WIDE_INSTRUMENTS = {
						"Eastman Aero-Ektar K-24 Lens on a K-19 Barrel Formerly KG (1950)",
						"Logbook Only. Pages without plates.",
						"Roe 6-inch",
						"1.5 inch Ross (short focus)",
						"Patrol cameras",
						"1.5-inch Ross-Xpress",
						"2.8-inch Kodak Aero-Ektar",
						"KE Camera with Installed Rough Focus",
						"Eastman Aero-Ektar K-24 Lens on a K-19 Barrel",
						"Eastman Aero-Ektar K-24 Lens on a K-19 Barrel Formerly KE (1951)",
						"3 inch Perkin-Zeiss Lens",
						"3 inch Ross-Tessar Lens",
						"2.6-inch Zeiss-Tessar",
						"Air Force Camera",
						"K-19 Air Force Camera",
						"1.5 in Cooke \"Long Focus\"",
						"1 in Cook Lens #832 Series renamed from ac-a",
						"1-inch",
						"1.5-inch Cooke Lenses",
						"2.5 inch Cooke Lens",
						"2.5-inch Ross Portrait Lens",
						"Damons South Yellow",
						"Damons South Red",
						"Damons North Red",
						"Damons North Blue",
						"Damons North Yellow",
						"New Cooke Lens",
						"Damons South Blue",
						"2.5 inch Voigtlander (Little Bache or \"Bachito\")",
						"NULL",
						"3-inch Ross Fecker",
						"3-inch Ross",
						"3-inch Elmer Ross",
						"4-inch Ross Lundin",
						}
						
						# these DASCH ids need to be investigated; their metadata
						# records look really awful
						completely_broken = {'ai43984', 'ai43986'}
					</code>
				</setup>
				<code>
				with open(self.sourceToken, encoding="utf-8") as f:
					result = json.load(f)
				if result["num_errors"]!=0:
					base.ui.notifyWarning(
						f"{result['num_errors']} errors in {self.sourceToken}")

				for plateid, platedata in result["results"].items():
					if plateid in completely_broken:
						continue
					metadata = platedata["data"]
					if metadata is None:
						# The CSV apparently contains a few non-existing plates.
						# Skip these.
						continue

					record = {"dasch_id": plateid,
						"status": platedata["status"],
						"instrument_name": metadata.get("telescope"),
						"plate_comment": metadata.get("plate_comment"),
						"comment_astronomers": metadata.get("comment_astronomers"),
						"plate_class": metadata.get("class"),
						"timestamp": condget(metadata, "catalog_exposures", 0, "datetime"),
						"facility_name": condget(metadata, "location", "name"),
						"mosaics": metadata.get("mosaics"),
						"NAXIS1": None,
						"NAXIS2": None,
						"NAXIS": 2,
					}

					wcs = metadata.get("exposures")
					if wcs and len(wcs)==1:
						record.update(to_wcs_keys(wcs[0]))
						record.update({
							"CTYPE1": 'RA---TAN-SIP',
							"CTYPE2": 'DEC--TAN-SIP',
							"LONPOLE": 180.,
							"CUNIT1": "deg", "CUNIT2": "deg"})

					datalinks = {}
					for key in ["jacket_images", "plate_images",
							"jacket_thumbnail", "plate_thumbnail"]:
						datalinks[key] = [r["url"].split("?")[0]
							for r in metadata[key]]
					record["datalinks"] = datalinks
					record["accref"] = "dasch/q/"+record["dasch_id"]

					if record["instrument_name"] in WIDE_INSTRUMENTS:
						record["isWide"] = True
						yield ("wide", record)
					else:
						record["isWide"] = False
						yield ("narrow", record)

					yield ("products", record)
				</code>
			</iterator>

			<rowfilter procDef="//products#define">
				<setup>
					<code>
						baseURL = rd.getById("dl").getURL("dlmeta")
					</code>
				</setup>

				<bind key="table">"changes-per-row"</bind>
				<bind key="accref">@accref</bind>
				<bind key="path">baseURL+"?ID="+urllib.parse.quote_plus(
					getStandardPubDID(@accref))</bind>
				<bind key="mime">'application/x-votable+xml;content=datalink'</bind>
				<bind key="fsize">10000</bind>
			</rowfilter>

			<rowfilter name="switch_table">
				<!-- table is an early parameter in //products#define, but we
				need to set it row-by row.  So, we fix things after the fact -->
				<code>
					if @isWide:
						@prodtblTable = "dachs.wide_plates"
					else:
						@prodtblTable = "dachs.narrow_plates"
					yield row
				</code>
			</rowfilter>
		</embeddedGrammar>

		<rowmaker id="make-plates">
			<idmaps>dasch_id,
				plate_comment, plate_class, datalinks, mosaics</idmaps>

			<var key="t_exptime" nullExcs="KeyError,TypeError">@exposure*60</var>

			<apply name="compute_times"><code>
				if not @timestamp:
					base.ui.notifyInfo("No timestamp on DASCH record: {}".
						format(@dasch_id))
					raise IgnoreThisRow("no timestamp")
					@centre = @t_min = @t_max = None
					return

				exptime = @t_exptime or 0
				@centre = dateTimeToMJD(parseISODT(@timestamp))
				@t_min = @centre-exptime/86400./2
				@t_max = @centre+exptime/86400./2
			</code></apply>

			<apply procDef="//siap2#computePGS">
				<code>
					result["s_xel1"] = result["s_xel2"] = None
						
					try:
						addWCS(vars, result)
					except Exception as msg:
						nullOutWCS(result)
						if vars.get("NAXIS1") is not None:
							base.ui.notifyWarning(
								f"Broken WCS ignored on {vars['dasch_id']} ({msg})")
				</code>
			</apply>

			<apply procDef="//siap2#setMeta" name="setmeta">
				<bind name="bandpassId">"Photographic emulsion; details pending"</bind>
				<bind name="calib_level">2 if result["s_region"] else 1</bind>
				<bind name="dateObs">@centre</bind>
				<bind name="instrument_name">@instrument_name</bind>
				<bind name="obs_collection">'DASCH'</bind>
				<bind name="obs_title">(@instrument_name+" observation on "
					+mjdToDateTime(@centre).isoformat())</bind>
				<bind name="t_exptime">@t_exptime</bind>
				<bind name="t_min">@t_min</bind>
				<bind name="t_max">@t_max</bind>
			</apply>
		</rowmaker>

		<make table="narrow_plates" rowmaker="make-plates" role="narrow"/>
		<make table="wide_plates" rowmaker="make-plates" role="wide"/>
	</data>

	<data id="make-view" auto="False">
		<make table="plates"/>
	</data>

	<service id="dl" allowed="dlmeta,dlget">
		<meta name="title">DASCH VO Datalink Service</meta>
		<meta name="description">This service generates authorised links
			to access DASCH FITS files.  To avoid unneccessarly large downloads,
			the #this link is a downsampled image.  To retrieve full-resolution
			FITS-es, look for rows with semantics="#coderived" and local
			semantics full-res.

			The dlget service is a SODA emulation; however, the cutouts have a
			fixed size.
		</meta>
		<datalinkCore>
			<descriptorGenerator procDef="//soda#fromStandardPubDID">
				<bind name="accrefPrefix">"dasch/q/"</bind>
				<bind name="contentQualifier">"image"</bind>
				<setup>
					<code>
						def addExtras(descriptor):
							descriptor.suppressAutoLinks = True
							with base.getTableConn() as conn:
								descriptor.extMeta = next(conn.queryToDicts(
									"SELECT * FROM dasch.plates"
									" WHERE obs_publisher_did = %(did)s",
									{"did": descriptor.pubDID}))

							# the original plan has been to use mosaics to infer
							# what is actually there.  But the links are not
							# useful because they can only be used with an API
							# key.  So byBinF is unused for now and we make
							# our own links.
							descriptor.byBinF = {}
							for m in descriptor.extMeta["mosaics"]:
								descriptor.byBinF.setdefault(m["bin_factor"], []
									).append((m["mosaic_num"], m["bin_factor"], m["flags"]))
							
							descriptor.datalinks = descriptor.extMeta["datalinks"]
							descriptor.prodURL = rd.getById("getprod").getURL("qp")
							descriptor.plateid = descriptor.extMeta["dasch_id"]
					</code>
				</setup>
			</descriptorGenerator>

			<metaMaker semantics="#this">
				<code>
					try:
						estimatedSize = descriptor.extMeta["s_xel1"
							]*descriptor.extMeta["s_xel2"]*16
					except TypeError:
						estimatedSize = 100000000

					yield descriptor.makeLink(
						descriptor.prodURL+"/fits-16/"+descriptor.plateid,
						contentType="application/fits",
						description=f"Plate scan downsampled by a factor of 16",
						contentLength=estimatedSize/16**2,
						localSemantics=f"downsampled-16")

					yield descriptor.makeLink(
						descriptor.prodURL+"/fits-01/"+descriptor.plateid,
						contentType="application/fits",
						description=f"Full-scale plate scan",
						contentLength=estimatedSize,
						semantics="#coderived",
						localSemantics=f"fullscale")
				</code>
			</metaMaker>

			<metaMaker semantics="#preview">
				<code>
					if "plate_images" in descriptor.datalinks:
						yield descriptor.makeLink(
							descriptor.prodURL+"/preview/"+descriptor.plateid,
							contentType="image/jpeg",
							description=f"Thumbnail for the plate scan")
				</code>
			</metaMaker>

			<metaMaker semantics="#preview-image">
				<code>
					if "plate_images" in descriptor.datalinks:
						yield descriptor.makeLink(
							descriptor.prodURL+"/jpeg/"+descriptor.plateid,
							contentType="image/jpeg",
							description=f"A lo-fi rendition of the plate scan")
				</code>
			</metaMaker>

			<metaMaker semantics="#documentation">
				<code>
					if "jacket_images" in descriptor.datalinks:
						yield descriptor.makeLink(
							descriptor.prodURL+"/jacket/"+descriptor.plateid,
							contentType="image/jpeg",
							description=f"A photo of the plate jacket")
				</code>
			</metaMaker>

			<metaMaker procDef="//soda#fits_makeSODAPOS"/>
			<dataFunction>
				<code>
					# leave a data attribute so the machinery is happy.  The
					# actual upstream request will be in do-remote-cutout
					descriptor.data = []
				</code>
			</dataFunction>
			<dataFunction procDef="//soda#fits_translateSODAPOS"/>

			<dataFunction id="do-remote-cutout">
				<setup imports="requests, gzip, base64"/>
				<code>
					# Regrettably, the SODA cutout can come in all kinds of crappy
					# ways.
					if args.get("CIRCLE"):
						pos = args["CIRCLE"].center
					elif args.get("POLYGON"):
						pos = pgsphere.SPoint(*args["POLYGON"].getCenter())
					elif args.get("RA"):
						# we'd have to be a lot more careful here, what with stitching
						# lines, simple centres, and so on.  But let's see if anyone
						# even uses this.
						pos = pgsphere.SPoint.fromDegrees(
							sum(args["RA"])/2,
							sum(args["DEC"])/2)
					else:
						raise ValidationError("Cannot find a position I recognise",
							colName="POS")

					ra, dec = pos.asDALI()
					payload = {
						"center_ra_deg": ra,
						"center_dec_deg": dec,
						"plate_id": descriptor.extMeta["dasch_id"],
						"solution_number": 0,
					}
					resp = requests.post("https://api.starglass.cfa.harvard.edu/public/dasch/dr7/cutout",
						json=payload)

					resp.raise_for_status()
					descriptor.data = gzip.decompress(base64.b64decode(resp.text))
				</code>
			</dataFunction>
			<dataFormatter><code>
				return ("application/fits", descriptor.data)
			</code></dataFormatter>
		</datalinkCore>
	</service>

	<service id="getprod" allowed="qp">
		<meta name="title">DASCH Product Access</meta>
		<meta name="description">Yield pre-authenticated links to DASCH
			data products.  Successful calls will redirect to the
			artefact in question.</meta>

		<property name="queryField">productpath</property>

		<pythonCore>
			<inputTable>
				<inputKey name="productpath" type="text"
					description="The product path as kind/plate_id, where
						kind is one of fits, jacket, jpeg, preview"/>
			</inputTable>
			<coreProc>
				<setup imports="requests,gavo.svcs">
					<code>
						apiTemplates = {
							"fits-01": "/plates/p/{plateid}/mosaic?bin_factor=01",
							"fits-16": "/plates/p/{plateid}/mosaic?bin_factor=16",
							"jacket": "/plates/p/{plateid}/download?image_type=jacket",
							"jpeg": "/plates/p/{plateid}/download?image_type=plate",
							"preview": "/plates/p/{plateid}/download?image_type=plate"
								"&amp;thumbnail_ratio=16",
						}
						apiURL = "https://api.starglass.cfa.harvard.edu/full"
						with open(rd.getAbsPath("dasch-api-key")) as f:
							apiKey = f.read().strip()
					</code>
				</setup>
				<code><![CDATA[
					path = inputTable.args["productpath"]
					mat = re.match("(?P<kind>[^/]+)/(?P<plateid>[^/]+)$", path)
					if not mat:
						raise svcs.UnknownURI(
							f"Invalid productpath '{path}'.",
							hint="Good paths have the form kind/plateid")
					
					kind, plateid = mat.groups()
					if kind not in apiTemplates:
						raise svcs.UnknownURI(
							f"Unknown product kind '{kind}'",
							hint=f"Known kinds are {apiTemplates.keys()}")

					apiCall = apiTemplates[kind].format(**locals())
					resp = requests.get(
						apiURL+apiCall,
						headers={"x-api-key": apiKey},
						allow_redirects=False)
					
					# aw god; sometimes this a JSON file with a link, sometimes
					# it's a redirect.  Ah well.
					try:
						if resp.status_code==200:
							newURL = resp.json()["presigned_link"]
						else:
							newURL = resp.headers["location"]
					except Exception as msg:
						raise base.ui.logOldExc(
							svcs.UnknownURI(
								"DASCH presign failed -- does this plate exist?",
								hint=f"The local symptom was {msg}"))
						
					raise svcs.Found(newURL)
				]]></code>
			</coreProc>
		</pythonCore>
	</service>

	<service id="im" allowed="siap2.xml">
		<meta name="shortName">DASCH</meta>
		<meta name="title">DASCH SIAP2</meta>
		<meta name="description">SIAP2 service over the DASCH (Harvard) plates.
			Please note that a substantial number of plates has no calibration
			and hence will not be found using SIAP2 queries with positional
			constraints.  Also, this service only returns ”narrow” plates,
			which here excludes plates from instruments with a field of view above
			200 square degrees.
		</meta>
		<meta>
			sia.type: Pointed
			testQuery.pos.ra: 10
			testQuery.pos.dec: 10
			testQuery.size.ra: 1
			testQuery.size.dec: 1
		</meta>

		<publish render="siap2.xml" sets="ivo_managed"/>

		<dbCore queriedTable="narrow_plates">
			<FEED source="//siap2#parameters"/>
		</dbCore>
	</service>

	<regSuite title="DASCH regression">
		<regTest title="DASCH data present">
			<url parSet="TAP" QUERY="
				SELECT * from dasch.plates where dasch_id in ('a01299', 'a01419')
				">/tap/sync</url>
			<code>
				recs = dict((r["dasch_id"], r) for r in self.getVOTableRows())
				# a01299 is not solved
				rec = recs["a01299"]
				self.assertEqual(rec["s_ra"], None)
				self.assertEqual(rec["facility_name"],
					"Harvard-Smithsonian Center for Astrophysics")
				self.assertEqual(rec["instrument_name"], "24-inch Bruce Doublet")
				self.assertAlmostEqual(rec["t_min"], 13247.0419999999)
				# a01419 is solved
				rec = recs["a01419"]
				self.assertAlmostEqual(rec["s_ra"], 107.87146626170235)
				self.assertAlmostEqual(rec["s_fov"], 8.602721773801411, 6)
				self.assertAlmostEqual(rec["t_max"], 13316.066861111029)
				self.assertAlmostEqual(rec["s_pixel_scale"], 0.6547661030197105)
				self.assertAlmostEqual(rec["t_exptime"], 840.0)
				self.assertAlmostEqual(rec["plate_comment"], "M.E.H.")
			</code>
		</regTest>

		<regTest title="DASCH SIA2 works">
			<url POS="CIRCLE 107.87 67.22 0.1"
				TIME="13316.05 13316.06">/dasch/q/im/siap2.xml</url>
			<code>
				rec = self.getFirstVOTableRow()
				self.assertEqual(
					rec['obs_title'],
					'24-inch Bruce Doublet observation on 1895-05-03T01:29:16.799993')
				self.assertEqual(rec['s_xel1'], 31627)
			</code>
		</regTest>

		<regTest title="DASCH datalink seems to work">
			<url ID="ivo://org.gavo.dc/~?dasch/q/a01299">/dasch/q/dl/dlmeta</url>
			<code>
				bySemantics = self.datalinkBySemantics()
				self.assertEqual(
					set(bySemantics), {
						'#this', '#preview-image', '#preview',
						'#documentation', '#coderived', "#proc"})

				self.assertEqual(
					self.getUnique(bySemantics["#coderived"])["local_semantics"],
					"fullscale")

				self.assertEqual(
					self.getUnique(bySemantics["#documentation"])["access_url"],
					EqualingRE(".*/dasch/q/getprod/qp/jacket/a01299"))

				self.assertEqual(
					self.getUnique(bySemantics["#preview"])["description"],
					"Thumbnail for the plate scan")

				self.assertEqual(
					self.getUnique(bySemantics["#this"])["content_length"],
					390625) # this is wrong because I can't see NAXISn here.
				
			</code>
		</regTest>

		<regTest title="getprod bad path yields meaningful error">
			<url>getprod/qp/junk</url>
			<code>
				self.assertHTTPStatus(404)
				self.assertHasStrings("Invalid productpath 'junk'.")
			</code>
		</regTest>

		<regTest title="getprod bad kind yields meaningful error">
			<url>getprod/qp/junk/a01299</url>
			<code>
				self.assertHTTPStatus(404)
				self.assertHasStrings("'fits-01', 'fits-16',") # from the hint
			</code>
		</regTest>

		<regTest title="getprod fits redirects to pre-authenticated"
				tags="bigserver">
			<url>getprod/qp/fits-16/a01299</url>
			<setup imports="gavo.rscdef.regtest"/>
			<code>
				self.assertHTTPStatus(302)
				nextURL = regtest.getHeaderValue(self.headers, "location")
				self.assertTrue(nextURL.startswith("https://dasch-prod-user.s3.us-east-1.amazonaws.com/plates/a01299/a01299_mosaic_00_16r270ww.fit.fz"))
			</code>
		</regTest>

		<regTest title="DASCH SIAP returns some data">
			<url POS="CIRCLE 107.9 67.2 0.5" TIME="13316.05 13316.06"
				>im/siap2.xml</url>
			<code>
				row = self.getFirstVOTableRow()
				self.assertEqual(
					row["access_format"],
					"application/x-votable+xml;content=datalink")
				self.assertAlmostEqual(
					row["t_max"],
					13316.066861111029)
				self.assertAlmostEqual(
					row["s_pixel_scale"],
					0.6547661030197105)
			</code>
		</regTest>

		<regTest title="Narrow plates table is filled">
			<url parSet="TAP" QUERY="SELECT t_exptime, s_pixel_scale, t_max
				FROM dasch.narrow_plates
				WHERE dasch_id='a00194'">/tap/sync</url>
			<code>
				row = self.getFirstVOTableRow()
				self.assertEqual(row["t_exptime"], 960.0)
				self.assertAlmostEqual(row["t_max"], 12854.16455555554)
				self.assertAlmostEqual(row["s_pixel_scale"], 0.6553118760334123)
			</code>
		</regTest>

		<regTest title="Wide plates table is filled">
			<url parSet="TAP" QUERY="SELECT plate_comment, s_xel1, t_min
				FROM dasch.wide_plates WHERE dasch_id='fa12689'">/tap/sync</url>
			<code>
				row = self.getFirstVOTableRow()
				self.assertEqual(row["plate_comment"], "emulsion severely yellowed")
				self.assertAlmostEqual(row["t_min"], 34092.35763888899)
				# the next assertion ought be become false when upstream has a
				# better way of including NAXISn
				self.assertAlmostEqual(row["s_xel1"], None)
			</code>
		</regTest>

		<regTest title="cutout service seems to work" tags="bigserver">
			<url POS="CIRCLE 129.7 19.8 0.1"
				ID="ivo://org.gavo.dc/~?dasch/q/a00194">dl/dlget</url>
			<code>
				self.assertHasStrings(
					"CRVAL1  =                129.7",
					"SIMPLE  =                    T")
			</code>
		</regTest>

	</regSuite>
</resource>
