001 /*
002 * $Id: SuomiNPPDataSource.java,v 1.4 2012/03/29 20:06:42 tommyj Exp $
003 *
004 * This file is part of McIDAS-V
005 *
006 * Copyright 2007-2012
007 * Space Science and Engineering Center (SSEC)
008 * University of Wisconsin - Madison
009 * 1225 W. Dayton Street, Madison, WI 53706, USA
010 * https://www.ssec.wisc.edu/mcidas
011 *
012 * All Rights Reserved
013 *
014 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and
015 * some McIDAS-V source code is based on IDV and VisAD source code.
016 *
017 * McIDAS-V is free software; you can redistribute it and/or modify
018 * it under the terms of the GNU Lesser Public License as published by
019 * the Free Software Foundation; either version 3 of the License, or
020 * (at your option) any later version.
021 *
022 * McIDAS-V is distributed in the hope that it will be useful,
023 * but WITHOUT ANY WARRANTY; without even the implied warranty of
024 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
025 * GNU Lesser Public License for more details.
026 *
027 * You should have received a copy of the GNU Lesser Public License
028 * along with this program. If not, see http://www.gnu.org/licenses.
029 */
030
031 package edu.wisc.ssec.mcidasv.data.hydra;
032
033 import edu.wisc.ssec.mcidasv.data.HydraDataSource;
034 import edu.wisc.ssec.mcidasv.data.PreviewSelection;
035
036 import java.io.ByteArrayInputStream;
037 import java.io.File;
038 import java.io.FilenameFilter;
039
040 import java.rmi.RemoteException;
041
042 import java.text.SimpleDateFormat;
043
044 import java.util.ArrayList;
045 import java.util.Date;
046 import java.util.Enumeration;
047 import java.util.HashMap;
048 import java.util.Hashtable;
049 import java.util.Iterator;
050 import java.util.List;
051 import java.util.SimpleTimeZone;
052 import java.util.StringTokenizer;
053 import java.util.TimeZone;
054 import java.util.TreeSet;
055
056 import org.jdom.Namespace;
057 import org.jdom.output.XMLOutputter;
058
059 import org.slf4j.Logger;
060 import org.slf4j.LoggerFactory;
061
062 import ucar.ma2.ArrayFloat;
063 import ucar.ma2.DataType;
064 import ucar.nc2.Dimension;
065 import ucar.nc2.Group;
066 import ucar.nc2.NetcdfFile;
067 import ucar.nc2.Variable;
068
069 import ucar.unidata.data.DataCategory;
070 import ucar.unidata.data.DataChoice;
071 import ucar.unidata.data.DataSelection;
072 import ucar.unidata.data.DataSelectionComponent;
073 import ucar.unidata.data.DataSourceDescriptor;
074 import ucar.unidata.data.DirectDataChoice;
075 import ucar.unidata.data.GeoLocationInfo;
076 import ucar.unidata.data.GeoSelection;
077
078 import ucar.unidata.util.Misc;
079
080 import visad.Data;
081 import visad.FlatField;
082 import visad.VisADException;
083
084 import visad.util.Util;
085
086 /**
087 * A data source for NPOESS Preparatory Project (Suomi NPP) data
088 * This will probably move, but we are placing it here for now
089 * since we are leveraging some existing code used for HYDRA.
090 */
091
092 public class SuomiNPPDataSource extends HydraDataSource {
093
094 private static final Logger logger = LoggerFactory.getLogger(SuomiNPPDataSource.class);
095
096 /** Sources file */
097 protected String filename;
098
099 protected MultiDimensionReader nppAggReader;
100
101 protected MultiDimensionAdapter[] adapters = null;
102
103 private ArrayList<MultiSpectralData> multiSpectralData = new ArrayList<MultiSpectralData>();
104 private HashMap<String, MultiSpectralData> msdMap = new HashMap<String, MultiSpectralData>();
105
106 private static final String DATA_DESCRIPTION = "Suomi NPP Data";
107
108 // instrument related variables and flags
109 ucar.nc2.Attribute instrumentName = null;
110 private String productName = null;
111
112 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions
113 private String crisFilter = "ES_Real";
114
115 // for now, we are only handling OMPS variables that match this filter and SCAN dimensions
116 private String ompsFilter = "Radiance";
117
118 private HashMap defaultSubset;
119 public TrackAdapter track_adapter;
120
121 private List categories;
122 private boolean hasChannelSelect = false;
123 private boolean hasImagePreview = true;
124 private boolean isCombinedProduct = false;
125 private boolean nameHasBeenSet = false;
126
127 private FlatField previewImage = null;
128
129 private static int[] YSCAN_POSSIBILITIES = {
130 48, 96, 512, 768, 771, 771, 1536, 1541, 2304, 2313, 180, 60, 60, 60, 5, 15
131 };
132 private static int[] XSCAN_POSSIBILITIES = {
133 254, 508, 2133, 3200, 4121, 4421, 6400, 8241, 4064, 4121, 96, 30, 30, 30, 5, 105
134 };
135 private static int[] ZSCAN_POSSIBILITIES = {
136 -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 22, 163, 437, 717, 200, 260
137 };
138 private int inTrackDimensionLength = -1;
139
140 // need our own separator char since it's always Unix-style in the Suomi NPP files
141 private static final String SEPARATOR_CHAR = "/";
142
143 // date formatter for converting Suomi NPP day/time to something we can use
144 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddhhmmss.SSS");
145
146 // date formatter for how we want to show granule day/time on display
147 SimpleDateFormat sdfOut = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
148
149 /**
150 * Zero-argument constructor for construction via unpersistence.
151 */
152
153 public SuomiNPPDataSource() {}
154
155 /**
156 * Construct a new Suomi NPP HDF5 data source.
157 * @param descriptor descriptor for this <code>DataSource</code>
158 * @param fileName name of the hdf file to read
159 * @param properties hashtable of properties
160 *
161 * @throws VisADException problem creating data
162 */
163
164 public SuomiNPPDataSource(DataSourceDescriptor descriptor,
165 String fileName, Hashtable properties)
166 throws VisADException {
167 this(descriptor, Misc.newList(fileName), properties);
168 logger.debug("SuomiNPPDataSource called, single file selected: " + fileName);
169 }
170
171 /**
172 * Construct a new Suomi NPP HDF5 data source.
173 * @param descriptor descriptor for this <code>DataSource</code>
174 * @param sources List of filenames
175 * @param properties hashtable of properties
176 *
177 * @throws VisADException problem creating data
178 */
179
180 public SuomiNPPDataSource(DataSourceDescriptor descriptor,
181 List<String> newSources, Hashtable properties)
182 throws VisADException {
183 super(descriptor, newSources, DATA_DESCRIPTION, properties);
184 logger.debug("SuomiNPPDataSource constructor called, file count: " + sources.size());
185
186 filename = (String) sources.get(0);
187 setDescription("Suomi NPP");
188
189 for (Object o : sources) {
190 logger.debug("Suomi NPP source file: " + (String) o);
191 }
192
193 setup();
194 }
195
196 public void setup() throws VisADException {
197
198 // looking to populate 3 things - path to lat, path to lon, path to relevant products
199 String pathToLat = null;
200 String pathToLon = null;
201 TreeSet<String> pathToProducts = new TreeSet<String>();
202
203 // flag to indicate data is 3-dimensions (X, Y, channel or band)
204 boolean is3D = false;
205
206 // check source filenames to see if this is a combined product
207 // XXX TJJ - looking for "underscore" is NOT GUARANTEED TO WORK! FIXME
208 String prodStr = filename.substring(
209 filename.lastIndexOf(File.separatorChar) + 1,
210 filename.lastIndexOf(File.separatorChar) + 1 + filename.indexOf("_"));
211 StringTokenizer st = new StringTokenizer(prodStr, "-");
212 logger.debug("check for embedded GEO, tokenizing: " + prodStr);
213 while (st.hasMoreTokens()) {
214 String singleProd = st.nextToken();
215 logger.debug("Next token: " + singleProd);
216 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) {
217 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) {
218 logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd);
219 isCombinedProduct = true;
220 break;
221 }
222 }
223 }
224
225 // various metatdata we'll need to gather on a per-product basis
226 ArrayList<String> unsignedFlags = new ArrayList<String>();
227 ArrayList<String> unpackFlags = new ArrayList<String>();
228
229 // time for each product in milliseconds since epoch
230 ArrayList<Long> productTimes = new ArrayList<Long>();
231
232 // geo product IDs for each granule
233 ArrayList<String> geoProductIDs = new ArrayList<String>();
234
235 // aggregations will use sets of NetCDFFile readers
236 ArrayList<NetCDFFile> ncdfal = new ArrayList<NetCDFFile>();
237
238 // we should be able to find an XML Product Profile for each data/product type
239 SuomiNPPProductProfile nppPP = null;
240
241 sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
242
243 try {
244
245 nppPP = new SuomiNPPProductProfile();
246
247 // for each source file provided get the nominal time
248 for (int fileCount = 0; fileCount < sources.size(); fileCount++) {
249 // need to open the main NetCDF file to determine the geolocation product
250 NetcdfFile ncfile = null;
251 String fileAbsPath = null;
252 try {
253 fileAbsPath = (String) sources.get(fileCount);
254 logger.debug("Trying to open file: " + fileAbsPath);
255 ncfile = NetcdfFile.open(fileAbsPath);
256 if (! isCombinedProduct) {
257 ucar.nc2.Attribute a = ncfile
258 .findGlobalAttribute("N_GEO_Ref");
259 logger.debug("Value of GEO global attribute: "
260 + a.getStringValue());
261 String tmpGeoProductID = a.getStringValue();
262 geoProductIDs.add(tmpGeoProductID);
263 }
264 Group rg = ncfile.getRootGroup();
265
266 logger.debug("Root group name: " + rg.getName());
267 List<Group> gl = rg.getGroups();
268 if (gl != null) {
269 for (Group g : gl) {
270 logger.debug("Group name: " + g.getName());
271 // when we find the Data_Products group, go down another group level and pull out
272 // what we will use for nominal day and time (for now anyway).
273 // XXX TJJ fileCount check is so we don't count the GEO file in time array!
274 if (g.getName().contains("Data_Products") && (fileCount != sources.size())) {
275 boolean foundDateTime = false;
276 List<Group> dpg = g.getGroups();
277
278 // cycle through once looking for XML Product Profiles
279 for (Group subG : dpg) {
280
281 // determine the instrument name (VIIRS, ATMS, CrIS, OMPS)
282 instrumentName = subG.findAttribute("Instrument_Short_Name");
283
284 // This is also where we find the attribute which tells us which
285 // XML Product Profile to use!
286 ucar.nc2.Attribute axpp = subG.findAttribute("N_Collection_Short_Name");
287 if (axpp != null) {
288 String baseName = axpp.getStringValue();
289 productName = baseName;
290 String productProfileFileName = nppPP.getProfileFileName(baseName);
291 logger.debug("Found profile: " + productProfileFileName);
292 if (productProfileFileName == null) {
293 throw new Exception("XML Product Profile not found in catalog");
294 }
295 try {
296 nppPP.addMetaDataFromFile(productProfileFileName);
297 } catch (Exception nppppe) {
298 logger.error("Error parsing XML Product Profile: " + productProfileFileName);
299 throw new Exception("XML Product Profile Error");
300 }
301 }
302 }
303
304 // 2nd pass through sub-group to extract date/time for aggregation
305 for (Group subG : dpg) {
306 List<Variable> vl = subG.getVariables();
307 for (Variable v : vl) {
308 ucar.nc2.Attribute aDate = v.findAttribute("AggregateBeginningDate");
309 ucar.nc2.Attribute aTime = v.findAttribute("AggregateBeginningTime");
310 // did we find the attributes we are looking for?
311 if ((aDate != null) && (aTime != null)) {
312 String sDate = aDate.getStringValue();
313 String sTime = aTime.getStringValue();
314 logger.debug("For day/time, using: " + sDate + sTime.substring(0, sTime.indexOf('Z') - 3));
315 Date d = sdf.parse(sDate + sTime.substring(0, sTime.indexOf('Z') - 3));
316 productTimes.add(new Long(d.getTime()));
317 logger.debug("ms since epoch: " + d.getTime());
318 foundDateTime = true;
319 // set time for display to day/time of 1st granule examined
320 if (! nameHasBeenSet) {
321 sdfOut.setTimeZone(new SimpleTimeZone(0, "UTC"));
322 setName(instrumentName.getStringValue() + " " + sdfOut.format(d));
323 nameHasBeenSet = true;
324 }
325 break;
326 }
327 }
328 if (foundDateTime) break;
329 }
330 if (! foundDateTime) {
331 throw new VisADException("No date time found in Suomi NPP granule");
332 }
333 }
334 }
335 }
336 } catch (Exception e) {
337 logger.debug("Exception during processing of file: " + fileAbsPath);
338 throw (e);
339 } finally {
340 ncfile.close();
341 }
342 }
343
344 for (Long l : productTimes) {
345 logger.debug("Product time: " + l);
346 }
347
348 // build each union aggregation element
349 for (int elementNum = 0; elementNum < sources.size(); elementNum++) {
350 String s = (String) sources.get(elementNum);
351
352 // build an XML (NCML actually) representation of the union aggregation of these two files
353 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2");
354 org.jdom.Element root = new org.jdom.Element("netcdf", ns);
355 org.jdom.Document document = new org.jdom.Document(root);
356
357 org.jdom.Element agg = new org.jdom.Element("aggregation", ns);
358 agg.setAttribute("type", "union");
359
360 org.jdom.Element fData = new org.jdom.Element("netcdf", ns);
361 fData.setAttribute("location", s);
362 agg.addContent(fData);
363
364 if (! isCombinedProduct) {
365 org.jdom.Element fGeo = new org.jdom.Element("netcdf", ns);
366
367 String geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1);
368 String fileNameRelative = s.substring(s.lastIndexOf(File.separatorChar) + 1);
369 // check if we have the whole file name or just the prefix
370 String geoProductID = geoProductIDs.get(elementNum);
371 if (geoProductID.endsWith("h5")) {
372 geoFilename += geoProductID;
373 } else {
374 geoFilename += geoProductID;
375 geoFilename += s.substring(s.lastIndexOf(File.separatorChar) + 6);
376 }
377 // XXX TJJ - temporary check to swap for terrain corrected geo if needed.
378 // This is until we learn the formal logic for which geo to look for/use
379 File tmpGeo = new File(geoFilename);
380 if (! tmpGeo.exists()) {
381 // this one looks for GMTCO instead of GMODO
382 String geoFileRelative = geoFilename.substring(geoFilename.lastIndexOf(File.separatorChar) + 1);
383 if (fileNameRelative.startsWith("SVM")) {
384 geoFileRelative = geoFileRelative.replace("OD", "TC");
385 }
386 if (fileNameRelative.startsWith("SVI")) {
387 geoFileRelative = geoFileRelative.replace("MG", "TC");
388 }
389
390 // now we make a file filter, and see if a matching geo file is present
391 File fList = new File(geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); // current directory
392
393 FilenameFilter geoFilter = new FilenameFilter() {
394 public boolean accept(File dir, String name) {
395 if ((name.startsWith("G")) && (name.endsWith(".h5"))) {
396 return true;
397 } else {
398 return false;
399 }
400 }
401 };
402
403 File[] files = fList.listFiles(geoFilter);
404 for (File file : files) {
405 if (file.isDirectory()) {
406 continue;
407 }
408 // get the file name for convenience
409 String fName = file.getName();
410 // is it one of the geo types we are looking for?
411 if (fName.substring(0, 5).equals(geoFileRelative.substring(0, 5))) {
412 int geoStartIdx = geoFileRelative.indexOf("_d");
413 int prdStartIdx = fName.indexOf("_d");
414 String s1 = geoFileRelative.substring(geoStartIdx, geoStartIdx + 35);
415 String s2 = fName.substring(prdStartIdx, prdStartIdx + 35);
416 if (s1.equals(s2)) {
417 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName;
418 break;
419 }
420 }
421 }
422 }
423 logger.debug("Cobbled together GEO file name: " + geoFilename);
424 fGeo.setAttribute("location", geoFilename);
425 agg.addContent(fGeo);
426 }
427
428 root.addContent(agg);
429 XMLOutputter xmlOut = new XMLOutputter();
430 String ncmlStr = xmlOut.outputString(document);
431 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes());
432 MultiDimensionReader netCDFReader = new NetCDFFile(is);
433
434 // let's try and look through the NetCDF reader and see what we can learn...
435 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile();
436
437 Group rg = ncdff.getRootGroup();
438
439 List<Group> gl = rg.getGroups();
440 if (gl != null) {
441 for (Group g : gl) {
442 logger.debug("Group name: " + g.getName());
443 // XXX just temporary - we are looking through All_Data, finding displayable data
444 if (g.getName().contains("All_Data")) {
445 List<Group> adg = g.getGroups();
446 // again, iterate through
447 for (Group subG : adg) {
448 logger.debug("Sub group name: " + subG.getName());
449 String subName = subG.getName();
450 if (subName.contains("-GEO")) {
451 // this is the geolocation data
452 List<Variable> vl = subG.getVariables();
453 for (Variable v : vl) {
454 if (v.getFullName().endsWith("Latitude")) {
455 pathToLat = v.getFullName();
456 logger.debug("Lat/Lon Variable: " + v.getFullName());
457 }
458 if (v.getFullName().endsWith("Longitude")) {
459 pathToLon = v.getFullName();
460 logger.debug("Lat/Lon Variable: " + v.getFullName());
461 }
462 }
463 } else {
464 // this is the product data
465 List<Variable> vl = subG.getVariables();
466 for (Variable v : vl) {
467 boolean useThis = false;
468 String vName = v.getFullName();
469 logger.debug("Variable: " + vName);
470 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1);
471
472 // skip Quality Flags for now.
473 // XXX TJJ - should we show these? if so, note they sometimes
474 // have different dimensions than the main variables. For ex,
475 // on high res bands QFs are 768 x 3200 while vars are 1536 x 6400
476 if (varShortName.startsWith("QF")) {
477 continue;
478 }
479
480 // for CrIS instrument, only taking real calibrated values for now
481 logger.debug("INSTRUMENT NAME: " + instrumentName);
482 if (instrumentName.getStringValue().equals("CrIS")) {
483 if (! varShortName.startsWith(crisFilter)) {
484 logger.debug("Skipping variable: " + varShortName);
485 continue;
486 }
487 }
488
489 // for OMPS, only Radiance for now...
490 if (instrumentName.getStringValue().contains("OMPS")) {
491 if (! varShortName.startsWith(ompsFilter)) {
492 logger.debug("Skipping variable: " + varShortName);
493 continue;
494 }
495 }
496
497 DataType dt = v.getDataType();
498 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) {
499 logger.debug("Skipping data of size: " + dt.getSize());
500 continue;
501 }
502 List al = v.getAttributes();
503
504 List<Dimension> dl = v.getDimensions();
505 if (dl.size() > 4) {
506 logger.debug("Skipping data of dimension: " + dl.size());
507 continue;
508 }
509
510 // for now, skip any 3D VIIRS data
511 if (instrumentName.getStringValue().equals("VIIRS")) {
512 if (dl.size() == 3) {
513 logger.debug("Skipping VIIRS 3D data for now...");
514 continue;
515 }
516 }
517
518 boolean xScanOk = false;
519 boolean yScanOk = false;
520 boolean zScanOk = false;
521 for (Dimension d : dl) {
522 // in order to consider this a displayable product, make sure
523 // both scan direction dimensions are present and look like a granule
524 for (int xIdx = 0; xIdx < XSCAN_POSSIBILITIES.length; xIdx++) {
525 if (d.getLength() == XSCAN_POSSIBILITIES[xIdx]) {
526 xScanOk = true;
527 break;
528 }
529 }
530 for (int yIdx = 0; yIdx < YSCAN_POSSIBILITIES.length; yIdx++) {
531 if (d.getLength() == YSCAN_POSSIBILITIES[yIdx]) {
532 yScanOk = true;
533 inTrackDimensionLength = YSCAN_POSSIBILITIES[yIdx];
534 break;
535 }
536 }
537 for (int zIdx = 0; zIdx < ZSCAN_POSSIBILITIES.length; zIdx++) {
538 if (d.getLength() == ZSCAN_POSSIBILITIES[zIdx]) {
539 zScanOk = true;
540 break;
541 }
542 }
543 }
544
545 if (xScanOk && yScanOk) {
546 useThis = true;
547 }
548
549 if (zScanOk) {
550 is3D = true;
551 hasChannelSelect = true;
552 logger.debug("Handling 3D data source!");
553 }
554
555 if (useThis) {
556 // loop through the variable list again, looking for a corresponding "Factors"
557 float scaleVal = 1f;
558 float offsetVal = 0f;
559 boolean unpackFlag = false;
560
561 // if the granule has an entry for this variable name
562 // get the data, data1 = scale, data2 = offset
563 // create and poke attributes with this data
564 // endif
565
566 String factorsVarName = nppPP.getScaleFactorName(varShortName);
567 logger.debug("Mapping: " + varShortName + " to: " + factorsVarName);
568 if (factorsVarName != null) {
569 for (Variable fV : vl) {
570 if (fV.getShortName().equals(factorsVarName)) {
571 logger.debug("Pulling scale and offset values from variable: " + fV.getShortName());
572 ucar.ma2.Array a = fV.read();
573 float[] so = (float[]) a.copyTo1DJavaArray();
574 //ucar.ma2.Index i = a.getIndex();
575 //scaleVal = a.getFloat(i);
576 scaleVal = so[0];
577 logger.debug("Scale value: " + scaleVal);
578 //i.incr();
579 //offsetVal = a.getFloat(i);
580 offsetVal = so[1];
581 logger.debug("Offset value: " + offsetVal);
582 unpackFlag = true;
583 break;
584 }
585 }
586 }
587
588 // poke in scale/offset attributes for now
589
590 ucar.nc2.Attribute a1 = new ucar.nc2.Attribute("scale_factor", scaleVal);
591 v.addAttribute(a1);
592 ucar.nc2.Attribute a2 = new ucar.nc2.Attribute("add_offset", offsetVal);
593 v.addAttribute(a2);
594
595 // add valid range and fill value attributes here
596 // try to fill in valid range
597 if (nppPP.hasNameAndMetaData(varShortName)) {
598 String rangeMin = nppPP.getRangeMin(varShortName);
599 String rangeMax = nppPP.getRangeMax(varShortName);
600 logger.debug("range min: " + rangeMin);
601 logger.debug("range max: " + rangeMax);
602 // only store range attribute if VALID range found
603 if ((rangeMin != null) && (rangeMax != null)) {
604 int [] shapeArr = new int [] { 2 };
605 ArrayFloat af = new ArrayFloat(shapeArr);
606 try {
607 af.setFloat(0, Float.parseFloat(rangeMin));
608 } catch (NumberFormatException nfe) {
609 af.setFloat(0, new Float(Integer.MIN_VALUE));
610 }
611 try {
612 af.setFloat(1, Float.parseFloat(rangeMax));
613 } catch (NumberFormatException nfe) {
614 af.setFloat(1, new Float(Integer.MAX_VALUE));
615 }
616 ucar.nc2.Attribute rangeAtt = new ucar.nc2.Attribute("valid_range", af);
617 v.addAttribute(rangeAtt);
618 }
619
620 // check for and load fill values too...
621
622 // we need to check two places, first, the XML product profile
623 ArrayList<Float> fval = nppPP.getFillValues(varShortName);
624
625 // 2nd, does the variable already have one defined?
626 // if there was already a fill value associated with this variable, make
627 // sure we bring that along for the ride too...
628 ucar.nc2.Attribute aFill = v.findAttribute("_FillValue");
629
630 // determine size of our fill value array
631 int fvArraySize = 0;
632 if (aFill != null) fvArraySize++;
633 if (! fval.isEmpty()) fvArraySize += fval.size();
634 int [] fillShape = new int [] { fvArraySize };
635
636 // allocate the array
637 ArrayFloat afFill = new ArrayFloat(fillShape);
638
639 // and FINALLY, fill it!
640 if (! fval.isEmpty()) {
641 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) {
642 afFill.setFloat(fillIdx, fval.get(fillIdx));
643 logger.debug("Adding fill value (from XML): " + fval.get(fillIdx));
644 }
645 }
646
647 if (aFill != null) {
648 Number n = aFill.getNumericValue();
649 // is the data unsigned?
650 ucar.nc2.Attribute aUnsigned = v.findAttribute("_Unsigned");
651 float fillValAsFloat = Float.NaN;
652 if (aUnsigned != null) {
653 if (aUnsigned.getStringValue().equals("true")) {
654 DataType fvdt = aFill.getDataType();
655 logger.debug("Data String: " + aFill.toString());
656 logger.debug("DataType primitive type: " + fvdt.getPrimitiveClassType());
657 // signed byte that needs conversion?
658 if (fvdt.getPrimitiveClassType() == byte.class) {
659 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue());
660 }
661 else if (fvdt.getPrimitiveClassType() == short.class) {
662 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue());
663 } else {
664 fillValAsFloat = n.floatValue();
665 }
666 }
667 }
668 afFill.setFloat(fvArraySize - 1, fillValAsFloat);
669 logger.debug("Adding fill value (from variable): " + fillValAsFloat);
670 }
671 ucar.nc2.Attribute fillAtt = new ucar.nc2.Attribute("_FillValue", afFill);
672 v.addAttribute(fillAtt);
673 }
674
675 ucar.nc2.Attribute aUnsigned = v.findAttribute("_Unsigned");
676 if (aUnsigned != null) {
677 logger.debug("_Unsigned attribute value: " + aUnsigned.getStringValue());
678 unsignedFlags.add(aUnsigned.getStringValue());
679 } else {
680 unsignedFlags.add("false");
681 }
682
683 if (unpackFlag) {
684 unpackFlags.add("true");
685 } else {
686 unpackFlags.add("false");
687 }
688
689 logger.debug("Adding product: " + v.getShortName());
690 pathToProducts.add(v.getFullName());
691
692 }
693 }
694 }
695 }
696 }
697 }
698 }
699
700 ncdfal.add((NetCDFFile) netCDFReader);
701 }
702
703 } catch (Exception e) {
704 logger.error("cannot create NetCDF reader for files selected");
705 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) {
706 throw new VisADException("Unable to extract metadata from required XML Product Profile");
707 }
708 }
709
710 // initialize the aggregation reader object
711 try {
712 nppAggReader = new GranuleAggregation(ncdfal, inTrackDimensionLength, "Track", "XTrack");
713 } catch (Exception e) {
714 throw new VisADException("Unable to initialize aggregation reader");
715 }
716
717 // make sure we found valid data
718 if (pathToProducts.size() == 0) {
719 throw new VisADException("No data found in files selected");
720 }
721
722 logger.debug("Number of adapters needed: " + pathToProducts.size());
723 adapters = new MultiDimensionAdapter[pathToProducts.size()];
724 Hashtable<String, String[]> properties = new Hashtable<String, String[]>();
725
726 Iterator<String> iterator = pathToProducts.iterator();
727 int pIdx = 0;
728 while (iterator.hasNext()) {
729 String pStr = (String) iterator.next();
730 logger.debug("Working on adapter number " + (pIdx + 1));
731 HashMap<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable();
732 HashMap<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable();
733 swathTable.put("array_name", pStr);
734 swathTable.put("lon_array_name", pathToLon);
735 swathTable.put("lat_array_name", pathToLat);
736 swathTable.put("XTrack", "XTrack");
737 swathTable.put("Track", "Track");
738 swathTable.put("geo_Track", "Track");
739 swathTable.put("geo_XTrack", "XTrack");
740 swathTable.put("product_name", productName);
741
742 // array_name common to spectrum table
743 spectTable.put("array_name", pStr);
744 spectTable.put("product_name", productName);
745 logger.debug("Product Name: " + productName);
746
747 if (is3D) {
748
749 // 3D data is either ATMS, OMPS, or CrIS
750 if ((instrumentName.getName() != null) && (instrumentName.getStringValue().equals("ATMS"))) {
751 //hasChannelSelect = true;
752 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel");
753 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel");
754
755 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
756 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
757 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
758 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
759 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
760 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
761
762 spectTable.put(SpectrumAdapter.channelType, "wavelength");
763 spectTable.put(SpectrumAdapter.channels_name, "Channel");
764 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack");
765 spectTable.put(SpectrumAdapter.y_dim_name, "Track");
766
767 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length;
768 float[] bandArray = new float[numChannels];
769 String[] bandNames = new String[numChannels];
770 for (int bIdx = 0; bIdx < numChannels; bIdx++) {
771 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx];
772 bandNames[bIdx] = "Channel " + (bIdx + 1);
773 }
774 spectTable.put(SpectrumAdapter.channelValues, bandArray);
775 spectTable.put(SpectrumAdapter.bandNames, bandNames);
776
777 } else {
778 if (instrumentName.getStringValue().equals("CrIS")) {
779
780 swathTable.put("XTrack", "dim1");
781 swathTable.put("Track", "dim0");
782 swathTable.put("geo_XTrack", "dim1");
783 swathTable.put("geo_Track", "dim0");
784 swathTable.put("product_name", "CrIS_SDR");
785 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3");
786 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2");
787
788 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3");
789 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2");
790 spectTable.put(SpectrumAdapter.x_dim_name, "dim1");
791 spectTable.put(SpectrumAdapter.y_dim_name, "dim0");
792
793 } else if (instrumentName.getStringValue().contains("OMPS")) {
794
795 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel");
796 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel");
797
798 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
799 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
800 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
801 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
802 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
803 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
804
805 spectTable.put(SpectrumAdapter.channelType, "wavelength");
806 spectTable.put(SpectrumAdapter.channels_name, "Channel");
807 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack");
808 spectTable.put(SpectrumAdapter.y_dim_name, "Track");
809
810 int numChannels = 200;
811 if (instrumentName.getStringValue().equals("OMPS-TC")) {
812 numChannels = 260;
813 }
814 float[] bandArray = new float[numChannels];
815 String[] bandNames = new String[numChannels];
816 for (int bIdx = 0; bIdx < numChannels; bIdx++) {
817 bandArray[bIdx] = bIdx;
818 bandNames[bIdx] = "Channel " + (bIdx + 1);
819 }
820 spectTable.put(SpectrumAdapter.channelValues, bandArray);
821 spectTable.put(SpectrumAdapter.bandNames, bandNames);
822
823 } else {
824 // sorry, if we can't id the instrument, we can't display the data!
825 throw new VisADException("Unable to determine instrument name");
826 }
827 }
828
829 } else {
830 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"});
831 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
832 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
833 }
834
835 swathTable.put("scale_name", "scale_factor");
836 swathTable.put("offset_name", "add_offset");
837 swathTable.put("fill_value_name", "_FillValue");
838 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1));
839 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1));
840
841 // set the valid range hash if data is available
842 if (nppPP != null) {
843 if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) {
844 swathTable.put("valid_range", "valid_range");
845 }
846 }
847
848 String unsignedAttributeStr = unsignedFlags.get(pIdx);
849 if (unsignedAttributeStr.equals("true")) {
850 swathTable.put("unsigned", unsignedAttributeStr);
851 }
852
853 String unpackFlagStr = unpackFlags.get(pIdx);
854 if (unpackFlagStr.equals("true")) {
855 swathTable.put("unpack", "true");
856 }
857
858 // For Suomi NPP data, do valid range check AFTER applying scale/offset
859 swathTable.put("range_check_after_scaling", "true");
860
861 // pass in a GranuleAggregation reader...
862 if (is3D) {
863 if (instrumentName.getStringValue().equals("ATMS")) {
864 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
865 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable);
866 DataCategory.createCategory("MultiSpectral");
867 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
868 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa,
869 "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS");
870 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]);
871 multiSpectralData.add(msd);
872 }
873 if (instrumentName.getStringValue().equals("CrIS")) {
874 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable);
875 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable);
876 DataCategory.createCategory("MultiSpectral");
877 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
878 MultiSpectralData msd = new MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx],
879 csa);
880 msd.setInitialWavenumber(csa.getInitialWavenumber());
881 multiSpectralData.add(msd);
882 }
883 if (instrumentName.getStringValue().contains("OMPS")) {
884 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
885 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable);
886 DataCategory.createCategory("MultiSpectral");
887 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
888 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa,
889 "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS");
890 msd.setInitialWavenumber(0);
891 multiSpectralData.add(msd);
892 }
893 if (pIdx == 0) {
894 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset();
895 try {
896 previewImage = multiSpectralData.get(pIdx).getImage(defaultSubset);
897 } catch (Exception e) {
898 e.printStackTrace();
899 }
900 }
901
902 } else {
903 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
904 if (pIdx == 0) {
905 defaultSubset = adapters[pIdx].getDefaultSubset();
906 }
907 categories = DataCategory.parseCategories("IMAGE");
908 }
909 pIdx++;
910 }
911
912 setProperties(properties);
913 }
914
915 public void initAfterUnpersistence() {
916 try {
917 setup();
918 } catch (Exception e) {
919 }
920 }
921
922 /**
923 * Make and insert the <code>DataChoice</code>-s for this
924 * <code>DataSource</code>.
925 */
926
927 public void doMakeDataChoices() {
928
929 // special loop for CrIS, ATMS, and OMPS data
930 if (multiSpectralData.size() > 0) {
931 for (int k = 0; k < multiSpectralData.size(); k++) {
932 MultiSpectralData adapter = multiSpectralData.get(k);
933 DataChoice choice = null;
934 try {
935 choice = doMakeDataChoice(k, adapter);
936 msdMap.put(choice.getName(), adapter);
937 addDataChoice(choice);
938 } catch (Exception e) {
939 e.printStackTrace();
940 }
941 }
942 return;
943 }
944 // all other data (VIIRS and 2D EDRs)
945 if (adapters != null) {
946 for (int idx = 0; idx < adapters.length; idx++) {
947 DataChoice choice = null;
948 try {
949 choice = doMakeDataChoice(idx, adapters[idx].getArrayName());
950 }
951 catch (Exception e) {
952 e.printStackTrace();
953 logger.error("doMakeDataChoice failed");
954 }
955
956 if (choice != null) {
957 addDataChoice(choice);
958 }
959 }
960 }
961 }
962
963 private DataChoice doMakeDataChoice(int idx, String var) throws Exception {
964 String name = var;
965 DataSelection dataSel = new MultiDimensionSubset(defaultSubset);
966 Hashtable subset = new Hashtable();
967 subset.put(new MultiDimensionSubset(), dataSel);
968 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, name, categories, subset);
969 return ddc;
970 }
971
972 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception {
973 String name = adapter.getName();
974 DataSelection dataSel = new MultiDimensionSubset(defaultSubset);
975 Hashtable subset = new Hashtable();
976 subset.put(MultiDimensionSubset.key, dataSel);
977 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter());
978 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset);
979 ddc.setProperties(subset);
980 return ddc;
981 }
982
983 /**
984 * Check to see if this <code>SuomiNPPDataSource</code> is equal to the object
985 * in question.
986 * @param o object in question
987 * @return true if they are the same or equivalent objects
988 */
989
990 public boolean equals(Object o) {
991 if ( !(o instanceof SuomiNPPDataSource)) {
992 return false;
993 }
994 return (this == (SuomiNPPDataSource) o);
995 }
996
997 public MultiSpectralData getMultiSpectralData() {
998 return multiSpectralData.get(0);
999 }
1000
1001 public MultiSpectralData getMultiSpectralData(DataChoice choice) {
1002 return msdMap.get(choice.getName());
1003 }
1004
1005 public String getDatasetName() {
1006 return filename;
1007 }
1008
1009 public void setDatasetName(String name) {
1010 filename = name;
1011 }
1012
1013 public HashMap getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) {
1014 GeoLocationInfo ginfo = geoSelection.getBoundingBox();
1015 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(),
1016 ginfo.getMinLon(), ginfo.getMaxLon());
1017 }
1018
1019 public synchronized Data getData(DataChoice dataChoice, DataCategory category,
1020 DataSelection dataSelection, Hashtable requestProperties)
1021 throws VisADException, RemoteException {
1022 return this.getDataInner(dataChoice, category, dataSelection, requestProperties);
1023 }
1024
1025
1026 protected Data getDataInner(DataChoice dataChoice, DataCategory category,
1027 DataSelection dataSelection, Hashtable requestProperties)
1028 throws VisADException, RemoteException {
1029
1030 //- this hack keeps the HydraImageProbe from doing a getData()
1031 //- TODO: need to use categories?
1032 if (requestProperties != null) {
1033 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) {
1034 return null;
1035 }
1036 }
1037
1038 GeoLocationInfo ginfo = null;
1039 GeoSelection geoSelection = null;
1040
1041 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) {
1042 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() :
1043 dataChoice.getDataSelection().getGeoSelection();
1044 }
1045
1046 if (geoSelection != null) {
1047 ginfo = geoSelection.getBoundingBox();
1048 }
1049
1050 Data data = null;
1051 if (adapters == null) {
1052 return data;
1053 }
1054
1055 MultiDimensionAdapter adapter = null;
1056
1057 // pick the adapter with the same index as the current data choice
1058 int aIdx = 0;
1059 List<DataChoice> dcl = getDataChoices();
1060 for (DataChoice dc : dcl) {
1061 if (dc.equals(dataChoice)) {
1062 aIdx = dcl.indexOf(dc);
1063 break;
1064 }
1065 }
1066
1067 logger.debug("Found dataChoice index: " + aIdx);
1068 adapter = adapters[aIdx];
1069
1070 try {
1071 HashMap subset = null;
1072 if (ginfo != null) {
1073 logger.debug("getting subset from lat-lon rect...");
1074 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(),
1075 ginfo.getMinLon(), ginfo.getMaxLon(),
1076 geoSelection.getXStride(),
1077 geoSelection.getYStride(),
1078 geoSelection.getZStride());
1079 }
1080 else {
1081
1082 MultiDimensionSubset select = null;
1083 Hashtable table = dataChoice.getProperties();
1084 Enumeration keys = table.keys();
1085 while (keys.hasMoreElements()) {
1086 Object key = keys.nextElement();
1087 logger.debug("Key: " + key.toString());
1088 if (key instanceof MultiDimensionSubset) {
1089 select = (MultiDimensionSubset) table.get(key);
1090 }
1091 }
1092 subset = select.getSubset();
1093 logger.debug("Subset size: " + subset.size());
1094
1095 if (dataSelection != null) {
1096 Hashtable props = dataSelection.getProperties();
1097 if (props != null) {
1098 if (props.containsKey(SpectrumAdapter.channelIndex_name)) {
1099 logger.debug("Props contains channel index key...");
1100 double[] coords = (double[]) subset.get(SpectrumAdapter.channelIndex_name);
1101 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue();
1102 coords[0] = (double)idx;
1103 coords[1] = (double)idx;
1104 coords[2] = (double)1;
1105 }
1106 }
1107 }
1108 }
1109
1110 if (subset != null) {
1111 data = adapter.getData(subset);
1112 data = applyProperties(data, requestProperties, subset, aIdx);
1113 }
1114 } catch (Exception e) {
1115 e.printStackTrace();
1116 logger.error("getData exception e=" + e);
1117 }
1118 return data;
1119 }
1120
1121 protected Data applyProperties(Data data, Hashtable requestProperties, HashMap subset, int adapterIndex)
1122 throws VisADException, RemoteException {
1123 Data new_data = data;
1124
1125 if (requestProperties == null) {
1126 new_data = data;
1127 return new_data;
1128 }
1129
1130 return new_data;
1131 }
1132
1133 protected void initDataSelectionComponents(
1134 List<DataSelectionComponent> components,
1135 final DataChoice dataChoice) {
1136
1137 if (System.getProperty("os.name").equals("Mac OS X") && hasImagePreview && hasChannelSelect) {
1138 try {
1139 components.add(new ImageChannelSelection(new PreviewSelection(dataChoice, previewImage, null), new ChannelSelection(dataChoice)));
1140 } catch (Exception e) {
1141 e.printStackTrace();
1142 }
1143 }
1144 else {
1145 if (hasImagePreview) {
1146 try {
1147 FlatField image = (FlatField) dataChoice.getData(null);
1148 components.add(new PreviewSelection(dataChoice, image, null));
1149 } catch (Exception e) {
1150 logger.error("Can't make PreviewSelection: "+e);
1151 e.printStackTrace();
1152 }
1153 }
1154 if (hasChannelSelect) {
1155 try {
1156 components.add(new ChannelSelection(dataChoice));
1157 }
1158 catch (Exception e) {
1159 e.printStackTrace();
1160 }
1161 }
1162 }
1163
1164 }
1165
1166 }