001 /*
002 * This file is part of McIDAS-V
003 *
004 * Copyright 2007-2013
005 * Space Science and Engineering Center (SSEC)
006 * University of Wisconsin - Madison
007 * 1225 W. Dayton Street, Madison, WI 53706, USA
008 * https://www.ssec.wisc.edu/mcidas
009 *
010 * All Rights Reserved
011 *
012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and
013 * some McIDAS-V source code is based on IDV and VisAD source code.
014 *
015 * McIDAS-V is free software; you can redistribute it and/or modify
016 * it under the terms of the GNU Lesser Public License as published by
017 * the Free Software Foundation; either version 3 of the License, or
018 * (at your option) any later version.
019 *
020 * McIDAS-V is distributed in the hope that it will be useful,
021 * but WITHOUT ANY WARRANTY; without even the implied warranty of
022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
023 * GNU Lesser Public License for more details.
024 *
025 * You should have received a copy of the GNU Lesser Public License
026 * along with this program. If not, see http://www.gnu.org/licenses.
027 */
028
029 package edu.wisc.ssec.mcidasv.data.hydra;
030
031 import edu.wisc.ssec.mcidasv.McIDASV;
032 import edu.wisc.ssec.mcidasv.PersistenceManager;
033 import edu.wisc.ssec.mcidasv.data.HydraDataSource;
034 import edu.wisc.ssec.mcidasv.data.PreviewSelection;
035 import edu.wisc.ssec.mcidasv.data.QualityFlag;
036
037 import java.io.ByteArrayInputStream;
038 import java.io.File;
039 import java.io.FilenameFilter;
040
041 import java.rmi.RemoteException;
042
043 import java.text.SimpleDateFormat;
044
045 import java.util.ArrayList;
046 import java.util.Date;
047 import java.util.Enumeration;
048 import java.util.HashMap;
049 import java.util.Hashtable;
050 import java.util.Iterator;
051 import java.util.LinkedHashMap;
052 import java.util.LinkedHashSet;
053 import java.util.List;
054 import java.util.SimpleTimeZone;
055 import java.util.StringTokenizer;
056
057 import org.jdom2.Document;
058 import org.jdom2.Element;
059 import org.jdom2.Namespace;
060 import org.jdom2.output.XMLOutputter;
061
062 import org.slf4j.Logger;
063 import org.slf4j.LoggerFactory;
064
065 import ucar.ma2.ArrayFloat;
066 import ucar.ma2.DataType;
067
068 import ucar.nc2.Attribute;
069 import ucar.nc2.Dimension;
070 import ucar.nc2.Group;
071 import ucar.nc2.NetcdfFile;
072 import ucar.nc2.Variable;
073 import ucar.nc2.dataset.VariableDS;
074
075 import ucar.unidata.data.DataCategory;
076 import ucar.unidata.data.DataChoice;
077 import ucar.unidata.data.DataSelection;
078 import ucar.unidata.data.DataSelectionComponent;
079 import ucar.unidata.data.DataSourceDescriptor;
080 import ucar.unidata.data.DirectDataChoice;
081 import ucar.unidata.data.GeoLocationInfo;
082 import ucar.unidata.data.GeoSelection;
083 import ucar.unidata.idv.IdvPersistenceManager;
084
085 import ucar.unidata.util.Misc;
086
087 import visad.Data;
088 import visad.FlatField;
089 import visad.VisADException;
090
091 import visad.util.Util;
092
093 /**
094 * A data source for NPOESS Preparatory Project (Suomi NPP) data
095 * This will probably move, but we are placing it here for now
096 * since we are leveraging some existing code used for HYDRA.
097 */
098
099 public class SuomiNPPDataSource extends HydraDataSource {
100
101 private static final Logger logger = LoggerFactory.getLogger(SuomiNPPDataSource.class);
102
103 /** Sources file */
104 protected String filename;
105
106 // for loading bundles, store granule lists and geo lists here
107 protected List<String> oldSources = new ArrayList<String>();
108 protected List<String> geoSources = new ArrayList<String>();
109
110 protected MultiDimensionReader nppAggReader;
111
112 protected MultiDimensionAdapter[] adapters = null;
113
114 private ArrayList<MultiSpectralData> msd_CrIS = new ArrayList<MultiSpectralData>();
115 private ArrayList<MultiSpectralData> multiSpectralData = new ArrayList<MultiSpectralData>();
116 private HashMap<String, MultiSpectralData> msdMap = new HashMap<String, MultiSpectralData>();
117 private HashMap<String, QualityFlag> qfMap = new HashMap<String, QualityFlag>();
118
119 private static final String DATA_DESCRIPTION = "Suomi NPP Data";
120
121 // instrument related variables and flags
122 Attribute instrumentName = null;
123 private String productName = null;
124
125 // product related variables and flags
126 boolean isEDR = false;
127
128 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions
129 private String crisFilter = "ES_Real";
130
131 // for now, we are only handling OMPS variables that match this filter and SCAN dimensions
132 private String ompsFilter = "Radiance";
133
134 private HashMap defaultSubset;
135 public TrackAdapter track_adapter;
136
137 private List categories;
138 private boolean hasChannelSelect = false;
139 private boolean hasImagePreview = true;
140 private boolean isCombinedProduct = false;
141 private boolean nameHasBeenSet = false;
142
143 private FlatField previewImage = null;
144
145 // need our own separator char since it's always Unix-style in the Suomi NPP files
146 private static final String SEPARATOR_CHAR = "/";
147
148 // date formatter for converting Suomi NPP day/time to something we can use
149 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss.SSS");
150
151 // date formatter for how we want to show granule day/time on display
152 SimpleDateFormat sdfOut = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
153
154 /**
155 * Zero-argument constructor for construction via unpersistence.
156 */
157
158 public SuomiNPPDataSource() {
159 }
160
161 public SuomiNPPDataSource(String fileName) throws VisADException {
162 this(null, Misc.newList(fileName), null);
163 logger.warn("filename only constructor call..");
164 }
165
166 /**
167 * Construct a new Suomi NPP HDF5 data source.
168 * @param descriptor descriptor for this <code>DataSource</code>
169 * @param fileName name of the hdf file to read
170 * @param properties hashtable of properties
171 *
172 * @throws VisADException problem creating data
173 */
174
175 public SuomiNPPDataSource(DataSourceDescriptor descriptor,
176 String fileName, Hashtable properties)
177 throws VisADException {
178 this(descriptor, Misc.newList(fileName), properties);
179 logger.debug("SuomiNPPDataSource called, single file selected: " + fileName);
180 }
181
182 /**
183 * Construct a new Suomi NPP HDF5 data source.
184 * @param descriptor descriptor for this <code>DataSource</code>
185 * @param sources List of filenames
186 * @param properties hashtable of properties
187 *
188 * @throws VisADException problem creating data
189 */
190
191 public SuomiNPPDataSource(DataSourceDescriptor descriptor,
192 List<String> newSources, Hashtable properties)
193 throws VisADException {
194 super(descriptor, newSources, DATA_DESCRIPTION, properties);
195 logger.debug("SuomiNPPDataSource constructor called, file count: " + sources.size());
196
197 filename = (String) sources.get(0);
198 setDescription("Suomi NPP");
199
200 for (Object o : sources) {
201 logger.debug("Suomi NPP source file: " + (String) o);
202 }
203
204 setup();
205 }
206
207 public void setup() throws VisADException {
208
209 // store filenames for possible bundle unpersistence
210 for (Object o : sources) {
211 oldSources.add((String) o);
212 }
213
214 // time zone for product labels
215 SimpleTimeZone stz = new SimpleTimeZone(0, "GMT");
216 sdf.setTimeZone(stz);
217 sdfOut.setTimeZone(stz);
218
219 // looking to populate 3 things - path to lat, path to lon, path to relevant products
220 String pathToLat = null;
221 String pathToLon = null;
222 LinkedHashSet<String> pathToProducts = new LinkedHashSet<String>();
223
224 // flag to indicate data is 3-dimensions (X, Y, channel or band)
225 boolean is3D = false;
226
227 // check source filenames to see if this is a combined product. everything
228 // from last file separator to first underscore should be product info
229 int lastSeparator = filename.lastIndexOf(File.separatorChar);
230 int firstUnderscore = filename.indexOf("_", lastSeparator + 1);
231 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore);
232 StringTokenizer st = new StringTokenizer(prodStr, "-");
233 logger.debug("SNPPDS check for embedded GEO, tokenizing: " + prodStr);
234 while (st.hasMoreTokens()) {
235 String singleProd = st.nextToken();
236 logger.debug("Next token: " + singleProd);
237 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) {
238 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) {
239 logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd);
240 isCombinedProduct = true;
241 break;
242 }
243 }
244 }
245
246 // various metatdata we'll need to gather on a per-product basis
247 LinkedHashMap<String, String> unsignedFlags = new LinkedHashMap<String, String>();
248 LinkedHashMap<String, String> unpackFlags = new LinkedHashMap<String, String>();
249
250 // geo product IDs for each granule
251 LinkedHashSet<String> geoProductIDs = new LinkedHashSet<String>();
252
253 // aggregations will use sets of NetCDFFile readers
254 ArrayList<NetCDFFile> ncdfal = new ArrayList<NetCDFFile>();
255
256 // we should be able to find an XML Product Profile for each data/product type
257 SuomiNPPProductProfile nppPP = null;
258
259 try {
260
261 nppPP = new SuomiNPPProductProfile();
262
263 // for each source file provided get the nominal time
264 for (int fileCount = 0; fileCount < sources.size(); fileCount++) {
265 // need to open the main NetCDF file to determine the geolocation product
266 NetcdfFile ncfile = null;
267 String fileAbsPath = null;
268 try {
269 fileAbsPath = (String) sources.get(fileCount);
270 logger.debug("Trying to open file: " + fileAbsPath);
271 ncfile = NetcdfFile.open(fileAbsPath);
272 if (! isCombinedProduct) {
273 Attribute a = ncfile.findGlobalAttribute("N_GEO_Ref");
274 logger.debug("Value of GEO global attribute: "
275 + a.getStringValue());
276 String tmpGeoProductID = a.getStringValue();
277 geoProductIDs.add(tmpGeoProductID);
278 }
279 Group rg = ncfile.getRootGroup();
280
281 List<Group> gl = rg.getGroups();
282 if (gl != null) {
283 for (Group g : gl) {
284 logger.debug("Group name: " + g.getFullName());
285 // when we find the Data_Products group, go down another group level and pull out
286 // what we will use for nominal day and time (for now anyway).
287 // XXX TJJ fileCount check is so we don't count the GEO file in time array!
288 if (g.getFullName().contains("Data_Products") && (fileCount != sources.size())) {
289 boolean foundDateTime = false;
290 List<Group> dpg = g.getGroups();
291
292 // cycle through once looking for XML Product Profiles
293 for (Group subG : dpg) {
294
295 String subName = subG.getFullName();
296 // use actual product, not geolocation, to id XML Product Profile
297 if (! subName.contains("-GEO")) {
298 // determine the instrument name (VIIRS, ATMS, CrIS, OMPS)
299 instrumentName = subG.findAttribute("Instrument_Short_Name");
300
301 // note any EDR products, will need to check for and remove
302 // fill scans later
303 Attribute adtt = subG.findAttribute("N_Dataset_Type_Tag");
304 if (adtt != null) {
305 String baseName = adtt.getStringValue();
306 if ((baseName != null) && (baseName.equals("EDR"))) {
307 isEDR = true;
308 }
309 }
310
311 // This is also where we find the attribute which tells us which
312 // XML Product Profile to use!
313 Attribute axpp = subG.findAttribute("N_Collection_Short_Name");
314 if (axpp != null) {
315 String baseName = axpp.getStringValue();
316 productName = baseName;
317 String productProfileFileName = nppPP.getProfileFileName(baseName);
318 logger.debug("Found profile: " + productProfileFileName);
319 if (productProfileFileName == null) {
320 throw new Exception("XML Product Profile not found in catalog");
321 }
322 try {
323 nppPP.addMetaDataFromFile(productProfileFileName);
324 } catch (Exception nppppe) {
325 logger.error("Error parsing XML Product Profile: " + productProfileFileName);
326 throw new Exception("XML Product Profile Error");
327 }
328 }
329 }
330 }
331
332 // 2nd pass through sub-group to extract date/time for aggregation
333 for (Group subG : dpg) {
334 List<Variable> vl = subG.getVariables();
335 for (Variable v : vl) {
336 Attribute aDate = v.findAttribute("AggregateBeginningDate");
337 Attribute aTime = v.findAttribute("AggregateBeginningTime");
338 // did we find the attributes we are looking for?
339 if ((aDate != null) && (aTime != null)) {
340 String sDate = aDate.getStringValue();
341 String sTime = aTime.getStringValue();
342 logger.debug("For day/time, using: " + sDate + sTime.substring(0, sTime.indexOf('Z') - 3));
343 Date d = sdf.parse(sDate + sTime.substring(0, sTime.indexOf('Z') - 3));
344 foundDateTime = true;
345 // set time for display to day/time of 1st granule examined
346 if (! nameHasBeenSet) {
347 setName(instrumentName.getStringValue() + " " + sdfOut.format(d)
348 + ", " + sources.size() + " Granule");
349 nameHasBeenSet = true;
350 }
351 break;
352 }
353 }
354 if (foundDateTime) break;
355 }
356 if (! foundDateTime) {
357 throw new VisADException("No date time found in Suomi NPP granule");
358 }
359 }
360 }
361 }
362 } catch (Exception e) {
363 logger.debug("Exception during processing of file: " + fileAbsPath);
364 throw (e);
365 } finally {
366 ncfile.close();
367 }
368 }
369
370 // build each union aggregation element
371 Iterator<String> iterator = geoProductIDs.iterator();
372 for (int elementNum = 0; elementNum < sources.size(); elementNum++) {
373
374 String s = (String) sources.get(elementNum);
375
376 // build an XML (NCML actually) representation of the union aggregation of these two files
377 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2");
378 Element root = new Element("netcdf", ns);
379 Document document = new Document(root);
380
381 Element agg = new Element("aggregation", ns);
382 agg.setAttribute("type", "union");
383
384 Element fData = new Element("netcdf", ns);
385 fData.setAttribute("location", s);
386 agg.addContent(fData);
387
388 if (! isCombinedProduct) {
389 Element fGeo = new Element("netcdf", ns);
390
391 String geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1);
392 // check if we have the whole file name or just the prefix
393 String geoProductID = iterator.next();
394 if (geoProductID.endsWith("h5")) {
395 geoFilename += geoProductID;
396 } else {
397 geoFilename += geoProductID;
398 geoFilename += s.substring(s.lastIndexOf(File.separatorChar) + 6);
399 }
400 // Be sure file as specified by N_GEO_Ref global attribute really is there.
401 File tmpGeo = new File(geoFilename);
402 if (! tmpGeo.exists()) {
403 // Ok, the expected file defined (supposedly) exactly by a global att is not there...
404 // We need to check for similar geo files with different creation dates
405 String geoFileRelative = geoFilename.substring(geoFilename.lastIndexOf(File.separatorChar) + 1);
406 // also check for Terrain Corrected version of geo
407 String geoTerrainCorrected = geoFileRelative;
408 geoTerrainCorrected = geoTerrainCorrected.replace("OD", "TC");
409 geoTerrainCorrected = geoTerrainCorrected.replace("MG", "TC");
410
411 // now we make a file filter, and see if a matching geo file is present
412 File fList = new File(geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); // current directory
413
414 FilenameFilter geoFilter = new FilenameFilter() {
415 public boolean accept(File dir, String name) {
416 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX)) {
417 return true;
418 } else {
419 return false;
420 }
421 }
422 };
423
424 File[] files = fList.listFiles(geoFilter);
425 for (File file : files) {
426 if (file.isDirectory()) {
427 continue;
428 }
429 // get the file name for convenience
430 String fName = file.getName();
431 // is it one of the standard Ellipsoid geo types we are looking for?
432 if (fName.substring(0, 5).equals(geoFileRelative.substring(0, 5))) {
433 int geoStartIdx = geoFileRelative.indexOf("_d");
434 int prdStartIdx = fName.indexOf("_d");
435 String s1 = geoFileRelative.substring(geoStartIdx, geoStartIdx + 35);
436 String s2 = fName.substring(prdStartIdx, prdStartIdx + 35);
437 if (s1.equals(s2)) {
438 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName;
439 break;
440 }
441 }
442 // same check, but for Terrain Corrected version
443 if (fName.substring(0, 5).equals(geoTerrainCorrected.substring(0, 5))) {
444 int geoStartIdx = geoTerrainCorrected.indexOf("_d");
445 int prdStartIdx = fName.indexOf("_d");
446 String s1 = geoTerrainCorrected.substring(geoStartIdx, geoStartIdx + 35);
447 String s2 = fName.substring(prdStartIdx, prdStartIdx + 35);
448 if (s1.equals(s2)) {
449 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName;
450 break;
451 }
452 }
453 }
454 }
455 logger.debug("Cobbled together GEO file name: " + geoFilename);
456 fGeo.setAttribute("location", geoFilename);
457 // add this to list used if we create a zipped bundle
458 geoSources.add(geoFilename);
459 agg.addContent(fGeo);
460 }
461
462 root.addContent(agg);
463 XMLOutputter xmlOut = new XMLOutputter();
464 String ncmlStr = xmlOut.outputString(document);
465 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes());
466 MultiDimensionReader netCDFReader = new NetCDFFile(is);
467
468 // let's try and look through the NetCDF reader and see what we can learn...
469 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile();
470
471 Group rg = ncdff.getRootGroup();
472 // this is a list filled with unpacked qflag products, if any
473 ArrayList<VariableDS> qfProds = new ArrayList<VariableDS>();
474
475 List<Group> gl = rg.getGroups();
476 if (gl != null) {
477 for (Group g : gl) {
478 logger.debug("Group name: " + g.getFullName());
479 // XXX just temporary - we are looking through All_Data, finding displayable data
480 if (g.getFullName().contains("All_Data")) {
481 List<Group> adg = g.getGroups();
482 int xDim = -1;
483 int yDim = -1;
484
485 // two sub-iterations, first one to find geolocation and product dimensions
486 for (Group subG : adg) {
487 logger.debug("Sub group name: " + subG.getFullName());
488 String subName = subG.getFullName();
489 if (subName.contains("-GEO")) {
490 // this is the geolocation data
491 List<Variable> vl = subG.getVariables();
492 for (Variable v : vl) {
493 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude")) {
494 pathToLat = v.getFullName();
495 logger.debug("Lat/Lon Variable: " + v.getFullName());
496 // get the dimensions of the lat variable
497 Dimension dAlongTrack = v.getDimension(0);
498 yDim = dAlongTrack.getLength();
499 logger.debug("Lat along track dim: " + dAlongTrack.getLength());
500 Dimension dAcrossTrack = v.getDimension(1);
501 xDim = dAcrossTrack.getLength();
502 logger.debug("Lat across track dim: " + dAcrossTrack.getLength());
503 }
504 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude")) {
505 // we got dimensions from lat, don't need 'em twice, but need path
506 pathToLon = v.getFullName();
507 }
508 }
509 }
510 }
511
512 // second to identify displayable products
513 for (Group subG : adg) {
514 logger.debug("Sub group name: " + subG.getFullName());
515 // this is the product data
516 List<Variable> vl = subG.getVariables();
517 for (Variable v : vl) {
518 boolean useThis = false;
519 String vName = v.getFullName();
520 logger.debug("Variable: " + vName);
521 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1);
522
523 // Special code to handle quality flags. We throw out anything
524 // that does not match bounds of the geolocation data
525
526 if (varShortName.startsWith("QF")) {
527
528 logger.debug("Handling Quality Flag: " + varShortName);
529
530 // this check is done later for ALL varialbles, but we need
531 // it early here to weed out those quality flags that are
532 // simply a small set of data w/no granule geo nbounds
533 boolean xScanOk = false;
534 boolean yScanOk = false;
535 List<Dimension> dl = v.getDimensions();
536
537 // toss out > 2D Quality Flags
538 if (dl.size() > 2) {
539 logger.debug("SKIPPING QF, > 2D: " + varShortName);
540 continue;
541 }
542
543 for (Dimension d : dl) {
544 // in order to consider this a displayable product, make sure
545 // both scan direction dimensions are present and look like a granule
546 if (d.getLength() == xDim) {
547 xScanOk = true;
548 }
549 if (d.getLength() == yDim) {
550 yScanOk = true;
551 }
552 }
553
554 if (! (xScanOk && yScanOk)) {
555 logger.debug("SKIPPING QF, does not match geo bounds: " + varShortName);
556 continue;
557 }
558
559 ArrayList<QualityFlag> qfal = nppPP.getQualityFlags(varShortName);
560 if (qfal != null) {
561 for (QualityFlag qf : qfal) {
562 qf.setPackedName(vName);
563 // make a copy of the qflag variable
564 // NOTE: by using a VariableDS here, the original
565 // variable is used for the I/O, this matters!
566 VariableDS vqf = new VariableDS(subG, v, false);
567 // prefix with QF num to help guarantee uniqueness across groups
568 // this will cover most cases, but could still be dupe names
569 // within a single QF. This is handled when fetching XMLPP metadata
570 vqf.setShortName(
571 varShortName.substring(0, 3) + "_" + qf.getName()
572 );
573 logger.debug("New var full name: " + vqf.getFullName());
574 qfProds.add(vqf);
575 qfMap.put(vqf.getFullName(), qf);
576 }
577 }
578 }
579
580 // for CrIS instrument, only taking real calibrated values for now
581 if (instrumentName.getStringValue().equals("CrIS")) {
582 if (! varShortName.startsWith(crisFilter)) {
583 logger.debug("Skipping variable: " + varShortName);
584 continue;
585 }
586 }
587
588 // for OMPS, only Radiance for now...
589 if (instrumentName.getStringValue().contains("OMPS")) {
590 if (! varShortName.startsWith(ompsFilter)) {
591 logger.debug("Skipping variable: " + varShortName);
592 continue;
593 }
594 }
595
596 DataType dt = v.getDataType();
597 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) {
598 logger.debug("Skipping data of size: " + dt.getSize());
599 continue;
600 }
601
602 List<Dimension> dl = v.getDimensions();
603 if (dl.size() > 4) {
604 logger.debug("Skipping data of dimension: " + dl.size());
605 continue;
606 }
607
608 // for now, skip any 3D VIIRS data
609 if (instrumentName.getStringValue().equals("VIIRS")) {
610 if (dl.size() == 3) {
611 logger.debug("Skipping VIIRS 3D data for now...");
612 continue;
613 }
614 }
615
616 boolean xScanOk = false;
617 boolean yScanOk = false;
618 for (Dimension d : dl) {
619 // in order to consider this a displayable product, make sure
620 // both scan direction dimensions are present and look like a granule
621 if (d.getLength() == xDim) {
622 xScanOk = true;
623 }
624 if (d.getLength() == yDim) {
625 yScanOk = true;
626 }
627 }
628
629 if (xScanOk && yScanOk) {
630 logger.debug("Will probably use this variable, a few more checks...");
631 useThis = true;
632 }
633
634 // new way to validate ATMS variables
635 if (instrumentName.getStringValue().equals("ATMS")) {
636 boolean isDisplayableATMS = false;
637 // check variable dimensions, if num channels not present, ditch it
638 for (Dimension d : dl) {
639 if (d.getLength() == JPSSUtilities.ATMSChannelCenterFrequencies.length) {
640 isDisplayableATMS = true;
641 logger.debug("This variable appears to be displayable ATMS");
642 break;
643 }
644 }
645 if (! isDisplayableATMS) {
646 useThis = false;
647 }
648 }
649
650 // sensor data with a channel dimension
651 if (useThis) {
652 if ((instrumentName.getStringValue().equals("CrIS")) ||
653 (instrumentName.getStringValue().equals("ATMS")) ||
654 (instrumentName.getStringValue().contains("OMPS"))) {
655 is3D = true;
656 hasChannelSelect = true;
657 logger.debug("Handling 3-D data source...");
658 }
659 }
660
661 if (useThis) {
662 // loop through the variable list again, looking for a corresponding "Factors"
663 float scaleVal = 1f;
664 float offsetVal = 0f;
665 boolean unpackFlag = false;
666
667 // if the granule has an entry for this variable name
668 // get the data, data1 = scale, data2 = offset
669 // create and poke attributes with this data
670 // endif
671
672 String factorsVarName = nppPP.getScaleFactorName(varShortName);
673 logger.debug("Mapping: " + varShortName + " to: " + factorsVarName);
674 if (factorsVarName != null) {
675 for (Variable fV : vl) {
676 if (fV.getShortName().equals(factorsVarName)) {
677 logger.debug("Pulling scale and offset values from variable: " + fV.getShortName());
678 ucar.ma2.Array a = fV.read();
679 float[] so = (float[]) a.copyTo1DJavaArray();
680 scaleVal = so[0];
681 logger.debug("Scale value: " + scaleVal);
682 offsetVal = so[1];
683 logger.debug("Offset value: " + offsetVal);
684 unpackFlag = true;
685 break;
686 }
687 }
688 }
689
690 // poke in scale/offset attributes for now
691
692 Attribute a1 = new Attribute("scale_factor", scaleVal);
693 v.addAttribute(a1);
694 Attribute a2 = new Attribute("add_offset", offsetVal);
695 v.addAttribute(a2);
696
697 // add valid range and fill value attributes here
698 // try to fill in valid range
699 if (nppPP.hasNameAndMetaData(varShortName)) {
700 String rangeMin = nppPP.getRangeMin(varShortName);
701 String rangeMax = nppPP.getRangeMax(varShortName);
702 logger.debug("range min: " + rangeMin);
703 logger.debug("range max: " + rangeMax);
704 // only store range attribute if VALID range found
705 if ((rangeMin != null) && (rangeMax != null)) {
706 int [] shapeArr = new int [] { 2 };
707 ArrayFloat af = new ArrayFloat(shapeArr);
708 try {
709 af.setFloat(0, Float.parseFloat(rangeMin));
710 } catch (NumberFormatException nfe) {
711 af.setFloat(0, new Float(Integer.MIN_VALUE));
712 }
713 try {
714 af.setFloat(1, Float.parseFloat(rangeMax));
715 } catch (NumberFormatException nfe) {
716 af.setFloat(1, new Float(Integer.MAX_VALUE));
717 }
718 Attribute rangeAtt = new Attribute("valid_range", af);
719 v.addAttribute(rangeAtt);
720 }
721
722 // check for and load fill values too...
723
724 // we need to check two places, first, the XML product profile
725 ArrayList<Float> fval = nppPP.getFillValues(varShortName);
726
727 // 2nd, does the variable already have one defined?
728 // if there was already a fill value associated with this variable, make
729 // sure we bring that along for the ride too...
730 Attribute aFill = v.findAttribute("_FillValue");
731
732 // determine size of our fill value array
733 int fvArraySize = 0;
734 if (aFill != null) fvArraySize++;
735 if (! fval.isEmpty()) fvArraySize += fval.size();
736 int [] fillShape = new int [] { fvArraySize };
737
738 // allocate the array
739 ArrayFloat afFill = new ArrayFloat(fillShape);
740
741 // and FINALLY, fill it!
742 if (! fval.isEmpty()) {
743 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) {
744 afFill.setFloat(fillIdx, fval.get(fillIdx));
745 logger.debug("Adding fill value (from XML): " + fval.get(fillIdx));
746 }
747 }
748
749 if (aFill != null) {
750 Number n = aFill.getNumericValue();
751 // is the data unsigned?
752 Attribute aUnsigned = v.findAttribute("_Unsigned");
753 float fillValAsFloat = Float.NaN;
754 if (aUnsigned != null) {
755 if (aUnsigned.getStringValue().equals("true")) {
756 DataType fvdt = aFill.getDataType();
757 logger.debug("Data String: " + aFill.toString());
758 logger.debug("DataType primitive type: " + fvdt.getPrimitiveClassType());
759 // signed byte that needs conversion?
760 if (fvdt.getPrimitiveClassType() == byte.class) {
761 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue());
762 }
763 else if (fvdt.getPrimitiveClassType() == short.class) {
764 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue());
765 } else {
766 fillValAsFloat = n.floatValue();
767 }
768 }
769 }
770 afFill.setFloat(fvArraySize - 1, fillValAsFloat);
771 logger.debug("Adding fill value (from variable): " + fillValAsFloat);
772 }
773 Attribute fillAtt = new Attribute("_FillValue", afFill);
774 v.addAttribute(fillAtt);
775 }
776
777 Attribute aUnsigned = v.findAttribute("_Unsigned");
778 if (aUnsigned != null) {
779 logger.debug("_Unsigned attribute value: " + aUnsigned.getStringValue());
780 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue());
781 } else {
782 unsignedFlags.put(v.getFullName(), "false");
783 }
784
785 if (unpackFlag) {
786 unpackFlags.put(v.getFullName(), "true");
787 } else {
788 unpackFlags.put(v.getFullName(), "false");
789 }
790
791 logger.debug("Adding product: " + v.getFullName());
792 pathToProducts.add(v.getFullName());
793
794 }
795 }
796 }
797 }
798 }
799 }
800
801 // add in any unpacked qflag products
802 for (VariableDS qfV: qfProds) {
803 // skip the spares - they are reserved for future use
804 if (qfV.getFullName().endsWith("Spare")) {
805 continue;
806 }
807 // String.endwWith is case sensitive so gott check both cases
808 if (qfV.getFullName().endsWith("spare")) {
809 continue;
810 }
811 ncdff.addVariable(qfV.getGroup(), qfV);
812 logger.trace("Adding QF product: " + qfV.getFullName());
813 pathToProducts.add(qfV.getFullName());
814 unsignedFlags.put(qfV.getFullName(), "true");
815 unpackFlags.put(qfV.getFullName(), "false");
816 }
817
818 ncdfal.add((NetCDFFile) netCDFReader);
819 }
820
821 } catch (Exception e) {
822 logger.error("cannot create NetCDF reader for files selected");
823 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) {
824 throw new VisADException("Unable to extract metadata from required XML Product Profile");
825 }
826 e.printStackTrace();
827 }
828
829 // initialize the aggregation reader object
830 try {
831 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "Track", "XTrack", isEDR);
832 ((GranuleAggregation) nppAggReader).setQfMap(qfMap);
833 } catch (Exception e) {
834 throw new VisADException("Unable to initialize aggregation reader");
835 }
836
837 // make sure we found valid data
838 if (pathToProducts.size() == 0) {
839 throw new VisADException("No data found in files selected");
840 }
841
842 logger.debug("Number of adapters needed: " + pathToProducts.size());
843 adapters = new MultiDimensionAdapter[pathToProducts.size()];
844 Hashtable<String, String[]> properties = new Hashtable<String, String[]>();
845
846 Iterator<String> iterator = pathToProducts.iterator();
847 int pIdx = 0;
848 while (iterator.hasNext()) {
849 String pStr = (String) iterator.next();
850 logger.debug("Working on adapter number " + (pIdx + 1) + ": " + pStr);
851 HashMap<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable();
852 HashMap<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable();
853 swathTable.put("array_name", pStr);
854 swathTable.put("lon_array_name", pathToLon);
855 swathTable.put("lat_array_name", pathToLat);
856 swathTable.put("XTrack", "XTrack");
857 swathTable.put("Track", "Track");
858 swathTable.put("geo_Track", "Track");
859 swathTable.put("geo_XTrack", "XTrack");
860 swathTable.put("product_name", productName);
861
862 // array_name common to spectrum table
863 spectTable.put("array_name", pStr);
864 spectTable.put("product_name", productName);
865 logger.debug("Product Name: " + productName);
866 logger.debug("is3D? : " + is3D);
867 logger.debug("instrumentName: " + instrumentName.getStringValue());
868
869 if (is3D) {
870
871 // 3D data is either ATMS, OMPS, or CrIS
872 if ((instrumentName.getShortName() != null) && (instrumentName.getStringValue().equals("ATMS"))) {
873
874 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel");
875 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel");
876
877 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
878 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
879 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
880 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
881 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
882 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
883
884 spectTable.put(SpectrumAdapter.channelType, "wavelength");
885 spectTable.put(SpectrumAdapter.channels_name, "Channel");
886 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack");
887 spectTable.put(SpectrumAdapter.y_dim_name, "Track");
888
889 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length;
890 float[] bandArray = new float[numChannels];
891 String[] bandNames = new String[numChannels];
892 for (int bIdx = 0; bIdx < numChannels; bIdx++) {
893 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx];
894 bandNames[bIdx] = "Channel " + (bIdx + 1);
895 }
896 spectTable.put(SpectrumAdapter.channelValues, bandArray);
897 spectTable.put(SpectrumAdapter.bandNames, bandNames);
898
899 } else {
900 if (instrumentName.getStringValue().equals("CrIS")) {
901
902 swathTable.put("XTrack", "dim1");
903 swathTable.put("Track", "dim0");
904 swathTable.put("geo_XTrack", "dim1");
905 swathTable.put("geo_Track", "dim0");
906 swathTable.put("product_name", "CrIS_SDR");
907 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3");
908 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2");
909
910 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3");
911 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2");
912 spectTable.put(SpectrumAdapter.x_dim_name, "dim1");
913 spectTable.put(SpectrumAdapter.y_dim_name, "dim0");
914
915 } else if (instrumentName.getStringValue().contains("OMPS")) {
916
917 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel");
918 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel");
919
920 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
921 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
922 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
923 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"});
924 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
925 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
926
927 spectTable.put(SpectrumAdapter.channelType, "wavelength");
928 spectTable.put(SpectrumAdapter.channels_name, "Channel");
929 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack");
930 spectTable.put(SpectrumAdapter.y_dim_name, "Track");
931
932 int numChannels = 200;
933 if (instrumentName.getStringValue().equals("OMPS-TC")) {
934 numChannels = 260;
935 }
936 logger.debug("Setting up OMPS adapter, num channels: " + numChannels);
937 float[] bandArray = new float[numChannels];
938 String[] bandNames = new String[numChannels];
939 for (int bIdx = 0; bIdx < numChannels; bIdx++) {
940 bandArray[bIdx] = bIdx;
941 bandNames[bIdx] = "Channel " + (bIdx + 1);
942 }
943 spectTable.put(SpectrumAdapter.channelValues, bandArray);
944 spectTable.put(SpectrumAdapter.bandNames, bandNames);
945
946 } else {
947 // sorry, if we can't id the instrument, we can't display the data!
948 throw new VisADException("Unable to determine instrument name");
949 }
950 }
951
952 } else {
953 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"});
954 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"});
955 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"});
956 }
957
958 swathTable.put("scale_name", "scale_factor");
959 swathTable.put("offset_name", "add_offset");
960 swathTable.put("fill_value_name", "_FillValue");
961 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1));
962 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1));
963
964 // set the valid range hash if data is available
965 if (nppPP != null) {
966 if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) {
967 swathTable.put("valid_range", "valid_range");
968 }
969 }
970
971 String unsignedAttributeStr = unsignedFlags.get(pStr);
972 if (unsignedAttributeStr.equals("true")) {
973 swathTable.put("unsigned", unsignedAttributeStr);
974 }
975
976 String unpackFlagStr = unpackFlags.get(pStr);
977 if (unpackFlagStr.equals("true")) {
978 swathTable.put("unpack", "true");
979 }
980
981 // For Suomi NPP data, do valid range check AFTER applying scale/offset
982 swathTable.put("range_check_after_scaling", "true");
983
984 // pass in a GranuleAggregation reader...
985 if (is3D) {
986 if (instrumentName.getStringValue().equals("ATMS")) {
987 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
988 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable);
989 DataCategory.createCategory("MultiSpectral");
990 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
991 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa,
992 "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS");
993 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]);
994 multiSpectralData.add(msd);
995 }
996 if (instrumentName.getStringValue().equals("CrIS")) {
997 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable);
998 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable);
999 DataCategory.createCategory("MultiSpectral");
1000 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
1001 MultiSpectralData msd = new CrIS_SDR_MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx], csa);
1002 msd.setInitialWavenumber(csa.getInitialWavenumber());
1003 msd_CrIS.add(msd);
1004 }
1005 if (instrumentName.getStringValue().contains("OMPS")) {
1006 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
1007 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable);
1008 DataCategory.createCategory("MultiSpectral");
1009 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE");
1010 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa,
1011 "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS");
1012 msd.setInitialWavenumber(0);
1013 multiSpectralData.add(msd);
1014 }
1015 if (pIdx == 0) {
1016 // generate preview image for ATMS and OMPS
1017 if (! instrumentName.getStringValue().equals("CrIS")) {
1018 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset();
1019 try {
1020 previewImage = multiSpectralData.get(pIdx).getImage(defaultSubset);
1021 } catch (Exception e) {
1022 e.printStackTrace();
1023 }
1024 }
1025 }
1026
1027 } else {
1028 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable);
1029 if (pIdx == 0) {
1030 defaultSubset = adapters[pIdx].getDefaultSubset();
1031 }
1032 categories = DataCategory.parseCategories("IMAGE");
1033 }
1034 pIdx++;
1035 }
1036
1037 if (msd_CrIS.size() == 3) {
1038 try {
1039 MultiSpectralAggr aggr = new MultiSpectralAggr(msd_CrIS.toArray(new MultiSpectralData[msd_CrIS.size()]));
1040 aggr.setInitialWavenumber(902.25f);
1041 multiSpectralData.add(aggr);
1042 defaultSubset = ((MultiSpectralData)msd_CrIS.get(0)).getDefaultSubset();
1043 previewImage = ((MultiSpectralData)msd_CrIS.get(0)).getImage(defaultSubset);
1044 }
1045 catch (Exception e) {
1046 System.out.println(e);
1047 }
1048 }
1049
1050 setProperties(properties);
1051 }
1052
1053 public void initAfterUnpersistence() {
1054 try {
1055 if (getTmpPaths() != null) {
1056 // New code for zipped bundles-
1057 // we want 'sources' to point to wherever the zipped data was unpacked.
1058 sources.clear();
1059 // following PersistenceManager.fixBulkDataSources, get temporary data location
1060 String zidvPath =
1061 McIDASV.getStaticMcv().getStateManager().
1062 getProperty(IdvPersistenceManager.PROP_ZIDVPATH, "");
1063 for (Object o : getTmpPaths()) {
1064 String tempPath = (String) o;
1065 // replace macro string with actual path
1066 String expandedPath = tempPath.replace(PersistenceManager.MACRO_ZIDVPATH, zidvPath);
1067 // we don't want to add nav files to this list!:
1068 File f = new File(expandedPath);
1069 if (!f.getName().matches(JPSSUtilities.SUOMI_GEO_REGEX)) {
1070 sources.add(expandedPath);
1071 }
1072 }
1073 } else {
1074 // leave in original unpersistence code - this will get run for unzipped bundles.
1075 // TODO: do we need to handle the "Save with relative paths" case specially?
1076 if (! oldSources.isEmpty()) {
1077 sources.clear();
1078 for (Object o : oldSources) {
1079 sources.add((String) o);
1080 }
1081 }
1082 }
1083 oldSources.clear();
1084 setup();
1085 } catch (Exception e) {
1086 e.printStackTrace();
1087 }
1088 }
1089
1090 /* (non-Javadoc)
1091 * @see edu.wisc.ssec.mcidasv.data.HydraDataSource#canSaveDataToLocalDisk()
1092 */
1093 @Override
1094 public boolean canSaveDataToLocalDisk() {
1095 // At present, Suomi data is always data granules on disk
1096 return true;
1097 }
1098
1099 /* (non-Javadoc)
1100 * @see ucar.unidata.data.DataSourceImpl#saveDataToLocalDisk(java.lang.String, java.lang.Object, boolean)
1101 */
1102 @Override
1103 protected List saveDataToLocalDisk(String filePrefix, Object loadId,
1104 boolean changeLinks) throws Exception {
1105 // need to make a list of all data granule files
1106 // PLUS all geolocation granule files, but only if accessed separate!
1107 List<String> fileList = new ArrayList<String>();
1108 for (Object o : sources) {
1109 fileList.add((String) o);
1110 }
1111 for (String s : geoSources) {
1112 fileList.add(s);
1113 }
1114 return fileList;
1115 }
1116
1117 public List<String> getOldSources() {
1118 return oldSources;
1119 }
1120
1121 public void setOldSources(List<String> oldSources) {
1122 this.oldSources = oldSources;
1123 }
1124
1125 /**
1126 * Make and insert the <code>DataChoice</code>-s for this
1127 * <code>DataSource</code>.
1128 */
1129
1130 public void doMakeDataChoices() {
1131
1132 // special loop for CrIS, ATMS, and OMPS data
1133 if (multiSpectralData.size() > 0) {
1134 for (int k = 0; k < multiSpectralData.size(); k++) {
1135 MultiSpectralData adapter = multiSpectralData.get(k);
1136 DataChoice choice = null;
1137 try {
1138 choice = doMakeDataChoice(k, adapter);
1139 msdMap.put(choice.getName(), adapter);
1140 addDataChoice(choice);
1141 } catch (Exception e) {
1142 e.printStackTrace();
1143 }
1144 }
1145 return;
1146 }
1147 // all other data (VIIRS and 2D EDRs)
1148 if (adapters != null) {
1149 for (int idx = 0; idx < adapters.length; idx++) {
1150 DataChoice choice = null;
1151 try {
1152 choice = doMakeDataChoice(idx, adapters[idx].getArrayName());
1153 }
1154 catch (Exception e) {
1155 e.printStackTrace();
1156 logger.error("doMakeDataChoice failed");
1157 }
1158
1159 if (choice != null) {
1160 addDataChoice(choice);
1161 }
1162 }
1163 }
1164 }
1165
1166 private DataChoice doMakeDataChoice(int idx, String var) throws Exception {
1167 String name = var;
1168 DataSelection dataSel = new MultiDimensionSubset(defaultSubset);
1169 Hashtable subset = new Hashtable();
1170 subset.put(new MultiDimensionSubset(), dataSel);
1171 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, name, categories, subset);
1172 return ddc;
1173 }
1174
1175 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception {
1176 String name = adapter.getName();
1177 DataSelection dataSel = new MultiDimensionSubset(defaultSubset);
1178 Hashtable subset = new Hashtable();
1179 subset.put(MultiDimensionSubset.key, dataSel);
1180 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter());
1181 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset);
1182 ddc.setProperties(subset);
1183 return ddc;
1184 }
1185
1186 /**
1187 * Check to see if this <code>SuomiNPPDataSource</code> is equal to the object
1188 * in question.
1189 * @param o object in question
1190 * @return true if they are the same or equivalent objects
1191 */
1192
1193 public boolean equals(Object o) {
1194 if ( !(o instanceof SuomiNPPDataSource)) {
1195 return false;
1196 }
1197 return (this == (SuomiNPPDataSource) o);
1198 }
1199
1200 public MultiSpectralData getMultiSpectralData() {
1201 return multiSpectralData.get(0);
1202 }
1203
1204 public MultiSpectralData getMultiSpectralData(DataChoice choice) {
1205 return msdMap.get(choice.getName());
1206 }
1207
1208 public String getDatasetName() {
1209 return filename;
1210 }
1211
1212 /**
1213 * @return the qfMap
1214 */
1215 public HashMap<String, QualityFlag> getQfMap() {
1216 return qfMap;
1217 }
1218
1219 public void setDatasetName(String name) {
1220 filename = name;
1221 }
1222
1223 public HashMap getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) {
1224 GeoLocationInfo ginfo = geoSelection.getBoundingBox();
1225 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(),
1226 ginfo.getMinLon(), ginfo.getMaxLon());
1227 }
1228
1229 public synchronized Data getData(DataChoice dataChoice, DataCategory category,
1230 DataSelection dataSelection, Hashtable requestProperties)
1231 throws VisADException, RemoteException {
1232 return this.getDataInner(dataChoice, category, dataSelection, requestProperties);
1233 }
1234
1235
1236 protected Data getDataInner(DataChoice dataChoice, DataCategory category,
1237 DataSelection dataSelection, Hashtable requestProperties)
1238 throws VisADException, RemoteException {
1239
1240 //- this hack keeps the HydraImageProbe from doing a getData()
1241 //- TODO: need to use categories?
1242 if (requestProperties != null) {
1243 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) {
1244 return null;
1245 }
1246 }
1247
1248 GeoLocationInfo ginfo = null;
1249 GeoSelection geoSelection = null;
1250
1251 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) {
1252 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() :
1253 dataChoice.getDataSelection().getGeoSelection();
1254 }
1255
1256 if (geoSelection != null) {
1257 ginfo = geoSelection.getBoundingBox();
1258 }
1259
1260 Data data = null;
1261 if (adapters == null) {
1262 return data;
1263 }
1264
1265 MultiDimensionAdapter adapter = null;
1266
1267 // pick the adapter with the same index as the current data choice
1268 int aIdx = 0;
1269 List<DataChoice> dcl = getDataChoices();
1270 for (DataChoice dc : dcl) {
1271 if (dc.getName().equals(dataChoice.getName())) {
1272 aIdx = dcl.indexOf(dc);
1273 break;
1274 }
1275 }
1276
1277 adapter = adapters[aIdx];
1278
1279 try {
1280 HashMap subset = null;
1281 if (ginfo != null) {
1282 logger.debug("getting subset from lat-lon rect...");
1283 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(),
1284 ginfo.getMinLon(), ginfo.getMaxLon(),
1285 geoSelection.getXStride(),
1286 geoSelection.getYStride(),
1287 geoSelection.getZStride());
1288 }
1289 else {
1290
1291 MultiDimensionSubset select = null;
1292 Hashtable table = dataChoice.getProperties();
1293 Enumeration keys = table.keys();
1294 while (keys.hasMoreElements()) {
1295 Object key = keys.nextElement();
1296 logger.debug("Key: " + key.toString());
1297 if (key instanceof MultiDimensionSubset) {
1298 select = (MultiDimensionSubset) table.get(key);
1299 }
1300 }
1301 subset = select.getSubset();
1302 logger.debug("Subset size: " + subset.size());
1303
1304 if (dataSelection != null) {
1305 Hashtable props = dataSelection.getProperties();
1306 if (props != null) {
1307 if (props.containsKey(SpectrumAdapter.channelIndex_name)) {
1308 logger.debug("Props contains channel index key...");
1309 double[] coords = (double[]) subset.get(SpectrumAdapter.channelIndex_name);
1310 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue();
1311 coords[0] = (double)idx;
1312 coords[1] = (double)idx;
1313 coords[2] = (double)1;
1314 }
1315 }
1316 }
1317 }
1318
1319 if (subset != null) {
1320 data = adapter.getData(subset);
1321 data = applyProperties(data, requestProperties, subset, aIdx);
1322 }
1323 } catch (Exception e) {
1324 e.printStackTrace();
1325 logger.error("getData exception e=" + e);
1326 }
1327 return data;
1328 }
1329
1330 protected Data applyProperties(Data data, Hashtable requestProperties, HashMap subset, int adapterIndex)
1331 throws VisADException, RemoteException {
1332 Data new_data = data;
1333
1334 if (requestProperties == null) {
1335 new_data = data;
1336 return new_data;
1337 }
1338
1339 return new_data;
1340 }
1341
1342 protected void initDataSelectionComponents(
1343 List<DataSelectionComponent> components,
1344 final DataChoice dataChoice) {
1345
1346 if (System.getProperty("os.name").equals("Mac OS X") && hasImagePreview && hasChannelSelect) {
1347 try {
1348 if (hasImagePreview) {
1349 components.add(new ImageChannelSelection(new PreviewSelection(dataChoice, previewImage, null), new ChannelSelection(dataChoice)));
1350 }
1351 if (hasChannelSelect) {
1352 components.add(new ChannelSelection(dataChoice));
1353 }
1354 } catch (Exception e) {
1355 e.printStackTrace();
1356 }
1357 }
1358 else {
1359 if (hasImagePreview) {
1360 try {
1361 FlatField image = (FlatField) dataChoice.getData(null);
1362 components.add(new PreviewSelection(dataChoice, image, null));
1363 } catch (Exception e) {
1364 logger.error("Can't make PreviewSelection: " + e);
1365 e.printStackTrace();
1366 }
1367 }
1368 if (hasChannelSelect) {
1369 try {
1370 components.add(new ChannelSelection(dataChoice));
1371 }
1372 catch (Exception e) {
1373 e.printStackTrace();
1374 }
1375 }
1376 }
1377
1378 }
1379
1380 }