001/* 002 * This file is part of McIDAS-V 003 * 004 * Copyright 2007-2015 005 * Space Science and Engineering Center (SSEC) 006 * University of Wisconsin - Madison 007 * 1225 W. Dayton Street, Madison, WI 53706, USA 008 * https://www.ssec.wisc.edu/mcidas 009 * 010 * All Rights Reserved 011 * 012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and 013 * some McIDAS-V source code is based on IDV and VisAD source code. 014 * 015 * McIDAS-V is free software; you can redistribute it and/or modify 016 * it under the terms of the GNU Lesser Public License as published by 017 * the Free Software Foundation; either version 3 of the License, or 018 * (at your option) any later version. 019 * 020 * McIDAS-V is distributed in the hope that it will be useful, 021 * but WITHOUT ANY WARRANTY; without even the implied warranty of 022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 023 * GNU Lesser Public License for more details. 024 * 025 * You should have received a copy of the GNU Lesser Public License 026 * along with this program. If not, see http://www.gnu.org/licenses. 027 */ 028 029package edu.wisc.ssec.mcidasv.data.hydra; 030 031import edu.wisc.ssec.mcidasv.Constants; 032import edu.wisc.ssec.mcidasv.McIDASV; 033import edu.wisc.ssec.mcidasv.PersistenceManager; 034import edu.wisc.ssec.mcidasv.data.HydraDataSource; 035import edu.wisc.ssec.mcidasv.data.PreviewSelection; 036import edu.wisc.ssec.mcidasv.data.QualityFlag; 037 038import java.io.ByteArrayInputStream; 039import java.io.File; 040import java.io.FilenameFilter; 041import java.rmi.RemoteException; 042import java.text.SimpleDateFormat; 043import java.util.ArrayList; 044import java.util.Date; 045import java.util.Enumeration; 046import java.util.HashMap; 047import java.util.Hashtable; 048import java.util.Iterator; 049import java.util.LinkedHashMap; 050import java.util.LinkedHashSet; 051import java.util.List; 052import java.util.Map; 053import java.util.Set; 054import java.util.SimpleTimeZone; 055import java.util.StringTokenizer; 056 057import javax.swing.JCheckBox; 058import javax.swing.JOptionPane; 059 060import org.jdom2.Document; 061import org.jdom2.Element; 062import org.jdom2.Namespace; 063import org.jdom2.output.XMLOutputter; 064import org.slf4j.Logger; 065import org.slf4j.LoggerFactory; 066 067import ucar.ma2.ArrayFloat; 068import ucar.ma2.DataType; 069import ucar.nc2.Attribute; 070import ucar.nc2.Dimension; 071import ucar.nc2.Group; 072import ucar.nc2.NetcdfFile; 073import ucar.nc2.Variable; 074import ucar.nc2.dataset.VariableDS; 075import ucar.unidata.data.DataCategory; 076import ucar.unidata.data.DataChoice; 077import ucar.unidata.data.DataSelection; 078import ucar.unidata.data.DataSelectionComponent; 079import ucar.unidata.data.DataSourceDescriptor; 080import ucar.unidata.data.DirectDataChoice; 081import ucar.unidata.data.GeoLocationInfo; 082import ucar.unidata.data.GeoSelection; 083import ucar.unidata.data.grid.GridUtil; 084import ucar.unidata.idv.IdvPersistenceManager; 085import ucar.unidata.util.Misc; 086import visad.Data; 087import visad.DateTime; 088import visad.DerivedUnit; 089import visad.FieldImpl; 090import visad.FlatField; 091import visad.FunctionType; 092import visad.RealType; 093import visad.SampledSet; 094import visad.Unit; 095import visad.VisADException; 096import visad.data.units.NoSuchUnitException; 097import visad.data.units.ParseException; 098import visad.data.units.Parser; 099import visad.util.Util; 100 101/** 102 * A data source for NPOESS Preparatory Project (Suomi NPP) data 103 * This will probably move, but we are placing it here for now 104 * since we are leveraging some existing code used for HYDRA. 105 */ 106 107public class SuomiNPPDataSource extends HydraDataSource { 108 109 private static final Logger logger = LoggerFactory.getLogger(SuomiNPPDataSource.class); 110 111 /** Sources file */ 112 protected String filename; 113 114 // for loading bundles, store granule lists and geo lists here 115 protected List<String> oldSources = new ArrayList<String>(); 116 protected List<String> geoSources = new ArrayList<String>(); 117 118 // integrity map for grouping sets/aggregations of selected products 119 Map<String, List<String>> filenameMap = null; 120 121 protected MultiDimensionReader nppAggReader; 122 123 protected MultiDimensionAdapter[] adapters = null; 124 125 private ArrayList<MultiSpectralData> msd_CrIS = new ArrayList<MultiSpectralData>(); 126 private ArrayList<MultiSpectralData> multiSpectralData = new ArrayList<MultiSpectralData>(); 127 private HashMap<String, MultiSpectralData> msdMap = new HashMap<String, MultiSpectralData>(); 128 private HashMap<String, QualityFlag> qfMap = new HashMap<String, QualityFlag>(); 129 130 private static final String DATA_DESCRIPTION = "Suomi NPP Data"; 131 132 // instrument related variables and flags 133 Attribute instrumentName = null; 134 private String productName = null; 135 136 // product related variables and flags 137 boolean isEDR = false; 138 String whichEDR = ""; 139 140 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions 141 private String crisFilter = "ES_Real"; 142 143 // for now, we are only handling OMPS variables that match this filter and SCAN dimensions 144 private String ompsFilter = "Radiance"; 145 146 private HashMap defaultSubset; 147 public TrackAdapter track_adapter; 148 149 private List categories; 150 private boolean isCombinedProduct = false; 151 private boolean nameHasBeenSet = false; 152 153 // need our own separator char since it's always Unix-style in the Suomi NPP files 154 private static final String SEPARATOR_CHAR = "/"; 155 156 // date formatter for converting Suomi NPP day/time to something we can use 157 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss.SSS"); 158 159 // date formatter for how we want to show granule day/time on display 160 SimpleDateFormat sdfOut = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); 161 162 // MJH keep track of date to add time dim to FieldImpl 163 Date theDate; 164 165 /** 166 * Zero-argument constructor for construction via unpersistence. 167 */ 168 169 public SuomiNPPDataSource() { 170 } 171 172 public SuomiNPPDataSource(String fileName) throws VisADException { 173 this(null, Misc.newList(fileName), null); 174 logger.debug("filename only constructor call.."); 175 } 176 177 /** 178 * Construct a new Suomi NPP HDF5 data source. 179 * @param descriptor descriptor for this {@code DataSource} 180 * @param fileName name of the hdf file to read 181 * @param properties hashtable of properties 182 * 183 * @throws VisADException problem creating data 184 */ 185 186 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 187 String fileName, Hashtable properties) 188 throws VisADException { 189 this(descriptor, Misc.newList(fileName), properties); 190 logger.debug("SuomiNPPDataSource called, single file selected: " + fileName); 191 } 192 193 /** 194 * Construct a new Suomi NPP HDF5 data source. 195 * 196 * @param descriptor Descriptor for this {@code DataSource}. 197 * @param newSources List of filenames. 198 * @param properties Hashtable of properties. 199 * 200 * @throws VisADException problem creating data 201 */ 202 203 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 204 List<String> newSources, Hashtable properties) 205 throws VisADException { 206 super(descriptor, newSources, DATA_DESCRIPTION, properties); 207 logger.debug("SuomiNPPDataSource constructor called, file count: " + sources.size()); 208 209 filename = (String) sources.get(0); 210 setDescription("Suomi NPP"); 211 212 // build the filename map - matches each product to set of files for that product 213 filenameMap = new HashMap<String, List<String>>(); 214 215 // Pass 1, populate the list of products selected 216 for (Object o : sources) { 217 String filename = (String) o; 218 // first five characters of any product go together 219 int lastSeparator = filename.lastIndexOf(File.separatorChar); 220 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 221 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 222 if (! filenameMap.containsKey(prodStr)) { 223 List<String> l = new ArrayList<String>(); 224 filenameMap.put(prodStr, l); 225 } 226 } 227 228 // pass 2, create a list of files for each product in this data source 229 for (Object o : sources) { 230 String filename = (String) o; 231 // first five characters of any product go together 232 int lastSeparator = filename.lastIndexOf(File.separatorChar); 233 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 234 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 235 List l = (List) filenameMap.get(prodStr); 236 l.add(filename); 237 filenameMap.put(prodStr, l); 238 } 239 240 versionCheck(); 241 setup(); 242 initQfTranslations(); 243 } 244 245 // alert user about possible VIIRS plugin compatibility issues 246 private void versionCheck() { 247 boolean pluginDialog = getIdv().getStore().get(Constants.PREF_VIIRS_PLUGIN, false); 248 // don't create a dialog though if we are running in background/offscreen mode 249 boolean offScreen = getIdv().getArgsManager().getIsOffScreen(); 250 if (! offScreen) { 251 if (! pluginDialog) { 252 String msg = "There has been an update to the VIIRS Formulas plugin.\n" + 253 "If you use the plugin, you will need to uninstall the currently installed\n" + 254 "version of the plugin, and install the plugin called \"VIIRS Formulas\"."; 255 JCheckBox jcbPlugin = new JCheckBox("Do not show this message again"); 256 Object[] params = { msg, jcbPlugin }; 257 JOptionPane.showMessageDialog(null, params, "Plugin Compatibility Notice", JOptionPane.OK_OPTION); 258 boolean dontShow = jcbPlugin.isSelected(); 259 getIdv().getStore().put(Constants.PREF_VIIRS_PLUGIN, dontShow); 260 } 261 } else { 262 logger.warn("Make sure your VIIRS plugin is current, there was an update with McV 1.5"); 263 } 264 } 265 266 public void setup() throws VisADException { 267 268 // store filenames for possible bundle unpersistence 269 for (Object o : sources) { 270 oldSources.add((String) o); 271 } 272 273 // time zone for product labels 274 SimpleTimeZone stz = new SimpleTimeZone(0, "GMT"); 275 sdf.setTimeZone(stz); 276 sdfOut.setTimeZone(stz); 277 278 // looking to populate 3 things - path to lat, path to lon, path to relevant products 279 String pathToLat = null; 280 String pathToLon = null; 281 LinkedHashSet<String> pathToProducts = new LinkedHashSet<String>(); 282 283 // flag to indicate data is 3-dimensions (X, Y, channel or band) 284 boolean is3D = false; 285 286 // check source filenames to see if this is a combined product. everything 287 // from last file separator to first underscore should be product info 288 int lastSeparator = filename.lastIndexOf(File.separatorChar); 289 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 290 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 291 StringTokenizer st = new StringTokenizer(prodStr, "-"); 292 logger.debug("SNPPDS check for embedded GEO, tokenizing: " + prodStr); 293 while (st.hasMoreTokens()) { 294 String singleProd = st.nextToken(); 295 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) { 296 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) { 297 logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd); 298 isCombinedProduct = true; 299 break; 300 } 301 } 302 } 303 304 // various metatdata we'll need to gather on a per-product basis 305 LinkedHashMap<String, String> unsignedFlags = new LinkedHashMap<String, String>(); 306 LinkedHashMap<String, String> unpackFlags = new LinkedHashMap<String, String>(); 307 308 // geo product IDs for each granule 309 LinkedHashSet<String> geoProductIDs = new LinkedHashSet<String>(); 310 311 // aggregations will use sets of NetCDFFile readers 312 ArrayList<NetCDFFile> ncdfal = new ArrayList<NetCDFFile>(); 313 314 // we should be able to find an XML Product Profile for each data/product type 315 SuomiNPPProductProfile nppPP = null; 316 // and also Profile metadata for geolocation variables 317 boolean haveGeoMetaData = false; 318 319 // number of source granules which make up the data source 320 int granuleCount = 1; 321 322 try { 323 324 nppPP = new SuomiNPPProductProfile(); 325 326 // for each source file provided, find the appropriate geolocation, 327 // get the nominal time and various other granule-level metadata 328 Iterator keyIterator = filenameMap.keySet().iterator(); 329 while (keyIterator.hasNext()) { 330 String keyStr = (String) keyIterator.next(); 331 List fileNames = (List) filenameMap.get(keyStr); 332 granuleCount = fileNames.size(); 333 setProperty(Constants.PROP_GRANULE_COUNT, granuleCount + " Granule"); 334 for (int fileCount = 0; fileCount < granuleCount; fileCount++) { 335 // need to open the main NetCDF file to determine the geolocation product 336 NetcdfFile ncfile = null; 337 String fileAbsPath = null; 338 try { 339 fileAbsPath = (String) fileNames.get(fileCount); 340 logger.debug("Trying to open file: " + fileAbsPath); 341 ncfile = NetcdfFile.open(fileAbsPath); 342 if (! isCombinedProduct) { 343 Attribute a = ncfile.findGlobalAttribute("N_GEO_Ref"); 344 logger.debug("Value of GEO global attribute: " 345 + a.getStringValue()); 346 String tmpGeoProductID = a.getStringValue(); 347 geoProductIDs.add(tmpGeoProductID); 348 } 349 Group rg = ncfile.getRootGroup(); 350 351 List<Group> gl = rg.getGroups(); 352 if (gl != null) { 353 for (Group g : gl) { 354 logger.trace("Group name: " + g.getFullName()); 355 // when we find the Data_Products group, go down another group level and pull out 356 // what we will use for nominal day and time (for now anyway). 357 // XXX TJJ fileCount check is so we don't count the GEO file in time array! 358 if (g.getFullName().contains("Data_Products") && (fileCount != fileNames.size())) { 359 boolean foundDateTime = false; 360 List<Group> dpg = g.getGroups(); 361 362 // cycle through once looking for XML Product Profiles 363 for (Group subG : dpg) { 364 365 String subName = subG.getFullName(); 366 // use actual product, not geolocation, to id XML Product Profile 367 if (! subName.contains("-GEO")) { 368 // determine the instrument name (VIIRS, ATMS, CrIS, OMPS) 369 instrumentName = subG.findAttribute("Instrument_Short_Name"); 370 371 // note any EDR products, will need to check for and remove 372 // fill scans later 373 Attribute adtt = subG.findAttribute("N_Dataset_Type_Tag"); 374 if (adtt != null) { 375 String baseName = adtt.getStringValue(); 376 if ((baseName != null) && (baseName.equals("EDR"))) { 377 isEDR = true; 378 // have to loop through sub groups variables to determine band 379 List<Variable> tmpVar = subG.getVariables(); 380 for (Variable v : tmpVar) { 381 // if Imagery EDR attribute for band is specified, save it 382 Attribute mBand = v.findAttribute("Band_ID"); 383 if (mBand != null) { 384 whichEDR = mBand.getStringValue(); 385 } 386 } 387 } 388 } 389 390 // This is also where we find the attribute which tells us which 391 // XML Product Profile to use! 392 Attribute axpp = subG.findAttribute("N_Collection_Short_Name"); 393 if (axpp != null) { 394 String baseName = axpp.getStringValue(); 395 productName = baseName; 396 String productProfileFileName = nppPP.getProfileFileName(baseName); 397 logger.trace("Found profile: " + productProfileFileName); 398 if (productProfileFileName == null) { 399 throw new Exception("XML Product Profile not found in catalog"); 400 } 401 try { 402 nppPP.addMetaDataFromFile(productProfileFileName); 403 } catch (Exception nppppe) { 404 logger.error("Error parsing XML Product Profile: " + productProfileFileName); 405 throw new Exception("XML Product Profile Error"); 406 } 407 } 408 } 409 } 410 411 // 2nd pass through sub-group to extract date/time for aggregation 412 for (Group subG : dpg) { 413 List<Variable> vl = subG.getVariables(); 414 for (Variable v : vl) { 415 Attribute aDate = v.findAttribute("AggregateBeginningDate"); 416 Attribute aTime = v.findAttribute("AggregateBeginningTime"); 417 // did we find the attributes we are looking for? 418 if ((aDate != null) && (aTime != null)) { 419 String sDate = aDate.getStringValue(); 420 String sTime = aTime.getStringValue(); 421 logger.trace("For day/time, using: " + sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); 422 Date d = sdf.parse(sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); 423 theDate = d; 424 foundDateTime = true; 425 // set time for display to day/time of 1st granule examined 426 if (! nameHasBeenSet) { 427 setName(instrumentName.getStringValue() + " " + sdfOut.format(d)); 428 nameHasBeenSet = true; 429 } 430 break; 431 } 432 } 433 if (foundDateTime) break; 434 } 435 if (! foundDateTime) { 436 throw new VisADException("No date time found in Suomi NPP granule"); 437 } 438 } 439 } 440 } 441 } catch (Exception e) { 442 logger.warn("Exception during processing of file: " + fileAbsPath); 443 throw (e); 444 } finally { 445 ncfile.close(); 446 } 447 } 448 449 } 450 451 // build each union aggregation element 452 Iterator<String> iterator = geoProductIDs.iterator(); 453 for (int elementNum = 0; elementNum < granuleCount; elementNum++) { 454 455 String s = null; 456 457 // build an XML (NCML actually) representation of the union aggregation of these two files 458 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"); 459 Element root = new Element("netcdf", ns); 460 Document document = new Document(root); 461 462 Element agg = new Element("aggregation", ns); 463 agg.setAttribute("type", "union"); 464 465 // TJJ - Loop over filename map, could be several products that need to be aggregated 466 Set set = filenameMap.keySet(); 467 Iterator mapIter = set.iterator(); 468 while (mapIter.hasNext()) { 469 String key = (String) mapIter.next(); 470 List l = (List) filenameMap.get(key); 471 Element fData = new Element("netcdf", ns); 472 fData.setAttribute("location", (String) l.get(elementNum)); 473 agg.addContent(fData); 474 s = (String) l.get(elementNum); 475 } 476 477 if (! isCombinedProduct) { 478 Element fGeo = new Element("netcdf", ns); 479 480 String geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1); 481 // check if we have the whole file name or just the prefix 482 String geoProductID = iterator.next(); 483 if (geoProductID.endsWith("h5")) { 484 geoFilename += geoProductID; 485 } else { 486 geoFilename += geoProductID; 487 geoFilename += s.substring(s.lastIndexOf(File.separatorChar) + 6); 488 } 489 // Be sure file as specified by N_GEO_Ref global attribute really is there. 490 File tmpGeo = new File(geoFilename); 491 if (! tmpGeo.exists()) { 492 // Ok, the expected file defined (supposedly) exactly by a global att is not there... 493 // We need to check for similar geo files with different creation dates 494 String geoFileRelative = geoFilename.substring(geoFilename.lastIndexOf(File.separatorChar) + 1); 495 // also check for Terrain Corrected version of geo 496 String geoTerrainCorrected = geoFileRelative; 497 geoTerrainCorrected = geoTerrainCorrected.replace("OD", "TC"); 498 geoTerrainCorrected = geoTerrainCorrected.replace("MG", "TC"); 499 500 // now we make a file filter, and see if a matching geo file is present 501 File fList = new File(geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); // current directory 502 503 FilenameFilter geoFilter = new FilenameFilter() { 504 public boolean accept(File dir, String name) { 505 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX)) { 506 return true; 507 } else { 508 return false; 509 } 510 } 511 }; 512 513 File[] files = fList.listFiles(geoFilter); 514 for (File file : files) { 515 if (file.isDirectory()) { 516 continue; 517 } 518 // get the file name for convenience 519 String fName = file.getName(); 520 // is it one of the standard Ellipsoid geo types we are looking for? 521 if (fName.substring(0, 5).equals(geoFileRelative.substring(0, 5))) { 522 int geoStartIdx = geoFileRelative.indexOf("_d"); 523 int prdStartIdx = fName.indexOf("_d"); 524 String s1 = geoFileRelative.substring(geoStartIdx, geoStartIdx + 35); 525 String s2 = fName.substring(prdStartIdx, prdStartIdx + 35); 526 if (s1.equals(s2)) { 527 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; 528 break; 529 } 530 } 531 // same check, but for Terrain Corrected version 532 if (fName.substring(0, 5).equals(geoTerrainCorrected.substring(0, 5))) { 533 int geoStartIdx = geoTerrainCorrected.indexOf("_d"); 534 int prdStartIdx = fName.indexOf("_d"); 535 String s1 = geoTerrainCorrected.substring(geoStartIdx, geoStartIdx + 35); 536 String s2 = fName.substring(prdStartIdx, prdStartIdx + 35); 537 if (s1.equals(s2)) { 538 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; 539 break; 540 } 541 } 542 } 543 } 544 logger.debug("Cobbled together GEO file name: " + geoFilename); 545 fGeo.setAttribute("location", geoFilename); 546 // add this to list used if we create a zipped bundle 547 geoSources.add(geoFilename); 548 agg.addContent(fGeo); 549 } 550 551 root.addContent(agg); 552 XMLOutputter xmlOut = new XMLOutputter(); 553 String ncmlStr = xmlOut.outputString(document); 554 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes()); 555 MultiDimensionReader netCDFReader = new NetCDFFile(is); 556 557 // let's try and look through the NetCDF reader and see what we can learn... 558 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile(); 559 560 Group rg = ncdff.getRootGroup(); 561 // this is a list filled with unpacked qflag products, if any 562 ArrayList<VariableDS> qfProds = new ArrayList<VariableDS>(); 563 564 List<Group> gl = rg.getGroups(); 565 if (gl != null) { 566 for (Group g : gl) { 567 logger.debug("Group name: " + g.getFullName()); 568 // XXX just temporary - we are looking through All_Data, finding displayable data 569 if (g.getFullName().contains("All_Data")) { 570 List<Group> adg = g.getGroups(); 571 int xDim = -1; 572 int yDim = -1; 573 574 // two sub-iterations, first one to find geolocation and product dimensions 575 for (Group subG : adg) { 576 logger.debug("Sub group name: " + subG.getFullName()); 577 String subName = subG.getFullName(); 578 if (subName.contains("-GEO")) { 579 // this is the geolocation data 580 String geoBaseName = subG.getShortName(); 581 geoBaseName = geoBaseName.substring(0, geoBaseName.indexOf('_')); 582 if (! haveGeoMetaData) { 583 String geoProfileFileName = nppPP.getProfileFileName(geoBaseName); 584 // also add meta data from geolocation profile 585 nppPP.addMetaDataFromFile(geoProfileFileName); 586 haveGeoMetaData = true; 587 } 588 List<Variable> vl = subG.getVariables(); 589 for (Variable v : vl) { 590 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude")) { 591 pathToLat = v.getFullName(); 592 logger.debug("Ellipsoid Lat/Lon Variable: " + v.getFullName()); 593 // get the dimensions of the lat variable 594 Dimension dAlongTrack = v.getDimension(0); 595 yDim = dAlongTrack.getLength(); 596 Dimension dAcrossTrack = v.getDimension(1); 597 xDim = dAcrossTrack.getLength(); 598 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 599 } 600 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude")) { 601 // we got dimensions from lat, don't need 'em twice, but need path 602 pathToLon = v.getFullName(); 603 } 604 } 605 // one more pass in case there is terrain-corrected Lat/Lon 606 for (Variable v : vl) { 607 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude_TC")) { 608 pathToLat = v.getFullName(); 609 logger.debug("Switched Lat/Lon Variable to TC: " + v.getFullName()); 610 // get the dimensions of the lat variable 611 Dimension dAlongTrack = v.getDimension(0); 612 yDim = dAlongTrack.getLength(); 613 Dimension dAcrossTrack = v.getDimension(1); 614 xDim = dAcrossTrack.getLength(); 615 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 616 } 617 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude_TC")) { 618 // we got dimensions from lat, don't need 'em twice, but need path 619 pathToLon = v.getFullName(); 620 } 621 } 622 } 623 } 624 625 // second to identify displayable products 626 for (Group subG : adg) { 627 // this is the product data 628 List<Variable> vl = subG.getVariables(); 629 for (Variable v : vl) { 630 boolean useThis = false; 631 String vName = v.getFullName(); 632 logger.trace("Variable: " + vName); 633 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1); 634 635 // Special code to handle quality flags. We throw out anything 636 // that does not match bounds of the geolocation data 637 638 if (varShortName.startsWith("QF")) { 639 640 logger.trace("Handling Quality Flag: " + varShortName); 641 642 // this check is done later for ALL variables, but we need 643 // it early here to weed out those quality flags that are 644 // simply a small set of data w/no granule geo nbounds 645 boolean xScanOk = false; 646 boolean yScanOk = false; 647 List<Dimension> dl = v.getDimensions(); 648 649 // toss out > 2D Quality Flags 650 if (dl.size() > 2) { 651 logger.trace("SKIPPING QF, > 2D: " + varShortName); 652 continue; 653 } 654 655 for (Dimension d : dl) { 656 // in order to consider this a displayable product, make sure 657 // both scan direction dimensions are present and look like a granule 658 if (d.getLength() == xDim) { 659 xScanOk = true; 660 } 661 if (d.getLength() == yDim) { 662 yScanOk = true; 663 } 664 } 665 666 if (! (xScanOk && yScanOk)) { 667 logger.trace("SKIPPING QF, does not match geo bounds: " + varShortName); 668 continue; 669 } 670 671 ArrayList<QualityFlag> qfal = nppPP.getQualityFlags(varShortName); 672 if (qfal != null) { 673 for (QualityFlag qf : qfal) { 674 qf.setPackedName(vName); 675 // make a copy of the qflag variable 676 // NOTE: by using a VariableDS here, the original 677 // variable is used for the I/O, this matters! 678 VariableDS vqf = new VariableDS(subG, v, false); 679 // prefix with QF num to help guarantee uniqueness across groups 680 // this will cover most cases, but could still be dupe names 681 // within a single QF. This is handled when fetching XMLPP metadata 682 vqf.setShortName( 683 varShortName.substring(0, 3) + "_" + qf.getName() 684 ); 685 logger.debug("New QF var full name: " + vqf.getFullName()); 686 qfProds.add(vqf); 687 qfMap.put(vqf.getFullName(), qf); 688 } 689 } 690 } 691 692 // for CrIS instrument, first find dimensions of var matching 693 // CrIS filter, then throw out all variables which don't match 694 // those dimensions 695 696 if (instrumentName.getStringValue().equals("CrIS")) { 697 if (! vName.contains("GEO")) { 698 if (! varShortName.startsWith(crisFilter)) { 699 logger.trace("Skipping variable: " + varShortName); 700 continue; 701 } 702 } else { 703 // these variables are all GEO-related 704 // if they match lat/lon bounds, keep them 705 List<Dimension> dl = v.getDimensions(); 706 if (dl.size() == 3) { 707 boolean isDisplayableCrIS = true; 708 for (Dimension d : dl) { 709 if ((d.getLength() != xDim) && (d.getLength() != yDim) && (d.getLength() != 9)) { 710 isDisplayableCrIS = false; 711 } 712 } 713 if (! isDisplayableCrIS) { 714 continue; 715 } 716 } 717 } 718 } 719 720 // for OMPS, only Radiance for now... 721 if (instrumentName.getStringValue().contains("OMPS")) { 722 if (! varShortName.startsWith(ompsFilter)) { 723 logger.trace("Skipping OMPS variable: " + varShortName); 724 continue; 725 } 726 } 727 728 DataType dt = v.getDataType(); 729 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) { 730 continue; 731 } 732 733 List<Dimension> dl = v.getDimensions(); 734 if (dl.size() > 4) { 735 continue; 736 } 737 738 // for now, skip any 3D VIIRS data 739 if (instrumentName.getStringValue().equals("VIIRS")) { 740 if (dl.size() == 3) { 741 continue; 742 } 743 } 744 745 boolean xScanOk = false; 746 boolean yScanOk = false; 747 for (Dimension d : dl) { 748 // in order to consider this a displayable product, make sure 749 // both scan direction dimensions are present and look like a granule 750 if (d.getLength() == xDim) { 751 xScanOk = true; 752 } 753 if (d.getLength() == yDim) { 754 yScanOk = true; 755 } 756 } 757 758 if (xScanOk && yScanOk) { 759 useThis = true; 760 } 761 762 // For ATMS, only 3-D variable we pass through is BrightnessTemperature 763 // Dimensions for BT are (lon, lat, channel) 764 if (instrumentName.getStringValue().equals("ATMS")) { 765 if (dl.size() == 3) { 766 boolean isDisplayableATMS = false; 767 for (Dimension d : dl) { 768 if (d.getLength() == JPSSUtilities.ATMSChannelCenterFrequencies.length) { 769 isDisplayableATMS = true; 770 logger.trace("This variable has a dimension matching num ATMS channels"); 771 break; 772 } 773 } 774 if (! isDisplayableATMS) useThis = false; 775 } 776 } 777 778 // sensor data with a channel dimension 779 if (useThis) { 780 if ((instrumentName.getStringValue().equals("CrIS")) || 781 (instrumentName.getStringValue().equals("ATMS")) || 782 (instrumentName.getStringValue().contains("OMPS"))) { 783 is3D = true; 784 logger.debug("Handling 3-D data source..."); 785 } 786 } 787 788 if (useThis) { 789 // loop through the variable list again, looking for a corresponding "Factors" 790 float scaleVal = 1f; 791 float offsetVal = 0f; 792 boolean unpackFlag = false; 793 794 // if the granule has an entry for this variable name 795 // get the data, data1 = scale, data2 = offset 796 // create and poke attributes with this data 797 // endif 798 799 String factorsVarName = nppPP.getScaleFactorName(varShortName); 800 logger.debug("Mapping: " + varShortName + " to: " + factorsVarName); 801 if (factorsVarName != null) { 802 for (Variable fV : vl) { 803 if (fV.getShortName().equals(factorsVarName)) { 804 logger.trace("Pulling scale and offset values from variable: " + fV.getShortName()); 805 ucar.ma2.Array a = fV.read(); 806 float[] so = (float[]) a.copyTo1DJavaArray(); 807 scaleVal = so[0]; 808 offsetVal = so[1]; 809 logger.trace("Scale value: " + scaleVal + ", Offset value: " + offsetVal); 810 unpackFlag = true; 811 break; 812 } 813 } 814 } 815 816 // poke in scale/offset attributes for now 817 818 Attribute a1 = new Attribute("scale_factor", scaleVal); 819 v.addAttribute(a1); 820 Attribute a2 = new Attribute("add_offset", offsetVal); 821 v.addAttribute(a2); 822 823 // add valid range and fill value attributes here 824 // try to fill in valid range 825 if (nppPP.hasNameAndMetaData(varShortName)) { 826 String rangeMin = nppPP.getRangeMin(varShortName); 827 String rangeMax = nppPP.getRangeMax(varShortName); 828 logger.trace("range min: " + rangeMin + ", range max: " + rangeMax); 829 // only store range attribute if VALID range found 830 if ((rangeMin != null) && (rangeMax != null)) { 831 int [] shapeArr = new int [] { 2 }; 832 ArrayFloat af = new ArrayFloat(shapeArr); 833 try { 834 af.setFloat(0, Float.parseFloat(rangeMin)); 835 } catch (NumberFormatException nfe) { 836 af.setFloat(0, new Float(Integer.MIN_VALUE)); 837 } 838 try { 839 af.setFloat(1, Float.parseFloat(rangeMax)); 840 } catch (NumberFormatException nfe) { 841 af.setFloat(1, new Float(Integer.MAX_VALUE)); 842 } 843 Attribute rangeAtt = new Attribute("valid_range", af); 844 v.addAttribute(rangeAtt); 845 } 846 847 // check for and load fill values too... 848 849 // we need to check two places, first, the XML product profile 850 ArrayList<Float> fval = nppPP.getFillValues(varShortName); 851 852 // 2nd, does the variable already have one defined? 853 // if there was already a fill value associated with this variable, make 854 // sure we bring that along for the ride too... 855 Attribute aFill = v.findAttribute("_FillValue"); 856 857 // determine size of our fill value array 858 int fvArraySize = 0; 859 if (aFill != null) fvArraySize++; 860 if (! fval.isEmpty()) fvArraySize += fval.size(); 861 int [] fillShape = new int [] { fvArraySize }; 862 863 // allocate the array 864 ArrayFloat afFill = new ArrayFloat(fillShape); 865 866 // and FINALLY, fill it! 867 if (! fval.isEmpty()) { 868 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) { 869 afFill.setFloat(fillIdx, fval.get(fillIdx)); 870 logger.trace("Adding fill value (from XML): " + fval.get(fillIdx)); 871 } 872 } 873 874 if (aFill != null) { 875 Number n = aFill.getNumericValue(); 876 // is the data unsigned? 877 Attribute aUnsigned = v.findAttribute("_Unsigned"); 878 float fillValAsFloat = Float.NaN; 879 if (aUnsigned != null) { 880 if (aUnsigned.getStringValue().equals("true")) { 881 DataType fvdt = aFill.getDataType(); 882 logger.trace("Data String: " + aFill.toString()); 883 logger.trace("DataType primitive type: " + fvdt.getPrimitiveClassType()); 884 // signed byte that needs conversion? 885 if (fvdt.getPrimitiveClassType() == byte.class) { 886 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue()); 887 } 888 else if (fvdt.getPrimitiveClassType() == short.class) { 889 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue()); 890 } else { 891 fillValAsFloat = n.floatValue(); 892 } 893 } 894 } 895 afFill.setFloat(fvArraySize - 1, fillValAsFloat); 896 logger.trace("Adding fill value (from variable): " + fillValAsFloat); 897 } 898 Attribute fillAtt = new Attribute("_FillValue", afFill); 899 v.addAttribute(fillAtt); 900 } 901 902 Attribute aUnsigned = v.findAttribute("_Unsigned"); 903 if (aUnsigned != null) { 904 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); 905 } else { 906 unsignedFlags.put(v.getFullName(), "false"); 907 } 908 909 if (unpackFlag) { 910 unpackFlags.put(v.getFullName(), "true"); 911 } else { 912 unpackFlags.put(v.getFullName(), "false"); 913 } 914 915 logger.debug("Adding product: " + v.getFullName()); 916 pathToProducts.add(v.getFullName()); 917 918 } 919 } 920 } 921 } 922 } 923 } 924 925 // add in any unpacked qflag products 926 for (VariableDS qfV: qfProds) { 927 // skip the spares - they are reserved for future use 928 if (qfV.getFullName().endsWith("Spare")) { 929 continue; 930 } 931 // String.endsWith is case sensitive so gotta check both cases 932 if (qfV.getFullName().endsWith("spare")) { 933 continue; 934 } 935 ncdff.addVariable(qfV.getGroup(), qfV); 936 logger.trace("Adding QF product: " + qfV.getFullName()); 937 pathToProducts.add(qfV.getFullName()); 938 unsignedFlags.put(qfV.getFullName(), "true"); 939 unpackFlags.put(qfV.getFullName(), "false"); 940 } 941 942 ncdfal.add((NetCDFFile) netCDFReader); 943 } 944 945 } catch (Exception e) { 946 logger.error("cannot create NetCDF reader for files selected"); 947 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) { 948 throw new VisADException("Unable to extract metadata from required XML Product Profile"); 949 } 950 e.printStackTrace(); 951 } 952 953 // initialize the aggregation reader object 954 try { 955 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "Track", "XTrack", isEDR); 956 ((GranuleAggregation) nppAggReader).setQfMap(qfMap); 957 } catch (Exception e) { 958 throw new VisADException("Unable to initialize aggregation reader"); 959 } 960 961 // make sure we found valid data 962 if (pathToProducts.size() == 0) { 963 throw new VisADException("No data found in files selected"); 964 } 965 966 logger.debug("Number of adapters needed: " + pathToProducts.size()); 967 adapters = new MultiDimensionAdapter[pathToProducts.size()]; 968 Hashtable<String, String[]> properties = new Hashtable<String, String[]>(); 969 970 Iterator<String> iterator = pathToProducts.iterator(); 971 int pIdx = 0; 972 boolean adapterCreated = false; 973 while (iterator.hasNext()) { 974 String pStr = (String) iterator.next(); 975 logger.debug("Working on adapter number " + (pIdx + 1) + ": " + pStr); 976 HashMap<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable(); 977 HashMap<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable(); 978 swathTable.put("array_name", pStr); 979 swathTable.put("lon_array_name", pathToLon); 980 swathTable.put("lat_array_name", pathToLat); 981 swathTable.put("XTrack", "XTrack"); 982 swathTable.put("Track", "Track"); 983 swathTable.put("geo_Track", "Track"); 984 swathTable.put("geo_XTrack", "XTrack"); 985 swathTable.put("product_name", productName); 986 987 // array_name common to spectrum table 988 spectTable.put("array_name", pStr); 989 spectTable.put("product_name", productName); 990 991 if (is3D) { 992 993 // 3D data is either ATMS, OMPS, or CrIS 994 if ((instrumentName.getShortName() != null) && (instrumentName.getStringValue().equals("ATMS"))) { 995 996 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 997 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 998 999 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1000 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1001 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1002 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1003 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1004 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1005 1006 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1007 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1008 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1009 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1010 1011 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length; 1012 float[] bandArray = new float[numChannels]; 1013 String[] bandNames = new String[numChannels]; 1014 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1015 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx]; 1016 bandNames[bIdx] = "Channel " + (bIdx + 1); 1017 } 1018 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1019 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1020 1021 } else { 1022 if (instrumentName.getStringValue().equals("CrIS")) { 1023 1024 swathTable.put("XTrack", "dim1"); 1025 swathTable.put("Track", "dim0"); 1026 swathTable.put("geo_XTrack", "dim1"); 1027 swathTable.put("geo_Track", "dim0"); 1028 swathTable.put("product_name", "CrIS_SDR"); 1029 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1030 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1031 1032 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1033 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1034 spectTable.put(SpectrumAdapter.x_dim_name, "dim1"); 1035 spectTable.put(SpectrumAdapter.y_dim_name, "dim0"); 1036 1037 } else if (instrumentName.getStringValue().contains("OMPS")) { 1038 1039 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1040 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1041 1042 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1043 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1044 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1045 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1046 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1047 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1048 1049 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1050 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1051 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1052 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1053 1054 int numChannels = 200; 1055 if (instrumentName.getStringValue().equals("OMPS-TC")) { 1056 numChannels = 260; 1057 } 1058 logger.debug("Setting up OMPS adapter, num channels: " + numChannels); 1059 float[] bandArray = new float[numChannels]; 1060 String[] bandNames = new String[numChannels]; 1061 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1062 bandArray[bIdx] = bIdx; 1063 bandNames[bIdx] = "Channel " + (bIdx + 1); 1064 } 1065 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1066 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1067 1068 } else { 1069 // sorry, if we can't id the instrument, we can't display the data! 1070 throw new VisADException("Unable to determine instrument name"); 1071 } 1072 } 1073 1074 } else { 1075 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"}); 1076 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1077 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1078 } 1079 1080 swathTable.put("scale_name", "scale_factor"); 1081 swathTable.put("offset_name", "add_offset"); 1082 swathTable.put("fill_value_name", "_FillValue"); 1083 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1084 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1085 1086 // set the valid range hash if data is available 1087 if (nppPP != null) { 1088 if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) { 1089 swathTable.put("valid_range", "valid_range"); 1090 } 1091 } 1092 1093 String unsignedAttributeStr = unsignedFlags.get(pStr); 1094 if (unsignedAttributeStr.equals("true")) { 1095 swathTable.put("unsigned", unsignedAttributeStr); 1096 } 1097 1098 String unpackFlagStr = unpackFlags.get(pStr); 1099 if (unpackFlagStr.equals("true")) { 1100 swathTable.put("unpack", "true"); 1101 } 1102 1103 // For Suomi NPP data, do valid range check AFTER applying scale/offset 1104 swathTable.put("range_check_after_scaling", "true"); 1105 1106 // pass in a GranuleAggregation reader... 1107 if (is3D) { 1108 if (instrumentName.getStringValue().equals("ATMS")) { 1109 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1110 adapterCreated = true; 1111 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1112 DataCategory.createCategory("MultiSpectral"); 1113 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1114 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1115 "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS"); 1116 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]); 1117 multiSpectralData.add(msd); 1118 } 1119 if (instrumentName.getStringValue().equals("CrIS")) { 1120 if (pStr.contains(crisFilter)) { 1121 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable); 1122 adapterCreated = true; 1123 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable); 1124 DataCategory.createCategory("MultiSpectral"); 1125 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1126 MultiSpectralData msd = new CrIS_SDR_MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx], csa); 1127 msd.setInitialWavenumber(csa.getInitialWavenumber()); 1128 msd_CrIS.add(msd); 1129 } 1130 } 1131 if (instrumentName.getStringValue().contains("OMPS")) { 1132 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1133 adapterCreated = true; 1134 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1135 DataCategory.createCategory("MultiSpectral"); 1136 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1137 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1138 "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS"); 1139 msd.setInitialWavenumber(0); 1140 multiSpectralData.add(msd); 1141 } 1142 if (pIdx == 0) { 1143 // generate default subset for ATMS and OMPS 1144 if (! instrumentName.getStringValue().equals("CrIS")) { 1145 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset(); 1146 } 1147 } 1148 1149 } else { 1150 String varName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); 1151 String varShortName = pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1); 1152 String units = nppPP.getUnits(varShortName); 1153 if (units == null) units = "Unknown"; 1154 Unit u = null; 1155 try { 1156 u = Parser.parse(units); 1157 } catch (NoSuchUnitException e) { 1158 u = new DerivedUnit(units); 1159 logger.debug("Unkown units: " + units); 1160 } catch (ParseException e) { 1161 u = new DerivedUnit(units); 1162 logger.debug("Unparseable units: " + units); 1163 } 1164 // associate this variable with these units, if not done already 1165 RealType.getRealType(varName, u); 1166 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1167 adapterCreated = true; 1168 if (pIdx == 0) { 1169 defaultSubset = adapters[pIdx].getDefaultSubset(); 1170 } 1171 categories = DataCategory.parseCategories("IMAGE"); 1172 } 1173 // only increment count if we created an adapter, some products are skipped 1174 if (adapterCreated) pIdx++; 1175 adapterCreated = false; 1176 } 1177 1178 if (msd_CrIS.size() > 0) { 1179 try { 1180 MultiSpectralAggr aggr = new MultiSpectralAggr(msd_CrIS.toArray(new MultiSpectralData[msd_CrIS.size()])); 1181 aggr.setInitialWavenumber(902.25f); 1182 multiSpectralData.add(aggr); 1183 defaultSubset = ((MultiSpectralData) msd_CrIS.get(0)).getDefaultSubset(); 1184 } catch (Exception e) { 1185 logger.error("Exception: ", e); 1186 } 1187 } 1188 1189 // Merge with pre-set properties 1190 Hashtable tmpHt = getProperties(); 1191 tmpHt.putAll(properties); 1192 setProperties(tmpHt); 1193 } 1194 1195 public void initAfterUnpersistence() { 1196 try { 1197 String zidvPath = 1198 McIDASV.getStaticMcv().getStateManager(). 1199 getProperty(IdvPersistenceManager.PROP_ZIDVPATH, ""); 1200 if (getTmpPaths() != null) { 1201 // New code for zipped bundles- 1202 // we want 'sources' to point to wherever the zipped data was unpacked. 1203 sources.clear(); 1204 // following PersistenceManager.fixBulkDataSources, get temporary data location 1205 for (Object o : getTmpPaths()) { 1206 String tempPath = (String) o; 1207 // replace macro string with actual path 1208 String expandedPath = tempPath.replace(PersistenceManager.MACRO_ZIDVPATH, zidvPath); 1209 // we don't want to add nav files to this list!: 1210 File f = new File(expandedPath); 1211 if (!f.getName().matches(JPSSUtilities.SUOMI_GEO_REGEX)) { 1212 sources.add(expandedPath); 1213 } 1214 } 1215 1216 // mjh fix absolute paths in filenameMap 1217 logger.debug("original filenameMap: {}", filenameMap); 1218 Iterator keyIterator = filenameMap.keySet().iterator(); 1219 while (keyIterator.hasNext()) { 1220 String keyStr = (String) keyIterator.next(); 1221 List<String> fileNames = (List<String>) filenameMap.get(keyStr); 1222 for (int i = 0; i < fileNames.size(); i++) { 1223 String name = fileNames.get(i); 1224 int lastSeparator = name.lastIndexOf(File.separatorChar); 1225 String sub = name.substring(0, lastSeparator); 1226 name = name.replace(sub, zidvPath); 1227 fileNames.set(i, name); 1228 } 1229 } 1230 logger.debug("filenameMap with zidvPath: {}", filenameMap); 1231 } else { 1232 // leave in original unpersistence code - this will get run for unzipped bundles. 1233 // TODO: do we need to handle the "Save with relative paths" case specially? 1234 if (! oldSources.isEmpty()) { 1235 sources.clear(); 1236 for (Object o : oldSources) { 1237 sources.add((String) o); 1238 } 1239 } 1240 } 1241 oldSources.clear(); 1242 setup(); 1243 } catch (Exception e) { 1244 logger.error("Exception: ", e); 1245 } 1246 } 1247 1248 /* (non-Javadoc) 1249 * @see edu.wisc.ssec.mcidasv.data.HydraDataSource#canSaveDataToLocalDisk() 1250 */ 1251 @Override 1252 public boolean canSaveDataToLocalDisk() { 1253 // At present, Suomi data is always data granules on disk 1254 return true; 1255 } 1256 1257 /* (non-Javadoc) 1258 * @see ucar.unidata.data.DataSourceImpl#saveDataToLocalDisk(java.lang.String, java.lang.Object, boolean) 1259 */ 1260 @Override 1261 protected List saveDataToLocalDisk(String filePrefix, Object loadId, 1262 boolean changeLinks) throws Exception { 1263 // need to make a list of all data granule files 1264 // PLUS all geolocation granule files, but only if accessed separate! 1265 List<String> fileList = new ArrayList<String>(); 1266 for (Object o : sources) { 1267 fileList.add((String) o); 1268 } 1269 for (String s : geoSources) { 1270 fileList.add(s); 1271 } 1272 return fileList; 1273 } 1274 1275 public List<String> getOldSources() { 1276 return oldSources; 1277 } 1278 1279 public void setOldSources(List<String> oldSources) { 1280 this.oldSources = oldSources; 1281 } 1282 1283 public Map<String, List<String>> getFilenameMap() { 1284 return filenameMap; 1285 } 1286 1287 public void setFilenameMap(Map<String, List<String>> filenameMap) { 1288 this.filenameMap = filenameMap; 1289 } 1290 1291 /** 1292 * Make and insert the {@link DataChoice DataChoices} for this 1293 * {@code DataSource}. 1294 */ 1295 1296 public void doMakeDataChoices() { 1297 1298 // special loop for CrIS, ATMS, and OMPS data 1299 if (multiSpectralData.size() > 0) { 1300 for (int k = 0; k < multiSpectralData.size(); k++) { 1301 MultiSpectralData adapter = multiSpectralData.get(k); 1302 DataChoice choice = null; 1303 try { 1304 choice = doMakeDataChoice(k, adapter); 1305 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1306 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1307 msdMap.put(choice.getName(), adapter); 1308 addDataChoice(choice); 1309 } catch (Exception e) { 1310 logger.error("Exception: ", e); 1311 } 1312 } 1313 return; 1314 } 1315 1316 // all other data (VIIRS and 2D EDRs) 1317 if (adapters != null) { 1318 for (int idx = 0; idx < adapters.length; idx++) { 1319 DataChoice choice = null; 1320 try { 1321 choice = doMakeDataChoice(idx, adapters[idx].getArrayName()); 1322 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1323 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1324 } 1325 catch (Exception e) { 1326 e.printStackTrace(); 1327 logger.error("doMakeDataChoice failed"); 1328 } 1329 1330 if (choice != null) { 1331 addDataChoice(choice); 1332 } 1333 } 1334 } 1335 } 1336 1337 private DataChoice doMakeDataChoice(int idx, String var) throws Exception { 1338 String name = var; 1339 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1340 Hashtable subset = new Hashtable(); 1341 subset.put(new MultiDimensionSubset(), dataSel); 1342 // TJJ Hack check for uber-odd case of data type varies for same variable 1343 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1344 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1345 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1346 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1347 name = name + "BrightnessTemperature"; 1348 } else { 1349 name = name + "Reflectance"; 1350 } 1351 } 1352 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, name, categories, subset); 1353 return ddc; 1354 } 1355 1356 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception { 1357 String name = adapter.getName(); 1358 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1359 Hashtable subset = new Hashtable(); 1360 subset.put(MultiDimensionSubset.key, dataSel); 1361 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter()); 1362 // TJJ Hack check for uber-odd case of data type varies for same variable 1363 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1364 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1365 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1366 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1367 name = name + "BrightnessTemperature"; 1368 } else { 1369 name = name + "Reflectance"; 1370 } 1371 } 1372 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset); 1373 ddc.setProperties(subset); 1374 return ddc; 1375 } 1376 1377 /** 1378 * Check to see if this {@code SuomiNPPDataSource} is equal to the object 1379 * in question. 1380 * @param o object in question 1381 * @return true if they are the same or equivalent objects 1382 */ 1383 1384 public boolean equals(Object o) { 1385 if ( !(o instanceof SuomiNPPDataSource)) { 1386 return false; 1387 } 1388 return (this == (SuomiNPPDataSource) o); 1389 } 1390 1391 public MultiSpectralData getMultiSpectralData() { 1392 return multiSpectralData.get(0); 1393 } 1394 1395 public MultiSpectralData getMultiSpectralData(DataChoice choice) { 1396 return msdMap.get(choice.getName()); 1397 } 1398 1399 public String getDatasetName() { 1400 return filename; 1401 } 1402 1403 /** 1404 * @return the qfMap 1405 */ 1406 public HashMap<String, QualityFlag> getQfMap() { 1407 return qfMap; 1408 } 1409 1410 public void setDatasetName(String name) { 1411 filename = name; 1412 } 1413 1414 public HashMap getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) { 1415 GeoLocationInfo ginfo = geoSelection.getBoundingBox(); 1416 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(), 1417 ginfo.getMinLon(), ginfo.getMaxLon()); 1418 } 1419 1420 public synchronized Data getData(DataChoice dataChoice, DataCategory category, 1421 DataSelection dataSelection, Hashtable requestProperties) 1422 throws VisADException, RemoteException { 1423 return this.getDataInner(dataChoice, category, dataSelection, requestProperties); 1424 } 1425 1426 1427 protected Data getDataInner(DataChoice dataChoice, DataCategory category, 1428 DataSelection dataSelection, Hashtable requestProperties) 1429 throws VisADException, RemoteException { 1430 1431 //- this hack keeps the HydraImageProbe from doing a getData() 1432 //- TODO: need to use categories? 1433 if (requestProperties != null) { 1434 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) { 1435 return null; 1436 } 1437 } 1438 1439 GeoLocationInfo ginfo = null; 1440 GeoSelection geoSelection = null; 1441 1442 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) { 1443 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() : 1444 dataChoice.getDataSelection().getGeoSelection(); 1445 } 1446 1447 if (geoSelection != null) { 1448 ginfo = geoSelection.getBoundingBox(); 1449 } 1450 1451 Data data = null; 1452 if (adapters == null) { 1453 return data; 1454 } 1455 1456 MultiDimensionAdapter adapter = null; 1457 1458 // pick the adapter with the same index as the current data choice 1459 int aIdx = 0; 1460 List<DataChoice> dcl = getDataChoices(); 1461 for (DataChoice dc : dcl) { 1462 if (dc.getName().equals(dataChoice.getName())) { 1463 aIdx = dcl.indexOf(dc); 1464 break; 1465 } 1466 } 1467 1468 adapter = adapters[aIdx]; 1469 1470 try { 1471 HashMap subset = null; 1472 if (ginfo != null) { 1473 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(), 1474 ginfo.getMinLon(), ginfo.getMaxLon(), 1475 geoSelection.getXStride(), 1476 geoSelection.getYStride(), 1477 geoSelection.getZStride()); 1478 } 1479 else { 1480 1481 MultiDimensionSubset select = null; 1482 Hashtable table = dataChoice.getProperties(); 1483 Enumeration keys = table.keys(); 1484 while (keys.hasMoreElements()) { 1485 Object key = keys.nextElement(); 1486 logger.debug("Key: " + key.toString()); 1487 if (key instanceof MultiDimensionSubset) { 1488 select = (MultiDimensionSubset) table.get(key); 1489 } 1490 } 1491 subset = select.getSubset(); 1492 logger.debug("Subset size: " + subset.size()); 1493 1494 if (dataSelection != null) { 1495 Hashtable props = dataSelection.getProperties(); 1496 if (props != null) { 1497 if (props.containsKey(SpectrumAdapter.channelIndex_name)) { 1498 logger.debug("Props contains channel index key..."); 1499 double[] coords = (double[]) subset.get(SpectrumAdapter.channelIndex_name); 1500 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue(); 1501 coords[0] = (double) idx; 1502 coords[1] = (double) idx; 1503 coords[2] = (double) 1; 1504 } 1505 } 1506 } 1507 } 1508 1509 if (subset != null) { 1510 data = adapter.getData(subset); 1511 data = applyProperties(data, requestProperties, subset, aIdx); 1512 } 1513 } catch (Exception e) { 1514 logger.error("getData Exception: ", e); 1515 } 1516 ////////// inq1429 return FieldImpl with time dim ///////////////// 1517 if (data != null) { 1518 List dateTimes = new ArrayList(); 1519 dateTimes.add(new DateTime(theDate)); 1520 SampledSet timeSet = (SampledSet) ucar.visad.Util.makeTimeSet(dateTimes); 1521 FunctionType ftype = new FunctionType(RealType.Time, data.getType()); 1522 FieldImpl fi = new FieldImpl(ftype, timeSet); 1523 fi.setSample(0, data); 1524 data = fi; 1525 } 1526 ////////////////////////////////////////////////////////////////// 1527 return data; 1528 } 1529 1530 protected Data applyProperties(Data data, Hashtable requestProperties, HashMap subset, int adapterIndex) 1531 throws VisADException, RemoteException { 1532 Data new_data = data; 1533 1534 if (requestProperties == null) { 1535 new_data = data; 1536 return new_data; 1537 } 1538 1539 return new_data; 1540 } 1541 1542 protected void initDataSelectionComponents( 1543 List<DataSelectionComponent> components, 1544 final DataChoice dataChoice) { 1545 1546 try { 1547 // inq1429: need to handle FieldImpl here 1548 FieldImpl thing = (FieldImpl) dataChoice.getData(null); 1549 FlatField image; 1550 if (GridUtil.isTimeSequence(thing)) { 1551 image = (FlatField) thing.getSample(0); 1552 } else { 1553 image = (FlatField) thing; 1554 } 1555 if (image != null) { 1556 PreviewSelection ps = new PreviewSelection(dataChoice, image, null); 1557 // Region subsetting not yet implemented for CrIS data 1558 if (instrumentName.getStringValue().equals("CrIS")) { 1559 ps.enableSubsetting(false); 1560 } 1561 components.add(ps); 1562 } 1563 } catch (Exception e) { 1564 logger.error("Can't make PreviewSelection: ", e); 1565 } 1566 1567 } 1568 1569 /** 1570 * Add Integer->String translations to IDV's 1571 * "translations" resource, so they will be made available 1572 * to the data probe of Image Display's. 1573 */ 1574 1575 public void initQfTranslations() { 1576 1577 Map<String, Map<Integer, String>> translations = 1578 getIdv().getResourceManager(). 1579 getTranslationsHashtable(); 1580 1581 for (String qfKey : qfMap.keySet()) { 1582 // This string needs to match up with the data choice name: 1583 String qfKeySubstr = qfKey.replace("All_Data/", ""); 1584 // check if we've already added map for this QF 1585 if (!translations.containsKey(qfKeySubstr)) { 1586 Map<String, String> hm = qfMap.get(qfKey).getHm(); 1587 Map<Integer, String> newMap = 1588 new HashMap<Integer, String>(hm.size()); 1589 for (String dataValueKey : hm.keySet()) { 1590 // convert Map<String, String> to Map<Integer, String> 1591 Integer intKey = Integer.parseInt(dataValueKey); 1592 newMap.put(intKey, hm.get(dataValueKey)); 1593 } 1594 translations.put(qfKeySubstr, newMap); 1595 } 1596 } 1597 } 1598 1599}