001/* 002 * This file is part of McIDAS-V 003 * 004 * Copyright 2007-2018 005 * Space Science and Engineering Center (SSEC) 006 * University of Wisconsin - Madison 007 * 1225 W. Dayton Street, Madison, WI 53706, USA 008 * https://www.ssec.wisc.edu/mcidas 009 * 010 * All Rights Reserved 011 * 012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and 013 * some McIDAS-V source code is based on IDV and VisAD source code. 014 * 015 * McIDAS-V is free software; you can redistribute it and/or modify 016 * it under the terms of the GNU Lesser Public License as published by 017 * the Free Software Foundation; either version 3 of the License, or 018 * (at your option) any later version. 019 * 020 * McIDAS-V is distributed in the hope that it will be useful, 021 * but WITHOUT ANY WARRANTY; without even the implied warranty of 022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 023 * GNU Lesser Public License for more details. 024 * 025 * You should have received a copy of the GNU Lesser Public License 026 * along with this program. If not, see http://www.gnu.org/licenses. 027 */ 028 029package edu.wisc.ssec.mcidasv.data.hydra; 030 031import edu.wisc.ssec.mcidasv.Constants; 032import edu.wisc.ssec.mcidasv.McIDASV; 033import edu.wisc.ssec.mcidasv.PersistenceManager; 034import edu.wisc.ssec.mcidasv.data.HydraDataSource; 035import edu.wisc.ssec.mcidasv.data.PreviewSelection; 036import edu.wisc.ssec.mcidasv.data.QualityFlag; 037 038import java.io.ByteArrayInputStream; 039import java.io.File; 040import java.io.FilenameFilter; 041import java.rmi.RemoteException; 042import java.text.SimpleDateFormat; 043import java.util.ArrayList; 044import java.util.Collections; 045import java.util.Date; 046import java.util.Enumeration; 047import java.util.HashMap; 048import java.util.Hashtable; 049import java.util.Iterator; 050import java.util.LinkedHashMap; 051import java.util.LinkedHashSet; 052import java.util.List; 053import java.util.Map; 054import java.util.Set; 055import java.util.SimpleTimeZone; 056import java.util.StringTokenizer; 057 058import javax.swing.JCheckBox; 059import javax.swing.JOptionPane; 060 061import org.jdom2.Document; 062import org.jdom2.Element; 063import org.jdom2.Namespace; 064import org.jdom2.output.XMLOutputter; 065import org.slf4j.Logger; 066import org.slf4j.LoggerFactory; 067 068import ucar.ma2.Array; 069import ucar.ma2.ArrayFloat; 070import ucar.ma2.DataType; 071import ucar.nc2.Attribute; 072import ucar.nc2.Dimension; 073import ucar.nc2.Group; 074import ucar.nc2.NetcdfFile; 075import ucar.nc2.Variable; 076import ucar.nc2.dataset.VariableDS; 077import ucar.unidata.data.DataCategory; 078import ucar.unidata.data.DataChoice; 079import ucar.unidata.data.DataSelection; 080import ucar.unidata.data.DataSelectionComponent; 081import ucar.unidata.data.DataSourceDescriptor; 082import ucar.unidata.data.DirectDataChoice; 083import ucar.unidata.data.GeoLocationInfo; 084import ucar.unidata.data.GeoSelection; 085import ucar.unidata.data.grid.GridUtil; 086import ucar.unidata.idv.IdvPersistenceManager; 087import ucar.unidata.util.Misc; 088 089import visad.Data; 090import visad.DateTime; 091import visad.DerivedUnit; 092import visad.FieldImpl; 093import visad.FlatField; 094import visad.FunctionType; 095import visad.RealType; 096import visad.SampledSet; 097import visad.Unit; 098import visad.VisADException; 099import visad.data.units.NoSuchUnitException; 100import visad.data.units.ParseException; 101import visad.data.units.Parser; 102import visad.util.Util; 103 104/** 105 * A data source for NPOESS Preparatory Project (Suomi NPP) data 106 * and JPSS data (JPSS-1 is officially NOAA-20). 107 * 108 * This should probably move, but we are placing it here for now 109 * since we are leveraging some existing code used for HYDRA. 110 */ 111 112public class SuomiNPPDataSource extends HydraDataSource { 113 114 private static final Logger logger = LoggerFactory.getLogger(SuomiNPPDataSource.class); 115 116 /** Sources file */ 117 protected String filename; 118 119 // for loading bundles, store granule lists and geo lists here 120 protected List<String> oldSources = new ArrayList<>(); 121 protected List<String> geoSources = new ArrayList<>(); 122 123 // integrity map for grouping sets/aggregations of selected products 124 Map<String, List<String>> filenameMap = null; 125 126 protected MultiDimensionReader nppAggReader; 127 128 protected MultiDimensionAdapter[] adapters = null; 129 130 private List<MultiSpectralData> msd_CrIS = new ArrayList<>(); 131 private List<MultiSpectralData> multiSpectralData = new ArrayList<>(); 132 private Map<String, MultiSpectralData> msdMap = new HashMap<>(); 133 private Map<String, QualityFlag> qfMap = new HashMap<>(); 134 private Map<String, float[]> lutMap = new HashMap<>(); 135 136 private static final String DATA_DESCRIPTION = "JPSS Data"; 137 138 // instrument related variables and flags 139 Attribute instrumentName = null; 140 private String productName = null; 141 142 // product related variables and flags 143 String whichEDR = ""; 144 145 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions 146 private String crisFilter = "ES_Real"; 147 148 private Map<String, double[]> defaultSubset; 149 public TrackAdapter track_adapter; 150 151 private List<DataCategory> categories; 152 private boolean isCombinedProduct = false; 153 private boolean nameHasBeenSet = false; 154 155 private boolean isNOAA; 156 157 // need our own separator char since it's always Unix-style in the Suomi NPP files 158 private static final String SEPARATOR_CHAR = "/"; 159 160 // date formatter for NASA L1B data, ex 2016-02-07T00:06:00.000Z 161 SimpleDateFormat sdfNASA = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); 162 163 // LUTs for NASA L1B data 164 float[] m12LUT = null; 165 float[] m13LUT = null; 166 float[] m14LUT = null; 167 float[] m15LUT = null; 168 float[] m16LUT = null; 169 float[] i04LUT = null; 170 float[] i05LUT = null; 171 172 // Map to match NASA variables to units (XML Product Profiles used for NOAA) 173 Map<String, String> unitsNASA = new HashMap<String, String>(); 174 175 // date formatter for converting Suomi NPP day/time to something we can use 176 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss.SSS"); 177 178 // date formatter for how we want to show granule day/time on display 179 SimpleDateFormat sdfOut = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); 180 181 // MJH keep track of date to add time dim to FieldImpl 182 Date theDate; 183 184 /** 185 * Zero-argument constructor for construction via unpersistence. 186 */ 187 188 public SuomiNPPDataSource() { 189 } 190 191 public SuomiNPPDataSource(String fileName) throws VisADException { 192 this(null, Misc.newList(fileName), null); 193 logger.debug("filename only constructor call.."); 194 } 195 196 /** 197 * Construct a new Suomi NPP HDF5 data source. 198 * @param descriptor descriptor for this {@code DataSource} 199 * @param fileName name of the hdf file to read 200 * @param properties hashtable of properties 201 * 202 * @throws VisADException problem creating data 203 */ 204 205 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 206 String fileName, Hashtable properties) 207 throws VisADException { 208 this(descriptor, Misc.newList(fileName), properties); 209 logger.debug("SuomiNPPDataSource called, single file selected: " + fileName); 210 } 211 212 /** 213 * Construct a new Suomi NPP HDF5 data source. 214 * 215 * @param descriptor Descriptor for this {@code DataSource}. 216 * @param newSources List of filenames. 217 * @param properties Hashtable of properties. 218 * 219 * @throws VisADException problem creating data 220 */ 221 222 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 223 List<String> newSources, Hashtable properties) 224 throws VisADException { 225 super(descriptor, newSources, DATA_DESCRIPTION, properties); 226 logger.debug("SuomiNPPDataSource constructor called, file count: " + sources.size()); 227 228 filename = (String) sources.get(0); 229 setDescription(DATA_DESCRIPTION); 230 231 // NASA data is UTC, pre-set time zone 232 SimpleTimeZone stz = new SimpleTimeZone(0, "UTC"); 233 sdfNASA.setTimeZone(stz); 234 235 // build the filename map - matches each product to set of files for that product 236 filenameMap = new HashMap<>(); 237 238 // Pass 1, populate the list of products selected 239 for (Object o : sources) { 240 String filename = (String) o; 241 // first five characters of any product go together 242 int lastSeparator = filename.lastIndexOf(File.separatorChar); 243 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 244 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 245 if (! filenameMap.containsKey(prodStr)) { 246 List<String> l = new ArrayList<String>(); 247 filenameMap.put(prodStr, l); 248 } 249 } 250 251 // Pass 2, create a list of files for each product in this data source 252 for (Object o : sources) { 253 String filename = (String) o; 254 // first five characters of any product go together 255 int lastSeparator = filename.lastIndexOf(File.separatorChar); 256 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 257 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 258 List<String> l = filenameMap.get(prodStr); 259 l.add(filename); 260 filenameMap.put(prodStr, l); 261 } 262 263 versionCheck(); 264 setup(); 265 initQfTranslations(); 266 } 267 268 // alert user about possible VIIRS plugin compatibility issues 269 private void versionCheck() { 270 boolean pluginDialog = getIdv().getStore().get(Constants.PREF_VIIRS_PLUGIN, false); 271 // don't create a dialog though if we are running in background/offscreen mode 272 boolean offScreen = getIdv().getArgsManager().getIsOffScreen(); 273 if (! offScreen) { 274 if (! pluginDialog) { 275 String msg = "There has been an update to the VIIRS Formulas plugin.\n" + 276 "If you use the plugin, you will need to uninstall the currently installed\n" + 277 "version of the plugin, and install the plugin called \"VIIRS Formulas\"."; 278 JCheckBox jcbPlugin = new JCheckBox("Do not show this message again"); 279 Object[] params = { msg, jcbPlugin }; 280 JOptionPane.showMessageDialog(null, params, "Plugin Compatibility Notice", JOptionPane.OK_OPTION); 281 boolean dontShow = jcbPlugin.isSelected(); 282 getIdv().getStore().put(Constants.PREF_VIIRS_PLUGIN, dontShow); 283 } 284 } else { 285 logger.warn("Make sure your VIIRS plugin is current, there was an update with McV 1.5"); 286 } 287 } 288 289 public void setup() throws VisADException { 290 291 // which format, NASA or NOAA? 292 isNOAA = false; 293 294 // store filenames for possible bundle unpersistence 295 for (Object o : sources) { 296 oldSources.add((String) o); 297 } 298 299 // time zone for product labels 300 SimpleTimeZone stz = new SimpleTimeZone(0, "GMT"); 301 sdf.setTimeZone(stz); 302 sdfOut.setTimeZone(stz); 303 304 // looking to populate 3 things - path to lat, path to lon, path to relevant products 305 String pathToLat = null; 306 String pathToLon = null; 307 Set<String> pathToProducts = new LinkedHashSet<>(); 308 Map<String, String> prodToDesc = new HashMap<>(); 309 310 // flag to differentiate VIIRS from one of the other Suomi sensors 311 boolean isVIIRS = true; 312 313 // check source filenames to see if this is a combined product. everything 314 // from last file separator to first underscore should be product info 315 int lastSeparator = filename.lastIndexOf(File.separatorChar); 316 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 317 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 318 // only do this check for NOAA data 319 if (filename.endsWith(".h5")) { 320 isNOAA = true; 321 StringTokenizer st = new StringTokenizer(prodStr, "-"); 322 logger.debug("SNPPDS check for embedded GEO, tokenizing: " + prodStr); 323 while (st.hasMoreTokens()) { 324 String singleProd = st.nextToken(); 325 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) { 326 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) { 327 logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd); 328 isCombinedProduct = true; 329 break; 330 } 331 } 332 } 333 } 334 335 // various metatdata we'll need to gather on a per-product basis 336 Map<String, String> unsignedFlags = new LinkedHashMap<>(); 337 Map<String, String> unpackFlags = new LinkedHashMap<>(); 338 339 // geo product IDs for each granule 340 Set<String> geoProductIDs = new LinkedHashSet<>(); 341 342 // aggregations will use sets of NetCDFFile readers 343 List<NetCDFFile> ncdfal = new ArrayList<>(); 344 345 // we should be able to find an XML Product Profile for each data/product type 346 SuomiNPPProductProfile nppPP = null; 347 // and also Profile metadata for geolocation variables 348 boolean haveGeoMetaData = false; 349 350 // number of source granules which make up the data source 351 int granuleCount = 1; 352 353 try { 354 355 nppPP = new SuomiNPPProductProfile(); 356 357 // for each source file provided, find the appropriate geolocation, 358 // get the nominal time and various other granule-level metadata 359 Iterator keyIterator = filenameMap.keySet().iterator(); 360 while (keyIterator.hasNext()) { 361 String keyStr = (String) keyIterator.next(); 362 List fileNames = (List) filenameMap.get(keyStr); 363 granuleCount = fileNames.size(); 364 setProperty(Constants.PROP_GRANULE_COUNT, granuleCount + " Granule"); 365 for (int fileCount = 0; fileCount < granuleCount; fileCount++) { 366 // need to open the main NetCDF file to determine the geolocation product 367 NetcdfFile ncfile = null; 368 String fileAbsPath = null; 369 try { 370 fileAbsPath = (String) fileNames.get(fileCount); 371 logger.debug("Trying to open file: " + fileAbsPath); 372 ncfile = NetcdfFile.open(fileAbsPath); 373 if (! isCombinedProduct) { 374 if (isNOAA) { 375 Attribute a = ncfile.findGlobalAttribute("N_GEO_Ref"); 376 logger.debug("Value of GEO global attribute: " + a.getStringValue()); 377 String tmpGeoProductID = a.getStringValue(); 378 geoProductIDs.add(tmpGeoProductID); 379 } else { 380 geoProductIDs.add(keyStr.replace("L1B", "GEO")); 381 } 382 } 383 Group rg = ncfile.getRootGroup(); 384 385 List<Group> gl = rg.getGroups(); 386 if (gl != null) { 387 for (Group g : gl) { 388 logger.trace("Group name: " + g.getFullName()); 389 if (isNOAA) { 390 // when we find the Data_Products group, go down another group level and pull out 391 // what we will use for nominal day and time (for now anyway). 392 // XXX TJJ fileCount check is so we don't count the GEO file in time array! 393 if (g.getFullName().contains("Data_Products") 394 && (fileCount != fileNames.size())) { 395 List<Group> dpg = g.getGroups(); 396 397 // cycle through once looking for XML Product Profiles 398 for (Group subG : dpg) { 399 400 String subName = subG.getFullName(); 401 // use actual product, not geolocation, to id XML Product Profile 402 if (!subName.contains("-GEO")) { 403 // determine the instrument name (VIIRS, ATMS, CrIS, OMPS) 404 instrumentName = subG.findAttribute("Instrument_Short_Name"); 405 406 // note any EDR products, will need to check for and remove 407 // fill scans later 408 Attribute adtt = subG.findAttribute("N_Dataset_Type_Tag"); 409 if (adtt != null) { 410 String baseName = adtt.getStringValue(); 411 if ((baseName != null) && (baseName.equals("EDR"))) { 412 // have to loop through sub groups variables to determine band 413 List<Variable> tmpVar = subG.getVariables(); 414 for (Variable v : tmpVar) { 415 // if Imagery EDR attribute for band is specified, save it 416 Attribute mBand = v.findAttribute("Band_ID"); 417 if (mBand != null) { 418 whichEDR = mBand.getStringValue(); 419 } 420 } 421 } 422 } 423 424 // This is also where we find the attribute which tells us which 425 // XML Product Profile to use! 426 Attribute axpp = subG.findAttribute("N_Collection_Short_Name"); 427 if (axpp != null) { 428 String baseName = axpp.getStringValue(); 429 productName = baseName; 430 431 // TJJ Apr 2018 432 // Hack so we can look at CrIS Full Spectrum, until we can 433 // track down existence of an official Product Profile for it. 434 // http://mcidas.ssec.wisc.edu/inquiry-v/?inquiry=2634 435 // The regular SDR profile lets us visualize it. 436 if (productName.equals("CrIS-FS-SDR")) productName = "CrIS-SDR"; 437 438 String productProfileFileName = nppPP 439 .getProfileFileName(productName); 440 logger.trace("Found profile: " + productProfileFileName); 441 if (productProfileFileName == null) { 442 throw new Exception( 443 "XML Product Profile not found in catalog"); 444 } 445 try { 446 nppPP.addMetaDataFromFile(productProfileFileName); 447 } catch (Exception nppppe) { 448 logger.error("Error parsing XML Product Profile: " 449 + productProfileFileName); 450 throw new Exception( 451 "XML Product Profile Error", 452 nppppe); 453 } 454 } 455 } 456 } 457 458 // 2nd pass through sub-group to extract date/time for aggregation 459 for (Group subG : dpg) { 460 List<Variable> vl = subG.getVariables(); 461 for (Variable v : vl) { 462 Attribute aDate = v.findAttribute("AggregateBeginningDate"); 463 Attribute aTime = v.findAttribute("AggregateBeginningTime"); 464 // did we find the attributes we are looking for? 465 if ((aDate != null) && (aTime != null)) { 466 // set time for display to day/time of 1st granule examined 467 if (! nameHasBeenSet) { 468 String sDate = aDate.getStringValue(); 469 String sTime = aTime.getStringValue(); 470 logger.debug("For day/time, using: " + sDate 471 + sTime.substring(0, sTime.indexOf('Z') - 3)); 472 Date d = sdf.parse(sDate 473 + sTime.substring(0, sTime.indexOf('Z') - 3)); 474 theDate = d; 475 setName(instrumentName.getStringValue() + " " 476 + sdfOut.format(d)); 477 nameHasBeenSet = true; 478 } 479 break; 480 } 481 } 482 } 483 if (! nameHasBeenSet) { 484 throw new VisADException( 485 "No date time found in Suomi NPP granule"); 486 } 487 } 488 } else { 489 // NASA data - date/time from global attribute 490 // set time for display to day/time of 1st granule examined 491 Attribute timeStartNASA = ncfile.findGlobalAttribute("time_coverage_start"); 492 Date d = sdfNASA.parse(timeStartNASA.getStringValue()); 493 theDate = d; 494 if (! nameHasBeenSet) { 495 instrumentName = ncfile.findGlobalAttribute("instrument"); 496 setName(instrumentName.getStringValue() + " " + sdfOut.format(d)); 497 nameHasBeenSet = true; 498 } 499 } 500 } 501 } 502 } catch (Exception e) { 503 logger.warn("Exception during processing of file: " + fileAbsPath); 504 throw (e); 505 } finally { 506 ncfile.close(); 507 } 508 } 509 510 } 511 512 // build each union aggregation element 513 Iterator<String> iterator = geoProductIDs.iterator(); 514 for (int elementNum = 0; elementNum < granuleCount; elementNum++) { 515 516 String s = null; 517 518 // build an XML (NCML actually) representation of the union aggregation of these two files 519 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"); 520 Element root = new Element("netcdf", ns); 521 Document document = new Document(root); 522 523 Element agg = new Element("aggregation", ns); 524 agg.setAttribute("type", "union"); 525 526 // TJJ - Loop over filename map, could be several products that need to be aggregated 527 Set set = filenameMap.keySet(); 528 Iterator mapIter = set.iterator(); 529 while (mapIter.hasNext()) { 530 String key = (String) mapIter.next(); 531 List l = (List) filenameMap.get(key); 532 Element fData = new Element("netcdf", ns); 533 fData.setAttribute("location", (String) l.get(elementNum)); 534 agg.addContent(fData); 535 s = (String) l.get(elementNum); 536 } 537 538 String geoFilename = null; 539 Element fGeo = new Element("netcdf", ns);; 540 541 if (! isCombinedProduct) { 542 543 if (isNOAA) { 544 geoFilename = s.substring(0, 545 s.lastIndexOf(File.separatorChar) + 1); 546 // check if we have the whole file name or just the prefix 547 String geoProductID = iterator.next(); 548 if (geoProductID.endsWith("h5")) { 549 geoFilename += geoProductID; 550 } else { 551 geoFilename += geoProductID; 552 geoFilename += s.substring(s 553 .lastIndexOf(File.separatorChar) + 6); 554 } 555 // Be sure file as specified by N_GEO_Ref global attribute really is there. 556 File tmpGeo = new File(geoFilename); 557 if (!tmpGeo.exists()) { 558 // Ok, the expected file defined (supposedly) exactly by a global att is not there... 559 // We need to check for similar geo files with different creation dates 560 String geoFileRelative = geoFilename 561 .substring(geoFilename 562 .lastIndexOf(File.separatorChar) + 1); 563 // also check for Terrain Corrected version of geo 564 String geoTerrainCorrected = geoFileRelative; 565 geoTerrainCorrected = geoTerrainCorrected.replace( 566 "OD", "TC"); 567 geoTerrainCorrected = geoTerrainCorrected.replace( 568 "MG", "TC"); 569 570 // now we make a file filter, and see if a matching geo file is present 571 File fList = new File( 572 geoFilename.substring( 573 0, 574 geoFilename 575 .lastIndexOf(File.separatorChar) + 1)); // current directory 576 577 FilenameFilter geoFilter = new FilenameFilter() { 578 public boolean accept(File dir, String name) { 579 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NOAA)) { 580 return true; 581 } else { 582 return false; 583 } 584 } 585 }; 586 587 File[] files = fList.listFiles(geoFilter); 588 for (File file : files) { 589 if (file.isDirectory()) { 590 continue; 591 } 592 // get the file name for convenience 593 String fName = file.getName(); 594 // is it one of the standard Ellipsoid geo types we are looking for? 595 if (fName.substring(0, 5).equals( 596 geoFileRelative.substring(0, 5))) { 597 int geoStartIdx = geoFileRelative 598 .indexOf("_d"); 599 int prdStartIdx = fName.indexOf("_d"); 600 String s1 = geoFileRelative.substring( 601 geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 602 String s2 = fName.substring(prdStartIdx, 603 prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 604 if (s1.equals(s2)) { 605 geoFilename = s 606 .substring( 607 0, 608 s.lastIndexOf(File.separatorChar) + 1) 609 + fName; 610 break; 611 } 612 } 613 // same check, but for Terrain Corrected version 614 if (fName.substring(0, 5).equals( 615 geoTerrainCorrected.substring(0, 5))) { 616 int geoStartIdx = geoTerrainCorrected 617 .indexOf("_d"); 618 int prdStartIdx = fName.indexOf("_d"); 619 String s1 = geoTerrainCorrected.substring( 620 geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 621 String s2 = fName.substring(prdStartIdx, 622 prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 623 if (s1.equals(s2)) { 624 geoFilename = s 625 .substring( 626 0, 627 s.lastIndexOf(File.separatorChar) + 1) 628 + fName; 629 break; 630 } 631 } 632 } 633 } 634 } else { 635 // NASA format 636 geoFilename = JPSSUtilities.replaceLast(s, "L1B", "GEO"); 637 // get list of files in current directory 638 File fList = 639 new File(geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); 640 // make a NASA style file filter, and see if a matching geo file is present 641 FilenameFilter geoFilter = new FilenameFilter() { 642 public boolean accept(File dir, String name) { 643 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NASA)) { 644 return true; 645 } else { 646 return false; 647 } 648 } 649 }; 650 File[] files = fList.listFiles(geoFilter); 651 for (File file : files) { 652 if (file.isDirectory()) { 653 continue; 654 } 655 // get the file name for convenience 656 String fName = file.getName(); 657 String tmpStr = geoFilename.substring(s.lastIndexOf(File.separatorChar) + 1, 658 s.lastIndexOf(File.separatorChar) + (JPSSUtilities.NASA_CREATION_DATE_INDEX + 1)); 659 if (fName.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX).equals(tmpStr.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX))) { 660 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; 661 break; 662 } 663 } 664 } 665 logger.debug("Determined GEO file name should be: " + geoFilename); 666 fGeo.setAttribute("location", geoFilename); 667 // add this to list used if we create a zipped bundle 668 geoSources.add(geoFilename); 669 agg.addContent(fGeo); 670 } 671 672 root.addContent(agg); 673 XMLOutputter xmlOut = new XMLOutputter(); 674 String ncmlStr = xmlOut.outputString(document); 675 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes()); 676 MultiDimensionReader netCDFReader = new NetCDFFile(is); 677 678 // let's try and look through the NetCDF reader and see what we can learn... 679 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile(); 680 681 Group rg = ncdff.getRootGroup(); 682 // this is a list filled with unpacked qflag products, if any 683 ArrayList<VariableDS> qfProds = new ArrayList<VariableDS>(); 684 685 // this is a list filled with pseudo Brightness Temp variables converted from Radiance 686 ArrayList<VariableDS> btProds = new ArrayList<VariableDS>(); 687 688 List<Group> gl = rg.getGroups(); 689 if (gl != null) { 690 int xDimNASA = -1; 691 int yDimNASA = -1; 692 // Make a first pass to determine the shape of the geolocation data 693 for (Group g : gl) { 694 if (g.getFullName().contains("geolocation_data")) { 695 List<Variable> vl = g.getVariables(); 696 for (Variable v : vl) { 697 if (v.getShortName().equals("latitude")) { 698 // XXX TJJ Nov 2015 699 // Hack because fill value in attribute does not match 700 // what I am seeing in the data. 701 Attribute fillAtt = new Attribute("_FillValue", -999.0); 702 v.addAttribute(fillAtt); 703 pathToLat = v.getFullName(); 704 pathToProducts.add(v.getFullName()); 705 prodToDesc.put(v.getFullName(), v.getDescription()); 706 xDimNASA = v.getDimension(0).getLength(); 707 yDimNASA = v.getDimension(1).getLength(); 708 } 709 if (v.getShortName().equals("longitude")) { 710 // XXX TJJ Nov 2015 711 // Hack because fill value in attribute does not match 712 // what I am seeing in the data. 713 Attribute fillAtt = new Attribute("_FillValue", -999.0); 714 v.addAttribute(fillAtt); 715 pathToLon = v.getFullName(); 716 pathToProducts.add(v.getFullName()); 717 prodToDesc.put(v.getFullName(), v.getDescription()); 718 } 719 } 720 } 721 } 722 for (Group g : gl) { 723 logger.debug("Group name: " + g.getFullName()); 724 // NASA only - looking through observation_data and geolocation_data 725 if (g.getFullName().contains("observation_data")) { 726 List<Variable> vl = g.getVariables(); 727 for (Variable v : vl) { 728 // keep any data which matches geolocation dimensions 729 if (v.getDimension(0).getLength() == xDimNASA && 730 v.getDimension(1).getLength() == yDimNASA) { 731 logger.debug("Adding product: " + v.getFullName()); 732 pathToProducts.add(v.getFullName()); 733 prodToDesc.put(v.getFullName(), v.getDescription()); 734 Attribute aUnsigned = v.findAttribute("_Unsigned"); 735 if (aUnsigned != null) { 736 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); 737 } else { 738 unsignedFlags.put(v.getFullName(), "false"); 739 } 740 741 // store units in a map for later 742 Attribute unitAtt = v.findAttribute("units"); 743 if (unitAtt != null) { 744 unitsNASA.put(v.getShortName(), unitAtt.getStringValue()); 745 } else { 746 unitsNASA.put(v.getShortName(), "Unknown"); 747 } 748 749 // TJJ Nov 2018 - SIPS V2+ mods 750 // Regridding with bow-tie interpolation wasn't working since there are 751 // now multiple fill value categories and we need to look specifically 752 // for the bowtie deletion flag 753 754 Attribute longNameAtt = v.findAttribute("long_name"); 755 String longName = "empty"; 756 if (longNameAtt != null) longName = longNameAtt.getStringValue(); 757 if (longName.contains("reflectance") || longName.contains("radiance")) { 758 759 Attribute flagMeanings = v.findAttribute(JPSSUtilities.SIPS_FLAG_MEANINGS_ATTRIBUTE); 760 // If this is not null, we must be v2.0.0 or higher 761 if (flagMeanings != null) { 762 String meanings = flagMeanings.getStringValue(); 763 // Tokenize meanings string, multiple flags defined there 764 StringTokenizer st = new StringTokenizer(meanings); 765 int bowtieIdx = -1; 766 boolean foundBowTieAttribute = false; 767 String tokStr = null; 768 while (st.hasMoreTokens()) { 769 tokStr = st.nextToken(); 770 bowtieIdx++; 771 if (tokStr.equals(JPSSUtilities.SIPS_BOWTIE_DELETED_FLAG)) { 772 foundBowTieAttribute = true; 773 break; 774 } 775 } 776 777 if (foundBowTieAttribute) { 778 Attribute flagValues = v.findAttribute(JPSSUtilities.SIPS_FLAG_VALUES_ATTRIBUTE); 779 Array flagValsArr = flagValues.getValues(); 780 int bowTieVal = (int) flagValsArr.getInt(bowtieIdx); 781 Attribute a1 = new Attribute("_FillValue", bowTieVal); 782 v.addAttribute(a1); 783 } 784 } 785 786 } 787 788 // TJJ Feb 2016 - Create BT variables where applicable 789 if ((v.getShortName().matches("M12|M13|M14|M15|M16")) || 790 (v.getShortName().matches("I04|I05"))) { 791 792 // Get the LUT variable, load into primitive array 793 Variable lut = g.findVariable(v.getShortName() + "_brightness_temperature_lut"); 794 int [] lutShape = lut.getShape(); 795 logger.debug("Handling NASA LUT Variable, LUT size: " + lutShape[0]); 796 797 // pull out valid min, max - these will be used for our new VariableDS 798 Attribute aVMin = lut.findAttribute("valid_min"); 799 Attribute aVMax = lut.findAttribute("valid_max"); 800 Attribute fillAtt = lut.findAttribute("_FillValue"); 801 logger.debug("valid_min from LUT: " + aVMin.getNumericValue()); 802 logger.debug("valid_max from LUT: " + aVMax.getNumericValue()); 803 804 // A little hacky, but at this point the class is such a mess 805 // that what's a little more, right? Load M12-M16, I4-I5 LUTS 806 807 if (v.getShortName().matches("M12")) { 808 m12LUT = new float[lutShape[0]]; 809 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 810 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 811 m12LUT[lutIdx] = lutArray.get(lutIdx); 812 } 813 } 814 815 if (v.getShortName().matches("M13")) { 816 m13LUT = new float[lutShape[0]]; 817 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 818 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 819 m13LUT[lutIdx] = lutArray.get(lutIdx); 820 } 821 } 822 823 if (v.getShortName().matches("M14")) { 824 m14LUT = new float[lutShape[0]]; 825 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 826 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 827 m14LUT[lutIdx] = lutArray.get(lutIdx); 828 } 829 } 830 831 if (v.getShortName().matches("M15")) { 832 m15LUT = new float[lutShape[0]]; 833 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 834 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 835 m15LUT[lutIdx] = lutArray.get(lutIdx); 836 } 837 } 838 839 if (v.getShortName().matches("M16")) { 840 m16LUT = new float[lutShape[0]]; 841 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 842 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 843 m16LUT[lutIdx] = lutArray.get(lutIdx); 844 } 845 } 846 847 if (v.getShortName().matches("I04")) { 848 i04LUT = new float[lutShape[0]]; 849 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 850 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 851 i04LUT[lutIdx] = lutArray.get(lutIdx); 852 } 853 } 854 855 if (v.getShortName().matches("I05")) { 856 i05LUT = new float[lutShape[0]]; 857 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 858 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 859 i05LUT[lutIdx] = lutArray.get(lutIdx); 860 } 861 } 862 863 // Create a pseudo-variable, fill using LUT 864 // make a copy of the source variable 865 // NOTE: by using a VariableDS here, the original 866 // variable is used for the I/O, this matters! 867 VariableDS vBT = new VariableDS(g, v, false); 868 869 // Name is orig name plus suffix 870 vBT.setShortName(v.getShortName() + "_BT"); 871 872 vBT.addAttribute(fillAtt); 873 vBT.addAttribute(aVMin); 874 vBT.addAttribute(aVMax); 875 876 if (v.getShortName().matches("M12")) { 877 lutMap.put(vBT.getFullName(), m12LUT); 878 } 879 if (v.getShortName().matches("M13")) { 880 lutMap.put(vBT.getFullName(), m13LUT); 881 } 882 if (v.getShortName().matches("M14")) { 883 lutMap.put(vBT.getFullName(), m14LUT); 884 } 885 if (v.getShortName().matches("M15")) { 886 lutMap.put(vBT.getFullName(), m15LUT); 887 } 888 if (v.getShortName().matches("M16")) { 889 lutMap.put(vBT.getFullName(), m16LUT); 890 } 891 if (v.getShortName().matches("I04")) { 892 lutMap.put(vBT.getFullName(), i04LUT); 893 } 894 if (v.getShortName().matches("I05")) { 895 lutMap.put(vBT.getFullName(), i05LUT); 896 } 897 pathToProducts.add(vBT.getFullName()); 898 String newName = vBT.getDescription().replace("radiance", "brightness temperature"); 899 prodToDesc.put(vBT.getFullName(), newName); 900 btProds.add(vBT); 901 } 902 } 903 } 904 } 905 if (g.getFullName().contains("geolocation_data")) { 906 List<Variable> vl = g.getVariables(); 907 for (Variable v : vl) { 908 // keep any data which matches geolocation dimensions 909 if (v.getDimension(0).getLength() == xDimNASA && 910 v.getDimension(1).getLength() == yDimNASA) { 911 // except we already found Lat and Lon, skip those 912 if ((v.getShortName().equals("latitude")) || 913 (v.getShortName().equals("latitude"))) continue; 914 logger.debug("Adding product: " + v.getFullName()); 915 pathToProducts.add(v.getFullName()); 916 prodToDesc.put(v.getFullName(), v.getDescription()); 917 } 918 } 919 } 920 921 // NOAA only - we are looking through All_Data, finding displayable data 922 if (g.getFullName().contains("All_Data")) { 923 List<Group> adg = g.getGroups(); 924 int xDim = -1; 925 int yDim = -1; 926 927 // two sub-iterations, first one to find geolocation and product dimensions 928 for (Group subG : adg) { 929 logger.debug("Sub group name: " + subG.getFullName()); 930 String subName = subG.getFullName(); 931 if (subName.contains("-GEO")) { 932 // this is the geolocation data 933 String geoBaseName = subG.getShortName(); 934 geoBaseName = geoBaseName.substring(0, geoBaseName.indexOf('_')); 935 if (! haveGeoMetaData) { 936 String geoProfileFileName = nppPP.getProfileFileName(geoBaseName); 937 // also add meta data from geolocation profile 938 nppPP.addMetaDataFromFile(geoProfileFileName); 939 haveGeoMetaData = true; 940 } 941 List<Variable> vl = subG.getVariables(); 942 for (Variable v : vl) { 943 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude")) { 944 pathToLat = v.getFullName(); 945 logger.debug("Ellipsoid Lat/Lon Variable: " + v.getFullName()); 946 // get the dimensions of the lat variable 947 Dimension dAlongTrack = v.getDimension(0); 948 yDim = dAlongTrack.getLength(); 949 Dimension dAcrossTrack = v.getDimension(1); 950 xDim = dAcrossTrack.getLength(); 951 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 952 } 953 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude")) { 954 // we got dimensions from lat, don't need 'em twice, but need path 955 pathToLon = v.getFullName(); 956 } 957 } 958 // one more pass in case there is terrain-corrected Lat/Lon 959 for (Variable v : vl) { 960 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude_TC")) { 961 pathToLat = v.getFullName(); 962 logger.debug("Switched Lat/Lon Variable to TC: " + v.getFullName()); 963 // get the dimensions of the lat variable 964 Dimension dAlongTrack = v.getDimension(0); 965 yDim = dAlongTrack.getLength(); 966 Dimension dAcrossTrack = v.getDimension(1); 967 xDim = dAcrossTrack.getLength(); 968 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 969 } 970 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude_TC")) { 971 // we got dimensions from lat, don't need 'em twice, but need path 972 pathToLon = v.getFullName(); 973 } 974 } 975 } 976 } 977 978 // second to identify displayable products 979 for (Group subG : adg) { 980 // this is the product data 981 List<Variable> vl = subG.getVariables(); 982 for (Variable v : vl) { 983 boolean useThis = false; 984 String vName = v.getFullName(); 985 logger.trace("Variable: " + vName); 986 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1); 987 988 // Special code to handle quality flags. We throw out anything 989 // that does not match bounds of the geolocation data 990 991 if (varShortName.startsWith("QF")) { 992 993 logger.trace("Handling Quality Flag: " + varShortName); 994 995 // this check is done later for ALL variables, but we need 996 // it early here to weed out those quality flags that are 997 // simply a small set of data w/no granule geo nbounds 998 boolean xScanOk = false; 999 boolean yScanOk = false; 1000 List<Dimension> dl = v.getDimensions(); 1001 1002 // toss out > 2D Quality Flags 1003 if (dl.size() > 2) { 1004 logger.trace("SKIPPING QF, > 2D: " + varShortName); 1005 continue; 1006 } 1007 1008 for (Dimension d : dl) { 1009 // in order to consider this a displayable product, make sure 1010 // both scan direction dimensions are present and look like a granule 1011 if (d.getLength() == xDim) { 1012 xScanOk = true; 1013 } 1014 if (d.getLength() == yDim) { 1015 yScanOk = true; 1016 } 1017 } 1018 1019 if (! (xScanOk && yScanOk)) { 1020 logger.trace("SKIPPING QF, does not match geo bounds: " + varShortName); 1021 continue; 1022 } 1023 1024 ArrayList<QualityFlag> qfal = nppPP.getQualityFlags(varShortName); 1025 if (qfal != null) { 1026 for (QualityFlag qf : qfal) { 1027 qf.setPackedName(vName); 1028 // make a copy of the qflag variable 1029 // NOTE: by using a VariableDS here, the original 1030 // variable is used for the I/O, this matters! 1031 VariableDS vqf = new VariableDS(subG, v, false); 1032 // prefix with QF num to help guarantee uniqueness across groups 1033 // this will cover most cases, but could still be dupe names 1034 // within a single QF. This is handled when fetching XMLPP metadata 1035 vqf.setShortName( 1036 varShortName.substring(0, 3) + "_" + qf.getName() 1037 ); 1038 logger.debug("New QF var full name: " + vqf.getFullName()); 1039 qfProds.add(vqf); 1040 qfMap.put(vqf.getFullName(), qf); 1041 } 1042 } 1043 } 1044 1045 // for CrIS instrument, first find dimensions of var matching 1046 // CrIS filter, then throw out all variables which don't match 1047 // those dimensions 1048 1049 if (instrumentName.getStringValue().equals("CrIS")) { 1050 if (! vName.contains("GEO")) { 1051 if (! varShortName.startsWith(crisFilter)) { 1052 logger.trace("Skipping variable: " + varShortName); 1053 continue; 1054 } 1055 } else { 1056 // these variables are all GEO-related 1057 // if they match lat/lon bounds, keep them 1058 List<Dimension> dl = v.getDimensions(); 1059 if (dl.size() == 3) { 1060 boolean isDisplayableCrIS = true; 1061 for (Dimension d : dl) { 1062 if ((d.getLength() != xDim) && (d.getLength() != yDim) && (d.getLength() != 9)) { 1063 isDisplayableCrIS = false; 1064 } 1065 } 1066 if (! isDisplayableCrIS) { 1067 continue; 1068 } 1069 } 1070 } 1071 } 1072 1073 DataType dt = v.getDataType(); 1074 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) { 1075 continue; 1076 } 1077 1078 List<Dimension> dl = v.getDimensions(); 1079 if (dl.size() > 4) { 1080 continue; 1081 } 1082 1083 // for now, skip any 3D VIIRS data 1084 if (instrumentName.getStringValue().equals("VIIRS")) { 1085 if (dl.size() == 3) { 1086 continue; 1087 } 1088 } 1089 1090 boolean xScanOk = false; 1091 boolean yScanOk = false; 1092 for (Dimension d : dl) { 1093 // in order to consider this a displayable product, make sure 1094 // both scan direction dimensions are present and look like a granule 1095 if (d.getLength() == xDim) { 1096 xScanOk = true; 1097 } 1098 if (d.getLength() == yDim) { 1099 yScanOk = true; 1100 } 1101 } 1102 1103 if (xScanOk && yScanOk) { 1104 useThis = true; 1105 } 1106 1107 // For ATMS, only 3-D variable we pass through is BrightnessTemperature 1108 // Dimensions for BT are (lon, lat, channel) 1109 if (instrumentName.getStringValue().equals("ATMS")) { 1110 if (dl.size() == 3) { 1111 boolean isDisplayableATMS = false; 1112 for (Dimension d : dl) { 1113 if (d.getLength() == JPSSUtilities.ATMSChannelCenterFrequencies.length) { 1114 isDisplayableATMS = true; 1115 logger.trace("This variable has a dimension matching num ATMS channels"); 1116 break; 1117 } 1118 } 1119 if (! isDisplayableATMS) useThis = false; 1120 } 1121 } 1122 1123 // sensor data with a channel dimension 1124 if (useThis) { 1125 if ((instrumentName.getStringValue().equals("CrIS")) || 1126 (instrumentName.getStringValue().equals("ATMS")) || 1127 (instrumentName.getStringValue().contains("OMPS"))) { 1128 isVIIRS = false; 1129 logger.debug("Handling non-VIIRS data source..."); 1130 } 1131 } 1132 1133 if (useThis) { 1134 // loop through the variable list again, looking for a corresponding "Factors" 1135 float scaleVal = 1f; 1136 float offsetVal = 0f; 1137 boolean unpackFlag = false; 1138 1139 // if the granule has an entry for this variable name 1140 // get the data, data1 = scale, data2 = offset 1141 // create and poke attributes with this data 1142 // endif 1143 1144 String factorsVarName = nppPP.getScaleFactorName(varShortName); 1145 if (factorsVarName != null) { 1146 logger.debug("Mapping: " + varShortName + " to: " + factorsVarName); 1147 for (Variable fV : vl) { 1148 if (fV.getShortName().equals(factorsVarName)) { 1149 logger.trace("Pulling scale and offset values from variable: " + fV.getShortName()); 1150 ucar.ma2.Array a = fV.read(); 1151 float[] so = (float[]) a.copyTo1DJavaArray(); 1152 scaleVal = so[0]; 1153 offsetVal = so[1]; 1154 logger.trace("Scale value: " + scaleVal + ", Offset value: " + offsetVal); 1155 unpackFlag = true; 1156 break; 1157 } 1158 } 1159 } 1160 1161 // poke in scale/offset attributes for now 1162 1163 Attribute a1 = new Attribute("scale_factor", scaleVal); 1164 v.addAttribute(a1); 1165 Attribute a2 = new Attribute("add_offset", offsetVal); 1166 v.addAttribute(a2); 1167 1168 // add valid range and fill value attributes here 1169 // try to fill in valid range 1170 if (nppPP.hasNameAndMetaData(varShortName)) { 1171 String rangeMin = nppPP.getRangeMin(varShortName); 1172 String rangeMax = nppPP.getRangeMax(varShortName); 1173 logger.trace("range min: " + rangeMin + ", range max: " + rangeMax); 1174 // only store range attribute if VALID range found 1175 if ((rangeMin != null) && (rangeMax != null)) { 1176 int [] shapeArr = new int [] { 2 }; 1177 ArrayFloat af = new ArrayFloat(shapeArr); 1178 try { 1179 af.setFloat(0, Float.parseFloat(rangeMin)); 1180 } catch (NumberFormatException nfe) { 1181 af.setFloat(0, new Float(Integer.MIN_VALUE)); 1182 } 1183 try { 1184 af.setFloat(1, Float.parseFloat(rangeMax)); 1185 } catch (NumberFormatException nfe) { 1186 af.setFloat(1, new Float(Integer.MAX_VALUE)); 1187 } 1188 Attribute rangeAtt = new Attribute("valid_range", af); 1189 v.addAttribute(rangeAtt); 1190 } 1191 1192 // check for and load fill values too... 1193 1194 // we need to check two places, first, the XML product profile 1195 ArrayList<Float> fval = nppPP.getFillValues(varShortName); 1196 1197 // 2nd, does the variable already have one defined? 1198 // if there was already a fill value associated with this variable, make 1199 // sure we bring that along for the ride too... 1200 Attribute aFill = v.findAttribute("_FillValue"); 1201 1202 // determine size of our fill value array 1203 int fvArraySize = 0; 1204 if (aFill != null) fvArraySize++; 1205 if (! fval.isEmpty()) fvArraySize += fval.size(); 1206 int [] fillShape = new int [] { fvArraySize }; 1207 1208 // allocate the array 1209 ArrayFloat afFill = new ArrayFloat(fillShape); 1210 1211 // and FINALLY, fill it! 1212 if (! fval.isEmpty()) { 1213 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) { 1214 afFill.setFloat(fillIdx, fval.get(fillIdx)); 1215 logger.trace("Adding fill value (from XML): " + fval.get(fillIdx)); 1216 } 1217 } 1218 1219 if (aFill != null) { 1220 Number n = aFill.getNumericValue(); 1221 // is the data unsigned? 1222 Attribute aUnsigned = v.findAttribute("_Unsigned"); 1223 float fillValAsFloat = Float.NaN; 1224 if (aUnsigned != null) { 1225 if (aUnsigned.getStringValue().equals("true")) { 1226 DataType fvdt = aFill.getDataType(); 1227 logger.trace("Data String: " + aFill.toString()); 1228 logger.trace("DataType primitive type: " + fvdt.getPrimitiveClassType()); 1229 // signed byte that needs conversion? 1230 if (fvdt.getPrimitiveClassType() == byte.class) { 1231 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue()); 1232 } 1233 else if (fvdt.getPrimitiveClassType() == short.class) { 1234 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue()); 1235 } else { 1236 fillValAsFloat = n.floatValue(); 1237 } 1238 } 1239 } 1240 afFill.setFloat(fvArraySize - 1, fillValAsFloat); 1241 logger.trace("Adding fill value (from variable): " + fillValAsFloat); 1242 } 1243 Attribute fillAtt = new Attribute("_FillValue", afFill); 1244 v.addAttribute(fillAtt); 1245 } 1246 1247 Attribute aUnsigned = v.findAttribute("_Unsigned"); 1248 if (aUnsigned != null) { 1249 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); 1250 } else { 1251 unsignedFlags.put(v.getFullName(), "false"); 1252 } 1253 1254 if (unpackFlag) { 1255 unpackFlags.put(v.getFullName(), "true"); 1256 } else { 1257 unpackFlags.put(v.getFullName(), "false"); 1258 } 1259 1260 logger.debug("Adding product: " + v.getFullName()); 1261 pathToProducts.add(v.getFullName()); 1262 prodToDesc.put(v.getFullName(), v.getDescription()); 1263 } 1264 } 1265 } 1266 } 1267 } 1268 } 1269 1270 // add in any unpacked qflag products 1271 for (VariableDS qfV: qfProds) { 1272 // skip the spares - they are reserved for future use 1273 if (qfV.getFullName().endsWith("Spare")) { 1274 continue; 1275 } 1276 // String.endsWith is case sensitive so gotta check both cases 1277 if (qfV.getFullName().endsWith("spare")) { 1278 continue; 1279 } 1280 ncdff.addVariable(qfV.getGroup(), qfV); 1281 logger.trace("Adding QF product: " + qfV.getFullName()); 1282 pathToProducts.add(qfV.getFullName()); 1283 prodToDesc.put(qfV.getFullName(), qfV.getDescription()); 1284 unsignedFlags.put(qfV.getFullName(), "true"); 1285 unpackFlags.put(qfV.getFullName(), "false"); 1286 } 1287 1288 // add in any pseudo BT products from NASA data 1289 for (Variable vBT: btProds) { 1290 logger.trace("Adding BT product: " + vBT.getFullName()); 1291 ncdff.addVariable(vBT.getGroup(), vBT); 1292 unsignedFlags.put(vBT.getFullName(), "true"); 1293 unpackFlags.put(vBT.getFullName(), "false"); 1294 } 1295 1296 ncdfal.add((NetCDFFile) netCDFReader); 1297 } 1298 1299 } catch (Exception e) { 1300 logger.error("cannot create NetCDF reader for files selected", e); 1301 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) { 1302 throw new VisADException("Unable to extract metadata from required XML Product Profile", e); 1303 } 1304 } 1305 1306 // TJJ Feb 2018 1307 // Doing a reorder of variable names here, as per HP's request from 1308 // http://mcidas.ssec.wisc.edu/inquiry-v/?inquiry=2613 1309 1310 if (isVIIRS) { 1311 // Copy the variable Set to a sortable List 1312 List<String> sortedList = new ArrayList(pathToProducts); 1313 Collections.sort(sortedList, new VIIRSSort()); 1314 1315 // Clear the original data structure which retains insert order 1316 // (it's a LinkedHashSet) 1317 pathToProducts.clear(); 1318 1319 // Re-add the variables in corrected order 1320 for (String s : sortedList) { 1321 pathToProducts.add(s); 1322 } 1323 } 1324 1325 // initialize the aggregation reader object 1326 try { 1327 if (isNOAA) { 1328 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "Track", "XTrack", isVIIRS); 1329 ((GranuleAggregation) nppAggReader).setQfMap(qfMap); 1330 } else { 1331 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "number_of_lines", "number_of_pixels", isVIIRS); 1332 ((GranuleAggregation) nppAggReader).setLUTMap(lutMap); 1333 } 1334 } catch (Exception e) { 1335 throw new VisADException("Unable to initialize aggregation reader", e); 1336 } 1337 1338 // make sure we found valid data 1339 if (pathToProducts.size() == 0) { 1340 throw new VisADException("No data found in files selected"); 1341 } 1342 1343 logger.debug("Number of adapters needed: " + pathToProducts.size()); 1344 adapters = new MultiDimensionAdapter[pathToProducts.size()]; 1345 Hashtable<String, String[]> properties = new Hashtable<>(); 1346 1347 Iterator<String> iterator = pathToProducts.iterator(); 1348 int pIdx = 0; 1349 boolean adapterCreated = false; 1350 while (iterator.hasNext()) { 1351 String pStr = iterator.next(); 1352 logger.debug("Working on adapter number " + (pIdx + 1) + ": " + pStr); 1353 Map<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable(); 1354 Map<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable(); 1355 swathTable.put("array_name", pStr); 1356 swathTable.put("lon_array_name", pathToLon); 1357 swathTable.put("lat_array_name", pathToLat); 1358 swathTable.put("XTrack", "XTrack"); 1359 swathTable.put("Track", "Track"); 1360 swathTable.put("geo_Track", "Track"); 1361 swathTable.put("geo_XTrack", "XTrack"); 1362 // TJJ is this even needed? Is product_name used anywhere? 1363 if (productName == null) productName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); 1364 swathTable.put("product_name", productName); 1365 swathTable.put("_mapping", prodToDesc); 1366 // array_name common to spectrum table 1367 spectTable.put("array_name", pStr); 1368 spectTable.put("product_name", productName); 1369 spectTable.put("_mapping", prodToDesc); 1370 1371 if (! isVIIRS) { 1372 1373 // 3D data is either ATMS, OMPS, or CrIS 1374 if ((instrumentName.getShortName() != null) && (instrumentName.getStringValue().equals("ATMS"))) { 1375 1376 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1377 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1378 1379 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1380 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1381 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1382 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1383 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1384 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1385 1386 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1387 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1388 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1389 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1390 1391 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length; 1392 float[] bandArray = new float[numChannels]; 1393 String[] bandNames = new String[numChannels]; 1394 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1395 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx]; 1396 bandNames[bIdx] = "Channel " + (bIdx + 1); 1397 } 1398 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1399 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1400 1401 } else { 1402 if (instrumentName.getStringValue().equals("CrIS")) { 1403 1404 swathTable.put("XTrack", "dim1"); 1405 swathTable.put("Track", "dim0"); 1406 swathTable.put("geo_XTrack", "dim1"); 1407 swathTable.put("geo_Track", "dim0"); 1408 swathTable.put("product_name", "CrIS_SDR"); 1409 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1410 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1411 1412 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1413 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1414 spectTable.put(SpectrumAdapter.x_dim_name, "dim1"); 1415 spectTable.put(SpectrumAdapter.y_dim_name, "dim0"); 1416 1417 } else if (instrumentName.getStringValue().contains("OMPS")) { 1418 1419 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1420 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1421 1422 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1423 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1424 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1425 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1426 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1427 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1428 1429 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1430 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1431 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1432 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1433 1434 int numChannels = 200; 1435 if (instrumentName.getStringValue().equals("OMPS-TC")) { 1436 numChannels = 260; 1437 } 1438 logger.debug("Setting up OMPS adapter, num channels: " + numChannels); 1439 float[] bandArray = new float[numChannels]; 1440 String[] bandNames = new String[numChannels]; 1441 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1442 bandArray[bIdx] = bIdx; 1443 bandNames[bIdx] = "Channel " + (bIdx + 1); 1444 } 1445 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1446 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1447 1448 } else { 1449 // sorry, if we can't id the instrument, we can't display the data! 1450 throw new VisADException("Unable to determine instrument name"); 1451 } 1452 } 1453 1454 } else { 1455 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"}); 1456 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1457 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1458 } 1459 1460 swathTable.put("scale_name", "scale_factor"); 1461 swathTable.put("offset_name", "add_offset"); 1462 swathTable.put("fill_value_name", "_FillValue"); 1463 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1464 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1465 1466 // set the valid range hash if data is available 1467 if (nppPP != null) { 1468 if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) { 1469 swathTable.put("valid_range", "valid_range"); 1470 } 1471 } 1472 1473 String unsignedAttributeStr = unsignedFlags.get(pStr); 1474 if ((unsignedAttributeStr != null) && (unsignedAttributeStr.equals("true"))) { 1475 swathTable.put("unsigned", unsignedAttributeStr); 1476 } 1477 1478 String unpackFlagStr = unpackFlags.get(pStr); 1479 if ((unpackFlagStr != null) && (unpackFlagStr.equals("true"))) { 1480 swathTable.put("unpack", "true"); 1481 } 1482 1483 // For Suomi NPP data, do valid range check AFTER applying scale/offset 1484 swathTable.put("range_check_after_scaling", "true"); 1485 1486 // pass in a GranuleAggregation reader... 1487 if (! isVIIRS) { 1488 if (instrumentName.getStringValue().equals("ATMS")) { 1489 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1490 adapterCreated = true; 1491 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1492 DataCategory.createCategory("MultiSpectral"); 1493 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1494 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1495 "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS"); 1496 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]); 1497 multiSpectralData.add(msd); 1498 } 1499 if (instrumentName.getStringValue().equals("CrIS")) { 1500 if (pStr.contains(crisFilter)) { 1501 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable); 1502 adapterCreated = true; 1503 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable); 1504 DataCategory.createCategory("MultiSpectral"); 1505 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1506 MultiSpectralData msd = new CrIS_SDR_MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx], csa); 1507 msd.setInitialWavenumber(csa.getInitialWavenumber()); 1508 msd_CrIS.add(msd); 1509 } 1510 } 1511 if (instrumentName.getStringValue().contains("OMPS")) { 1512 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1513 adapterCreated = true; 1514 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1515 DataCategory.createCategory("MultiSpectral"); 1516 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1517 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1518 "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS"); 1519 msd.setInitialWavenumber(0); 1520 multiSpectralData.add(msd); 1521 } 1522 if (pIdx == 0) { 1523 // generate default subset for ATMS and OMPS 1524 if (! instrumentName.getStringValue().equals("CrIS")) { 1525 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset(); 1526 } 1527 } 1528 1529 } else { 1530 // setting NOAA-format units 1531 String varName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); 1532 String varShortName = pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1); 1533 String units = nppPP.getUnits(varShortName); 1534 1535 // setting NASA-format units 1536 if (! isNOAA) { 1537 units = unitsNASA.get(varShortName); 1538 // Need to set _BT variables manually, since they are created on the fly 1539 if (varShortName.endsWith("_BT")) units = "Kelvin"; 1540 } 1541 if (units == null) units = "Unknown"; 1542 Unit u = null; 1543 try { 1544 u = Parser.parse(units); 1545 } catch (NoSuchUnitException e) { 1546 u = new DerivedUnit(units); 1547 logger.debug("Unknown units: " + units); 1548 } catch (ParseException e) { 1549 u = new DerivedUnit(units); 1550 logger.debug("Unparseable units: " + units); 1551 } 1552 // associate this variable with these units, if not done already 1553 RealType.getRealType(varName, u); 1554 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1555 adapterCreated = true; 1556 if (pIdx == 0) { 1557 defaultSubset = adapters[pIdx].getDefaultSubset(); 1558 } 1559 categories = DataCategory.parseCategories("IMAGE"); 1560 } 1561 // only increment count if we created an adapter, some products are skipped 1562 if (adapterCreated) pIdx++; 1563 adapterCreated = false; 1564 } 1565 1566 if (msd_CrIS.size() > 0) { 1567 try { 1568 MultiSpectralAggr aggr = new MultiSpectralAggr(msd_CrIS.toArray(new MultiSpectralData[msd_CrIS.size()])); 1569 aggr.setInitialWavenumber(902.25f); 1570 multiSpectralData.add(aggr); 1571 defaultSubset = ((MultiSpectralData) msd_CrIS.get(0)).getDefaultSubset(); 1572 } catch (Exception e) { 1573 logger.error("Exception: ", e); 1574 } 1575 } 1576 1577 // Merge with pre-set properties 1578 Hashtable tmpHt = getProperties(); 1579 tmpHt.putAll(properties); 1580 setProperties(tmpHt); 1581 } 1582 1583 public void initAfterUnpersistence() { 1584 try { 1585 String zidvPath = 1586 McIDASV.getStaticMcv().getStateManager(). 1587 getProperty(IdvPersistenceManager.PROP_ZIDVPATH, ""); 1588 if (getTmpPaths() != null) { 1589 // New code for zipped bundles- 1590 // we want 'sources' to point to wherever the zipped data was unpacked. 1591 sources.clear(); 1592 // following PersistenceManager.fixBulkDataSources, get temporary data location 1593 for (Object o : getTmpPaths()) { 1594 String tempPath = (String) o; 1595 // replace macro string with actual path 1596 String expandedPath = tempPath.replace(PersistenceManager.MACRO_ZIDVPATH, zidvPath); 1597 // we don't want to add nav files to this list!: 1598 File f = new File(expandedPath); 1599 if (!f.getName().matches(JPSSUtilities.SUOMI_GEO_REGEX_NOAA)) { 1600 sources.add(expandedPath); 1601 } 1602 } 1603 1604 // mjh fix absolute paths in filenameMap 1605 logger.debug("original filenameMap: {}", filenameMap); 1606 Iterator<String> keyIterator = filenameMap.keySet().iterator(); 1607 while (keyIterator.hasNext()) { 1608 String keyStr = (String) keyIterator.next(); 1609 List<String> fileNames = (List<String>) filenameMap.get(keyStr); 1610 for (int i = 0; i < fileNames.size(); i++) { 1611 String name = fileNames.get(i); 1612 int lastSeparator = name.lastIndexOf(File.separatorChar); 1613 String sub = name.substring(0, lastSeparator); 1614 name = name.replace(sub, zidvPath); 1615 fileNames.set(i, name); 1616 } 1617 } 1618 logger.debug("filenameMap with zidvPath: {}", filenameMap); 1619 } else { 1620 // leave in original unpersistence code - this will get run for unzipped bundles. 1621 // TODO: do we need to handle the "Save with relative paths" case specially? 1622 if (! oldSources.isEmpty()) { 1623 sources.clear(); 1624 for (Object o : oldSources) { 1625 sources.add((String) o); 1626 } 1627 } 1628 } 1629 oldSources.clear(); 1630 setup(); 1631 } catch (Exception e) { 1632 logger.error("Exception: ", e); 1633 } 1634 } 1635 1636 /* (non-Javadoc) 1637 * @see edu.wisc.ssec.mcidasv.data.HydraDataSource#canSaveDataToLocalDisk() 1638 */ 1639 @Override 1640 public boolean canSaveDataToLocalDisk() { 1641 // At present, Suomi data is always data granules on disk 1642 return true; 1643 } 1644 1645 /* (non-Javadoc) 1646 * @see ucar.unidata.data.DataSourceImpl#saveDataToLocalDisk(java.lang.String, java.lang.Object, boolean) 1647 */ 1648 @Override 1649 protected List saveDataToLocalDisk(String filePrefix, Object loadId, 1650 boolean changeLinks) throws Exception { 1651 // need to make a list of all data granule files 1652 // PLUS all geolocation granule files, but only if accessed separate! 1653 List<String> fileList = new ArrayList<String>(); 1654 for (Object o : sources) { 1655 fileList.add((String) o); 1656 } 1657 for (String s : geoSources) { 1658 fileList.add(s); 1659 } 1660 return fileList; 1661 } 1662 1663 public List<String> getOldSources() { 1664 return oldSources; 1665 } 1666 1667 public void setOldSources(List<String> oldSources) { 1668 this.oldSources = oldSources; 1669 } 1670 1671 public Map<String, List<String>> getFilenameMap() { 1672 return filenameMap; 1673 } 1674 1675 public void setFilenameMap(Map<String, List<String>> filenameMap) { 1676 this.filenameMap = filenameMap; 1677 } 1678 1679 /** 1680 * Make and insert the {@link DataChoice DataChoices} for this 1681 * {@code DataSource}. 1682 */ 1683 1684 public void doMakeDataChoices() { 1685 1686 // special loop for CrIS, ATMS, and OMPS data 1687 if (multiSpectralData.size() > 0) { 1688 for (int k = 0; k < multiSpectralData.size(); k++) { 1689 MultiSpectralData adapter = multiSpectralData.get(k); 1690 DataChoice choice = null; 1691 try { 1692 choice = doMakeDataChoice(k, adapter); 1693 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1694 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1695 msdMap.put(choice.getName(), adapter); 1696 addDataChoice(choice); 1697 } catch (Exception e) { 1698 logger.error("Exception: ", e); 1699 } 1700 } 1701 return; 1702 } 1703 1704 // all other data (VIIRS and 2D EDRs) 1705 if (adapters != null) { 1706 for (int idx = 0; idx < adapters.length; idx++) { 1707 DataChoice choice = null; 1708 try { 1709 Map<String, Object> metadata = adapters[idx].getMetadata(); 1710 String description = null; 1711 if (metadata.containsKey("_mapping")) { 1712 String arrayName = metadata.get("array_name").toString(); 1713 Map<String, String> mapping = 1714 (Map<String, String>)metadata.get("_mapping"); 1715 description = mapping.get(arrayName); 1716 } 1717 choice = doMakeDataChoice(idx, adapters[idx].getArrayName(), description); 1718 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1719 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1720 } 1721 catch (Exception e) { 1722 logger.error("doMakeDataChoice failed", e); 1723 } 1724 1725 if (choice != null) { 1726 addDataChoice(choice); 1727 } 1728 } 1729 } 1730 } 1731 1732 private DataChoice doMakeDataChoice(int idx, String var, String description) throws Exception { 1733 String name = var; 1734 if (description == null) { 1735 description = name; 1736 } 1737 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1738 Hashtable subset = new Hashtable(); 1739 subset.put(new MultiDimensionSubset(), dataSel); 1740 // TJJ Hack check for uber-odd case of data type varies for same variable 1741 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1742 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1743 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1744 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1745 name = name + "BrightnessTemperature"; 1746 } else { 1747 name = name + "Reflectance"; 1748 } 1749 } 1750 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, description, categories, subset); 1751 return ddc; 1752 } 1753 1754 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception { 1755 String name = adapter.getName(); 1756 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1757 Hashtable subset = new Hashtable(); 1758 subset.put(MultiDimensionSubset.key, dataSel); 1759 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter()); 1760 // TJJ Hack check for uber-odd case of data type varies for same variable 1761 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1762 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1763 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1764 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1765 name = name + "BrightnessTemperature"; 1766 } else { 1767 name = name + "Reflectance"; 1768 } 1769 } 1770 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset); 1771 ddc.setProperties(subset); 1772 return ddc; 1773 } 1774 1775 /** 1776 * Check to see if this {@code SuomiNPPDataSource} is equal to the object 1777 * in question. 1778 * @param o object in question 1779 * @return true if they are the same or equivalent objects 1780 */ 1781 1782 public boolean equals(Object o) { 1783 if ( !(o instanceof SuomiNPPDataSource)) { 1784 return false; 1785 } 1786 return (this == (SuomiNPPDataSource) o); 1787 } 1788 1789 public MultiSpectralData getMultiSpectralData() { 1790 return multiSpectralData.get(0); 1791 } 1792 1793 public MultiSpectralData getMultiSpectralData(DataChoice choice) { 1794 return msdMap.get(choice.getName()); 1795 } 1796 1797 public String getDatasetName() { 1798 return filename; 1799 } 1800 1801 /** 1802 * @return the qfMap 1803 */ 1804 public Map<String, QualityFlag> getQfMap() { 1805 return qfMap; 1806 } 1807 1808 public void setDatasetName(String name) { 1809 filename = name; 1810 } 1811 1812 /** 1813 * Determine if this data source originated from a 1814 * {@literal "NOAA file"}. 1815 * 1816 * @return {@code true} if file came from NOAA, {@code false} otherwise. 1817 */ 1818 public boolean isNOAA() { 1819 return isNOAA; 1820 } 1821 1822 public Map<String, double[]> getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) { 1823 GeoLocationInfo ginfo = geoSelection.getBoundingBox(); 1824 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(), 1825 ginfo.getMinLon(), ginfo.getMaxLon()); 1826 } 1827 1828 public synchronized Data getData(DataChoice dataChoice, DataCategory category, 1829 DataSelection dataSelection, Hashtable requestProperties) 1830 throws VisADException, RemoteException { 1831 return this.getDataInner(dataChoice, category, dataSelection, requestProperties); 1832 } 1833 1834 1835 protected Data getDataInner(DataChoice dataChoice, DataCategory category, 1836 DataSelection dataSelection, Hashtable requestProperties) 1837 throws VisADException, RemoteException { 1838 1839 //- this hack keeps the HydraImageProbe from doing a getData() 1840 //- TODO: need to use categories? 1841 if (requestProperties != null) { 1842 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) { 1843 return null; 1844 } 1845 } 1846 1847 GeoLocationInfo ginfo = null; 1848 GeoSelection geoSelection = null; 1849 1850 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) { 1851 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() : 1852 dataChoice.getDataSelection().getGeoSelection(); 1853 } 1854 1855 if (geoSelection != null) { 1856 ginfo = geoSelection.getBoundingBox(); 1857 } 1858 1859 Data data = null; 1860 if (adapters == null) { 1861 return data; 1862 } 1863 1864 MultiDimensionAdapter adapter = null; 1865 1866 // pick the adapter with the same index as the current data choice 1867 int aIdx = 0; 1868 List<DataChoice> dcl = getDataChoices(); 1869 for (DataChoice dc : dcl) { 1870 if (dc.getName().equals(dataChoice.getName())) { 1871 aIdx = dcl.indexOf(dc); 1872 break; 1873 } 1874 } 1875 1876 adapter = adapters[aIdx]; 1877 1878 try { 1879 Map<String, double[]> subset = null; 1880 if (ginfo != null) { 1881 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(), 1882 ginfo.getMinLon(), ginfo.getMaxLon(), 1883 geoSelection.getXStride(), 1884 geoSelection.getYStride(), 1885 geoSelection.getZStride()); 1886 } 1887 else { 1888 1889 MultiDimensionSubset select = null; 1890 Hashtable table = dataChoice.getProperties(); 1891 Enumeration keys = table.keys(); 1892 while (keys.hasMoreElements()) { 1893 Object key = keys.nextElement(); 1894 logger.debug("Key: " + key.toString()); 1895 if (key instanceof MultiDimensionSubset) { 1896 select = (MultiDimensionSubset) table.get(key); 1897 } 1898 } 1899 subset = select.getSubset(); 1900 logger.debug("Subset size: " + subset.size()); 1901 1902 if (dataSelection != null) { 1903 Hashtable props = dataSelection.getProperties(); 1904 if (props != null) { 1905 if (props.containsKey(SpectrumAdapter.channelIndex_name)) { 1906 logger.debug("Props contains channel index key..."); 1907 double[] coords = subset.get(SpectrumAdapter.channelIndex_name); 1908 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue(); 1909 coords[0] = (double) idx; 1910 coords[1] = (double) idx; 1911 coords[2] = (double) 1; 1912 } 1913 } 1914 } 1915 } 1916 1917 if (subset != null) { 1918 data = adapter.getData(subset); 1919 data = applyProperties(data, requestProperties, subset, aIdx); 1920 } 1921 } catch (Exception e) { 1922 logger.error("getData Exception: ", e); 1923 } 1924 ////////// inq1429 return FieldImpl with time dim ///////////////// 1925 if (data != null) { 1926 List dateTimes = new ArrayList(); 1927 dateTimes.add(new DateTime(theDate)); 1928 SampledSet timeSet = (SampledSet) ucar.visad.Util.makeTimeSet(dateTimes); 1929 FunctionType ftype = new FunctionType(RealType.Time, data.getType()); 1930 FieldImpl fi = new FieldImpl(ftype, timeSet); 1931 fi.setSample(0, data); 1932 data = fi; 1933 } 1934 ////////////////////////////////////////////////////////////////// 1935 return data; 1936 } 1937 1938 protected Data applyProperties(Data data, Hashtable requestProperties, Map<String, double[]> subset, int adapterIndex) 1939 throws VisADException, RemoteException { 1940 Data new_data = data; 1941 1942 if (requestProperties == null) { 1943 new_data = data; 1944 return new_data; 1945 } 1946 1947 return new_data; 1948 } 1949 1950 protected void initDataSelectionComponents( 1951 List<DataSelectionComponent> components, 1952 final DataChoice dataChoice) { 1953 1954 try { 1955 // inq1429: need to handle FieldImpl here 1956 FieldImpl thing = (FieldImpl) dataChoice.getData(null); 1957 FlatField image; 1958 if (GridUtil.isTimeSequence(thing)) { 1959 image = (FlatField) thing.getSample(0); 1960 } else { 1961 image = (FlatField) thing; 1962 } 1963 if (image != null) { 1964 PreviewSelection ps = new PreviewSelection(dataChoice, image, null); 1965 // Region subsetting not yet implemented for CrIS data 1966 if (instrumentName.getStringValue().equals("CrIS")) { 1967 ps.enableSubsetting(false); 1968 } 1969 components.add(ps); 1970 } 1971 } catch (Exception e) { 1972 logger.error("Can't make PreviewSelection: ", e); 1973 } 1974 1975 } 1976 1977 /** 1978 * Add {@code Integer->String} translations to IDV's 1979 * {@literal "translations"} resource, so they will be made available to 1980 * the data probe of Image Display's. 1981 */ 1982 public void initQfTranslations() { 1983 1984 Map<String, Map<Integer, String>> translations = 1985 getIdv().getResourceManager(). 1986 getTranslationsHashtable(); 1987 1988 for (String qfKey : qfMap.keySet()) { 1989 // This string needs to match up with the data choice name: 1990 String qfKeySubstr = qfKey.replace("All_Data/", ""); 1991 // check if we've already added map for this QF 1992 if (!translations.containsKey(qfKeySubstr)) { 1993 Map<String, String> hm = qfMap.get(qfKey).getHm(); 1994 Map<Integer, String> newMap = 1995 new HashMap<Integer, String>(hm.size()); 1996 for (String dataValueKey : hm.keySet()) { 1997 // convert Map<String, String> to Map<Integer, String> 1998 Integer intKey = Integer.parseInt(dataValueKey); 1999 newMap.put(intKey, hm.get(dataValueKey)); 2000 } 2001 translations.put(qfKeySubstr, newMap); 2002 } 2003 } 2004 } 2005 2006}