001/* 002 * $Id: NPPDataSource.java,v 1.28 2011/03/24 16:06:33 davep Exp $ 003 * 004 * This file is part of McIDAS-V 005 * 006 * Copyright 2007-2011 007 * Space Science and Engineering Center (SSEC) 008 * University of Wisconsin - Madison 009 * 1225 W. Dayton Street, Madison, WI 53706, USA 010 * https://www.ssec.wisc.edu/mcidas 011 * 012 * All Rights Reserved 013 * 014 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and 015 * some McIDAS-V source code is based on IDV and VisAD source code. 016 * 017 * McIDAS-V is free software; you can redistribute it and/or modify 018 * it under the terms of the GNU Lesser Public License as published by 019 * the Free Software Foundation; either version 3 of the License, or 020 * (at your option) any later version. 021 * 022 * McIDAS-V is distributed in the hope that it will be useful, 023 * but WITHOUT ANY WARRANTY; without even the implied warranty of 024 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 025 * GNU Lesser Public License for more details. 026 * 027 * You should have received a copy of the GNU Lesser Public License 028 * along with this program. If not, see http://www.gnu.org/licenses. 029 */ 030 031package edu.wisc.ssec.mcidasv.data.hydra; 032 033import edu.wisc.ssec.mcidasv.data.HydraDataSource; 034import edu.wisc.ssec.mcidasv.data.PreviewSelection; 035 036import java.io.ByteArrayInputStream; 037import java.io.File; 038 039import java.rmi.RemoteException; 040 041import java.text.SimpleDateFormat; 042 043import java.util.ArrayList; 044import java.util.Date; 045import java.util.Enumeration; 046import java.util.HashMap; 047import java.util.Hashtable; 048import java.util.Iterator; 049import java.util.List; 050import java.util.StringTokenizer; 051import java.util.TimeZone; 052import java.util.TreeSet; 053 054import org.jdom.Namespace; 055import org.jdom.output.XMLOutputter; 056 057import org.slf4j.Logger; 058import org.slf4j.LoggerFactory; 059 060import ucar.ma2.ArrayFloat; 061import ucar.ma2.DataType; 062import ucar.nc2.Dimension; 063import ucar.nc2.Group; 064import ucar.nc2.NetcdfFile; 065import ucar.nc2.Variable; 066 067import ucar.unidata.data.DataCategory; 068import ucar.unidata.data.DataChoice; 069import ucar.unidata.data.DataSelection; 070import ucar.unidata.data.DataSelectionComponent; 071import ucar.unidata.data.DataSourceDescriptor; 072import ucar.unidata.data.DirectDataChoice; 073import ucar.unidata.data.GeoLocationInfo; 074import ucar.unidata.data.GeoSelection; 075 076import ucar.unidata.util.Misc; 077 078import visad.Data; 079import visad.FlatField; 080import visad.GriddedSet; 081import visad.VisADException; 082 083import visad.util.Util; 084 085/** 086 * A data source for NPOESS Preparatory Project (NPP) data 087 * This will probably move, but we are placing it here for now 088 * since we are leveraging some existing code used for HYDRA. 089 */ 090 091public class NPPDataSource extends HydraDataSource { 092 093 private static final Logger logger = LoggerFactory.getLogger(NPPDataSource.class); 094 095 /** Sources file */ 096 protected String filename; 097 098 protected MultiDimensionReader nppAggReader; 099 100 protected MultiDimensionAdapter[] adapters = null; 101 102 private ArrayList<MultiSpectralData> multiSpectralData = new ArrayList<MultiSpectralData>(); 103 private HashMap<String, MultiSpectralData> msdMap = new HashMap<String, MultiSpectralData>(); 104 105 private static final String DATA_DESCRIPTION = "NPP Data"; 106 107 // instrument related variables and flags 108 ucar.nc2.Attribute instrumentName = null; 109 private String productName = null; 110 111 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions 112 private String crisFilter = "ES_Real"; 113 114 private HashMap defaultSubset; 115 public TrackAdapter track_adapter; 116 117 private List categories; 118 private boolean hasChannelSelect = false; 119 private boolean hasImagePreview = true; 120 private boolean isCombinedProduct = false; 121 122 private PreviewSelection previewSelection = null; 123 private FlatField previewImage = null; 124 125 private static int[] YSCAN_POSSIBILITIES = { 96, 512, 768, 1536, 2304, 2313, 12, 4, 4, 4 }; 126 private static int[] XSCAN_POSSIBILITIES = { 508, 2133, 3200, 6400, 4064, 4121, 96, 30, 30, 30 }; 127 private static int[] ZSCAN_POSSIBILITIES = { -1, -1, -1, -1, -1, -1, 22, 163, 437, 717 }; 128 private int inTrackDimensionLength = -1; 129 130 // need our own separator char since it's always Unix-style in the NPP files 131 private static final String SEPARATOR_CHAR = "/"; 132 133 // date formatter for converting NPP day/time to something we can use 134 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddhhmmss.SSS"); 135 136 /** 137 * Zero-argument constructor for construction via unpersistence. 138 */ 139 140 public NPPDataSource() {} 141 142 /** 143 * Construct a new NPP hdf data source. 144 * @param descriptor descriptor for this <code>DataSource</code> 145 * @param fileName name of the hdf file to read 146 * @param properties hashtable of properties 147 * 148 * @throws VisADException problem creating data 149 */ 150 151 public NPPDataSource(DataSourceDescriptor descriptor, 152 String fileName, Hashtable properties) 153 throws VisADException { 154 this(descriptor, Misc.newList(fileName), properties); 155 logger.trace("NPPDataSource called, single file selected: " + fileName); 156 } 157 158 /** 159 * Construct a new NPP hdf data source. 160 * @param descriptor descriptor for this <code>DataSource</code> 161 * @param sources List of filenames 162 * @param properties hashtable of properties 163 * 164 * @throws VisADException problem creating data 165 */ 166 167 public NPPDataSource(DataSourceDescriptor descriptor, 168 List<String> newSources, Hashtable properties) 169 throws VisADException { 170 super(descriptor, newSources, DATA_DESCRIPTION, properties); 171 logger.debug("NPPDataSource constructor called, file count: " + sources.size()); 172 173 this.filename = (String) sources.get(0); 174 175 this.setName("NPP"); 176 this.setDescription("NPP"); 177 178 for (Object o : sources) { 179 logger.debug("NPP source file: " + (String) o); 180 } 181 182 setup(); 183 } 184 185 public void setup() throws VisADException { 186 187 // looking to populate 3 things - path to lat, path to lon, path to relevant products 188 String pathToLat = null; 189 String pathToLon = null; 190 TreeSet<String> pathToProducts = new TreeSet<String>(); 191 192 // flag to indicate data is 3-dimensions (X, Y, channel or band) 193 boolean is3D = false; 194 195 // check source filenames to see if this is a combined product 196 // XXX TJJ - looking for "underscore" is NOT GUARANTEED TO WORK! FIXME 197 String prodStr = filename.substring( 198 filename.lastIndexOf(File.separatorChar) + 1, 199 filename.lastIndexOf(File.separatorChar) + 1 + filename.indexOf("_")); 200 StringTokenizer st = new StringTokenizer(prodStr, "-"); 201 logger.trace("check for embedded GEO, tokenizing: " + prodStr); 202 while (st.hasMoreTokens()) { 203 String singleProd = st.nextToken(); 204 logger.trace("Next token: " + singleProd); 205 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) { 206 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) { 207 logger.trace("Setting isCombinedProduct true, Found embedded GEO: " + singleProd); 208 isCombinedProduct = true; 209 break; 210 } 211 } 212 } 213 214 // various metatdata we'll need to gather on a per-product basis 215 ArrayList<String> unsignedFlags = new ArrayList<String>(); 216 ArrayList<String> unpackFlags = new ArrayList<String>(); 217 218 // time for each product in milliseconds since epoch 219 ArrayList<Long> productTimes = new ArrayList<Long>(); 220 221 // geo product IDs for each granule 222 ArrayList<String> geoProductIDs = new ArrayList<String>(); 223 224 // aggregations will use sets of NetCDFFile readers 225 ArrayList<NetCDFFile> ncdfal = new ArrayList<NetCDFFile>(); 226 227 // we should be able to find an XML Product Profile for each data/product type 228 NPPProductProfile nppPP = null; 229 230 sdf.setTimeZone(TimeZone.getTimeZone("GMT")); 231 232 try { 233 234 nppPP = new NPPProductProfile(); 235 236 // for each source file provided get the nominal time 237 for (int fileCount = 0; fileCount < sources.size(); fileCount++) { 238 // need to open the main NetCDF file to determine the geolocation product 239 NetcdfFile ncfile = null; 240 String fileAbsPath = null; 241 try { 242 fileAbsPath = (String) sources.get(fileCount); 243 logger.debug("Trying to open file: " + fileAbsPath); 244 ncfile = NetcdfFile.open(fileAbsPath); 245 if (! isCombinedProduct) { 246 ucar.nc2.Attribute a = ncfile 247 .findGlobalAttribute("N_GEO_Ref"); 248 logger.debug("Value of GEO global attribute: " 249 + a.getStringValue()); 250 String tmpGeoProductID = null; 251 if (a.getStringValue().endsWith("h5")) { 252 tmpGeoProductID = a.getStringValue(); 253 } else { 254 tmpGeoProductID = JPSSUtilities 255 .mapGeoRefToProductID(a.getStringValue()); 256 } 257 logger.debug("Value of corresponding Product ID: " 258 + tmpGeoProductID); 259 geoProductIDs.add(tmpGeoProductID); 260 } 261 Group rg = ncfile.getRootGroup(); 262 263 logger.debug("Root group name: " + rg.getName()); 264 List<Group> gl = rg.getGroups(); 265 if (gl != null) { 266 for (Group g : gl) { 267 logger.debug("Group name: " + g.getName()); 268 // when we find the Data_Products group, go down another group level and pull out 269 // what we will use for nominal day and time (for now anyway). 270 // XXX TJJ fileCount check is so we don't count the GEO file in time array! 271 if (g.getName().contains("Data_Products") && (fileCount != sources.size())) { 272 boolean foundDateTime = false; 273 List<Group> dpg = g.getGroups(); 274 275 // cycle through once looking for XML Product Profiles 276 for (Group subG : dpg) { 277 278 // determine the instrument name (VIIRS, ATMS, CrIS) 279 instrumentName = subG.findAttribute("Instrument_Short_Name"); 280 281 // This is also where we find the attribute which tells us which 282 // XML Product Profile to use! 283 ucar.nc2.Attribute axpp = subG.findAttribute("N_Collection_Short_Name"); 284 if (axpp != null) { 285 System.err.println("XML Product Profile N_Collection_Short_Name: " + axpp.getStringValue()); 286 String baseName = axpp.getStringValue(); 287 productName = baseName; 288 String productProfileFileName = nppPP.getProfileFileName(baseName); 289 logger.trace("Found profile: " + productProfileFileName); 290 if (productProfileFileName == null) { 291 throw new Exception("XML Product Profile not found in catalog"); 292 } 293 try { 294 nppPP.addMetaDataFromFile(productProfileFileName); 295 } catch (Exception nppppe) { 296 logger.error("Error parsing XML Product Profile: " + productProfileFileName); 297 throw new Exception("XML Product Profile Error"); 298 } 299 } 300 } 301 302 // 2nd pass through sub-group to extract date/time for aggregation 303 for (Group subG : dpg) { 304 List<Variable> vl = subG.getVariables(); 305 for (Variable v : vl) { 306 ucar.nc2.Attribute aDate = v.findAttribute("AggregateBeginningDate"); 307 ucar.nc2.Attribute aTime = v.findAttribute("AggregateBeginningTime"); 308 // did we find the attributes we are looking for? 309 if ((aDate != null) && (aTime != null)) { 310 String sDate = aDate.getStringValue(); 311 String sTime = aTime.getStringValue(); 312 logger.debug("For day/time, using: " + sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); 313 Date d = sdf.parse(sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); 314 productTimes.add(new Long(d.getTime())); 315 logger.debug("ms since epoch: " + d.getTime()); 316 foundDateTime = true; 317 break; 318 } 319 } 320 if (foundDateTime) break; 321 } 322 if (! foundDateTime) { 323 throw new VisADException("No date time found in NPP granule"); 324 } 325 } 326 } 327 } 328 } catch (Exception e) { 329 logger.debug("Exception during processing of file: " + fileAbsPath); 330 throw (e); 331 } finally { 332 ncfile.close(); 333 } 334 } 335 336 for (Long l : productTimes) { 337 logger.debug("Product time: " + l); 338 } 339 340 // build each union aggregation element 341 for (int elementNum = 0; elementNum < sources.size(); elementNum++) { 342 String s = (String) sources.get(elementNum); 343 344 // build an XML (NCML actually) representation of the union aggregation of these two files 345 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"); 346 org.jdom.Element root = new org.jdom.Element("netcdf", ns); 347 org.jdom.Document document = new org.jdom.Document(root); 348 349 org.jdom.Element agg = new org.jdom.Element("aggregation", ns); 350 agg.setAttribute("type", "union"); 351 352 org.jdom.Element fData = new org.jdom.Element("netcdf", ns); 353 fData.setAttribute("location", s); 354 agg.addContent(fData); 355 356 if (! isCombinedProduct) { 357 org.jdom.Element fGeo = new org.jdom.Element("netcdf", ns); 358 359 String geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1); 360 // check if we have the whole file name or just the prefix 361 String geoProductID = geoProductIDs.get(elementNum); 362 if (geoProductID.endsWith("h5")) { 363 geoFilename += geoProductID; 364 } else { 365 geoFilename += geoProductID; 366 geoFilename += s.substring(s.lastIndexOf(File.separatorChar) + 6); 367 } 368 logger.debug("Cobbled together GEO file name: " + geoFilename); 369 fGeo.setAttribute("location", geoFilename); 370 agg.addContent(fGeo); 371 } 372 373 root.addContent(agg); 374 XMLOutputter xmlOut = new XMLOutputter(); 375 String ncmlStr = xmlOut.outputString(document); 376 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes()); 377 MultiDimensionReader netCDFReader = new NetCDFFile(is); 378 379 // let's try and look through the NetCDF reader and see what we can learn... 380 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile(); 381 382 Group rg = ncdff.getRootGroup(); 383 384 List<Group> gl = rg.getGroups(); 385 if (gl != null) { 386 for (Group g : gl) { 387 logger.debug("Group name: " + g.getName()); 388 // XXX just temporary - we are looking through All_Data, finding displayable data 389 if (g.getName().contains("All_Data")) { 390 List<Group> adg = g.getGroups(); 391 // again, iterate through 392 for (Group subG : adg) { 393 logger.debug("Sub group name: " + subG.getName()); 394 String subName = subG.getName(); 395 if (subName.contains("-GEO")) { 396 // this is the geolocation data 397 List<Variable> vl = subG.getVariables(); 398 for (Variable v : vl) { 399 if (v.getName().contains("Latitude")) { 400 pathToLat = v.getName(); 401 logger.debug("Lat/Lon Variable: " + v.getName()); 402 } 403 if (v.getName().contains("Longitude")) { 404 pathToLon = v.getName(); 405 logger.debug("Lat/Lon Variable: " + v.getName()); 406 } 407 } 408 } else { 409 // this is the product data 410 List<Variable> vl = subG.getVariables(); 411 for (Variable v : vl) { 412 boolean useThis = false; 413 String vName = v.getName(); 414 logger.debug("Variable: " + vName); 415 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1); 416 417 // skip Quality Flags for now. 418 // XXX TJJ - should we show these? if so, note they sometimes 419 // have different dimensions than the main variables. For ex, 420 // on high res bands QFs are 768 x 3200 while vars are 1536 x 6400 421 if (varShortName.startsWith("QF")) { 422 continue; 423 } 424 425 // for CrIS instrument, only taking real calibrated values for now 426 logger.debug("INSTRUMENT NAME: " + instrumentName); 427 if (instrumentName.getStringValue().equals("CrIS")) { 428 if (! varShortName.startsWith(crisFilter)) { 429 logger.debug("Skipping variable: " + varShortName); 430 continue; 431 } 432 } 433 434 logger.debug("Variable prefix for finding Factors: " + varShortName); 435 DataType dt = v.getDataType(); 436 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) { 437 logger.debug("Skipping data of size: " + dt.getSize()); 438 continue; 439 } 440 List al = v.getAttributes(); 441 442 List<Dimension> dl = v.getDimensions(); 443 if (dl.size() > 4) { 444 logger.debug("Skipping data of dimension: " + dl.size()); 445 continue; 446 } 447 boolean xScanOk = false; 448 boolean yScanOk = false; 449 boolean zScanOk = false; 450 for (Dimension d : dl) { 451 // in order to consider this a displayable product, make sure 452 // both scan direction dimensions are present and look like a granule 453 for (int xIdx = 0; xIdx < XSCAN_POSSIBILITIES.length; xIdx++) { 454 if (d.getLength() == XSCAN_POSSIBILITIES[xIdx]) { 455 xScanOk = true; 456 break; 457 } 458 } 459 for (int yIdx = 0; yIdx < YSCAN_POSSIBILITIES.length; yIdx++) { 460 if (d.getLength() == YSCAN_POSSIBILITIES[yIdx]) { 461 yScanOk = true; 462 inTrackDimensionLength = YSCAN_POSSIBILITIES[yIdx]; 463 break; 464 } 465 } 466 for (int zIdx = 0; zIdx < ZSCAN_POSSIBILITIES.length; zIdx++) { 467 if (d.getLength() == ZSCAN_POSSIBILITIES[zIdx]) { 468 zScanOk = true; 469 break; 470 } 471 } 472 } 473 474 if (xScanOk && yScanOk) { 475 useThis = true; 476 } 477 478 if (zScanOk) { 479 is3D = true; 480 hasChannelSelect = true; 481 logger.info("Handling 3D data source!"); 482 } 483 484 if (useThis) { 485 // loop through the variable list again, looking for a corresponding "Factors" 486 float scaleVal = 1f; 487 float offsetVal = 0f; 488 boolean unpackFlag = false; 489 490 // XXX TJJ - this is NOT DETERMINISTIC! The spec in 491 // CDFCB-X, Vol 5, page 8, is too vague, and there is 492 // no SURE way to map variable name to scale/offset parameter 493 // 494 // if static map has an entry for this variable name 495 // get the data, data1 = scale, data2 = offset 496 // create and poke attributes with this data 497 // endif 498 499 String factorsVarName = JPSSUtilities.mapDataVarNameToFactorsName(varShortName); 500 logger.info("Mapping: " + varShortName + " to: " + factorsVarName); 501 if (factorsVarName != null) { 502 for (Variable fV : vl) { 503 if (fV.getName().endsWith(factorsVarName)) { 504 logger.debug("Pulling scale and offset values from variable: " + fV.getName()); 505 ucar.ma2.Array a = fV.read(); 506 ucar.ma2.Index i = a.getIndex(); 507 scaleVal = a.getFloat(i); 508 logger.debug("Scale value: " + scaleVal); 509 i.incr(); 510 offsetVal = a.getFloat(i); 511 logger.debug("Offset value: " + offsetVal); 512 unpackFlag = true; 513 break; 514 } 515 } 516 } 517 518 // poke in scale/offset attributes for now 519 520 ucar.nc2.Attribute a1 = new ucar.nc2.Attribute("scale_factor", scaleVal); 521 v.addAttribute(a1); 522 ucar.nc2.Attribute a2 = new ucar.nc2.Attribute("add_offset", offsetVal); 523 v.addAttribute(a2); 524 525 // add valid range and fill value attributes here 526 // try to fill in valid range 527 if (nppPP != null) { 528 String translatedName = JPSSUtilities.mapProdNameToProfileName(vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1)); 529 logger.debug("mapped name: " + translatedName); 530 if (translatedName != null) { 531 String rangeMin = nppPP.getRangeMin(translatedName); 532 String rangeMax = nppPP.getRangeMax(translatedName); 533 logger.debug("range min: " + rangeMin); 534 logger.debug("range max: " + rangeMax); 535 int [] shapeArr = new int [] { 2 }; 536 ArrayFloat af = new ArrayFloat(shapeArr); 537 try { 538 af.setFloat(0, Float.parseFloat(rangeMin)); 539 } catch (NumberFormatException nfe) { 540 af.setFloat(0, new Float(Integer.MIN_VALUE)); 541 } 542 try { 543 af.setFloat(1, Float.parseFloat(rangeMax)); 544 } catch (NumberFormatException nfe) { 545 af.setFloat(1, new Float(Integer.MAX_VALUE)); 546 } 547 ucar.nc2.Attribute rangeAtt = new ucar.nc2.Attribute("valid_range", af); 548 v.addAttribute(rangeAtt); 549 550 // check for and load fill values too... 551 552 // we need to check two places, first, the XML product profile 553 ArrayList<Float> fval = nppPP.getFillValues(translatedName); 554 555 // 2nd, does the variable already have one defined? 556 // if there was already a fill value associated with this variable, make 557 // sure we bring that along for the ride too... 558 ucar.nc2.Attribute aFill = v.findAttribute("_FillValue"); 559 560 // determine size of our fill value array 561 int fvArraySize = 0; 562 if (aFill != null) fvArraySize++; 563 if (! fval.isEmpty()) fvArraySize += fval.size(); 564 int [] fillShape = new int [] { fvArraySize }; 565 566 // allocate the array 567 ArrayFloat afFill = new ArrayFloat(fillShape); 568 569 // and FINALLY, fill it! 570 if (! fval.isEmpty()) { 571 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) { 572 afFill.setFloat(fillIdx, fval.get(fillIdx)); 573 logger.info("Adding fill value (from XML): " + fval.get(fillIdx)); 574 } 575 } 576 577 if (aFill != null) { 578 Number n = aFill.getNumericValue(); 579 // is the data unsigned? 580 ucar.nc2.Attribute aUnsigned = v.findAttribute("_Unsigned"); 581 float fillValAsFloat = Float.NaN; 582 if (aUnsigned != null) { 583 if (aUnsigned.getStringValue().equals("true")) { 584 DataType fvdt = aFill.getDataType(); 585 logger.info("Data String: " + aFill.toString()); 586 logger.info("DataType primitive type: " + fvdt.getPrimitiveClassType()); 587 // signed byte that needs conversion? 588 if (fvdt.getPrimitiveClassType() == byte.class) { 589 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue()); 590 } 591 else if (fvdt.getPrimitiveClassType() == short.class) { 592 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue()); 593 } else { 594 fillValAsFloat = n.floatValue(); 595 } 596 } 597 } 598 afFill.setFloat(fvArraySize - 1, fillValAsFloat); 599 logger.info("Adding fill value (from variable): " + fillValAsFloat); 600 } 601 ucar.nc2.Attribute fillAtt = new ucar.nc2.Attribute("_FillValue", afFill); 602 v.addAttribute(fillAtt); 603 } 604 } 605 606 ucar.nc2.Attribute aUnsigned = v.findAttribute("_Unsigned"); 607 if (aUnsigned != null) { 608 logger.debug("_Unsigned attribute value: " + aUnsigned.getStringValue()); 609 unsignedFlags.add(aUnsigned.getStringValue()); 610 } else { 611 unsignedFlags.add("false"); 612 } 613 614 if (unpackFlag) { 615 unpackFlags.add("true"); 616 } else { 617 unpackFlags.add("false"); 618 } 619 620 logger.debug("Adding product: " + v.getName()); 621 pathToProducts.add(v.getName()); 622 623 } 624 } 625 626 } 627 } 628 } 629 } 630 } 631 632 ncdfal.add((NetCDFFile) netCDFReader); 633 } 634 635 } catch (Exception e) { 636 logger.error("cannot create NetCDF reader for files selected"); 637 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) { 638 throw new VisADException("Unable to extract metadata from required XML Product Profile"); 639 } 640 } 641 642 // initialize the aggregation reader object 643 try { 644 nppAggReader = new GranuleAggregation(ncdfal, inTrackDimensionLength, "Track", "XTrack"); 645 } catch (Exception e) { 646 throw new VisADException("Unable to initialize aggregation reader"); 647 } 648 649 // make sure we found valid data 650 if (pathToProducts.size() == 0) { 651 throw new VisADException("No data found in files selected"); 652 } 653 654 logger.debug("Number of adapters needed: " + pathToProducts.size()); 655 adapters = new MultiDimensionAdapter[pathToProducts.size()]; 656 Hashtable<String, String[]> properties = new Hashtable<String, String[]>(); 657 658 Iterator<String> iterator = pathToProducts.iterator(); 659 int pIdx = 0; 660 while (iterator.hasNext()) { 661 String pStr = (String) iterator.next(); 662 logger.debug("Working on adapter number " + (pIdx + 1)); 663 HashMap<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable(); 664 HashMap<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable(); 665 swathTable.put("array_name", pStr); 666 swathTable.put("lon_array_name", pathToLon); 667 swathTable.put("lat_array_name", pathToLat); 668 swathTable.put("XTrack", "XTrack"); 669 swathTable.put("Track", "Track"); 670 swathTable.put("geo_Track", "Track"); 671 swathTable.put("geo_XTrack", "XTrack"); 672 swathTable.put("product_name", productName); 673 674 // array_name common to spectrum table 675 spectTable.put("array_name", pStr); 676 spectTable.put("product_name", productName); 677 logger.trace("Product Name: " + productName); 678 679 if (is3D) { 680 681 // 3D data is either ATMS or CrIS 682 if ((instrumentName.getName() != null) && (instrumentName.getStringValue().equals("ATMS"))) { 683 //hasChannelSelect = true; 684 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 685 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 686 687 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 688 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 689 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 690 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 691 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 692 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 693 694 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 695 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 696 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 697 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 698 699 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length; 700 float[] bandArray = new float[numChannels]; 701 String[] bandNames = new String[numChannels]; 702 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 703 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx]; 704 bandNames[bIdx] = "Channel " + (bIdx + 1); 705 } 706 spectTable.put(SpectrumAdapter.channelValues, bandArray); 707 spectTable.put(SpectrumAdapter.bandNames, bandNames); 708 709 } else { 710 if (instrumentName.getStringValue().equals("CrIS")) { 711 712 swathTable.put("XTrack", "dim1"); 713 swathTable.put("Track", "dim0"); 714 swathTable.put("geo_XTrack", "dim1"); 715 swathTable.put("geo_Track", "dim0"); 716 swathTable.put("product_name", "CrIS_SDR"); 717 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 718 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 719 720 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 721 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 722 spectTable.put(SpectrumAdapter.x_dim_name, "dim1"); 723 spectTable.put(SpectrumAdapter.y_dim_name, "dim0"); 724 725 } else { 726 // sorry, if we can't id the instrument, we can't display the data! 727 throw new VisADException("Unable to determine instrument name"); 728 } 729 } 730 731 } else { 732 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"}); 733 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 734 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 735 } 736 737 swathTable.put("scale_name", "scale_factor"); 738 swathTable.put("offset_name", "add_offset"); 739 swathTable.put("fill_value_name", "_FillValue"); 740 logger.info("Setting range_name to: " + pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 741 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 742 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 743 744 // set the valid range hash if data is available 745 if (nppPP != null) { 746 String translatedName = JPSSUtilities.mapProdNameToProfileName(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)); 747 if (translatedName != null) { 748 swathTable.put("valid_range", "valid_range"); 749 } 750 } 751 752 String unsignedAttributeStr = unsignedFlags.get(pIdx); 753 if (unsignedAttributeStr.equals("true")) { 754 swathTable.put("unsigned", unsignedAttributeStr); 755 } 756 757 String unpackFlagStr = unpackFlags.get(pIdx); 758 if (unpackFlagStr.equals("true")) { 759 swathTable.put("unpack", "true"); 760 } 761 762 // For NPP data, do valid range check AFTER applying scale/offset 763 swathTable.put("range_check_after_scaling", "true"); 764 765 // pass in a GranuleAggregation reader... 766 if (is3D) { 767 if (instrumentName.getStringValue().equals("ATMS")) { 768 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 769 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 770 DataCategory.createCategory("MultiSpectral"); 771 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 772 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 773 "BrightnessTemperature", "BrightnessTemperature", "NPP", "ATMS"); 774 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]); 775 multiSpectralData.add(msd); 776 } else { 777 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable); 778 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable); 779 DataCategory.createCategory("MultiSpectral"); 780 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 781 MultiSpectralData msd = new MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx], 782 csa); 783 msd.setInitialWavenumber(csa.getInitialWavenumber()); 784 multiSpectralData.add(msd); 785 } 786 if (pIdx == 0) { 787 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset(); 788 try { 789 previewImage = multiSpectralData.get(pIdx).getImage(defaultSubset); 790 } catch (Exception e) { 791 e.printStackTrace(); 792 } 793 } 794 795 } else { 796 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 797 if (pIdx == 0) { 798 defaultSubset = adapters[pIdx].getDefaultSubset(); 799 } 800 categories = DataCategory.parseCategories("IMAGE"); 801 } 802 pIdx++; 803 } 804 805 setProperties(properties); 806 } 807 808 public void initAfterUnpersistence() { 809 try { 810 setup(); 811 } catch (Exception e) { 812 } 813 } 814 815 /** 816 * Make and insert the <code>DataChoice</code>-s for this 817 * <code>DataSource</code>. 818 */ 819 820 public void doMakeDataChoices() { 821 822 // special loop for CrIS and ATMS data 823 if (multiSpectralData.size() > 0) { 824 for (int k=0; k<multiSpectralData.size(); k++) { 825 MultiSpectralData adapter = multiSpectralData.get(k); 826 DataChoice choice = null; 827 try { 828 choice = doMakeDataChoice(k, adapter); 829 msdMap.put(choice.getName(), adapter); 830 addDataChoice(choice); 831 } catch (Exception e) { 832 e.printStackTrace(); 833 } 834 } 835 return; 836 } 837 // all other data (VIIRS and 2D EDRs) 838 if (adapters != null) { 839 for (int idx = 0; idx < adapters.length; idx++) { 840 DataChoice choice = null; 841 try { 842 choice = doMakeDataChoice(idx, adapters[idx].getArrayName()); 843 } 844 catch (Exception e) { 845 e.printStackTrace(); 846 logger.error("doMakeDataChoice failed"); 847 } 848 849 if (choice != null) { 850 addDataChoice(choice); 851 } 852 } 853 } 854 } 855 856 private DataChoice doMakeDataChoice(int idx, String var) throws Exception { 857 String name = var; 858 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 859 Hashtable subset = new Hashtable(); 860 subset.put(new MultiDimensionSubset(), dataSel); 861 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, name, categories, subset); 862 return ddc; 863 } 864 865 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception { 866 String name = adapter.getName(); 867 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 868 Hashtable subset = new Hashtable(); 869 subset.put(MultiDimensionSubset.key, dataSel); 870 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter()); 871 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset); 872 ddc.setProperties(subset); 873 return ddc; 874 } 875 876 /** 877 * Check to see if this <code>NPPDataSource</code> is equal to the object 878 * in question. 879 * @param o object in question 880 * @return true if they are the same or equivalent objects 881 */ 882 883 public boolean equals(Object o) { 884 if ( !(o instanceof NPPDataSource)) { 885 return false; 886 } 887 return (this == (NPPDataSource) o); 888 } 889 890 public MultiSpectralData getMultiSpectralData() { 891 return multiSpectralData.get(0); 892 } 893 894 public MultiSpectralData getMultiSpectralData(DataChoice choice) { 895 return msdMap.get(choice.getName()); 896 } 897 898 public String getDatasetName() { 899 return filename; 900 } 901 902 public void setDatasetName(String name) { 903 filename = name; 904 } 905 906 public HashMap getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) { 907 GeoLocationInfo ginfo = geoSelection.getBoundingBox(); 908 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(), 909 ginfo.getMinLon(), ginfo.getMaxLon()); 910 } 911 912 public synchronized Data getData(DataChoice dataChoice, DataCategory category, 913 DataSelection dataSelection, Hashtable requestProperties) 914 throws VisADException, RemoteException { 915 return this.getDataInner(dataChoice, category, dataSelection, requestProperties); 916 } 917 918 919 protected Data getDataInner(DataChoice dataChoice, DataCategory category, 920 DataSelection dataSelection, Hashtable requestProperties) 921 throws VisADException, RemoteException { 922 923 //- this hack keeps the HydraImageProbe from doing a getData() 924 //- TODO: need to use categories? 925 if (requestProperties != null) { 926 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) { 927 return null; 928 } 929 } 930 931 GeoLocationInfo ginfo = null; 932 GeoSelection geoSelection = null; 933 934 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) { 935 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() : 936 dataChoice.getDataSelection().getGeoSelection(); 937 } 938 939 if (geoSelection != null) { 940 ginfo = geoSelection.getBoundingBox(); 941 } 942 943 Data data = null; 944 if (adapters == null) { 945 return data; 946 } 947 948 MultiDimensionAdapter adapter = null; 949 950 // pick the adapter with the same index as the current data choice 951 int aIdx = 0; 952 List<DataChoice> dcl = getDataChoices(); 953 for (DataChoice dc : dcl) { 954 if (dc.equals(dataChoice)) { 955 aIdx = dcl.indexOf(dc); 956 break; 957 } 958 } 959 960 logger.debug("Found dataChoice index: " + aIdx); 961 adapter = adapters[aIdx]; 962 963 try { 964 HashMap subset = null; 965 if (ginfo != null) { 966 logger.debug("getting subset from lat-lon rect..."); 967 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(), 968 ginfo.getMinLon(), ginfo.getMaxLon(), 969 geoSelection.getXStride(), 970 geoSelection.getYStride(), 971 geoSelection.getZStride()); 972 } 973 else { 974 975 MultiDimensionSubset select = null; 976 Hashtable table = dataChoice.getProperties(); 977 Enumeration keys = table.keys(); 978 while (keys.hasMoreElements()) { 979 Object key = keys.nextElement(); 980 logger.debug("Key: " + key.toString()); 981 if (key instanceof MultiDimensionSubset) { 982 select = (MultiDimensionSubset) table.get(key); 983 } 984 } 985 subset = select.getSubset(); 986 logger.debug("Subset size: " + subset.size()); 987 988 if (dataSelection != null) { 989 Hashtable props = dataSelection.getProperties(); 990 if (props != null) { 991 if (props.containsKey(SpectrumAdapter.channelIndex_name)) { 992 logger.debug("Props contains channel index key..."); 993 double[] coords = (double[]) subset.get(SpectrumAdapter.channelIndex_name); 994 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue(); 995 coords[0] = (double)idx; 996 coords[1] = (double)idx; 997 coords[2] = (double)1; 998 } 999 } 1000 } 1001 } 1002 1003 if (subset != null) { 1004 data = adapter.getData(subset); 1005 data = applyProperties(data, requestProperties, subset, aIdx); 1006 } 1007 } catch (Exception e) { 1008 e.printStackTrace(); 1009 logger.error("getData exception e=" + e); 1010 } 1011 return data; 1012 } 1013 1014 protected Data applyProperties(Data data, Hashtable requestProperties, HashMap subset, int adapterIndex) 1015 throws VisADException, RemoteException { 1016 Data new_data = data; 1017 1018 if (requestProperties == null) { 1019 new_data = data; 1020 return new_data; 1021 } 1022 1023 if (requestProperties.containsKey("medianFilter")) { 1024 String[] items = (String[]) requestProperties.get("medianFilter"); 1025 double window_lenx = Double.parseDouble(items[0]); 1026 double window_leny = Double.parseDouble(items[1]); 1027 GriddedSet domainSet = (GriddedSet) ((FlatField)data).getDomainSet(); 1028 int[] lens = domainSet.getLengths(); 1029 float[] range_values = (((FlatField)data).getFloats())[0]; 1030 range_values = 1031 ProfileAlongTrack.medianFilter(range_values, lens[0], lens[1], 1032 (int)window_lenx, (int)window_leny); 1033 ((FlatField)new_data).setSamples(new float[][] {range_values}); 1034 } 1035 if (requestProperties.containsKey("setBelowSfcMissing")) { 1036 String[] items = (String[]) requestProperties.get("setBelowSfcMissing"); 1037 FlatField track = (FlatField) track_adapter.getData(subset); 1038 float[] sfcElev = (track.getFloats())[0]; 1039 FlatField field = (FlatField) new_data; 1040 GriddedSet gset = (GriddedSet) field.getDomainSet(); 1041 float[][] samples = gset.getSamples(false); 1042 int[] lens = gset.getLengths(); 1043 float[] range_values = (field.getFloats())[0]; 1044 1045 int trkIdx = ((ProfileAlongTrack3D) adapters[adapterIndex]).adapter2D.getTrackTupIdx(); 1046 int vrtIdx = ((ProfileAlongTrack3D) adapters[adapterIndex]).adapter2D.getVertTupIdx(); 1047 1048 int k = 0; 1049 for (int j=0; j<lens[trkIdx]; j++) { 1050 float val = sfcElev[j]; 1051 for (int i=0; i<lens[vrtIdx]; i++) { 1052 if (vrtIdx < trkIdx) k = i + j*lens[0]; 1053 if (trkIdx < vrtIdx) k = j + i*lens[0]; 1054 if (samples[2][k] <= val || samples[2][k] < 0.0) { 1055 range_values[k] = Float.NaN; 1056 } 1057 } 1058 } 1059 field.setSamples(new float[][] {range_values}); 1060 } 1061 return new_data; 1062 } 1063 1064 protected void initDataSelectionComponents( 1065 List<DataSelectionComponent> components, 1066 final DataChoice dataChoice) { 1067 1068 if (System.getProperty("os.name").equals("Mac OS X") && hasImagePreview && hasChannelSelect) { 1069 try { 1070 components.add(new ImageChannelSelection(new PreviewSelection(dataChoice, previewImage, null), new ChannelSelection(dataChoice))); 1071 } catch (Exception e) { 1072 e.printStackTrace(); 1073 } 1074 } 1075 else { 1076 if (hasImagePreview) { 1077 try { 1078 FlatField image = (FlatField) dataChoice.getData(null); 1079 components.add(new PreviewSelection(dataChoice, image, null)); 1080 } catch (Exception e) { 1081 logger.error("Can't make PreviewSelection: "+e); 1082 e.printStackTrace(); 1083 } 1084 } 1085 if (hasChannelSelect) { 1086 try { 1087 components.add(new ChannelSelection(dataChoice)); 1088 } 1089 catch (Exception e) { 1090 e.printStackTrace(); 1091 } 1092 } 1093 } 1094 1095 } 1096 1097}