001 /*
002 * This file is part of McIDAS-V
003 *
004 * Copyright 2007-2013
005 * Space Science and Engineering Center (SSEC)
006 * University of Wisconsin - Madison
007 * 1225 W. Dayton Street, Madison, WI 53706, USA
008 * https://www.ssec.wisc.edu/mcidas
009 *
010 * All Rights Reserved
011 *
012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and
013 * some McIDAS-V source code is based on IDV and VisAD source code.
014 *
015 * McIDAS-V is free software; you can redistribute it and/or modify
016 * it under the terms of the GNU Lesser Public License as published by
017 * the Free Software Foundation; either version 3 of the License, or
018 * (at your option) any later version.
019 *
020 * McIDAS-V is distributed in the hope that it will be useful,
021 * but WITHOUT ANY WARRANTY; without even the implied warranty of
022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
023 * GNU Lesser Public License for more details.
024 *
025 * You should have received a copy of the GNU Lesser Public License
026 * along with this program. If not, see http://www.gnu.org/licenses.
027 */
028
029 package edu.wisc.ssec.mcidasv.data.hydra;
030
031 import java.util.ArrayList;
032 import java.util.HashMap;
033 import java.util.Iterator;
034 import java.util.LinkedHashSet;
035 import java.util.List;
036
037 import org.slf4j.Logger;
038 import org.slf4j.LoggerFactory;
039
040 import edu.wisc.ssec.mcidasv.data.QualityFlag;
041
042 import ucar.ma2.Array;
043 import ucar.ma2.DataType;
044 import ucar.ma2.Index;
045 import ucar.ma2.IndexIterator;
046 import ucar.ma2.Range;
047
048 import ucar.nc2.Attribute;
049 import ucar.nc2.Dimension;
050 import ucar.nc2.NetcdfFile;
051 import ucar.nc2.Structure;
052 import ucar.nc2.Variable;
053
054 /**
055 * Provides a view and operations on a set of contiguous data granules as if they
056 * were a single granule.
057 *
058 * This file needs to implement the same signatures NetCDFFile does,
059 * but for aggregations of consecutive granules.
060 *
061 * @author tommyj
062 *
063 */
064
065 public class GranuleAggregation implements MultiDimensionReader {
066
067 private static final Logger logger = LoggerFactory.getLogger(GranuleAggregation.class);
068
069 // this structure holds the NcML readers that get passed in
070 ArrayList<NetcdfFile> nclist = new ArrayList<NetcdfFile>();
071
072 // this holds the MultiDimensionReaders, here NetCDFFile
073 ArrayList<NetCDFFile> ncdfal = null;
074
075 // need an ArrayList for each variable hashmap structure
076 ArrayList<HashMap<String, Variable>> varMapList = new ArrayList<HashMap<String, Variable>>();
077 ArrayList<HashMap<String, String[]>> varDimNamesList = new ArrayList<HashMap<String, String[]>>();
078 ArrayList<HashMap<String, Class>> varDataTypeList = new ArrayList<HashMap<String, Class>>();
079
080 // map of granule index and granule in-track length for each variable
081 HashMap<String, HashMap<Integer, Integer>> varGranInTrackLengths = new HashMap<String, HashMap<Integer, Integer>>();
082 HashMap<String, int[]> varAggrDimLengths = new HashMap<String, int[]>();
083
084 // this object is used to handle granules like VIIRS Imagery EDRs, where scan
085 // gaps of varying sizes and locations in the granule must be removed. If
086 // present, an initial read with these "cut" ranges will be done before subsetting
087 HashMap<Integer, ArrayList<Range>> granCutRanges = new HashMap<Integer, ArrayList<Range>>();
088 HashMap<Integer, Integer> granCutScans = new HashMap<Integer, Integer>();
089
090 // except quality flags - only need one hashmap per aggregation
091 // it maps the broken out variable name back to the original packed variable name
092 HashMap<String, QualityFlag> qfMap = null;
093
094 // variable can have bulk array processor set by the application
095 HashMap<String, RangeProcessor> varToRangeProcessor = new HashMap<String, RangeProcessor>();
096
097 private int granuleCount = -1;
098 private String inTrackDimensionName = null;
099 private String inTrackGeoDimensionName = null;
100 private String crossTrackDimensionName = null;
101 private LinkedHashSet<String> products;
102 private String origName = null;
103 private boolean isEDR = false;
104
105 public GranuleAggregation(ArrayList<NetCDFFile> ncdfal, LinkedHashSet<String> products,
106 String inTrackDimensionName, String inTrackGeoDimensionName,
107 String crossTrackDimensionName, boolean isEDR) throws Exception {
108 if (ncdfal == null) throw new Exception("No data: empty Suomi NPP aggregation object");
109 this.inTrackDimensionName = inTrackDimensionName;
110 this.crossTrackDimensionName = crossTrackDimensionName;
111 this.inTrackGeoDimensionName = inTrackGeoDimensionName;
112 this.ncdfal = ncdfal;
113 this.products = products;
114 this.isEDR = isEDR;
115 init(ncdfal);
116 }
117
118 public GranuleAggregation(ArrayList<NetCDFFile> ncdfal, LinkedHashSet<String> products,
119 String inTrackDimensionName, String inTrackGeoDimensionName,
120 String crossTrackDimensionName) throws Exception {
121 this(ncdfal, products, inTrackDimensionName, inTrackGeoDimensionName, crossTrackDimensionName, false);
122 }
123
124 public GranuleAggregation(ArrayList<NetCDFFile> ncdfal, LinkedHashSet<String> products,
125 String inTrackDimensionName, String crossTrackDimensionName) throws Exception {
126 this(ncdfal, products, inTrackDimensionName, inTrackDimensionName, crossTrackDimensionName, false);
127 }
128
129 public GranuleAggregation(ArrayList<NetCDFFile> ncdfal, LinkedHashSet<String> products,
130 String inTrackDimensionName, String crossTrackDimensionName, boolean isEDR) throws Exception {
131 this(ncdfal, products, inTrackDimensionName, inTrackDimensionName, crossTrackDimensionName, isEDR);
132 }
133
134 public Class getArrayType(String array_name) {
135 array_name = mapNameIfQualityFlag(array_name);
136 return varDataTypeList.get(0).get(array_name);
137 }
138
139 public String[] getDimensionNames(String array_name) {
140 array_name = mapNameIfQualityFlag(array_name);
141 return varDimNamesList.get(0).get(array_name);
142 }
143
144 public int[] getDimensionLengths(String array_name) {
145 array_name = mapNameIfQualityFlag(array_name);
146 logger.debug("For var " + array_name + ", sending back dim len: " + varAggrDimLengths.get(array_name));
147 return varAggrDimLengths.get(array_name);
148 }
149
150 private String mapNameIfQualityFlag(String array_name) {
151 // only applies if name is from a packed quality flag
152 // we pull data from the "mapped" variable name, a packed byte
153 if (qfMap != null) {
154 logger.debug("mapNameIfQualityFlag, checking key: " + array_name);
155 if (qfMap.containsKey(array_name)) {
156 origName = array_name;
157 QualityFlag qf = qfMap.get(array_name);
158 String mappedName = qf.getPackedName();
159 logger.debug("Mapped to: " + mappedName);
160 return mappedName;
161 }
162 }
163 return array_name;
164 }
165
166 /**
167 * @return the isEDR
168 */
169 public boolean isEDR() {
170 return isEDR;
171 }
172
173 /**
174 * @param isEDR the isEDR to set
175 */
176 public void setEDR(boolean isEDR) {
177 this.isEDR = isEDR;
178 }
179
180 public float[] getFloatArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
181 return (float[]) readArray(array_name, start, count, stride);
182 }
183
184 public int[] getIntArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
185 return (int[]) readArray(array_name, start, count, stride);
186 }
187
188 public double[] getDoubleArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
189 return (double[]) readArray(array_name, start, count, stride);
190 }
191
192 public short[] getShortArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
193 return (short[]) readArray(array_name, start, count, stride);
194 }
195
196 public byte[] getByteArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
197 return (byte[]) readArray(array_name, start, count, stride);
198 }
199
200 public Object getArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
201 return readArray(array_name, start, count, stride);
202 }
203
204 public HDFArray getGlobalAttribute(String attr_name) throws Exception {
205 throw new Exception("GranuleAggregation.getGlobalAttributes: Unimplemented");
206 }
207
208 public HDFArray getArrayAttribute(String array_name, String attr_name) throws Exception {
209 Variable var = varMapList.get(0).get(array_name);
210 if (var == null) return null;
211
212 Attribute attr = var.findAttribute(attr_name);
213 if (attr == null) return null;
214
215 Array attrVals = attr.getValues();
216 DataType dataType = attr.getDataType();
217 Object array = attrVals.copyTo1DJavaArray();
218
219 HDFArray harray = null;
220
221 if (dataType.getPrimitiveClassType() == Float.TYPE) {
222 harray = HDFArray.make((float[])array);
223 }
224 else if (dataType.getPrimitiveClassType() == Double.TYPE) {
225 harray = HDFArray.make((double[])array);
226 }
227 else if (dataType == DataType.STRING) {
228 harray = HDFArray.make((String[])array);
229 }
230 else if (dataType.getPrimitiveClassType() == Short.TYPE) {
231 harray = HDFArray.make((short[])array);
232 }
233 else if (dataType.getPrimitiveClassType() == Integer.TYPE) {
234 harray = HDFArray.make((int[])array);
235 }
236 return harray;
237 }
238
239 public void close() throws Exception {
240 // close each NetCDF file
241 for (NetcdfFile n : nclist) {
242 n.close();
243 }
244 }
245
246 private void init(ArrayList<NetCDFFile> ncdfal) throws Exception {
247
248 logger.debug("init in...");
249 // make a NetCDFFile object from the NcML for each granule
250 for (NetCDFFile n : ncdfal) {
251 logger.debug("loading another NetCDF file from NcML...");
252 NetcdfFile ncfile = n.getNetCDFFile();
253 nclist.add(ncfile);
254 }
255
256 granuleCount = nclist.size();
257 logger.debug("Granule count: " + granuleCount);
258
259 // All files do NOT have the same structure, so need to look at each ncfile
260 // For ex, some MODIS granules have slightly different in-track and along-track
261 // lengths
262
263 NetcdfFile ncfile = null;
264 for (int ncIdx = 0; ncIdx < nclist.size(); ncIdx++) {
265
266 // good place to initialize the cut Range ArrayList for each granule
267 Integer granuleIndex = new Integer(ncIdx);
268 ArrayList<Range> al = new ArrayList<Range>();
269 granCutRanges.put(granuleIndex, al);
270 int cutScanCount = 0;
271
272 ncfile = nclist.get(ncIdx);
273
274 Iterator<Variable> varIter = ncfile.getVariables().iterator();
275 while (varIter.hasNext()) {
276 Variable var = varIter.next();
277 logger.debug("Variable " + var.getShortName() + ", Rank: " + var.getRank());
278 varAggrDimLengths.put(var.getFullName(), new int[var.getRank()]);
279 varGranInTrackLengths.put(var.getFullName(), new HashMap<Integer, Integer>());
280
281 // Here, let's try to check the data for EDR fill lines
282 // and if found, try to handle it by simply adjusting the dimensions
283 // for this granule. Sound like a plan? We'll see...
284
285 if (isEDR) {
286
287 logger.debug("IS an EDR, need to look for fill scans...");
288 // look through lat grid, look for missing scans
289 String varName = var.getShortName();
290 if (varName.endsWith("Latitude")) {
291 // iterate through the scan lines, looking for fill lines
292 // NOTE: we only need to check the first column! so set
293 // up an appropriate Range to cut the read down significantly
294 int[] shape = var.getShape();
295 ArrayList<Range> alr = new ArrayList<Range>();
296 alr.add(new Range(0, shape[0] - 1, 1));
297 alr.add(new Range(0, 1, 1));
298 Array a = var.read(alr);
299 logger.debug("Lat shape: " + shape[0] + " by " + shape[1]);
300 int scanLength = shape[1];
301 Index index = a.getIndex();
302 float fVal = 0.0f;
303
304 int rangeOffset = 1;
305 int rangeCount = 0;
306 boolean prvScanWasCut = false;
307 boolean needClosingRange = false;
308 boolean hadCutRanges = false;
309 boolean someMissing = false;
310
311 for (int i = 0; i < shape[0]; i++) {
312
313 someMissing = false;
314 fVal = a.getFloat(index.set(i, 0));
315 if (fVal < -90.0f) {
316 someMissing = true;
317 }
318
319 if (someMissing) {
320 hadCutRanges = true;
321 cutScanCount++;
322 logger.debug("Found a cut scan " + (i + 1)
323 + ", last val: " + fVal);
324 if ((prvScanWasCut) || (i == 0)) {
325 if (i == 0) {
326 rangeOffset = 1;
327 } else {
328 rangeOffset = i + 2;
329 }
330 } else {
331 try {
332 // We are using 2D ranges
333 logger.debug("Adding Range: " + rangeOffset
334 + ", " + i + ", 1");
335 al.add(new Range(rangeOffset, i, 1));
336 logger.debug("Adding Range: " + 1 + ", "
337 + (scanLength - 1) + ", 1");
338 al.add(new Range(0, scanLength - 1, 1));
339 } catch (Exception e) {
340 e.printStackTrace();
341 }
342 rangeCount = 0;
343 rangeOffset = i + 1;
344 }
345 prvScanWasCut = true;
346 } else {
347 prvScanWasCut = false;
348 rangeCount += scanLength;
349 }
350
351 // check to see if closing Range needed, good data at end
352 if ((! prvScanWasCut) && (i == (scanLength - 1))) {
353 needClosingRange = true;
354 }
355 }
356
357 if (needClosingRange) {
358 // We are using 2D ranges
359 al.add(new Range(rangeOffset, rangeOffset + shape[0]
360 - 1, 1));
361 al.add(new Range(0, scanLength - 1, 1));
362 logger.debug("Adding closing cut Range, offs: "
363 + rangeOffset + ", len: " + rangeCount);
364 }
365
366 // if only one contiguous range, process as a normal clean granule
367 if (! hadCutRanges) {
368 al.clear();
369 }
370
371 granCutScans.put(granuleIndex, new Integer(cutScanCount));
372 logger.debug("Total scans cut this granule: "
373 + cutScanCount);
374
375 }
376 } else {
377 granCutScans.put(granuleIndex, new Integer(0));
378 logger.debug("is NOT an EDR, no need to check for fill scans...");
379 }
380 }
381 }
382
383 for (int ncIdx = 0; ncIdx < nclist.size(); ncIdx++) {
384
385 ncfile = nclist.get(ncIdx);
386
387 HashMap<String, Variable> varMap = new HashMap<String, Variable>();
388 HashMap<String, String[]> varDimNames = new HashMap<String, String[]>();
389 HashMap<String, Class> varDataType = new HashMap<String, Class>();
390
391 Iterator<Variable> varIter = ncfile.getVariables().iterator();
392 int varInTrackIndex = -1;
393 while (varIter.hasNext()) {
394 Variable var = (Variable) varIter.next();
395
396 logger.debug("Working on variable: " + var.getFullName());
397
398 boolean foundProduct = false;
399 for (String s : products) {
400 if (s.contains(var.getFullName())) {
401 logger.debug("Valid product: " + var.getFullName());
402 foundProduct = true;
403 }
404 // we'll also pass Lat and Lon, needed for nav
405 if (var.getShortName().equals("Latitude")) {
406 foundProduct = true;
407 }
408 if (var.getShortName().equals("Longitude")) {
409 foundProduct = true;
410 }
411 }
412
413 if (! foundProduct) {
414 logger.debug("Skipping variable: " + var.getFullName());
415 continue;
416 }
417
418 if (var instanceof Structure) {
419 // simply skip these, applicable only to IASI far as I know
420 continue;
421 }
422
423 int rank = var.getRank();
424
425 // bypass any less-than-2D variables for now...
426 if (rank < 2) {
427 logger.debug("Skipping 1D variable: " + var.getFullName());
428 continue;
429 }
430
431 String varName = var.getFullName();
432 varMap.put(varName, var);
433 Iterator<Dimension> dimIter = var.getDimensions().iterator();
434 String[] dimNames = new String[rank];
435 int[] dimLengths = new int[rank];
436 int cnt = 0;
437 boolean notDisplayable = false;
438 varInTrackIndex = getInTrackIndex(var);
439
440 while (dimIter.hasNext()) {
441 Dimension dim = dimIter.next();
442 String s = dim.getShortName();
443 if ((s != null) && (!s.isEmpty())) {
444 if ((! s.equals(inTrackDimensionName)) &&
445 ((! s.startsWith("Band")) && (cnt == 0)) &&
446 (! varName.endsWith("Latitude")) &&
447 (! varName.endsWith("Longitude")) &&
448 (! s.equals(crossTrackDimensionName))) {
449 notDisplayable = true;
450 break;
451 }
452 }
453 String dimName = dim.getShortName();
454 logger.debug("GranuleAggregation init, variable: " + varName + ", dimension name: " + dimName + ", length: " + dim.getLength());
455 if (dimName == null) dimName = "dim" + cnt;
456 dimNames[cnt] = dimName;
457 dimLengths[cnt] = dim.getLength();
458 cnt++;
459 }
460
461 // skip to next variable if it's not displayable data
462 if (notDisplayable) continue;
463
464 // adjust in-track dimension if needed (scans were cut)
465 int cutScans = granCutScans.get(ncIdx);
466 dimLengths[varInTrackIndex] = dimLengths[varInTrackIndex] - cutScans;
467
468 // XXX TJJ - can below block go away? Think so...
469 int[] aggrDimLengths = varAggrDimLengths.get(varName);
470 for (int i = 0; i < rank; i++) {
471 if (i == varInTrackIndex) {
472 aggrDimLengths[i] += dimLengths[i];
473 } else {
474 aggrDimLengths[i] = dimLengths[i];
475 }
476 }
477
478 varDimNames.put(varName, dimNames);
479 varDataType.put(varName, var.getDataType().getPrimitiveClassType());
480
481 if (varInTrackIndex < 0) {
482 logger.debug("Skipping variable with unknown dimension: " + var.getFullName());
483 continue;
484 }
485
486 HashMap<Integer, Integer> granIdxToInTrackLen = varGranInTrackLengths.get(varName);
487 granIdxToInTrackLen.put(ncIdx, new Integer(dimLengths[varInTrackIndex]));
488
489 dimLengths[varInTrackIndex] = dimLengths[varInTrackIndex] * granuleCount;
490 varDataType.put(varName, var.getDataType().getPrimitiveClassType());
491 }
492
493 // add the new hashmaps to our enclosing lists
494 varMapList.add(varMap);
495 varDimNamesList.add(varDimNames);
496 varDataTypeList.add(varDataType);
497
498 }
499 }
500
501 /**
502 * Based on the names of the variable dimensions, determine the in-track index
503 * @param dimNames names of dimensions - should match static strings in relevant classes
504 * @return correct index (0 or greater), or -1 if error
505 */
506
507 private int getInTrackIndex(Variable v) {
508
509 int index = -1;
510 boolean is2D = false;
511 boolean is3D = false;
512
513 String inTrackName = null;
514
515 // typical sanity check
516 if (v == null) return index;
517 logger.debug("getInTrackIndex called for variable: " + v.getShortName());
518
519 // lat/lon vars have different dimension names
520 if ((v.getFullName().endsWith("Latitude")) || (v.getFullName().endsWith("Longitude"))) {
521 if (v.getFullName().startsWith("All_Data")) {
522 inTrackName = inTrackDimensionName;
523 } else {
524 inTrackName = inTrackGeoDimensionName;
525 }
526 } else {
527 inTrackName = inTrackDimensionName;
528 }
529 // pull out the dimensions
530 List<Dimension> dList = v.getDimensions();
531
532 // right now, we only handle 2D and 3D variables.
533 // TJJ XXX it does get trickier, and we will have to expand this
534 // to deal with for example CrIS data...
535 int numDimensions = dList.size();
536 logger.debug("Number of dimensions: " + numDimensions);
537
538 // the only 4D data right now is CrIS, return 0
539 if (numDimensions == 4) return 0;
540
541 if ((numDimensions == 2) || (numDimensions == 3)) {
542 if (numDimensions == 2) is2D = true;
543 if (numDimensions == 3) is3D = true;
544 } else {
545 return index;
546 }
547
548 // if the data is 2D, we use the SwathAdapter class,
549 // if 3D, we use the SpectrumAdapter class
550 for (int i = 0; i < numDimensions; i++) {
551 if (is2D) {
552 // XXX TJJ - if empty name, in-track index is 0
553 if ((dList.get(i).getShortName() == null) || (dList.get(i).getShortName().isEmpty())) {
554 logger.warn("Empty dimension name!, assuming in-track dim is 0");
555 return 0;
556 }
557 if (dList.get(i).getShortName().equals(inTrackName)) {
558 index = i;
559 break;
560 }
561 }
562 if (is3D) {
563 // XXX TJJ - if empty name, in-track index is 0
564 if ((dList.get(i).getShortName() == null) || (dList.get(i).getShortName().isEmpty())) {
565 logger.warn("Empty dimension name!, assuming in-track dim is 0");
566 return 0;
567 }
568 if (dList.get(i).getShortName().equals(inTrackName)) {
569 index = i;
570 break;
571 }
572 }
573 }
574
575 // hopefully we found the right one
576 return index;
577 }
578
579 private synchronized Object readArray(String array_name, int[] start, int[] count, int[] stride) throws Exception {
580
581 array_name = mapNameIfQualityFlag(array_name);
582 // how many dimensions are we dealing with
583 int dimensionCount = start.length;
584
585 // pull out a representative variable so we can determine which index is in-track
586 Variable vTmp = varMapList.get(0).get(array_name);
587 int vInTrackIndex = getInTrackIndex(vTmp);
588
589 int loGranuleId = 0;
590 int hiGranuleId = 0;
591
592 HashMap<Integer, Integer> granIdxToInTrackLen = varGranInTrackLengths.get(array_name);
593 int numGrans = granIdxToInTrackLen.size();
594
595 int[] vGranuleLengths = new int[numGrans];
596 for (int k = 0; k < numGrans; k++) {
597 vGranuleLengths[k] = granIdxToInTrackLen.get(k);
598 logger.debug("readArray, gran len: " + vGranuleLengths[k] + ", scans cut: " + granCutScans.get(k));
599 }
600
601 int strt = start[vInTrackIndex];
602 int stp = strt + (count[vInTrackIndex] - 1) * stride[vInTrackIndex];
603 int cnt = 0;
604 for (int k = 0; k < numGrans; k++) {
605 int granLen = granIdxToInTrackLen.get(k);
606 cnt += granLen;
607 if (strt < cnt) {
608 loGranuleId = k;
609 break;
610 }
611 }
612
613 cnt = 0;
614 for (int k = 0; k < numGrans; k++) {
615 int granLen = granIdxToInTrackLen.get(k);
616 cnt += granLen;
617 if (stp < cnt) {
618 hiGranuleId = k;
619 break;
620 }
621 }
622 logger.debug("loGranuleId: " + loGranuleId);
623 logger.debug("hiGranuleId: " + hiGranuleId);
624
625
626 // next, we break out the offsets, counts, and strides for each granule
627 int granuleSpan = hiGranuleId - loGranuleId + 1;
628
629 logger.debug("readArray req, loGran: " + loGranuleId + ", hiGran: " +
630 hiGranuleId + ", granule span: " + granuleSpan + ", dimCount: " + dimensionCount);
631
632 for (int i = 0; i < dimensionCount; i++) {
633 logger.debug("start[" + i + "]: " + start[i]);
634 logger.debug("count[" + i + "]: " + count[i]);
635 logger.debug("stride[" + i + "]: " + stride[i]);
636 }
637
638 int [][] startSet = new int [granuleSpan][dimensionCount];
639 int [][] countSet = new int [granuleSpan][dimensionCount];
640 int [][] strideSet = new int [granuleSpan][dimensionCount];
641 int countSubtotal = 0;
642
643 int inTrackTotal = 0;
644 for (int i = 0; i < loGranuleId; i++) {
645 inTrackTotal += vGranuleLengths[i];
646 }
647
648 // this part is a little tricky - set the values for each granule we need to access for this read
649 for (int i = 0; i < granuleSpan; i++) {
650 inTrackTotal += vGranuleLengths[loGranuleId+i];
651 for (int j = 0; j < dimensionCount; j++) {
652 // for all indeces other than the in-track index, the numbers match what was passed in
653 if (j != vInTrackIndex) {
654 startSet[i][j] = start[j];
655 countSet[i][j] = count[j] * stride[j];
656 strideSet[i][j] = stride[j];
657 } else {
658 // for the in-track index, it's not so easy...
659 // for first granule, start is what's passed in
660 if (i == 0) {
661 startSet[i][j] = start[j] - (inTrackTotal - vGranuleLengths[loGranuleId]);
662 } else {
663 startSet[i][j] = (inTrackTotal - start[j]) % stride[j];
664 // TJJ Sep 2013, zero-base starts that offset into subsequent granules
665 if (startSet[i][j] > 0) {
666 startSet[i][j]--;
667 }
668 }
669 // counts may be different for start, end, and middle granules
670 if (i == 0) {
671 // is this the first and only granule?
672 if (granuleSpan == 1) {
673 countSet[i][j] = count[j] * stride[j];
674 // or is this the first of multiple granules...
675 } else {
676 if ((inTrackTotal - start[j]) < (count[j] * stride[j])) {
677 countSet[i][j] = inTrackTotal - start[j];
678 } else {
679 countSet[i][j] = count[j] * stride[j];
680 }
681 countSubtotal += countSet[i][j];
682 }
683 } else {
684 // middle granules
685 if (i < (granuleSpan - 1)) {
686 countSet[i][j] = vGranuleLengths[loGranuleId+i];
687 countSubtotal += countSet[i][j];
688 } else {
689 // the end granule
690 countSet[i][j] = (count[j] * stride[j]) - countSubtotal;
691 // XXX TJJ - limiting count to valid numbers here, why??
692 // need to revisit, see why this condition manifests
693 if (countSet[i][j] > (vGranuleLengths[loGranuleId+i] - startSet[i][j]))
694 countSet[i][j] = vGranuleLengths[loGranuleId+i] - startSet[i][j];
695 }
696 }
697 // luckily, stride never changes
698 strideSet[i][j] = stride[j];
699 }
700 }
701 }
702
703 int totalLength = 0;
704 int rangeListCount = 0;
705 ArrayList<Array> arrayList = new ArrayList<Array>();
706 for (int granuleIdx = 0; granuleIdx < granuleCount; granuleIdx++) {
707 if ((granuleIdx >= loGranuleId) && (granuleIdx <= hiGranuleId)) {
708 Variable var = varMapList.get(loGranuleId + (granuleIdx-loGranuleId)).get(array_name);
709
710 if (var instanceof Structure) {
711 // what to do here?
712 } else {
713 ArrayList<Range> rangeList = new ArrayList<Range>();
714 for (int dimensionIdx = 0; dimensionIdx < dimensionCount; dimensionIdx++) {
715 logger.debug("Creating new Range: " + startSet[rangeListCount][dimensionIdx] +
716 ", " + (startSet[rangeListCount][dimensionIdx] + countSet[rangeListCount][dimensionIdx] - 1) + ", " + strideSet[rangeListCount][dimensionIdx]);
717 Range range = new Range(
718 startSet[rangeListCount][dimensionIdx],
719 startSet[rangeListCount][dimensionIdx] + countSet[rangeListCount][dimensionIdx] - 1,
720 strideSet[rangeListCount][dimensionIdx]
721 );
722 rangeList.add(dimensionIdx, range);
723 }
724 rangeListCount++;
725
726 // If there were chunks of fill data to remove...
727 ArrayList<Range> al = granCutRanges.get(new Integer(granuleIdx));
728 if (! al.isEmpty()) {
729 ArrayList<Variable> varChunks = new ArrayList<Variable>();
730 for (int rangeCount = 0; rangeCount < al.size(); rangeCount+=2) {
731 ArrayList<Range> rl = new ArrayList<Range>();
732 rl.add(al.get(rangeCount));
733 rl.add(al.get(rangeCount + 1));
734 varChunks.add(var.section(rl));
735 }
736
737 int [] newShape = var.getShape();
738 int cutScans = granCutScans.get(granuleIdx);
739 newShape[0] = newShape[0] - cutScans;
740 logger.debug("New Shape: " + newShape[0] + ", " + newShape[1]);
741 Array single = Array.factory(var.getDataType(), newShape);
742
743 // now read variable chunk data into single contiguous array
744 int idx = 0;
745 for (Variable v : varChunks) {
746 Array data = v.read();
747 int [] tmpShape = v.getShape();
748 for (int tIdx = 0; tIdx < tmpShape.length; tIdx++) {
749 logger.debug("Shape[" + tIdx + "]: " + tmpShape[tIdx]);
750 }
751 IndexIterator ii = data.getIndexIterator();
752 while (ii.hasNext()) {
753 single.setFloat(idx, ii.getFloatNext());
754 idx++;
755 }
756 }
757
758 // finally, apply subset ranges
759 Array subarray = single.section(rangeList);
760 totalLength += subarray.getSize();
761 arrayList.add(subarray);
762 logger.debug("Size of final data array: " + subarray.getSize());
763
764 } else {
765 Array subarray = var.read(rangeList);
766 totalLength += subarray.getSize();
767 arrayList.add(subarray);
768 }
769
770 }
771 // put in an empty ArrayList placeholder to retain a slot for each granule
772 } else {
773 Array emptyArray = null;
774 arrayList.add(emptyArray);
775 }
776 }
777
778 // last, concatenate the individual NetCDF arrays pulled out
779
780 Class outType;
781 Class arrayType = getArrayType(array_name);
782 RangeProcessor rngProcessor = varToRangeProcessor.get(array_name);
783 if (rngProcessor == null) {
784 outType = getArrayType(array_name);
785 }
786 else {
787 outType = java.lang.Float.TYPE;
788 }
789 Object o = java.lang.reflect.Array.newInstance(outType, totalLength);
790
791 int destPos = 0;
792 int granIdx = 0;
793
794 for (Array a : arrayList) {
795 if (a != null) {
796 Object primArray = a.copyTo1DJavaArray();
797 primArray = processArray(array_name, arrayType, granIdx, primArray, rngProcessor, start, count);
798 System.arraycopy(primArray, 0, o, destPos, (int) a.getSize());
799 destPos += a.getSize();
800 }
801 granIdx++;
802 }
803
804 return o;
805 }
806
807 /**
808 * @param qfMap the qfMap to set
809 */
810 public void setQfMap(HashMap<String, QualityFlag> qfMap) {
811 this.qfMap = qfMap;
812 }
813
814 public HashMap getVarMap() {
815 return varMapList.get(0);
816 }
817
818 public ArrayList<NetCDFFile> getReaders() {
819 return this.ncdfal;
820 }
821
822 /* pass individual granule pieces just read from dataset through the RangeProcessor */
823 private Object processArray(String array_name, Class arrayType, int granIdx, Object values, RangeProcessor rngProcessor, int[] start, int[] count) {
824
825 if (rngProcessor == null) {
826 return values;
827 }
828 else {
829 ((AggregationRangeProcessor)rngProcessor).setWhichRangeProcessor(granIdx);
830
831 boolean processAlongMultiScaleDim = false;
832
833 if (rngProcessor.hasMultiDimensionScale()) { // this data variable has an array > 1 of scale/offsets. For example, one for each band.
834 rngProcessor.setMultiScaleIndex(start[rngProcessor.getMultiScaleDimensionIndex()]);
835 if (count[rngProcessor.getMultiScaleDimensionIndex()] > 1) { // if the multiScaleDim is > 1, use processAlongMultiScaleDim below
836 processAlongMultiScaleDim = true;
837 }
838 }
839
840 Object outArray = null;
841
842 if (processAlongMultiScaleDim) {
843
844 if (arrayType == Short.TYPE) {
845 outArray = rngProcessor.processAlongMultiScaleDim((short[])values);
846 } else if (arrayType == Byte.TYPE) {
847 outArray = rngProcessor.processAlongMultiScaleDim((byte[])values);
848 } else if (arrayType == Float.TYPE) {
849 outArray = values;
850 } else if (arrayType == Double.TYPE) {
851 outArray = values;
852 }
853
854 }
855 else {
856
857 if (arrayType == Short.TYPE) {
858 outArray = rngProcessor.processRange((short[]) values, null);
859 } else if (arrayType == Byte.TYPE) {
860 // if variable is a bit-field quality flag, apply mask
861 if (qfMap.containsKey(origName)) {
862 QualityFlag qf = qfMap.get(origName);
863 outArray = rngProcessor.processRangeQualityFlag((byte[]) values, null, qf);
864 } else {
865 outArray = rngProcessor.processRange((byte[]) values, null);
866 }
867 } else if (arrayType == Float.TYPE) {
868 outArray = rngProcessor.processRange((float[]) values, null);
869 } else if (arrayType == Double.TYPE) {
870 outArray = rngProcessor.processRange((double[]) values, null);
871 }
872
873 }
874
875 return outArray;
876 }
877 }
878
879 /* Application can supply a RangeProcessor for a variable 'arrayName' */
880 public void addRangeProcessor(String arrayName, RangeProcessor rangeProcessor) {
881 varToRangeProcessor.put(arrayName, rangeProcessor);
882 }
883
884 }