001 /*
002 * This file is part of McIDAS-V
003 *
004 * Copyright 2007-2013
005 * Space Science and Engineering Center (SSEC)
006 * University of Wisconsin - Madison
007 * 1225 W. Dayton Street, Madison, WI 53706, USA
008 * https://www.ssec.wisc.edu/mcidas
009 *
010 * All Rights Reserved
011 *
012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and
013 * some McIDAS-V source code is based on IDV and VisAD source code.
014 *
015 * McIDAS-V is free software; you can redistribute it and/or modify
016 * it under the terms of the GNU Lesser Public License as published by
017 * the Free Software Foundation; either version 3 of the License, or
018 * (at your option) any later version.
019 *
020 * McIDAS-V is distributed in the hope that it will be useful,
021 * but WITHOUT ANY WARRANTY; without even the implied warranty of
022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
023 * GNU Lesser Public License for more details.
024 *
025 * You should have received a copy of the GNU Lesser Public License
026 * along with this program. If not, see http://www.gnu.org/licenses.
027 */
028
029 package edu.wisc.ssec.mcidasv.data.hydra;
030
031 import java.util.HashMap;
032 import java.util.ArrayList;
033
034 import org.slf4j.Logger;
035 import org.slf4j.LoggerFactory;
036
037 import edu.wisc.ssec.mcidasv.data.QualityFlag;
038
039 import visad.util.Util;
040
041 public class RangeProcessor {
042
043 private static final Logger logger = LoggerFactory.getLogger(RangeProcessor.class);
044
045 static RangeProcessor createRangeProcessor(MultiDimensionReader reader, HashMap metadata) throws Exception {
046 if (reader instanceof GranuleAggregation) {
047 return new AggregationRangeProcessor((GranuleAggregation)reader, metadata);
048 }
049
050 if (metadata.get("scale_name") == null) {
051 String product_name = (String) metadata.get(SwathAdapter.product_name);
052 if (product_name == "IASI_L1C_xxx") {
053 return new IASI_RangeProcessor();
054 }
055 return null;
056 }
057 else {
058 String product_name = (String) metadata.get(ProfileAlongTrack.product_name);
059 if (product_name == "2B-GEOPROF") {
060 return new CloudSat_2B_GEOPROF_RangeProcessor(reader, metadata);
061 }
062 else {
063 return new RangeProcessor(reader, metadata);
064 }
065 }
066 }
067
068 MultiDimensionReader reader;
069 HashMap metadata;
070
071 float[] scale = null;
072 float[] offset = null;
073 float[] missing = null;
074 float[] valid_range = null;
075 float valid_low = -Float.MAX_VALUE;
076 float valid_high = Float.MAX_VALUE;
077 float[] low = new float[] {-Float.MAX_VALUE};
078 float[] high = new float[] {Float.MAX_VALUE};
079
080 boolean unpack = false;
081 boolean unsigned = false;
082 boolean rangeCheckBeforeScaling = true;
083
084 int scaleOffsetLen = 1;
085
086 String multiScaleDimName = SpectrumAdapter.channelIndex_name;
087 boolean hasMultiDimensionScale = false;
088
089 int multiScaleDimensionIndex = 0;
090
091 int soIndex = 0;
092
093 public RangeProcessor() {
094 }
095
096 public RangeProcessor(float scale, float offset, float valid_low, float valid_high, float missing) {
097 this.scale = new float[] {scale};
098 this.offset = new float[] {offset};
099 this.missing = new float[] {missing};
100 this.valid_low = valid_low;
101 this.valid_high = valid_high;
102 }
103
104
105 public RangeProcessor(MultiDimensionReader reader, HashMap metadata, String multiScaleDimName) throws Exception {
106 this(reader, metadata);
107 this.multiScaleDimName = multiScaleDimName;
108 }
109
110 public RangeProcessor(MultiDimensionReader reader, HashMap metadata) throws Exception {
111 this.reader = reader;
112 this.metadata = metadata;
113
114 if (metadata.get("unpack") != null) {
115 unpack = true;
116 }
117
118 if (metadata.get("unsigned") != null) {
119 unsigned = true;
120 }
121
122 if (metadata.get("range_check_after_scaling") != null) {
123 String s = (String) metadata.get("range_check_after_scaling");
124 logger.debug("range_check_after_scaling: " + s);
125 rangeCheckBeforeScaling = false;
126 }
127
128 String array_name = (String) metadata.get("array_name");
129
130 scale = getAttributeAsFloatArray(array_name, (String) metadata.get("scale_name"));
131
132 offset = getAttributeAsFloatArray(array_name, (String) metadata.get("offset_name"));
133
134 if (scale != null) {
135 scaleOffsetLen = scale.length;
136
137 if (offset != null) {
138 if (scale.length != offset.length) {
139 throw new Exception("RangeProcessor: scale and offset array lengths must be equal");
140 }
141 }
142 else {
143 offset = new float[scaleOffsetLen];
144 for (int i=0; i<offset.length; i++) offset[i] = 0f;
145 }
146
147 }
148
149 missing = getAttributeAsFloatArray(array_name, (String) metadata.get("fill_value_name"));
150
151 String metaStr = (String)metadata.get("valid_range");
152 // attr name not supplied, so try the convention default
153 if (metaStr == null) {
154 metaStr = "valid_range";
155 }
156
157 valid_range = getAttributeAsFloatArray(array_name, metaStr);
158 if (valid_range != null) {
159
160 valid_low = valid_range[0];
161 valid_high = valid_range[1];
162
163 if (valid_range[0] > valid_range[1]) {
164 valid_low = valid_range[1];
165 valid_high = valid_range[0];
166 }
167 }
168
169 String str = (String)metadata.get("multiScaleDimensionIndex");
170 hasMultiDimensionScale = (str != null);
171 multiScaleDimensionIndex = (str != null) ? Integer.parseInt(str) : 0;
172 }
173
174 public float[] getAttributeAsFloatArray(String arrayName, String attrName)
175 throws Exception
176 {
177 float[] fltArray = null;
178 HDFArray arrayAttr = reader.getArrayAttribute(arrayName, attrName);
179
180 if (arrayAttr != null) {
181
182 if (arrayAttr.getType().equals(Float.TYPE)) {
183 float[] attr = (float[]) arrayAttr.getArray();
184 fltArray = new float[attr.length];
185 for (int k=0; k<attr.length; k++) fltArray[k] = attr[k];
186 }
187 else if (arrayAttr.getType().equals(Short.TYPE)) {
188 short[] attr = (short[]) arrayAttr.getArray();
189 fltArray = new float[attr.length];
190 for (int k=0; k<attr.length; k++) fltArray[k] = (float) attr[k];
191 }
192 else if (arrayAttr.getType().equals(Integer.TYPE)) {
193 int[] attr = (int[]) arrayAttr.getArray();
194 fltArray = new float[attr.length];
195 for (int k=0; k<attr.length; k++) fltArray[k] = (float) attr[k];
196 }
197 else if (arrayAttr.getType().equals(Double.TYPE)) {
198 double[] attr = (double[]) arrayAttr.getArray();
199 fltArray = new float[attr.length];
200 for (int k=0; k<attr.length; k++) fltArray[k] = (float) attr[k];
201 }
202
203 }
204
205 return fltArray;
206 }
207
208 /**
209 * Process a range of data from a byte array where bytes are packed bit
210 * or multi-bit fields of quality flags. Based on info in a QualityFlag
211 * object passed in, we extract and return values for that flag.
212 *
213 * @param values input values
214 * @param subset optional subset
215 * @param qf quality flag
216 * @return processed range
217 */
218
219 public float[] processRangeQualityFlag(byte[] values, HashMap subset, QualityFlag qf) {
220
221 if (subset != null) {
222 if (subset.get(multiScaleDimName) != null) {
223 soIndex = (int) ((double[])subset.get(multiScaleDimName))[0];
224 }
225 }
226
227 float[] newValues = new float[values.length];
228
229 float val = 0f;
230 int bitOffset = qf.getBitOffset();
231 int divisor = -1;
232
233 // map bit offset to a divisor
234 switch (bitOffset) {
235 case 1:
236 divisor = 2;
237 break;
238 case 2:
239 divisor = 4;
240 break;
241 case 3:
242 divisor = 8;
243 break;
244 case 4:
245 divisor = 16;
246 break;
247 case 5:
248 divisor = 32;
249 break;
250 case 6:
251 divisor = 64;
252 break;
253 case 7:
254 divisor = 128;
255 break;
256 default:
257 divisor = 1;
258 break;
259 }
260
261 // now map bit width to a mask
262 int numBits = qf.getNumBits();
263 int mask = -1;
264 switch (numBits) {
265 case 1:
266 mask = (int) 0x00000001;
267 break;
268 case 2:
269 mask = (int) 0x00000003;
270 break;
271 case 3:
272 mask = (int) 0x00000007;
273 break;
274 case 4:
275 mask = (int) 0x0000000F;
276 break;
277 case 5:
278 mask = (int) 0x0000001F;
279 break;
280 case 6:
281 mask = (int) 0x0000003F;
282 break;
283 case 7:
284 mask = (int) 0x0000007F;
285 break;
286 default:
287 mask = (int) 0x00000000;
288 break;
289 }
290
291 int i = 0;
292 for (int k = 0; k < values.length; k++) {
293 val = (float) values[k];
294 i = Util.unsignedByteToInt(values[k]);
295 val = (float) ((i / divisor) & mask);
296 newValues[k] = val;
297 }
298
299 return newValues;
300 }
301
302 /**
303 * Process a range of data from a byte array
304 * @param values
305 * @param subset
306 * @return
307 */
308
309 public float[] processRange(byte[] values, HashMap subset) {
310
311 if (subset != null) {
312 if (subset.get(multiScaleDimName) != null) {
313 soIndex = (int) ((double[])subset.get(multiScaleDimName))[0];
314 }
315 }
316
317 float[] new_values = new float[values.length];
318
319 // if we are working with unsigned data, need to convert missing vals to unsigned too
320 if (unsigned) {
321 if (missing != null) {
322 for (int i = 0; i < missing.length; i++) {
323 missing[i] = (float) Util.unsignedByteToInt((byte) missing[i]);
324 }
325 }
326 }
327
328 float val = 0f;
329 int i = 0;
330 boolean isMissing = false;
331
332 for (int k = 0; k < values.length; k++) {
333
334 val = (float) values[k];
335 if (unsigned) {
336 i = Util.unsignedByteToInt(values[k]);
337 val = (float) i;
338 }
339
340 // first, check the (possibly multiple) missing values
341 isMissing = false;
342 if (missing != null) {
343 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
344 if (val == missing[mvIdx]) {
345 isMissing = true;
346 break;
347 }
348 }
349 }
350
351 if (isMissing) {
352 new_values[k] = Float.NaN;
353 continue;
354 }
355
356 if (rangeCheckBeforeScaling) {
357 if ((val < valid_low) || (val > valid_high)) {
358 new_values[k] = Float.NaN;
359 continue;
360 }
361 }
362
363 if (scale != null) {
364 if (unpack) {
365 new_values[k] = scale[soIndex] * (val) + offset[soIndex];
366 } else {
367 new_values[k] = scale[soIndex] * (val - offset[soIndex]);
368 }
369 }
370 else {
371 new_values[k] = val;
372 }
373
374 // do valid range check AFTER scaling?
375 if (! rangeCheckBeforeScaling) {
376 if ((new_values[k] < valid_low) || (new_values[k] > valid_high)) {
377 new_values[k] = Float.NaN;
378 }
379 }
380 }
381 return new_values;
382 }
383
384 /**
385 * Process a range of data from a short array
386 * @param values
387 * @param subset
388 * @return
389 */
390
391 public float[] processRange(short[] values, HashMap subset) {
392
393
394 if (subset != null) {
395 if (subset.get(multiScaleDimName) != null) {
396 soIndex = (int) ((double[])subset.get(multiScaleDimName))[0];
397 }
398 }
399
400 float[] new_values = new float[values.length];
401
402 // if we are working with unsigned data, need to convert missing vals to unsigned too
403 if (unsigned) {
404 if (missing != null) {
405 for (int i = 0; i < missing.length; i++) {
406 missing[i] = (float) Util.unsignedShortToInt((short) missing[i]);
407 }
408 }
409 }
410
411 float val = 0f;
412 int i = 0;
413 boolean isMissing = false;
414
415 for (int k = 0; k < values.length; k++) {
416
417 val = (float) values[k];
418 if (unsigned) {
419 i = Util.unsignedShortToInt(values[k]);
420 val = (float) i;
421 }
422
423 // first, check the (possibly multiple) missing values
424 isMissing = false;
425 if (missing != null) {
426 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
427 if (val == missing[mvIdx]) {
428 isMissing = true;
429 break;
430 }
431 }
432 }
433
434 if (isMissing) {
435 new_values[k] = Float.NaN;
436 continue;
437 }
438
439 if (rangeCheckBeforeScaling) {
440 if ((val < valid_low) || (val > valid_high)) {
441 new_values[k] = Float.NaN;
442 continue;
443 }
444 }
445
446 if (scale != null) {
447 if (unpack) {
448 new_values[k] = (scale[soIndex] * val) + offset[soIndex];
449 } else {
450 new_values[k] = scale[soIndex] * (val - offset[soIndex]);
451 }
452 } else {
453 new_values[k] = val;
454 }
455
456 // do valid range check AFTER scaling?
457 if (! rangeCheckBeforeScaling) {
458 if ((new_values[k] < valid_low) || (new_values[k] > valid_high)) {
459 new_values[k] = Float.NaN;
460 }
461 }
462
463 }
464 return new_values;
465 }
466
467 /**
468 * Process a range of data from a float array
469 * @param values
470 * @param subset
471 * @return
472 */
473
474 public float[] processRange(float[] values, HashMap subset) {
475
476 float[] new_values = null;
477
478 if ((missing != null) || (valid_range != null)) {
479 new_values = new float[values.length];
480 }
481 else {
482 return values;
483 }
484
485 float val;
486
487 for (int k = 0; k < values.length; k++) {
488 val = values[k];
489 new_values[k] = val;
490
491 // first, check the (possibly multiple) missing values
492 if (missing != null) {
493 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
494 if (val == missing[mvIdx]) {
495 new_values[k] = Float.NaN;
496 break;
497 }
498 }
499 }
500
501 if ((valid_range != null) && ((val < valid_low) || (val > valid_high))) {
502 new_values[k] = Float.NaN;
503 }
504
505 }
506
507 return new_values;
508 }
509
510 /**
511 * Process a range of data from a double array
512 * @param values
513 * @param subset
514 * @return
515 */
516
517 public double[] processRange(double[] values, HashMap subset) {
518
519 double[] new_values = null;
520
521 if ((missing != null) || (valid_range != null)) {
522 new_values = new double[values.length];
523 }
524 else {
525 return values;
526 }
527
528 double val;
529
530 for (int k = 0; k < values.length; k++) {
531 val = values[k];
532 new_values[k] = val;
533
534 // first, check the (possibly multiple) missing values
535 if (missing != null) {
536 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
537 if (val == missing[mvIdx]) {
538 new_values[k] = Float.NaN;
539 break;
540 }
541 }
542 }
543
544 if ((valid_range != null) && ((val < valid_low) || (val > valid_high))) {
545 new_values[k] = Double.NaN;
546 }
547 }
548
549 return new_values;
550 }
551
552 /**
553 * Process a range of data from a byte array
554 * @param values
555 * @return
556 */
557
558 public float[] processAlongMultiScaleDim(byte[] values) {
559
560 float[] new_values = new float[values.length];
561
562 // if we are working with unsigned data, need to convert missing vals to unsigned too
563 if (unsigned) {
564 if (missing != null) {
565 for (int i = 0; i < missing.length; i++) {
566 missing[i] = (float) Util.unsignedByteToInt((byte) missing[i]);
567 }
568 }
569 }
570
571 float val = 0f;
572 int i = 0;
573 boolean isMissing = false;
574
575 for (int k = 0; k < values.length; k++) {
576
577 val = (float) values[k];
578 if (unsigned) {
579 i = Util.unsignedByteToInt(values[k]);
580 val = (float) i;
581 }
582
583 // first, check the (possibly multiple) missing values
584 isMissing = false;
585 if (missing != null) {
586 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
587 if (val == missing[mvIdx]) {
588 isMissing = true;
589 break;
590 }
591 }
592 }
593
594 if (isMissing) {
595 new_values[k] = Float.NaN;
596 continue;
597 }
598
599 if (rangeCheckBeforeScaling) {
600 if ((val < valid_low) || (val > valid_high)) {
601 new_values[k] = Float.NaN;
602 continue;
603 }
604 }
605
606 if (unpack) {
607 new_values[k] = scale[k] * val + offset[k];
608 } else {
609 new_values[k] = scale[k] * (val - offset[k]);
610 }
611
612 // do valid range check AFTER scaling?
613 if (! rangeCheckBeforeScaling) {
614 if ((new_values[k] < valid_low) || (new_values[k] > valid_high)) {
615 new_values[k] = Float.NaN;
616 }
617 }
618 }
619 return new_values;
620 }
621
622 /**
623 * Process a range of data from a short array
624 * @param values
625 * @return
626 */
627
628 public float[] processAlongMultiScaleDim(short[] values) {
629
630 float[] new_values = new float[values.length];
631
632 // if we are working with unsigned data, need to convert missing vals to unsigned too
633 if (unsigned) {
634 if (missing != null) {
635 for (int i = 0; i < missing.length; i++) {
636 missing[i] = (float) Util.unsignedShortToInt((short) missing[i]);
637 }
638 }
639 }
640
641 float val = 0f;
642 int i = 0;
643 boolean isMissing = false;
644
645 for (int k = 0; k < values.length; k++) {
646
647 val = (float) values[k];
648 if (unsigned) {
649 i = Util.unsignedShortToInt(values[k]);
650 val = (float) i;
651 }
652
653 // first, check the (possibly multiple) missing values
654 isMissing = false;
655 if (missing != null) {
656 for (int mvIdx = 0; mvIdx < missing.length; mvIdx++) {
657 if (val == missing[mvIdx]) {
658 isMissing = true;
659 break;
660 }
661 }
662 }
663
664 if (isMissing) {
665 new_values[k] = Float.NaN;
666 continue;
667 }
668
669 if (rangeCheckBeforeScaling) {
670 if ((val < valid_low) || (val > valid_high)) {
671 new_values[k] = Float.NaN;
672 continue;
673 }
674 }
675
676 if (unpack) {
677 new_values[k] = scale[k] * val + offset[k];
678 } else {
679 new_values[k] = scale[k] * (val - offset[k]);
680 }
681
682 // do valid range check AFTER scaling?
683 if (! rangeCheckBeforeScaling) {
684 if ((new_values[k] < valid_low) || (new_values[k] > valid_high)) {
685 new_values[k] = Float.NaN;
686 }
687 }
688 }
689 return new_values;
690 }
691
692 public void setMultiScaleDimName(String multiScaleDimName) {
693 this.multiScaleDimName = multiScaleDimName;
694 }
695
696 public int getMultiScaleDimensionIndex() {
697 return multiScaleDimensionIndex;
698 }
699
700 public boolean hasMultiDimensionScale() {
701 return hasMultiDimensionScale;
702 }
703
704 public void setHasMultiDimensionScale(boolean yesno) {
705 hasMultiDimensionScale = yesno;
706 }
707
708 public void setMultiScaleIndex(int idx) {
709 this.soIndex = idx;
710 }
711
712 }
713
714 class IASI_RangeProcessor extends RangeProcessor {
715
716 public IASI_RangeProcessor() throws Exception {
717 super();
718 }
719
720 public float[] processRange(short[] values, HashMap subset) {
721 int channelIndex = (int) ((double[]) subset.get(SpectrumAdapter.channelIndex_name))[0];
722
723 float[] new_values = IASI_L1C_Utility.getDecodedIASIImage(values, null, channelIndex);
724
725 double[] track_coords = (double[]) subset.get(SwathAdapter.track_name);
726 double[] xtrack_coords = (double[]) subset.get(SwathAdapter.xtrack_name);
727
728 int numElems = ((int)(xtrack_coords[1] - xtrack_coords[0]) + 1);
729 int numLines = ((int)(track_coords[1] - track_coords[0]) + 1);
730
731 new_values = IASI_L1C_Utility.psuedoScanReorder2(new_values, 60, numLines*2);
732
733 //- subset here, if necessary
734
735 return new_values;
736 }
737
738 }
739
740 class CrIS_RangeProcessor extends RangeProcessor {
741
742 public CrIS_RangeProcessor() throws Exception {
743 super();
744 }
745
746 public float[] processRange(float[] values, HashMap subset) {
747
748 double[] track_coords = (double[]) subset.get(SwathAdapter.track_name);
749 double[] xtrack_coords = (double[]) subset.get(SwathAdapter.xtrack_name);
750
751 int numElems = ((int)(xtrack_coords[1] - xtrack_coords[0]) + 1);
752 int numLines = ((int)(track_coords[1] - track_coords[0]) + 1);
753
754 values = CrIS_SDR_Utility.psuedoScanReorder(values, 90, numLines*3);
755
756 //- subset here, if necessary
757
758 return values;
759 }
760
761 }
762
763
764 class CloudSat_2B_GEOPROF_RangeProcessor extends RangeProcessor {
765
766 public CloudSat_2B_GEOPROF_RangeProcessor(MultiDimensionReader reader, HashMap metadata) throws Exception {
767 super(reader, metadata);
768 if (scale == null) { // use implicit default value since E05, E06 has removed the scale/offset from the Radar Refl variable
769 scale = new float[] {100f};
770 offset = new float[] {0f};
771 }
772 }
773
774 public float[] processRange(short[] values, HashMap subset) {
775 float[] new_values = new float[values.length];
776 for (int k=0; k<values.length;k++) {
777 float val = (float) values[k];
778 if (val == missing[0]) {
779 new_values[k] = Float.NaN;
780 }
781 else if ((val < valid_low) || (val > valid_high)) {
782 new_values[k] = -40f;
783 }
784 else {
785 new_values[k] = val/scale[0] + offset[0];
786 }
787 }
788 return new_values;
789 }
790
791 }
792
793 class AggregationRangeProcessor extends RangeProcessor {
794
795 ArrayList<RangeProcessor> rangeProcessors = new ArrayList<RangeProcessor>();
796
797 int rngIdx = 0;
798
799 public AggregationRangeProcessor(GranuleAggregation aggrReader, HashMap metadata) throws Exception {
800 super();
801
802 ArrayList readers = aggrReader.getReaders();
803
804 int num = 0;
805
806 for (int rdrIdx = 0; rdrIdx < readers.size(); rdrIdx++) {
807 RangeProcessor rngProcessor =
808 RangeProcessor.createRangeProcessor((MultiDimensionReader)readers.get(rdrIdx), metadata);
809
810 if (rngProcessor.hasMultiDimensionScale()) {
811 num++;
812 }
813
814 rangeProcessors.add(rngProcessor);
815 }
816
817 if (num > 0 && num != readers.size()) {
818 throw new Exception("AggregationRangeProcessor: all or none can define a multiDimensionScale");
819 }
820 else if (num == readers.size()) {
821 setHasMultiDimensionScale(true);
822 }
823
824 aggrReader.addRangeProcessor((String)metadata.get(SwathAdapter.array_name), this);
825 }
826
827 public synchronized void setWhichRangeProcessor(int index) {
828 rngIdx = index;
829 }
830
831 public synchronized void setMultiScaleIndex(int idx) {
832 rangeProcessors.get(rngIdx).setMultiScaleIndex(idx);
833 }
834
835
836 public synchronized float[] processRange(byte[] values, HashMap subset) {
837 return rangeProcessors.get(rngIdx).processRange(values, subset);
838 }
839
840 public synchronized float[] processRange(short[] values, HashMap subset) {
841 return rangeProcessors.get(rngIdx).processRange(values, subset);
842 }
843
844 public synchronized float[] processRange(float[] values, HashMap subset) {
845 return rangeProcessors.get(rngIdx).processRange(values, subset);
846 }
847
848 public synchronized double[] processRange(double[] values, HashMap subset) {
849 return rangeProcessors.get(rngIdx).processRange(values, subset);
850 }
851
852 public synchronized float[] processAlongMultiScaleDim(short[] values) {
853 return rangeProcessors.get(rngIdx).processAlongMultiScaleDim(values);
854 }
855
856 public synchronized float[] processAlongMultiScaleDim(byte[] values) {
857 return rangeProcessors.get(rngIdx).processAlongMultiScaleDim(values);
858 }
859 }