001/* 002 * This file is part of McIDAS-V 003 * 004 * Copyright 2007-2017 005 * Space Science and Engineering Center (SSEC) 006 * University of Wisconsin - Madison 007 * 1225 W. Dayton Street, Madison, WI 53706, USA 008 * https://www.ssec.wisc.edu/mcidas 009 * 010 * All Rights Reserved 011 * 012 * McIDAS-V is built on Unidata's IDV and SSEC's VisAD libraries, and 013 * some McIDAS-V source code is based on IDV and VisAD source code. 014 * 015 * McIDAS-V is free software; you can redistribute it and/or modify 016 * it under the terms of the GNU Lesser Public License as published by 017 * the Free Software Foundation; either version 3 of the License, or 018 * (at your option) any later version. 019 * 020 * McIDAS-V is distributed in the hope that it will be useful, 021 * but WITHOUT ANY WARRANTY; without even the implied warranty of 022 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 023 * GNU Lesser Public License for more details. 024 * 025 * You should have received a copy of the GNU Lesser Public License 026 * along with this program. If not, see http://www.gnu.org/licenses. 027 */ 028 029package edu.wisc.ssec.mcidasv.data.hydra; 030 031import edu.wisc.ssec.mcidasv.Constants; 032import edu.wisc.ssec.mcidasv.McIDASV; 033import edu.wisc.ssec.mcidasv.PersistenceManager; 034import edu.wisc.ssec.mcidasv.data.HydraDataSource; 035import edu.wisc.ssec.mcidasv.data.PreviewSelection; 036import edu.wisc.ssec.mcidasv.data.QualityFlag; 037 038import java.io.ByteArrayInputStream; 039import java.io.File; 040import java.io.FilenameFilter; 041import java.rmi.RemoteException; 042import java.text.SimpleDateFormat; 043import java.util.ArrayList; 044import java.util.Date; 045import java.util.Enumeration; 046import java.util.HashMap; 047import java.util.Hashtable; 048import java.util.Iterator; 049import java.util.LinkedHashMap; 050import java.util.LinkedHashSet; 051import java.util.List; 052import java.util.Map; 053import java.util.Set; 054import java.util.SimpleTimeZone; 055import java.util.StringTokenizer; 056 057import javax.swing.JCheckBox; 058import javax.swing.JOptionPane; 059 060import org.jdom2.Document; 061import org.jdom2.Element; 062import org.jdom2.Namespace; 063import org.jdom2.output.XMLOutputter; 064import org.slf4j.Logger; 065import org.slf4j.LoggerFactory; 066 067import ucar.ma2.ArrayFloat; 068import ucar.ma2.DataType; 069import ucar.nc2.Attribute; 070import ucar.nc2.Dimension; 071import ucar.nc2.Group; 072import ucar.nc2.NetcdfFile; 073import ucar.nc2.Variable; 074import ucar.nc2.dataset.VariableDS; 075import ucar.unidata.data.DataCategory; 076import ucar.unidata.data.DataChoice; 077import ucar.unidata.data.DataSelection; 078import ucar.unidata.data.DataSelectionComponent; 079import ucar.unidata.data.DataSourceDescriptor; 080import ucar.unidata.data.DirectDataChoice; 081import ucar.unidata.data.GeoLocationInfo; 082import ucar.unidata.data.GeoSelection; 083import ucar.unidata.data.grid.GridUtil; 084import ucar.unidata.idv.IdvPersistenceManager; 085import ucar.unidata.util.Misc; 086import visad.Data; 087import visad.DateTime; 088import visad.DerivedUnit; 089import visad.FieldImpl; 090import visad.FlatField; 091import visad.FunctionType; 092import visad.RealType; 093import visad.SampledSet; 094import visad.Unit; 095import visad.VisADException; 096import visad.data.units.NoSuchUnitException; 097import visad.data.units.ParseException; 098import visad.data.units.Parser; 099import visad.util.Util; 100 101/** 102 * A data source for NPOESS Preparatory Project (Suomi NPP) data 103 * This will probably move, but we are placing it here for now 104 * since we are leveraging some existing code used for HYDRA. 105 */ 106 107public class SuomiNPPDataSource extends HydraDataSource { 108 109 private static final Logger logger = LoggerFactory.getLogger(SuomiNPPDataSource.class); 110 111 /** Sources file */ 112 protected String filename; 113 114 // for loading bundles, store granule lists and geo lists here 115 protected List<String> oldSources = new ArrayList<>(); 116 protected List<String> geoSources = new ArrayList<>(); 117 118 // integrity map for grouping sets/aggregations of selected products 119 Map<String, List<String>> filenameMap = null; 120 121 protected MultiDimensionReader nppAggReader; 122 123 protected MultiDimensionAdapter[] adapters = null; 124 125 private List<MultiSpectralData> msd_CrIS = new ArrayList<>(); 126 private List<MultiSpectralData> multiSpectralData = new ArrayList<>(); 127 private Map<String, MultiSpectralData> msdMap = new HashMap<>(); 128 private Map<String, QualityFlag> qfMap = new HashMap<>(); 129 private Map<String, float[]> lutMap = new HashMap<>(); 130 131 private static final String DATA_DESCRIPTION = "Suomi NPP Data"; 132 133 // instrument related variables and flags 134 Attribute instrumentName = null; 135 private String productName = null; 136 137 // product related variables and flags 138 String whichEDR = ""; 139 140 // for now, we are only handling CrIS variables that match this filter and SCAN dimensions 141 private String crisFilter = "ES_Real"; 142 143 // for now, we are only handling OMPS variables that match this filter and SCAN dimensions 144 private String ompsFilter = "Radiance"; 145 146 private Map<String, double[]> defaultSubset; 147 public TrackAdapter track_adapter; 148 149 private List categories; 150 private boolean isCombinedProduct = false; 151 private boolean nameHasBeenSet = false; 152 153 private boolean isNOAA; 154 155 // need our own separator char since it's always Unix-style in the Suomi NPP files 156 private static final String SEPARATOR_CHAR = "/"; 157 158 // date formatter for NASA L1B data, ex 2016-02-07T00:06:00.000Z 159 SimpleDateFormat sdfNASA = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); 160 161 // LUTs for NASA L1B data 162 float[] m12LUT = null; 163 float[] m13LUT = null; 164 float[] m14LUT = null; 165 float[] m15LUT = null; 166 float[] m16LUT = null; 167 float[] i04LUT = null; 168 float[] i05LUT = null; 169 170 // Map to match NASA variables to units (XML Product Profiles used for NOAA) 171 Map<String, String> unitsNASA = new HashMap<String, String>(); 172 173 // date formatter for converting Suomi NPP day/time to something we can use 174 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss.SSS"); 175 176 // date formatter for how we want to show granule day/time on display 177 SimpleDateFormat sdfOut = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); 178 179 // MJH keep track of date to add time dim to FieldImpl 180 Date theDate; 181 182 /** 183 * Zero-argument constructor for construction via unpersistence. 184 */ 185 186 public SuomiNPPDataSource() { 187 } 188 189 public SuomiNPPDataSource(String fileName) throws VisADException { 190 this(null, Misc.newList(fileName), null); 191 logger.debug("filename only constructor call.."); 192 } 193 194 /** 195 * Construct a new Suomi NPP HDF5 data source. 196 * @param descriptor descriptor for this {@code DataSource} 197 * @param fileName name of the hdf file to read 198 * @param properties hashtable of properties 199 * 200 * @throws VisADException problem creating data 201 */ 202 203 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 204 String fileName, Hashtable properties) 205 throws VisADException { 206 this(descriptor, Misc.newList(fileName), properties); 207 logger.debug("SuomiNPPDataSource called, single file selected: " + fileName); 208 } 209 210 /** 211 * Construct a new Suomi NPP HDF5 data source. 212 * 213 * @param descriptor Descriptor for this {@code DataSource}. 214 * @param newSources List of filenames. 215 * @param properties Hashtable of properties. 216 * 217 * @throws VisADException problem creating data 218 */ 219 220 public SuomiNPPDataSource(DataSourceDescriptor descriptor, 221 List<String> newSources, Hashtable properties) 222 throws VisADException { 223 super(descriptor, newSources, DATA_DESCRIPTION, properties); 224 logger.debug("SuomiNPPDataSource constructor called, file count: " + sources.size()); 225 226 filename = (String) sources.get(0); 227 setDescription("Suomi NPP"); 228 229 // NASA data is UTC, pre-set time zone 230 SimpleTimeZone stz = new SimpleTimeZone(0, "UTC"); 231 sdfNASA.setTimeZone(stz);; 232 233 // build the filename map - matches each product to set of files for that product 234 filenameMap = new HashMap<>(); 235 236 // Pass 1, populate the list of products selected 237 for (Object o : sources) { 238 String filename = (String) o; 239 // first five characters of any product go together 240 int lastSeparator = filename.lastIndexOf(File.separatorChar); 241 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 242 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 243 if (! filenameMap.containsKey(prodStr)) { 244 List<String> l = new ArrayList<String>(); 245 filenameMap.put(prodStr, l); 246 } 247 } 248 249 // pass 2, create a list of files for each product in this data source 250 for (Object o : sources) { 251 String filename = (String) o; 252 // first five characters of any product go together 253 int lastSeparator = filename.lastIndexOf(File.separatorChar); 254 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 255 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 256 List l = (List) filenameMap.get(prodStr); 257 l.add(filename); 258 filenameMap.put(prodStr, l); 259 } 260 261 versionCheck(); 262 setup(); 263 initQfTranslations(); 264 } 265 266 // alert user about possible VIIRS plugin compatibility issues 267 private void versionCheck() { 268 boolean pluginDialog = getIdv().getStore().get(Constants.PREF_VIIRS_PLUGIN, false); 269 // don't create a dialog though if we are running in background/offscreen mode 270 boolean offScreen = getIdv().getArgsManager().getIsOffScreen(); 271 if (! offScreen) { 272 if (! pluginDialog) { 273 String msg = "There has been an update to the VIIRS Formulas plugin.\n" + 274 "If you use the plugin, you will need to uninstall the currently installed\n" + 275 "version of the plugin, and install the plugin called \"VIIRS Formulas\"."; 276 JCheckBox jcbPlugin = new JCheckBox("Do not show this message again"); 277 Object[] params = { msg, jcbPlugin }; 278 JOptionPane.showMessageDialog(null, params, "Plugin Compatibility Notice", JOptionPane.OK_OPTION); 279 boolean dontShow = jcbPlugin.isSelected(); 280 getIdv().getStore().put(Constants.PREF_VIIRS_PLUGIN, dontShow); 281 } 282 } else { 283 logger.warn("Make sure your VIIRS plugin is current, there was an update with McV 1.5"); 284 } 285 } 286 287 public void setup() throws VisADException { 288 289 // which format, NASA or NOAA? 290 isNOAA = false; 291 292 // store filenames for possible bundle unpersistence 293 for (Object o : sources) { 294 oldSources.add((String) o); 295 } 296 297 // time zone for product labels 298 SimpleTimeZone stz = new SimpleTimeZone(0, "GMT"); 299 sdf.setTimeZone(stz); 300 sdfOut.setTimeZone(stz); 301 302 // looking to populate 3 things - path to lat, path to lon, path to relevant products 303 String pathToLat = null; 304 String pathToLon = null; 305 Set<String> pathToProducts = new LinkedHashSet<>(); 306 Map<String, String> prodToDesc = new HashMap<>(); 307 308 // flag to differentiate VIIRS from one of the other Suomi sensors 309 boolean isVIIRS = true; 310 311 // check source filenames to see if this is a combined product. everything 312 // from last file separator to first underscore should be product info 313 int lastSeparator = filename.lastIndexOf(File.separatorChar); 314 int firstUnderscore = filename.indexOf("_", lastSeparator + 1); 315 String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); 316 // only do this check for NOAA data 317 if (filename.endsWith(".h5")) { 318 isNOAA = true; 319 StringTokenizer st = new StringTokenizer(prodStr, "-"); 320 logger.debug("SNPPDS check for embedded GEO, tokenizing: " + prodStr); 321 while (st.hasMoreTokens()) { 322 String singleProd = st.nextToken(); 323 for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) { 324 if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) { 325 logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd); 326 isCombinedProduct = true; 327 break; 328 } 329 } 330 } 331 } 332 333 // various metatdata we'll need to gather on a per-product basis 334 Map<String, String> unsignedFlags = new LinkedHashMap<>(); 335 Map<String, String> unpackFlags = new LinkedHashMap<>(); 336 337 // geo product IDs for each granule 338 Set<String> geoProductIDs = new LinkedHashSet<>(); 339 340 // aggregations will use sets of NetCDFFile readers 341 List<NetCDFFile> ncdfal = new ArrayList<>(); 342 343 // we should be able to find an XML Product Profile for each data/product type 344 SuomiNPPProductProfile nppPP = null; 345 // and also Profile metadata for geolocation variables 346 boolean haveGeoMetaData = false; 347 348 // number of source granules which make up the data source 349 int granuleCount = 1; 350 351 try { 352 353 nppPP = new SuomiNPPProductProfile(); 354 355 // for each source file provided, find the appropriate geolocation, 356 // get the nominal time and various other granule-level metadata 357 Iterator keyIterator = filenameMap.keySet().iterator(); 358 while (keyIterator.hasNext()) { 359 String keyStr = (String) keyIterator.next(); 360 List fileNames = (List) filenameMap.get(keyStr); 361 granuleCount = fileNames.size(); 362 setProperty(Constants.PROP_GRANULE_COUNT, granuleCount + " Granule"); 363 for (int fileCount = 0; fileCount < granuleCount; fileCount++) { 364 // need to open the main NetCDF file to determine the geolocation product 365 NetcdfFile ncfile = null; 366 String fileAbsPath = null; 367 try { 368 fileAbsPath = (String) fileNames.get(fileCount); 369 logger.debug("Trying to open file: " + fileAbsPath); 370 ncfile = NetcdfFile.open(fileAbsPath); 371 if (! isCombinedProduct) { 372 if (isNOAA) { 373 Attribute a = ncfile.findGlobalAttribute("N_GEO_Ref"); 374 logger.debug("Value of GEO global attribute: " + a.getStringValue()); 375 String tmpGeoProductID = a.getStringValue(); 376 geoProductIDs.add(tmpGeoProductID); 377 } else { 378 geoProductIDs.add(keyStr.replace("L1B", "GEO")); 379 } 380 } 381 Group rg = ncfile.getRootGroup(); 382 383 List<Group> gl = rg.getGroups(); 384 if (gl != null) { 385 for (Group g : gl) { 386 logger.trace("Group name: " + g.getFullName()); 387 if (isNOAA) { 388 // when we find the Data_Products group, go down another group level and pull out 389 // what we will use for nominal day and time (for now anyway). 390 // XXX TJJ fileCount check is so we don't count the GEO file in time array! 391 if (g.getFullName().contains("Data_Products") 392 && (fileCount != fileNames.size())) { 393 List<Group> dpg = g.getGroups(); 394 395 // cycle through once looking for XML Product Profiles 396 for (Group subG : dpg) { 397 398 String subName = subG.getFullName(); 399 // use actual product, not geolocation, to id XML Product Profile 400 if (!subName.contains("-GEO")) { 401 // determine the instrument name (VIIRS, ATMS, CrIS, OMPS) 402 instrumentName = subG.findAttribute("Instrument_Short_Name"); 403 404 // note any EDR products, will need to check for and remove 405 // fill scans later 406 Attribute adtt = subG.findAttribute("N_Dataset_Type_Tag"); 407 if (adtt != null) { 408 String baseName = adtt.getStringValue(); 409 if ((baseName != null) && (baseName.equals("EDR"))) { 410 // have to loop through sub groups variables to determine band 411 List<Variable> tmpVar = subG.getVariables(); 412 for (Variable v : tmpVar) { 413 // if Imagery EDR attribute for band is specified, save it 414 Attribute mBand = v.findAttribute("Band_ID"); 415 if (mBand != null) { 416 whichEDR = mBand.getStringValue(); 417 } 418 } 419 } 420 } 421 422 // This is also where we find the attribute which tells us which 423 // XML Product Profile to use! 424 Attribute axpp = subG.findAttribute("N_Collection_Short_Name"); 425 if (axpp != null) { 426 String baseName = axpp.getStringValue(); 427 productName = baseName; 428 String productProfileFileName = nppPP 429 .getProfileFileName(baseName); 430 logger.trace("Found profile: " + productProfileFileName); 431 if (productProfileFileName == null) { 432 throw new Exception( 433 "XML Product Profile not found in catalog"); 434 } 435 try { 436 nppPP.addMetaDataFromFile(productProfileFileName); 437 } catch (Exception nppppe) { 438 logger.error("Error parsing XML Product Profile: " 439 + productProfileFileName); 440 throw new Exception( 441 "XML Product Profile Error", 442 nppppe); 443 } 444 } 445 } 446 } 447 448 // 2nd pass through sub-group to extract date/time for aggregation 449 for (Group subG : dpg) { 450 List<Variable> vl = subG.getVariables(); 451 for (Variable v : vl) { 452 Attribute aDate = v.findAttribute("AggregateBeginningDate"); 453 Attribute aTime = v.findAttribute("AggregateBeginningTime"); 454 // did we find the attributes we are looking for? 455 if ((aDate != null) && (aTime != null)) { 456 // set time for display to day/time of 1st granule examined 457 if (! nameHasBeenSet) { 458 String sDate = aDate.getStringValue(); 459 String sTime = aTime.getStringValue(); 460 logger.debug("For day/time, using: " + sDate 461 + sTime.substring(0, sTime.indexOf('Z') - 3)); 462 Date d = sdf.parse(sDate 463 + sTime.substring(0, sTime.indexOf('Z') - 3)); 464 theDate = d; 465 setName(instrumentName.getStringValue() + " " 466 + sdfOut.format(d)); 467 nameHasBeenSet = true; 468 } 469 break; 470 } 471 } 472 } 473 if (! nameHasBeenSet) { 474 throw new VisADException( 475 "No date time found in Suomi NPP granule"); 476 } 477 } 478 } else { 479 // NASA data - date/time from global attribute 480 // set time for display to day/time of 1st granule examined 481 Attribute timeStartNASA = ncfile.findGlobalAttribute("time_coverage_start"); 482 Date d = sdfNASA.parse(timeStartNASA.getStringValue()); 483 theDate = d; 484 if (! nameHasBeenSet) { 485 instrumentName = ncfile.findGlobalAttribute("instrument"); 486 setName(instrumentName.getStringValue() + " " + sdfOut.format(d)); 487 nameHasBeenSet = true; 488 } 489 } 490 } 491 } 492 } catch (Exception e) { 493 logger.warn("Exception during processing of file: " + fileAbsPath); 494 throw (e); 495 } finally { 496 ncfile.close(); 497 } 498 } 499 500 } 501 502 // build each union aggregation element 503 Iterator<String> iterator = geoProductIDs.iterator(); 504 for (int elementNum = 0; elementNum < granuleCount; elementNum++) { 505 506 String s = null; 507 508 // build an XML (NCML actually) representation of the union aggregation of these two files 509 Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"); 510 Element root = new Element("netcdf", ns); 511 Document document = new Document(root); 512 513 Element agg = new Element("aggregation", ns); 514 agg.setAttribute("type", "union"); 515 516 // TJJ - Loop over filename map, could be several products that need to be aggregated 517 Set set = filenameMap.keySet(); 518 Iterator mapIter = set.iterator(); 519 while (mapIter.hasNext()) { 520 String key = (String) mapIter.next(); 521 List l = (List) filenameMap.get(key); 522 Element fData = new Element("netcdf", ns); 523 fData.setAttribute("location", (String) l.get(elementNum)); 524 agg.addContent(fData); 525 s = (String) l.get(elementNum); 526 } 527 528 String geoFilename = null; 529 Element fGeo = new Element("netcdf", ns);; 530 531 if (! isCombinedProduct) { 532 533 if (isNOAA) { 534 geoFilename = s.substring(0, 535 s.lastIndexOf(File.separatorChar) + 1); 536 // check if we have the whole file name or just the prefix 537 String geoProductID = iterator.next(); 538 if (geoProductID.endsWith("h5")) { 539 geoFilename += geoProductID; 540 } else { 541 geoFilename += geoProductID; 542 geoFilename += s.substring(s 543 .lastIndexOf(File.separatorChar) + 6); 544 } 545 // Be sure file as specified by N_GEO_Ref global attribute really is there. 546 File tmpGeo = new File(geoFilename); 547 if (!tmpGeo.exists()) { 548 // Ok, the expected file defined (supposedly) exactly by a global att is not there... 549 // We need to check for similar geo files with different creation dates 550 String geoFileRelative = geoFilename 551 .substring(geoFilename 552 .lastIndexOf(File.separatorChar) + 1); 553 // also check for Terrain Corrected version of geo 554 String geoTerrainCorrected = geoFileRelative; 555 geoTerrainCorrected = geoTerrainCorrected.replace( 556 "OD", "TC"); 557 geoTerrainCorrected = geoTerrainCorrected.replace( 558 "MG", "TC"); 559 560 // now we make a file filter, and see if a matching geo file is present 561 File fList = new File( 562 geoFilename.substring( 563 0, 564 geoFilename 565 .lastIndexOf(File.separatorChar) + 1)); // current directory 566 567 FilenameFilter geoFilter = new FilenameFilter() { 568 public boolean accept(File dir, String name) { 569 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NOAA)) { 570 return true; 571 } else { 572 return false; 573 } 574 } 575 }; 576 577 File[] files = fList.listFiles(geoFilter); 578 for (File file : files) { 579 if (file.isDirectory()) { 580 continue; 581 } 582 // get the file name for convenience 583 String fName = file.getName(); 584 // is it one of the standard Ellipsoid geo types we are looking for? 585 if (fName.substring(0, 5).equals( 586 geoFileRelative.substring(0, 5))) { 587 int geoStartIdx = geoFileRelative 588 .indexOf("_d"); 589 int prdStartIdx = fName.indexOf("_d"); 590 String s1 = geoFileRelative.substring( 591 geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 592 String s2 = fName.substring(prdStartIdx, 593 prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 594 if (s1.equals(s2)) { 595 geoFilename = s 596 .substring( 597 0, 598 s.lastIndexOf(File.separatorChar) + 1) 599 + fName; 600 break; 601 } 602 } 603 // same check, but for Terrain Corrected version 604 if (fName.substring(0, 5).equals( 605 geoTerrainCorrected.substring(0, 5))) { 606 int geoStartIdx = geoTerrainCorrected 607 .indexOf("_d"); 608 int prdStartIdx = fName.indexOf("_d"); 609 String s1 = geoTerrainCorrected.substring( 610 geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 611 String s2 = fName.substring(prdStartIdx, 612 prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); 613 if (s1.equals(s2)) { 614 geoFilename = s 615 .substring( 616 0, 617 s.lastIndexOf(File.separatorChar) + 1) 618 + fName; 619 break; 620 } 621 } 622 } 623 } 624 } else { 625 // NASA format 626 geoFilename = JPSSUtilities.replaceLast(s, "L1B", "GEO"); 627 // get list of files in current directory 628 File fList = 629 new File(geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); 630 // make a NASA style file filter, and see if a matching geo file is present 631 FilenameFilter geoFilter = new FilenameFilter() { 632 public boolean accept(File dir, String name) { 633 if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NASA)) { 634 return true; 635 } else { 636 return false; 637 } 638 } 639 }; 640 File[] files = fList.listFiles(geoFilter); 641 for (File file : files) { 642 if (file.isDirectory()) { 643 continue; 644 } 645 // get the file name for convenience 646 String fName = file.getName(); 647 String tmpStr = geoFilename.substring(s.lastIndexOf(File.separatorChar) + 1, 648 s.lastIndexOf(File.separatorChar) + (JPSSUtilities.NASA_CREATION_DATE_INDEX + 1)); 649 if (fName.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX).equals(tmpStr.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX))) { 650 geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; 651 break; 652 } 653 } 654 } 655 logger.debug("Determined GEO file name should be: " + geoFilename); 656 fGeo.setAttribute("location", geoFilename); 657 // add this to list used if we create a zipped bundle 658 geoSources.add(geoFilename); 659 agg.addContent(fGeo); 660 } 661 662 root.addContent(agg); 663 XMLOutputter xmlOut = new XMLOutputter(); 664 String ncmlStr = xmlOut.outputString(document); 665 ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes()); 666 MultiDimensionReader netCDFReader = new NetCDFFile(is); 667 668 // let's try and look through the NetCDF reader and see what we can learn... 669 NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile(); 670 671 Group rg = ncdff.getRootGroup(); 672 // this is a list filled with unpacked qflag products, if any 673 ArrayList<VariableDS> qfProds = new ArrayList<VariableDS>(); 674 675 // this is a list filled with pseudo Brightness Temp variables converted from Radiance 676 ArrayList<VariableDS> btProds = new ArrayList<VariableDS>(); 677 678 List<Group> gl = rg.getGroups(); 679 if (gl != null) { 680 int xDimNASA = -1; 681 int yDimNASA = -1; 682 // Make a first pass to determine the shape of the geolocation data 683 for (Group g : gl) { 684 if (g.getFullName().contains("geolocation_data")) { 685 List<Variable> vl = g.getVariables(); 686 for (Variable v : vl) { 687 if (v.getShortName().equals("latitude")) { 688 // XXX TJJ Nov 2015 689 // Hack because fill value in attribute does not match 690 // what I am seeing in the data. 691 Attribute fillAtt = new Attribute("_FillValue", -999.0); 692 v.addAttribute(fillAtt); 693 pathToLat = v.getFullName(); 694 pathToProducts.add(v.getFullName()); 695 prodToDesc.put(v.getFullName(), v.getDescription()); 696 xDimNASA = v.getDimension(0).getLength(); 697 yDimNASA = v.getDimension(1).getLength(); 698 } 699 if (v.getShortName().equals("longitude")) { 700 // XXX TJJ Nov 2015 701 // Hack because fill value in attribute does not match 702 // what I am seeing in the data. 703 Attribute fillAtt = new Attribute("_FillValue", -999.0); 704 v.addAttribute(fillAtt); 705 pathToLon = v.getFullName(); 706 pathToProducts.add(v.getFullName()); 707 prodToDesc.put(v.getFullName(), v.getDescription()); 708 } 709 } 710 } 711 } 712 for (Group g : gl) { 713 logger.debug("Group name: " + g.getFullName()); 714 // NASA only - looking through observation_data and geolocation_data 715 if (g.getFullName().contains("observation_data")) { 716 List<Variable> vl = g.getVariables(); 717 for (Variable v : vl) { 718 // keep any data which matches geolocation dimensions 719 if (v.getDimension(0).getLength() == xDimNASA && 720 v.getDimension(1).getLength() == yDimNASA) { 721 logger.debug("Adding product: " + v.getFullName()); 722 pathToProducts.add(v.getFullName()); 723 prodToDesc.put(v.getFullName(), v.getDescription()); 724 Attribute aUnsigned = v.findAttribute("_Unsigned"); 725 if (aUnsigned != null) { 726 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); 727 } else { 728 unsignedFlags.put(v.getFullName(), "false"); 729 } 730 731 // store units in a map for later 732 Attribute unitAtt = v.findAttribute("units"); 733 if (unitAtt != null) { 734 unitsNASA.put(v.getShortName(), unitAtt.getStringValue()); 735 } else { 736 unitsNASA.put(v.getShortName(), "Unknown"); 737 } 738 739 // TJJ Feb 2016 - Create BT variables where applicable 740 if ((v.getShortName().matches("M12|M13|M14|M15|M16")) || 741 (v.getShortName().matches("I04|I05"))) { 742 743 // Get the LUT variable, load into primitive array 744 Variable lut = g.findVariable(v.getShortName() + "_brightness_temperature_lut"); 745 int [] lutShape = lut.getShape(); 746 logger.debug("Handling NASA LUT Variable, LUT size: " + lutShape[0]); 747 748 // pull out valid min, max - these will be used for our new VariableDS 749 Attribute aVMin = lut.findAttribute("valid_min"); 750 Attribute aVMax = lut.findAttribute("valid_max"); 751 Attribute fillAtt = lut.findAttribute("_FillValue"); 752 logger.debug("valid_min from LUT: " + aVMin.getNumericValue()); 753 logger.debug("valid_max from LUT: " + aVMax.getNumericValue()); 754 755 // A little hacky, but at this point the class is such a mess 756 // that what's a little more, right? Load M12-M16, I4-I5 LUTS 757 758 if (v.getShortName().matches("M12")) { 759 m12LUT = new float[lutShape[0]]; 760 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 761 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 762 m12LUT[lutIdx] = lutArray.get(lutIdx); 763 } 764 } 765 766 if (v.getShortName().matches("M13")) { 767 m13LUT = new float[lutShape[0]]; 768 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 769 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 770 m13LUT[lutIdx] = lutArray.get(lutIdx); 771 } 772 } 773 774 if (v.getShortName().matches("M14")) { 775 m14LUT = new float[lutShape[0]]; 776 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 777 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 778 m14LUT[lutIdx] = lutArray.get(lutIdx); 779 } 780 } 781 782 if (v.getShortName().matches("M15")) { 783 m15LUT = new float[lutShape[0]]; 784 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 785 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 786 m15LUT[lutIdx] = lutArray.get(lutIdx); 787 } 788 } 789 790 if (v.getShortName().matches("M16")) { 791 m16LUT = new float[lutShape[0]]; 792 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 793 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 794 m16LUT[lutIdx] = lutArray.get(lutIdx); 795 } 796 } 797 798 if (v.getShortName().matches("I04")) { 799 i04LUT = new float[lutShape[0]]; 800 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 801 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 802 i04LUT[lutIdx] = lutArray.get(lutIdx); 803 } 804 } 805 806 if (v.getShortName().matches("I05")) { 807 i05LUT = new float[lutShape[0]]; 808 ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); 809 for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { 810 i05LUT[lutIdx] = lutArray.get(lutIdx); 811 } 812 } 813 814 // Create a pseudo-variable, fill using LUT 815 // make a copy of the source variable 816 // NOTE: by using a VariableDS here, the original 817 // variable is used for the I/O, this matters! 818 VariableDS vBT = new VariableDS(g, v, false); 819 820 // Name is orig name plus suffix 821 vBT.setShortName(v.getShortName() + "_BT"); 822 823 vBT.addAttribute(fillAtt); 824 vBT.addAttribute(aVMin); 825 vBT.addAttribute(aVMax); 826 827 if (v.getShortName().matches("M12")) { 828 lutMap.put(vBT.getFullName(), m12LUT); 829 } 830 if (v.getShortName().matches("M13")) { 831 lutMap.put(vBT.getFullName(), m13LUT); 832 } 833 if (v.getShortName().matches("M14")) { 834 lutMap.put(vBT.getFullName(), m14LUT); 835 } 836 if (v.getShortName().matches("M15")) { 837 lutMap.put(vBT.getFullName(), m15LUT); 838 } 839 if (v.getShortName().matches("M16")) { 840 lutMap.put(vBT.getFullName(), m16LUT); 841 } 842 if (v.getShortName().matches("I04")) { 843 lutMap.put(vBT.getFullName(), i04LUT); 844 } 845 if (v.getShortName().matches("I05")) { 846 lutMap.put(vBT.getFullName(), i05LUT); 847 } 848 pathToProducts.add(vBT.getFullName()); 849 String newName = vBT.getDescription().replace("radiance", "brightness temperature"); 850 prodToDesc.put(vBT.getFullName(), newName); 851 btProds.add(vBT); 852 } 853 } 854 } 855 } 856 if (g.getFullName().contains("geolocation_data")) { 857 List<Variable> vl = g.getVariables(); 858 for (Variable v : vl) { 859 // keep any data which matches geolocation dimensions 860 if (v.getDimension(0).getLength() == xDimNASA && 861 v.getDimension(1).getLength() == yDimNASA) { 862 // except we already found Lat and Lon, skip those 863 if ((v.getShortName().equals("latitude")) || 864 (v.getShortName().equals("latitude"))) continue; 865 logger.debug("Adding product: " + v.getFullName()); 866 pathToProducts.add(v.getFullName()); 867 prodToDesc.put(v.getFullName(), v.getDescription()); 868 } 869 } 870 } 871 872 // NOAA only - we are looking through All_Data, finding displayable data 873 if (g.getFullName().contains("All_Data")) { 874 List<Group> adg = g.getGroups(); 875 int xDim = -1; 876 int yDim = -1; 877 878 // two sub-iterations, first one to find geolocation and product dimensions 879 for (Group subG : adg) { 880 logger.debug("Sub group name: " + subG.getFullName()); 881 String subName = subG.getFullName(); 882 if (subName.contains("-GEO")) { 883 // this is the geolocation data 884 String geoBaseName = subG.getShortName(); 885 geoBaseName = geoBaseName.substring(0, geoBaseName.indexOf('_')); 886 if (! haveGeoMetaData) { 887 String geoProfileFileName = nppPP.getProfileFileName(geoBaseName); 888 // also add meta data from geolocation profile 889 nppPP.addMetaDataFromFile(geoProfileFileName); 890 haveGeoMetaData = true; 891 } 892 List<Variable> vl = subG.getVariables(); 893 for (Variable v : vl) { 894 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude")) { 895 pathToLat = v.getFullName(); 896 logger.debug("Ellipsoid Lat/Lon Variable: " + v.getFullName()); 897 // get the dimensions of the lat variable 898 Dimension dAlongTrack = v.getDimension(0); 899 yDim = dAlongTrack.getLength(); 900 Dimension dAcrossTrack = v.getDimension(1); 901 xDim = dAcrossTrack.getLength(); 902 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 903 } 904 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude")) { 905 // we got dimensions from lat, don't need 'em twice, but need path 906 pathToLon = v.getFullName(); 907 } 908 } 909 // one more pass in case there is terrain-corrected Lat/Lon 910 for (Variable v : vl) { 911 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude_TC")) { 912 pathToLat = v.getFullName(); 913 logger.debug("Switched Lat/Lon Variable to TC: " + v.getFullName()); 914 // get the dimensions of the lat variable 915 Dimension dAlongTrack = v.getDimension(0); 916 yDim = dAlongTrack.getLength(); 917 Dimension dAcrossTrack = v.getDimension(1); 918 xDim = dAcrossTrack.getLength(); 919 logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); 920 } 921 if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude_TC")) { 922 // we got dimensions from lat, don't need 'em twice, but need path 923 pathToLon = v.getFullName(); 924 } 925 } 926 } 927 } 928 929 // second to identify displayable products 930 for (Group subG : adg) { 931 // this is the product data 932 List<Variable> vl = subG.getVariables(); 933 for (Variable v : vl) { 934 boolean useThis = false; 935 String vName = v.getFullName(); 936 logger.trace("Variable: " + vName); 937 String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1); 938 939 // Special code to handle quality flags. We throw out anything 940 // that does not match bounds of the geolocation data 941 942 if (varShortName.startsWith("QF")) { 943 944 logger.trace("Handling Quality Flag: " + varShortName); 945 946 // this check is done later for ALL variables, but we need 947 // it early here to weed out those quality flags that are 948 // simply a small set of data w/no granule geo nbounds 949 boolean xScanOk = false; 950 boolean yScanOk = false; 951 List<Dimension> dl = v.getDimensions(); 952 953 // toss out > 2D Quality Flags 954 if (dl.size() > 2) { 955 logger.trace("SKIPPING QF, > 2D: " + varShortName); 956 continue; 957 } 958 959 for (Dimension d : dl) { 960 // in order to consider this a displayable product, make sure 961 // both scan direction dimensions are present and look like a granule 962 if (d.getLength() == xDim) { 963 xScanOk = true; 964 } 965 if (d.getLength() == yDim) { 966 yScanOk = true; 967 } 968 } 969 970 if (! (xScanOk && yScanOk)) { 971 logger.trace("SKIPPING QF, does not match geo bounds: " + varShortName); 972 continue; 973 } 974 975 ArrayList<QualityFlag> qfal = nppPP.getQualityFlags(varShortName); 976 if (qfal != null) { 977 for (QualityFlag qf : qfal) { 978 qf.setPackedName(vName); 979 // make a copy of the qflag variable 980 // NOTE: by using a VariableDS here, the original 981 // variable is used for the I/O, this matters! 982 VariableDS vqf = new VariableDS(subG, v, false); 983 // prefix with QF num to help guarantee uniqueness across groups 984 // this will cover most cases, but could still be dupe names 985 // within a single QF. This is handled when fetching XMLPP metadata 986 vqf.setShortName( 987 varShortName.substring(0, 3) + "_" + qf.getName() 988 ); 989 logger.debug("New QF var full name: " + vqf.getFullName()); 990 qfProds.add(vqf); 991 qfMap.put(vqf.getFullName(), qf); 992 } 993 } 994 } 995 996 // for CrIS instrument, first find dimensions of var matching 997 // CrIS filter, then throw out all variables which don't match 998 // those dimensions 999 1000 if (instrumentName.getStringValue().equals("CrIS")) { 1001 if (! vName.contains("GEO")) { 1002 if (! varShortName.startsWith(crisFilter)) { 1003 logger.trace("Skipping variable: " + varShortName); 1004 continue; 1005 } 1006 } else { 1007 // these variables are all GEO-related 1008 // if they match lat/lon bounds, keep them 1009 List<Dimension> dl = v.getDimensions(); 1010 if (dl.size() == 3) { 1011 boolean isDisplayableCrIS = true; 1012 for (Dimension d : dl) { 1013 if ((d.getLength() != xDim) && (d.getLength() != yDim) && (d.getLength() != 9)) { 1014 isDisplayableCrIS = false; 1015 } 1016 } 1017 if (! isDisplayableCrIS) { 1018 continue; 1019 } 1020 } 1021 } 1022 } 1023 1024 // for OMPS, only Radiance for now... 1025 if (instrumentName.getStringValue().contains("OMPS")) { 1026 if (! varShortName.startsWith(ompsFilter)) { 1027 logger.trace("Skipping OMPS variable: " + varShortName); 1028 continue; 1029 } 1030 } 1031 1032 DataType dt = v.getDataType(); 1033 if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) { 1034 continue; 1035 } 1036 1037 List<Dimension> dl = v.getDimensions(); 1038 if (dl.size() > 4) { 1039 continue; 1040 } 1041 1042 // for now, skip any 3D VIIRS data 1043 if (instrumentName.getStringValue().equals("VIIRS")) { 1044 if (dl.size() == 3) { 1045 continue; 1046 } 1047 } 1048 1049 boolean xScanOk = false; 1050 boolean yScanOk = false; 1051 for (Dimension d : dl) { 1052 // in order to consider this a displayable product, make sure 1053 // both scan direction dimensions are present and look like a granule 1054 if (d.getLength() == xDim) { 1055 xScanOk = true; 1056 } 1057 if (d.getLength() == yDim) { 1058 yScanOk = true; 1059 } 1060 } 1061 1062 if (xScanOk && yScanOk) { 1063 useThis = true; 1064 } 1065 1066 // For ATMS, only 3-D variable we pass through is BrightnessTemperature 1067 // Dimensions for BT are (lon, lat, channel) 1068 if (instrumentName.getStringValue().equals("ATMS")) { 1069 if (dl.size() == 3) { 1070 boolean isDisplayableATMS = false; 1071 for (Dimension d : dl) { 1072 if (d.getLength() == JPSSUtilities.ATMSChannelCenterFrequencies.length) { 1073 isDisplayableATMS = true; 1074 logger.trace("This variable has a dimension matching num ATMS channels"); 1075 break; 1076 } 1077 } 1078 if (! isDisplayableATMS) useThis = false; 1079 } 1080 } 1081 1082 // sensor data with a channel dimension 1083 if (useThis) { 1084 if ((instrumentName.getStringValue().equals("CrIS")) || 1085 (instrumentName.getStringValue().equals("ATMS")) || 1086 (instrumentName.getStringValue().contains("OMPS"))) { 1087 isVIIRS = false; 1088 logger.debug("Handling non-VIIRS data source..."); 1089 } 1090 } 1091 1092 if (useThis) { 1093 // loop through the variable list again, looking for a corresponding "Factors" 1094 float scaleVal = 1f; 1095 float offsetVal = 0f; 1096 boolean unpackFlag = false; 1097 1098 // if the granule has an entry for this variable name 1099 // get the data, data1 = scale, data2 = offset 1100 // create and poke attributes with this data 1101 // endif 1102 1103 String factorsVarName = nppPP.getScaleFactorName(varShortName); 1104 logger.debug("Mapping: " + varShortName + " to: " + factorsVarName); 1105 if (factorsVarName != null) { 1106 for (Variable fV : vl) { 1107 if (fV.getShortName().equals(factorsVarName)) { 1108 logger.trace("Pulling scale and offset values from variable: " + fV.getShortName()); 1109 ucar.ma2.Array a = fV.read(); 1110 float[] so = (float[]) a.copyTo1DJavaArray(); 1111 scaleVal = so[0]; 1112 offsetVal = so[1]; 1113 logger.trace("Scale value: " + scaleVal + ", Offset value: " + offsetVal); 1114 unpackFlag = true; 1115 break; 1116 } 1117 } 1118 } 1119 1120 // poke in scale/offset attributes for now 1121 1122 Attribute a1 = new Attribute("scale_factor", scaleVal); 1123 v.addAttribute(a1); 1124 Attribute a2 = new Attribute("add_offset", offsetVal); 1125 v.addAttribute(a2); 1126 1127 // add valid range and fill value attributes here 1128 // try to fill in valid range 1129 if (nppPP.hasNameAndMetaData(varShortName)) { 1130 String rangeMin = nppPP.getRangeMin(varShortName); 1131 String rangeMax = nppPP.getRangeMax(varShortName); 1132 logger.trace("range min: " + rangeMin + ", range max: " + rangeMax); 1133 // only store range attribute if VALID range found 1134 if ((rangeMin != null) && (rangeMax != null)) { 1135 int [] shapeArr = new int [] { 2 }; 1136 ArrayFloat af = new ArrayFloat(shapeArr); 1137 try { 1138 af.setFloat(0, Float.parseFloat(rangeMin)); 1139 } catch (NumberFormatException nfe) { 1140 af.setFloat(0, new Float(Integer.MIN_VALUE)); 1141 } 1142 try { 1143 af.setFloat(1, Float.parseFloat(rangeMax)); 1144 } catch (NumberFormatException nfe) { 1145 af.setFloat(1, new Float(Integer.MAX_VALUE)); 1146 } 1147 Attribute rangeAtt = new Attribute("valid_range", af); 1148 v.addAttribute(rangeAtt); 1149 } 1150 1151 // check for and load fill values too... 1152 1153 // we need to check two places, first, the XML product profile 1154 ArrayList<Float> fval = nppPP.getFillValues(varShortName); 1155 1156 // 2nd, does the variable already have one defined? 1157 // if there was already a fill value associated with this variable, make 1158 // sure we bring that along for the ride too... 1159 Attribute aFill = v.findAttribute("_FillValue"); 1160 1161 // determine size of our fill value array 1162 int fvArraySize = 0; 1163 if (aFill != null) fvArraySize++; 1164 if (! fval.isEmpty()) fvArraySize += fval.size(); 1165 int [] fillShape = new int [] { fvArraySize }; 1166 1167 // allocate the array 1168 ArrayFloat afFill = new ArrayFloat(fillShape); 1169 1170 // and FINALLY, fill it! 1171 if (! fval.isEmpty()) { 1172 for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) { 1173 afFill.setFloat(fillIdx, fval.get(fillIdx)); 1174 logger.trace("Adding fill value (from XML): " + fval.get(fillIdx)); 1175 } 1176 } 1177 1178 if (aFill != null) { 1179 Number n = aFill.getNumericValue(); 1180 // is the data unsigned? 1181 Attribute aUnsigned = v.findAttribute("_Unsigned"); 1182 float fillValAsFloat = Float.NaN; 1183 if (aUnsigned != null) { 1184 if (aUnsigned.getStringValue().equals("true")) { 1185 DataType fvdt = aFill.getDataType(); 1186 logger.trace("Data String: " + aFill.toString()); 1187 logger.trace("DataType primitive type: " + fvdt.getPrimitiveClassType()); 1188 // signed byte that needs conversion? 1189 if (fvdt.getPrimitiveClassType() == byte.class) { 1190 fillValAsFloat = (float) Util.unsignedByteToInt(n.byteValue()); 1191 } 1192 else if (fvdt.getPrimitiveClassType() == short.class) { 1193 fillValAsFloat = (float) Util.unsignedShortToInt(n.shortValue()); 1194 } else { 1195 fillValAsFloat = n.floatValue(); 1196 } 1197 } 1198 } 1199 afFill.setFloat(fvArraySize - 1, fillValAsFloat); 1200 logger.trace("Adding fill value (from variable): " + fillValAsFloat); 1201 } 1202 Attribute fillAtt = new Attribute("_FillValue", afFill); 1203 v.addAttribute(fillAtt); 1204 } 1205 1206 Attribute aUnsigned = v.findAttribute("_Unsigned"); 1207 if (aUnsigned != null) { 1208 unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); 1209 } else { 1210 unsignedFlags.put(v.getFullName(), "false"); 1211 } 1212 1213 if (unpackFlag) { 1214 unpackFlags.put(v.getFullName(), "true"); 1215 } else { 1216 unpackFlags.put(v.getFullName(), "false"); 1217 } 1218 1219 logger.debug("Adding product: " + v.getFullName()); 1220 pathToProducts.add(v.getFullName()); 1221 prodToDesc.put(v.getFullName(), v.getDescription()); 1222 } 1223 } 1224 } 1225 } 1226 } 1227 } 1228 1229 // add in any unpacked qflag products 1230 for (VariableDS qfV: qfProds) { 1231 // skip the spares - they are reserved for future use 1232 if (qfV.getFullName().endsWith("Spare")) { 1233 continue; 1234 } 1235 // String.endsWith is case sensitive so gotta check both cases 1236 if (qfV.getFullName().endsWith("spare")) { 1237 continue; 1238 } 1239 ncdff.addVariable(qfV.getGroup(), qfV); 1240 logger.trace("Adding QF product: " + qfV.getFullName()); 1241 pathToProducts.add(qfV.getFullName()); 1242 prodToDesc.put(qfV.getFullName(), qfV.getDescription()); 1243 unsignedFlags.put(qfV.getFullName(), "true"); 1244 unpackFlags.put(qfV.getFullName(), "false"); 1245 } 1246 1247 // add in any pseudo BT products from NASA data 1248 for (Variable vBT: btProds) { 1249 logger.trace("Adding BT product: " + vBT.getFullName()); 1250 ncdff.addVariable(vBT.getGroup(), vBT); 1251 unsignedFlags.put(vBT.getFullName(), "true"); 1252 unpackFlags.put(vBT.getFullName(), "false"); 1253 } 1254 1255 ncdfal.add((NetCDFFile) netCDFReader); 1256 } 1257 1258 } catch (Exception e) { 1259 logger.error("cannot create NetCDF reader for files selected", e); 1260 if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) { 1261 throw new VisADException("Unable to extract metadata from required XML Product Profile", e); 1262 } 1263 } 1264 1265 // initialize the aggregation reader object 1266 try { 1267 if (isNOAA) { 1268 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "Track", "XTrack", isVIIRS); 1269 ((GranuleAggregation) nppAggReader).setQfMap(qfMap); 1270 } else { 1271 nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "number_of_lines", "number_of_pixels", isVIIRS); 1272 ((GranuleAggregation) nppAggReader).setLUTMap(lutMap); 1273 } 1274 } catch (Exception e) { 1275 throw new VisADException("Unable to initialize aggregation reader", e); 1276 } 1277 1278 // make sure we found valid data 1279 if (pathToProducts.size() == 0) { 1280 throw new VisADException("No data found in files selected"); 1281 } 1282 1283 logger.debug("Number of adapters needed: " + pathToProducts.size()); 1284 adapters = new MultiDimensionAdapter[pathToProducts.size()]; 1285 Hashtable<String, String[]> properties = new Hashtable<>(); 1286 1287 Iterator<String> iterator = pathToProducts.iterator(); 1288 int pIdx = 0; 1289 boolean adapterCreated = false; 1290 while (iterator.hasNext()) { 1291 String pStr = iterator.next(); 1292 logger.debug("Working on adapter number " + (pIdx + 1) + ": " + pStr); 1293 Map<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable(); 1294 Map<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable(); 1295 swathTable.put("array_name", pStr); 1296 swathTable.put("lon_array_name", pathToLon); 1297 swathTable.put("lat_array_name", pathToLat); 1298 swathTable.put("XTrack", "XTrack"); 1299 swathTable.put("Track", "Track"); 1300 swathTable.put("geo_Track", "Track"); 1301 swathTable.put("geo_XTrack", "XTrack"); 1302 // TJJ is this even needed? Is product_name used anywhere? 1303 if (productName == null) productName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); 1304 swathTable.put("product_name", productName); 1305 swathTable.put("_mapping", prodToDesc); 1306 // array_name common to spectrum table 1307 spectTable.put("array_name", pStr); 1308 spectTable.put("product_name", productName); 1309 spectTable.put("_mapping", prodToDesc); 1310 1311 if (! isVIIRS) { 1312 1313 // 3D data is either ATMS, OMPS, or CrIS 1314 if ((instrumentName.getShortName() != null) && (instrumentName.getStringValue().equals("ATMS"))) { 1315 1316 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1317 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1318 1319 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1320 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1321 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1322 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1323 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1324 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1325 1326 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1327 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1328 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1329 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1330 1331 int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length; 1332 float[] bandArray = new float[numChannels]; 1333 String[] bandNames = new String[numChannels]; 1334 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1335 bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx]; 1336 bandNames[bIdx] = "Channel " + (bIdx + 1); 1337 } 1338 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1339 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1340 1341 } else { 1342 if (instrumentName.getStringValue().equals("CrIS")) { 1343 1344 swathTable.put("XTrack", "dim1"); 1345 swathTable.put("Track", "dim0"); 1346 swathTable.put("geo_XTrack", "dim1"); 1347 swathTable.put("geo_Track", "dim0"); 1348 swathTable.put("product_name", "CrIS_SDR"); 1349 swathTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1350 swathTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1351 1352 spectTable.put(SpectrumAdapter.channelIndex_name, "dim3"); 1353 spectTable.put(SpectrumAdapter.FOVindex_name, "dim2"); 1354 spectTable.put(SpectrumAdapter.x_dim_name, "dim1"); 1355 spectTable.put(SpectrumAdapter.y_dim_name, "dim0"); 1356 1357 } else if (instrumentName.getStringValue().contains("OMPS")) { 1358 1359 spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1360 swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); 1361 1362 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1363 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1364 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1365 spectTable.put("array_dimension_names", new String[] {"Track", "XTrack", "Channel"}); 1366 spectTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1367 spectTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1368 1369 spectTable.put(SpectrumAdapter.channelType, "wavelength"); 1370 spectTable.put(SpectrumAdapter.channels_name, "Channel"); 1371 spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); 1372 spectTable.put(SpectrumAdapter.y_dim_name, "Track"); 1373 1374 int numChannels = 200; 1375 if (instrumentName.getStringValue().equals("OMPS-TC")) { 1376 numChannels = 260; 1377 } 1378 logger.debug("Setting up OMPS adapter, num channels: " + numChannels); 1379 float[] bandArray = new float[numChannels]; 1380 String[] bandNames = new String[numChannels]; 1381 for (int bIdx = 0; bIdx < numChannels; bIdx++) { 1382 bandArray[bIdx] = bIdx; 1383 bandNames[bIdx] = "Channel " + (bIdx + 1); 1384 } 1385 spectTable.put(SpectrumAdapter.channelValues, bandArray); 1386 spectTable.put(SpectrumAdapter.bandNames, bandNames); 1387 1388 } else { 1389 // sorry, if we can't id the instrument, we can't display the data! 1390 throw new VisADException("Unable to determine instrument name"); 1391 } 1392 } 1393 1394 } else { 1395 swathTable.put("array_dimension_names", new String[] {"Track", "XTrack"}); 1396 swathTable.put("lon_array_dimension_names", new String[] {"Track", "XTrack"}); 1397 swathTable.put("lat_array_dimension_names", new String[] {"Track", "XTrack"}); 1398 } 1399 1400 swathTable.put("scale_name", "scale_factor"); 1401 swathTable.put("offset_name", "add_offset"); 1402 swathTable.put("fill_value_name", "_FillValue"); 1403 swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1404 spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); 1405 1406 // set the valid range hash if data is available 1407 if (nppPP != null) { 1408 if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) { 1409 swathTable.put("valid_range", "valid_range"); 1410 } 1411 } 1412 1413 String unsignedAttributeStr = unsignedFlags.get(pStr); 1414 if ((unsignedAttributeStr != null) && (unsignedAttributeStr.equals("true"))) { 1415 swathTable.put("unsigned", unsignedAttributeStr); 1416 } 1417 1418 String unpackFlagStr = unpackFlags.get(pStr); 1419 if ((unpackFlagStr != null) && (unpackFlagStr.equals("true"))) { 1420 swathTable.put("unpack", "true"); 1421 } 1422 1423 // For Suomi NPP data, do valid range check AFTER applying scale/offset 1424 swathTable.put("range_check_after_scaling", "true"); 1425 1426 // pass in a GranuleAggregation reader... 1427 if (! isVIIRS) { 1428 if (instrumentName.getStringValue().equals("ATMS")) { 1429 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1430 adapterCreated = true; 1431 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1432 DataCategory.createCategory("MultiSpectral"); 1433 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1434 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1435 "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS"); 1436 msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]); 1437 multiSpectralData.add(msd); 1438 } 1439 if (instrumentName.getStringValue().equals("CrIS")) { 1440 if (pStr.contains(crisFilter)) { 1441 adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable); 1442 adapterCreated = true; 1443 CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable); 1444 DataCategory.createCategory("MultiSpectral"); 1445 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1446 MultiSpectralData msd = new CrIS_SDR_MultiSpectralData((CrIS_SDR_SwathAdapter) adapters[pIdx], csa); 1447 msd.setInitialWavenumber(csa.getInitialWavenumber()); 1448 msd_CrIS.add(msd); 1449 } 1450 } 1451 if (instrumentName.getStringValue().contains("OMPS")) { 1452 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1453 adapterCreated = true; 1454 SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); 1455 DataCategory.createCategory("MultiSpectral"); 1456 categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); 1457 MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, 1458 "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS"); 1459 msd.setInitialWavenumber(0); 1460 multiSpectralData.add(msd); 1461 } 1462 if (pIdx == 0) { 1463 // generate default subset for ATMS and OMPS 1464 if (! instrumentName.getStringValue().equals("CrIS")) { 1465 defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset(); 1466 } 1467 } 1468 1469 } else { 1470 // setting NOAA-format units 1471 String varName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); 1472 String varShortName = pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1); 1473 String units = nppPP.getUnits(varShortName); 1474 1475 // setting NASA-format units 1476 if (! isNOAA) { 1477 units = unitsNASA.get(varShortName); 1478 // Need to set _BT variables manually, since they are created on the fly 1479 if (varShortName.endsWith("_BT")) units = "Kelvin"; 1480 } 1481 if (units == null) units = "Unknown"; 1482 Unit u = null; 1483 try { 1484 u = Parser.parse(units); 1485 } catch (NoSuchUnitException e) { 1486 u = new DerivedUnit(units); 1487 logger.debug("Unknown units: " + units); 1488 } catch (ParseException e) { 1489 u = new DerivedUnit(units); 1490 logger.debug("Unparseable units: " + units); 1491 } 1492 // associate this variable with these units, if not done already 1493 RealType.getRealType(varName, u); 1494 adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); 1495 adapterCreated = true; 1496 if (pIdx == 0) { 1497 defaultSubset = adapters[pIdx].getDefaultSubset(); 1498 } 1499 categories = DataCategory.parseCategories("IMAGE"); 1500 } 1501 // only increment count if we created an adapter, some products are skipped 1502 if (adapterCreated) pIdx++; 1503 adapterCreated = false; 1504 } 1505 1506 if (msd_CrIS.size() > 0) { 1507 try { 1508 MultiSpectralAggr aggr = new MultiSpectralAggr(msd_CrIS.toArray(new MultiSpectralData[msd_CrIS.size()])); 1509 aggr.setInitialWavenumber(902.25f); 1510 multiSpectralData.add(aggr); 1511 defaultSubset = ((MultiSpectralData) msd_CrIS.get(0)).getDefaultSubset(); 1512 } catch (Exception e) { 1513 logger.error("Exception: ", e); 1514 } 1515 } 1516 1517 // Merge with pre-set properties 1518 Hashtable tmpHt = getProperties(); 1519 tmpHt.putAll(properties); 1520 setProperties(tmpHt); 1521 } 1522 1523 public void initAfterUnpersistence() { 1524 try { 1525 String zidvPath = 1526 McIDASV.getStaticMcv().getStateManager(). 1527 getProperty(IdvPersistenceManager.PROP_ZIDVPATH, ""); 1528 if (getTmpPaths() != null) { 1529 // New code for zipped bundles- 1530 // we want 'sources' to point to wherever the zipped data was unpacked. 1531 sources.clear(); 1532 // following PersistenceManager.fixBulkDataSources, get temporary data location 1533 for (Object o : getTmpPaths()) { 1534 String tempPath = (String) o; 1535 // replace macro string with actual path 1536 String expandedPath = tempPath.replace(PersistenceManager.MACRO_ZIDVPATH, zidvPath); 1537 // we don't want to add nav files to this list!: 1538 File f = new File(expandedPath); 1539 if (!f.getName().matches(JPSSUtilities.SUOMI_GEO_REGEX_NOAA)) { 1540 sources.add(expandedPath); 1541 } 1542 } 1543 1544 // mjh fix absolute paths in filenameMap 1545 logger.debug("original filenameMap: {}", filenameMap); 1546 Iterator keyIterator = filenameMap.keySet().iterator(); 1547 while (keyIterator.hasNext()) { 1548 String keyStr = (String) keyIterator.next(); 1549 List<String> fileNames = (List<String>) filenameMap.get(keyStr); 1550 for (int i = 0; i < fileNames.size(); i++) { 1551 String name = fileNames.get(i); 1552 int lastSeparator = name.lastIndexOf(File.separatorChar); 1553 String sub = name.substring(0, lastSeparator); 1554 name = name.replace(sub, zidvPath); 1555 fileNames.set(i, name); 1556 } 1557 } 1558 logger.debug("filenameMap with zidvPath: {}", filenameMap); 1559 } else { 1560 // leave in original unpersistence code - this will get run for unzipped bundles. 1561 // TODO: do we need to handle the "Save with relative paths" case specially? 1562 if (! oldSources.isEmpty()) { 1563 sources.clear(); 1564 for (Object o : oldSources) { 1565 sources.add((String) o); 1566 } 1567 } 1568 } 1569 oldSources.clear(); 1570 setup(); 1571 } catch (Exception e) { 1572 logger.error("Exception: ", e); 1573 } 1574 } 1575 1576 /* (non-Javadoc) 1577 * @see edu.wisc.ssec.mcidasv.data.HydraDataSource#canSaveDataToLocalDisk() 1578 */ 1579 @Override 1580 public boolean canSaveDataToLocalDisk() { 1581 // At present, Suomi data is always data granules on disk 1582 return true; 1583 } 1584 1585 /* (non-Javadoc) 1586 * @see ucar.unidata.data.DataSourceImpl#saveDataToLocalDisk(java.lang.String, java.lang.Object, boolean) 1587 */ 1588 @Override 1589 protected List saveDataToLocalDisk(String filePrefix, Object loadId, 1590 boolean changeLinks) throws Exception { 1591 // need to make a list of all data granule files 1592 // PLUS all geolocation granule files, but only if accessed separate! 1593 List<String> fileList = new ArrayList<String>(); 1594 for (Object o : sources) { 1595 fileList.add((String) o); 1596 } 1597 for (String s : geoSources) { 1598 fileList.add(s); 1599 } 1600 return fileList; 1601 } 1602 1603 public List<String> getOldSources() { 1604 return oldSources; 1605 } 1606 1607 public void setOldSources(List<String> oldSources) { 1608 this.oldSources = oldSources; 1609 } 1610 1611 public Map<String, List<String>> getFilenameMap() { 1612 return filenameMap; 1613 } 1614 1615 public void setFilenameMap(Map<String, List<String>> filenameMap) { 1616 this.filenameMap = filenameMap; 1617 } 1618 1619 /** 1620 * Make and insert the {@link DataChoice DataChoices} for this 1621 * {@code DataSource}. 1622 */ 1623 1624 public void doMakeDataChoices() { 1625 1626 // special loop for CrIS, ATMS, and OMPS data 1627 if (multiSpectralData.size() > 0) { 1628 for (int k = 0; k < multiSpectralData.size(); k++) { 1629 MultiSpectralData adapter = multiSpectralData.get(k); 1630 DataChoice choice = null; 1631 try { 1632 choice = doMakeDataChoice(k, adapter); 1633 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1634 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1635 msdMap.put(choice.getName(), adapter); 1636 addDataChoice(choice); 1637 } catch (Exception e) { 1638 logger.error("Exception: ", e); 1639 } 1640 } 1641 return; 1642 } 1643 1644 // all other data (VIIRS and 2D EDRs) 1645 if (adapters != null) { 1646 for (int idx = 0; idx < adapters.length; idx++) { 1647 DataChoice choice = null; 1648 try { 1649 Map<String, Object> metadata = adapters[idx].getMetadata(); 1650 String description = null; 1651 if (metadata.containsKey("_mapping")) { 1652 String arrayName = metadata.get("array_name").toString(); 1653 Map<String, String> mapping = 1654 (Map<String, String>)metadata.get("_mapping"); 1655 description = mapping.get(arrayName); 1656 } 1657 choice = doMakeDataChoice(idx, adapters[idx].getArrayName(), description); 1658 choice.setObjectProperty(Constants.PROP_GRANULE_COUNT, 1659 getProperty(Constants.PROP_GRANULE_COUNT, "1 Granule")); 1660 } 1661 catch (Exception e) { 1662 logger.error("doMakeDataChoice failed", e); 1663 } 1664 1665 if (choice != null) { 1666 addDataChoice(choice); 1667 } 1668 } 1669 } 1670 } 1671 1672 private DataChoice doMakeDataChoice(int idx, String var, String description) throws Exception { 1673 String name = var; 1674 if (description == null) { 1675 description = name; 1676 } 1677 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1678 Hashtable subset = new Hashtable(); 1679 subset.put(new MultiDimensionSubset(), dataSel); 1680 // TJJ Hack check for uber-odd case of data type varies for same variable 1681 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1682 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1683 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1684 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1685 name = name + "BrightnessTemperature"; 1686 } else { 1687 name = name + "Reflectance"; 1688 } 1689 } 1690 DirectDataChoice ddc = new DirectDataChoice(this, idx, name, description, categories, subset); 1691 return ddc; 1692 } 1693 1694 private DataChoice doMakeDataChoice(int idx, MultiSpectralData adapter) throws Exception { 1695 String name = adapter.getName(); 1696 DataSelection dataSel = new MultiDimensionSubset(defaultSubset); 1697 Hashtable subset = new Hashtable(); 1698 subset.put(MultiDimensionSubset.key, dataSel); 1699 subset.put(MultiSpectralDataSource.paramKey, adapter.getParameter()); 1700 // TJJ Hack check for uber-odd case of data type varies for same variable 1701 // If it's M12 - M16, it's a BrightnessTemperature, otherwise Reflectance 1702 if (name.endsWith("BrightnessTemperatureOrReflectance")) { 1703 name = name.substring(0, name.length() - "BrightnessTemperatureOrReflectance".length()); 1704 if (whichEDR.matches("M12|M13|M14|M15|M16")) { 1705 name = name + "BrightnessTemperature"; 1706 } else { 1707 name = name + "Reflectance"; 1708 } 1709 } 1710 DirectDataChoice ddc = new DirectDataChoice(this, new Integer(idx), name, name, categories, subset); 1711 ddc.setProperties(subset); 1712 return ddc; 1713 } 1714 1715 /** 1716 * Check to see if this {@code SuomiNPPDataSource} is equal to the object 1717 * in question. 1718 * @param o object in question 1719 * @return true if they are the same or equivalent objects 1720 */ 1721 1722 public boolean equals(Object o) { 1723 if ( !(o instanceof SuomiNPPDataSource)) { 1724 return false; 1725 } 1726 return (this == (SuomiNPPDataSource) o); 1727 } 1728 1729 public MultiSpectralData getMultiSpectralData() { 1730 return multiSpectralData.get(0); 1731 } 1732 1733 public MultiSpectralData getMultiSpectralData(DataChoice choice) { 1734 return msdMap.get(choice.getName()); 1735 } 1736 1737 public String getDatasetName() { 1738 return filename; 1739 } 1740 1741 /** 1742 * @return the qfMap 1743 */ 1744 public Map<String, QualityFlag> getQfMap() { 1745 return qfMap; 1746 } 1747 1748 public void setDatasetName(String name) { 1749 filename = name; 1750 } 1751 1752 /** 1753 * Determine if this data source originated from a 1754 * {@literal "NOAA file"}. 1755 * 1756 * @return {@code true} if file came from NOAA, {@code false} otherwise. 1757 */ 1758 public boolean isNOAA() { 1759 return isNOAA; 1760 } 1761 1762 public Map<String, double[]> getSubsetFromLonLatRect(MultiDimensionSubset select, GeoSelection geoSelection) { 1763 GeoLocationInfo ginfo = geoSelection.getBoundingBox(); 1764 return adapters[0].getSubsetFromLonLatRect(select.getSubset(), ginfo.getMinLat(), ginfo.getMaxLat(), 1765 ginfo.getMinLon(), ginfo.getMaxLon()); 1766 } 1767 1768 public synchronized Data getData(DataChoice dataChoice, DataCategory category, 1769 DataSelection dataSelection, Hashtable requestProperties) 1770 throws VisADException, RemoteException { 1771 return this.getDataInner(dataChoice, category, dataSelection, requestProperties); 1772 } 1773 1774 1775 protected Data getDataInner(DataChoice dataChoice, DataCategory category, 1776 DataSelection dataSelection, Hashtable requestProperties) 1777 throws VisADException, RemoteException { 1778 1779 //- this hack keeps the HydraImageProbe from doing a getData() 1780 //- TODO: need to use categories? 1781 if (requestProperties != null) { 1782 if ((requestProperties.toString()).equals("{prop.requester=MultiSpectral}")) { 1783 return null; 1784 } 1785 } 1786 1787 GeoLocationInfo ginfo = null; 1788 GeoSelection geoSelection = null; 1789 1790 if ((dataSelection != null) && (dataSelection.getGeoSelection() != null)) { 1791 geoSelection = (dataSelection.getGeoSelection().getBoundingBox() != null) ? dataSelection.getGeoSelection() : 1792 dataChoice.getDataSelection().getGeoSelection(); 1793 } 1794 1795 if (geoSelection != null) { 1796 ginfo = geoSelection.getBoundingBox(); 1797 } 1798 1799 Data data = null; 1800 if (adapters == null) { 1801 return data; 1802 } 1803 1804 MultiDimensionAdapter adapter = null; 1805 1806 // pick the adapter with the same index as the current data choice 1807 int aIdx = 0; 1808 List<DataChoice> dcl = getDataChoices(); 1809 for (DataChoice dc : dcl) { 1810 if (dc.getName().equals(dataChoice.getName())) { 1811 aIdx = dcl.indexOf(dc); 1812 break; 1813 } 1814 } 1815 1816 adapter = adapters[aIdx]; 1817 1818 try { 1819 Map<String, double[]> subset = null; 1820 if (ginfo != null) { 1821 subset = adapter.getSubsetFromLonLatRect(ginfo.getMinLat(), ginfo.getMaxLat(), 1822 ginfo.getMinLon(), ginfo.getMaxLon(), 1823 geoSelection.getXStride(), 1824 geoSelection.getYStride(), 1825 geoSelection.getZStride()); 1826 } 1827 else { 1828 1829 MultiDimensionSubset select = null; 1830 Hashtable table = dataChoice.getProperties(); 1831 Enumeration keys = table.keys(); 1832 while (keys.hasMoreElements()) { 1833 Object key = keys.nextElement(); 1834 logger.debug("Key: " + key.toString()); 1835 if (key instanceof MultiDimensionSubset) { 1836 select = (MultiDimensionSubset) table.get(key); 1837 } 1838 } 1839 subset = select.getSubset(); 1840 logger.debug("Subset size: " + subset.size()); 1841 1842 if (dataSelection != null) { 1843 Hashtable props = dataSelection.getProperties(); 1844 if (props != null) { 1845 if (props.containsKey(SpectrumAdapter.channelIndex_name)) { 1846 logger.debug("Props contains channel index key..."); 1847 double[] coords = subset.get(SpectrumAdapter.channelIndex_name); 1848 int idx = ((Integer) props.get(SpectrumAdapter.channelIndex_name)).intValue(); 1849 coords[0] = (double) idx; 1850 coords[1] = (double) idx; 1851 coords[2] = (double) 1; 1852 } 1853 } 1854 } 1855 } 1856 1857 if (subset != null) { 1858 data = adapter.getData(subset); 1859 data = applyProperties(data, requestProperties, subset, aIdx); 1860 } 1861 } catch (Exception e) { 1862 logger.error("getData Exception: ", e); 1863 } 1864 ////////// inq1429 return FieldImpl with time dim ///////////////// 1865 if (data != null) { 1866 List dateTimes = new ArrayList(); 1867 dateTimes.add(new DateTime(theDate)); 1868 SampledSet timeSet = (SampledSet) ucar.visad.Util.makeTimeSet(dateTimes); 1869 FunctionType ftype = new FunctionType(RealType.Time, data.getType()); 1870 FieldImpl fi = new FieldImpl(ftype, timeSet); 1871 fi.setSample(0, data); 1872 data = fi; 1873 } 1874 ////////////////////////////////////////////////////////////////// 1875 return data; 1876 } 1877 1878 protected Data applyProperties(Data data, Hashtable requestProperties, Map<String, double[]> subset, int adapterIndex) 1879 throws VisADException, RemoteException { 1880 Data new_data = data; 1881 1882 if (requestProperties == null) { 1883 new_data = data; 1884 return new_data; 1885 } 1886 1887 return new_data; 1888 } 1889 1890 protected void initDataSelectionComponents( 1891 List<DataSelectionComponent> components, 1892 final DataChoice dataChoice) { 1893 1894 try { 1895 // inq1429: need to handle FieldImpl here 1896 FieldImpl thing = (FieldImpl) dataChoice.getData(null); 1897 FlatField image; 1898 if (GridUtil.isTimeSequence(thing)) { 1899 image = (FlatField) thing.getSample(0); 1900 } else { 1901 image = (FlatField) thing; 1902 } 1903 if (image != null) { 1904 PreviewSelection ps = new PreviewSelection(dataChoice, image, null); 1905 // Region subsetting not yet implemented for CrIS data 1906 if (instrumentName.getStringValue().equals("CrIS")) { 1907 ps.enableSubsetting(false); 1908 } 1909 components.add(ps); 1910 } 1911 } catch (Exception e) { 1912 logger.error("Can't make PreviewSelection: ", e); 1913 } 1914 1915 } 1916 1917 /** 1918 * Add {@code Integer->String} translations to IDV's 1919 * {@literal "translations"} resource, so they will be made available to 1920 * the data probe of Image Display's. 1921 */ 1922 public void initQfTranslations() { 1923 1924 Map<String, Map<Integer, String>> translations = 1925 getIdv().getResourceManager(). 1926 getTranslationsHashtable(); 1927 1928 for (String qfKey : qfMap.keySet()) { 1929 // This string needs to match up with the data choice name: 1930 String qfKeySubstr = qfKey.replace("All_Data/", ""); 1931 // check if we've already added map for this QF 1932 if (!translations.containsKey(qfKeySubstr)) { 1933 Map<String, String> hm = qfMap.get(qfKey).getHm(); 1934 Map<Integer, String> newMap = 1935 new HashMap<Integer, String>(hm.size()); 1936 for (String dataValueKey : hm.keySet()) { 1937 // convert Map<String, String> to Map<Integer, String> 1938 Integer intKey = Integer.parseInt(dataValueKey); 1939 newMap.put(intKey, hm.get(dataValueKey)); 1940 } 1941 translations.put(qfKeySubstr, newMap); 1942 } 1943 } 1944 } 1945 1946}