4 * Copyright: Thomas McGlynn 1997-1999.
5 * This code may be used for any purpose, non-commercial
6 * or commercial so long as this copyright notice is retained
7 * in the source code or included in or referred to in any
14 import nom.tam.util.*;
16 /** This class provides access to routines to allow users
17 * to read and write FITS files.
21 * <b> Description of the Package </b>
23 * This FITS package attempts to make using FITS files easy,
24 * but does not do exhaustive error checking. Users should
25 * not assume that just because a FITS file can be read
26 * and written that it is necessarily legal FITS.
30 * <li> The Fits class provides capabilities to
31 * read and write data at the HDU level, and to
32 * add and delete HDU's from the current Fits object.
33 * A large number of constructors are provided which
34 * allow users to associate the Fits object with
35 * some form of external data. This external
36 * data may be in a compressed format.
37 * <li> The HDU class is a factory class which is used to
38 * create HDUs. HDU's can be of a number of types
39 * derived from the abstract class BasicHDU.
40 * The hierarchy of HDUs is:
45 * <li> RandomGroupsHDU
54 * <li> The Header class provides many functions to
55 * add, delete and read header keywords in a variety
57 * <li> The HeaderCard class provides access to the structure
58 * of a FITS header card.
59 * <li> The Data class is an abstract class which provides
60 * the basic methods for reading and writing FITS data.
61 * Users will likely only be interested in the getData
62 * method which returns that actual FITS data.
63 * <li> The TableHDU class provides a large number of
64 * methods to access and modify information in
66 * <li> The Column class
67 * combines the Header information and Data corresponding to
72 * @version 1.06.0 May 21, 2011
76 /** The input stream associated with this Fits object.
78 private ArrayDataInput dataStr;
80 /** A vector of HDUs that have been added to this
83 private Vector hduList = new Vector();
85 /** Has the input stream reached the EOF?
87 private boolean atEOF;
89 /** The last offset we reached.
90 * A -1 is used to indicate that we
91 * cannot use the offset.
93 private long lastFileOffset = -1;
95 /** Indicate the version of these classes */
96 public static String version() {
98 // Version 0.1: Original test FITS classes -- 9/96
99 // Version 0.2: Pre-alpha release 10/97
100 // Complete rewrite using BufferedData*** and
101 // ArrayFuncs utilities.
102 // Version 0.3: Pre-alpha release 1/98
103 // Incorporation of HDU hierarchy developed
104 // by Dave Glowacki and various bug fixes.
105 // Version 0.4: Alpha-release 2/98
106 // BinaryTable classes revised to use
107 // ColumnTable classes.
108 // Version 0.5: Random Groups Data 3/98
109 // Version 0.6: Handling of bad/skipped FITS, FitsDate (D. Glowacki) 3/98
110 // Version 0.9: ASCII tables, Tiled images, Faux, Bad and SkippedHDU class
112 // Version 0.91: Changed visibility of some methods.
114 // Version 0.92: Fix bug in BinaryTable when reading from stream.
115 // Version 0.93: Supports HIERARCH header cards. Added FitsElement interface.
116 // Several bug fixes especially for null HDUs.
117 // Version 0.96: Address issues with mandatory keywords.
118 // Fix problem where some keywords were not properly keyed.
119 // Version 0.96a: Update version in FITS
120 // Version 0.99: Added support for Checksums (thanks to RJ Mathar).
121 // Fixed bug with COMMENT and HISTORY keywords (Rose Early)
122 // Changed checking for compression and fixed bug with TFORM
123 // handling in binary tables (Laurent Michel)
124 // Distinguished arrays of length 1 from scalars in
125 // binary tables (Jorgo Bakker)
126 // Fixed bug in handling of length 0 values in headers (Fred Romerfanger, Jorgo Bakker)
127 // Truncated BufferedFiles when finishing write (but only
128 // for FITS file as a whole.)
129 // Fixed bug writing binary tables with deferred reads.
130 // Made addLine methods in Header public.
131 // Changed ArrayFuncs.newInstance to handle inputs with dimensionality of 0.
133 // Added deleteRows and deleteColumns functionality to all tables.
134 // This includes changes
135 // to TableData, TableHDU, AsciiTable, BinaryTable and util/ColumnTable.
136 // Row deletions were suggested by code of R. Mathar but this works
137 // on all types of tables and implements the deletions at a lower level.
138 // Completely revised util.HashedList to use more standard features from
139 // Collections. The HashedList now melds a HashMap and ArrayList
140 // Added sort to HashedList function to enable sorting of the list.
141 // The logic now uses a simple index for the iterators rather than
142 // traversing a linked list.
143 // Added sort before write in Header to ensure keywords are in correct order.
144 // This uses a new HeaderOrder class which implements java.util.Comparator to
145 // indicate the required order for FITS keywords. Users should now
146 // be able to write required keywords anywhere without getting errors
147 // later when they try to write out the FITS file.
148 // Fixed bug in setColumn in util.Column table where the new column
149 // was not being pointed to. Any new column resets the table.
150 // Several fixes to BinaryTable to address errors in variable length
152 // Several fixes to the handling of variable length array in binary tables.
153 // (noted by Guillame Belanger).
154 // Several fixes and changes suggested by Richard Mathar mostly
157 // Revised test routines to use Junit. Note that Junit tests
158 // use annotations and require Java 1.5.
159 // Added ArrayFuncs.arrayEquals() methods to compare
161 // Fixed bugs in handling of 0 length columns and table update.
163 // Additional fixes for 0 length strings.
165 // Changed handling of constructor for File objects
167 // Add ability to handle FILE, HTTPS and FTP URLs and to
168 // handle redirects amongst different protocols.
170 // Fixes to String handling (A. Kovacs)
171 // Truncating long doubles to fit in
173 // Made some methods public in FitsFactory
176 // Fix to BinaryTable (L. Michel)
178 // Support for .Z compressed data.
179 // Better detection of compressed data streams
180 // Bug fix for binary tables (A. Kovacs)
181 // Version 1.00.1 (2/09)
182 // Fix for exponential format in header keywords
183 // Version 1.00.2 (3/09)
184 // Fixed offsets when users read rows or elements
185 // within multiHDU files.
187 // Fixes bugs and adds some more graceful
188 // error handling for situations where arrays
189 // could exceed 2G. More work is needed here though.
190 // Data.getTrueSize() now returns a long.
192 // Fixed bug with initial blanks in HIERARCH
194 // Version 1.02.0 (7/09)
195 // Fixes bugs in ASCII tables where integer and real
196 // fields that are blank should be read as 0 per the FITS
197 // standard. (L. Michel)
199 // Adds PaddingException to allow users to read
200 // improperly padded last HDU in FITS file. (suggested by L. Michel)
201 // This required changes to the Fits.java and all of the Data subclasses
202 // as well as the new exception classes.
203 // Version 1.03.0 (7/09)
204 // Many changes to support long (>2GB) arrays in
205 // reads and size computations:
206 // ArrayDataInput.readArray deprecated in
207 // favor of ArrayDataInput.readLArray
208 // ArrayUtil.computeSize -> ArrayUtil.computeLSize
209 // ArrayUtil.nElements -> ArrayUtil.nLElements
210 // The skipBytes method in ArrayDataInput is overloaded
211 // to take a long argument and return a long value (in
212 // addition to the method inherited from DataInput
213 // the takes and returns an int).
215 // Corresponding changes in FITS classes.
216 // [Note that there are still many restrictions
217 // due to the array size limits in Java.]
219 // A number of obsolete comments regarding BITPIX=64 being non-standard
221 // If errors are found in reading the Header of an HDU
222 // an IOException is now returned in some situations
223 // where an Error was being returned.
225 // A bug in the new PaddingException was fixed that
226 // lets truncated ImageHDUs have Tilers.
228 // Version 1.03.1 (7/09)
229 // Changed FitsUtil.byteArrayToStrings to make
230 // sure that deleted white space is eligible for
231 // garbage collection. (J.C. Segovia)
233 // Version 1.04.0 (12/09)
235 // Added support for the long string convention
236 // (see JavaDocs for Header).
237 // Fixed errors in handling of strings with embedded
241 // Version 1.05.0 (12/10)
242 // Several fixes suggested by Laurent Bourges
243 // - Better handling of strings in binary tables
244 // including handling of truncated strings (with
245 // embedded nuls) and detection of illegal
246 // non-printing characters.
247 // - New table metadata functions in TableHDU
248 // - Handling of complex data including variable
250 // Added a number of convenience methods
251 // - FitsUtil.pad() is used to write padding
252 // rather than separate code in many classes
253 // - FitsUtil.HDUFactory will now create
254 // an HDU from a Header or input data.
255 // - reset() method added to FitsElement to simplify
256 // reading of Fits data using low level access.
257 // This is implemented in many classes.
258 // - dumpArray method in ArrayFuncs for convenience
260 // Version 1.05.1 (2/11)
261 // Fixed error in Long string support where the
262 // COMMENT keyword was being used instead of the
263 // correct CONTINUE. (V. Forchi)
264 // An error in the positioning of the Header cursor
265 // for primary images was noted by V. Forchi. Updates
266 // to the header could easily result in writing
267 // records before the EXTEND keyword which is a violation
268 // of the FITS standard.
269 // Version 1.06.0 (5/11)
270 // Substantial reworking of compression to accommodate
271 // BZIP2 compression. The Apache Bzip library is used or
272 // since this is very slow, the user can specify a local
273 // command to do the decompression using the BZIP_DECOMPRESSOR
274 // environment variable. This is assumed to require a
275 // '-' argument which is added if not supplied by the user.
276 // The decompressor should act as a filter between standard input
279 // User compression flags are now completely
280 // ignored and the compression and the compression
281 // is determined entirely by the content of the stream.
282 // The Apache library will be needed in the
283 // classpath to accommodate BZIP2 inputs if the user
284 // does not supply the BZIP_DECOMPRESSOR.
286 // Adding additional compression methods should be much easier and
287 // may only involve adding a couple of lines in the
288 // FitsUtil.decompress function if a decompressor class
291 // One subtle consequence of how compression is now handled
292 // is that there is no advantage for users to
293 // create their own BufferedDataInputStream's.
294 // Users should just provide a standard input stream
295 // and allow the FITS library to wrap it in a
296 // BufferedDataInputStream.
298 // A bug in the UndefinedData class was detected
299 // Vincenzo Forzi and has been corrected.
301 // The nom.tam.util.AsciiFuncs class now handles
302 // ASCII encoding to more cleanly separate this
303 // functionality from the FITS library and to enable
304 // Java 1.5 compatibitity. (Suggested by changes of L.Bourges)
305 // Some other V1.5 incompatiblities removed.
307 // The HeaderCommentsMap class is now provided to enable
308 // users to control the comments that are generated in system
309 // generated header cards. The map is initialized to values
310 // that should be the same as the current defaults. This
311 // should allow users to emulate the comments of other packages.
313 // All Java code has been passed through NetBeans formatter
314 // so that it should have a more uniform appearance.
319 /** Create an empty Fits object which is not
320 * associated with an input stream.
325 /** Create a Fits object associated with
326 * the given data stream.
327 * Compression is determined from the first few bytes of the stream.
328 * @param str The data stream.
330 public Fits(InputStream str) throws FitsException {
331 streamInit(str, false);
334 /** Create a Fits object associated with a data stream.
335 * @param str The data stream.
336 * @param compressed Is the stream compressed? This is currently ignored.
337 * Compression is determined from the first two bytes in the stream.
339 public Fits(InputStream str, boolean compressed)
340 throws FitsException {
344 /** Initialize the stream.
345 * @param str The user specified input stream
346 * @param seekable ignored
348 protected void streamInit(InputStream str, boolean seekable)
349 throws FitsException {
353 /** Do the stream initialization.
355 * @param str The input stream.
356 * @param compressed Is this data compressed? This flag
357 * is ignored. The compression is determined from the stream content.
358 * @param seekable Can one seek on the stream. This parameter is ignored.
360 protected void streamInit(InputStream str, boolean compressed,
362 throws FitsException {
366 /** Initialize the input stream. Mostly this
367 * checks to see if the stream is compressed and
368 * wraps the stream if necessary. Even if the
369 * stream is not compressed, it will likely be wrapped
370 * in a PushbackInputStream. So users should probably
371 * not supply a BufferedDataInputStream themselves, but
372 * should allow the Fits class to do the wrapping.
374 * @throws FitsException
376 protected void streamInit(InputStream str) throws FitsException {
377 str = FitsUtil.decompress(str);
378 if (str instanceof ArrayDataInput) {
379 dataStr = (ArrayDataInput) str;
381 // Use efficient blocking for input.
382 dataStr = new BufferedDataInputStream(str);
386 /** Initialize using buffered random access.
387 * This implies that the data is uncompressed.
389 * @throws FitsException
391 protected void randomInit(File f) throws FitsException {
393 String permissions = "r";
394 if (!f.exists() || !f.canRead()) {
395 throw new FitsException("Non-existent or unreadable file");
401 dataStr = new BufferedFile(f, permissions);
403 ((BufferedFile) dataStr).seek(0);
404 } catch (IOException e) {
405 throw new FitsException("Unable to open file " + f.getPath());
409 /** Associate FITS object with a File. If the file is
410 * compressed a stream will be used, otherwise random access
412 * @param myFile The File object.
414 public Fits(File myFile) throws FitsException {
415 this(myFile, FitsUtil.isCompressed(myFile));
418 /** Associate the Fits object with a File
419 * @param myFile The File object.
420 * @param compressed Is the data compressed?
422 public Fits(File myFile, boolean compressed) throws FitsException {
423 fileInit(myFile, compressed);
426 /** Get a stream from the file and then use the stream initialization.
427 * @param myFile The File to be associated.
428 * @param compressed Is the data compressed?
430 protected void fileInit(File myFile, boolean compressed) throws FitsException {
434 FileInputStream str = new FileInputStream(myFile);
439 } catch (IOException e) {
440 throw new FitsException("Unable to create Input Stream from File: " + myFile);
444 /** Associate the FITS object with a file or URL.
446 * The string is assumed to be a URL if it begins one of the
448 * If the string ends in .gz it is assumed that
449 * the data is in a compressed format.
450 * All string comparisons are case insensitive.
452 * @param filename The name of the file or URL to be processed.
453 * @exception FitsException Thrown if unable to find or open
454 * a file or URL from the string given.
456 public Fits(String filename) throws FitsException {
457 this(filename, FitsUtil.isCompressed(filename));
460 /** Associate the FITS object with a file or URL.
462 * The string is assumed to be a URL if it begins one of the
464 * If the string ends in .gz it is assumed that
465 * the data is in a compressed format.
466 * All string comparisons are case insensitive.
468 * @param filename The name of the file or URL to be processed.
469 * @exception FitsException Thrown if unable to find or open
470 * a file or URL from the string given.
472 public Fits(String filename, boolean compressed) throws FitsException {
476 if (filename == null) {
477 throw new FitsException("Null FITS Identifier String");
480 int len = filename.length();
481 String lc = filename.toLowerCase();
483 URL test = new URL(filename);
484 InputStream is = FitsUtil.getURLStream(new URL(filename), 0);
487 } catch (Exception e) {
488 // Just try it as a file
491 File fil = new File(filename);
493 fileInit(fil, compressed);
499 InputStream str = ClassLoader.getSystemClassLoader().getResourceAsStream(filename);
501 } catch (Exception e) {
507 /** Associate the FITS object with a given uncompressed URL
508 * @param myURL The URL to be associated with the FITS file.
509 * @param compressed Compression flag, ignored.
510 * @exception FitsException Thrown if unable to use the specified URL.
512 public Fits(URL myURL, boolean compressed) throws FitsException {
516 /** Associate the FITS object with a given URL
518 * @exception FitsException Thrown if unable to find or open
519 * a file or URL from the string given.
521 public Fits(URL myURL) throws FitsException {
523 streamInit(FitsUtil.getURLStream(myURL, 0));
524 } catch (IOException e) {
525 throw new FitsException("Unable to open input from URL:" + myURL);
529 /** Return all HDUs for the Fits object. If the
530 * FITS file is associated with an external stream make
531 * sure that we have exhausted the stream.
532 * @return an array of all HDUs in the Fits object. Returns
533 * null if there are no HDUs associated with this object.
535 public BasicHDU[] read() throws FitsException {
539 int size = getNumberOfHDUs();
545 BasicHDU[] hdus = new BasicHDU[size];
546 hduList.copyInto(hdus);
550 /** Read the next HDU on the default input stream.
551 * @return The HDU read, or null if an EOF was detected.
552 * Note that null is only returned when the EOF is detected immediately
553 * at the beginning of reading the HDU.
555 public BasicHDU readHDU() throws FitsException, IOException {
557 if (dataStr == null || atEOF) {
561 if (dataStr instanceof nom.tam.util.RandomAccess && lastFileOffset > 0) {
562 FitsUtil.reposition(dataStr, lastFileOffset);
565 Header hdr = Header.readHeader(dataStr);
571 Data datum = hdr.makeData();
574 } catch (PaddingException e) {
579 lastFileOffset = FitsUtil.findOffset(dataStr);
580 BasicHDU nextHDU = FitsFactory.HDUFactory(hdr, datum);
582 hduList.addElement(nextHDU);
586 /** Skip HDUs on the associate input stream.
587 * @param n The number of HDUs to be skipped.
589 public void skipHDU(int n) throws FitsException, IOException {
590 for (int i = 0; i < n; i += 1) {
595 /** Skip the next HDU on the default input stream.
597 public void skipHDU() throws FitsException, IOException {
602 Header hdr = new Header(dataStr);
607 int dataSize = (int) hdr.getDataSize();
608 dataStr.skip(dataSize);
612 /** Return the n'th HDU.
613 * If the HDU is already read simply return a pointer to the
614 * cached data. Otherwise read the associated stream
615 * until the n'th HDU is read.
616 * @param n The index of the HDU to be read. The primary HDU is index 0.
617 * @return The n'th HDU or null if it could not be found.
619 public BasicHDU getHDU(int n) throws FitsException, IOException {
621 int size = getNumberOfHDUs();
623 for (int i = size; i <= n; i += 1) {
632 return (BasicHDU) hduList.elementAt(n);
633 } catch (NoSuchElementException e) {
634 throw new FitsException("Internal Error: hduList build failed");
638 /** Read to the end of the associated input stream */
639 private void readToEnd() throws FitsException {
641 while (dataStr != null && !atEOF) {
643 if (readHDU() == null) {
646 } catch (IOException e) {
647 throw new FitsException("IO error: " + e);
652 /** Return the number of HDUs in the Fits object. If the
653 * FITS file is associated with an external stream make
654 * sure that we have exhausted the stream.
655 * @return number of HDUs.
656 * @deprecated The meaning of size of ambiguous. Use
658 public int size() throws FitsException {
660 return getNumberOfHDUs();
663 /** Add an HDU to the Fits object. Users may intermix
664 * calls to functions which read HDUs from an associated
665 * input stream with the addHDU and insertHDU calls,
666 * but should be careful to understand the consequences.
668 * @param myHDU The HDU to be added to the end of the FITS object.
670 public void addHDU(BasicHDU myHDU)
671 throws FitsException {
672 insertHDU(myHDU, getNumberOfHDUs());
675 /** Insert a FITS object into the list of HDUs.
677 * @param myHDU The HDU to be inserted into the list of HDUs.
678 * @param n The location at which the HDU is to be inserted.
680 public void insertHDU(BasicHDU myHDU, int n)
681 throws FitsException {
687 if (n < 0 || n > getNumberOfHDUs()) {
688 throw new FitsException("Attempt to insert HDU at invalid location: " + n);
695 // Note that the previous initial HDU is no longer the first.
696 // If we were to insert tables backwards from last to first,
697 // we could get a lot of extraneous DummyHDUs but we currently
698 // do not worry about that.
700 if (getNumberOfHDUs() > 0) {
701 ((BasicHDU) hduList.elementAt(0)).setPrimaryHDU(false);
704 if (myHDU.canBePrimary()) {
705 myHDU.setPrimaryHDU(true);
706 hduList.insertElementAt(myHDU, 0);
708 insertHDU(BasicHDU.getDummyHDU(), 0);
709 myHDU.setPrimaryHDU(false);
710 hduList.insertElementAt(myHDU, 1);
713 myHDU.setPrimaryHDU(false);
714 hduList.insertElementAt(myHDU, n);
716 } catch (NoSuchElementException e) {
717 throw new FitsException("hduList inconsistency in insertHDU");
722 /** Delete an HDU from the HDU list.
724 * @param n The index of the HDU to be deleted.
725 * If n is 0 and there is more than one HDU present, then
726 * the next HDU will be converted from an image to
727 * primary HDU if possible. If not a dummy header HDU
728 * will then be inserted.
730 public void deleteHDU(int n) throws FitsException {
731 int size = getNumberOfHDUs();
732 if (n < 0 || n >= size) {
733 throw new FitsException("Attempt to delete non-existent HDU:" + n);
736 hduList.removeElementAt(n);
737 if (n == 0 && size > 1) {
738 BasicHDU newFirst = (BasicHDU) hduList.elementAt(0);
739 if (newFirst.canBePrimary()) {
740 newFirst.setPrimaryHDU(true);
742 insertHDU(BasicHDU.getDummyHDU(), 0);
745 } catch (NoSuchElementException e) {
746 throw new FitsException("Internal Error: hduList Vector Inconsitency");
750 /** Write a Fits Object to an external Stream.
752 * @param dos A DataOutput stream.
754 public void write(DataOutput os) throws FitsException {
757 boolean newOS = false;
759 if (os instanceof ArrayDataOutput) {
760 obs = (ArrayDataOutput) os;
761 } else if (os instanceof DataOutputStream) {
763 obs = new BufferedDataOutputStream((DataOutputStream) os);
765 throw new FitsException("Cannot create ArrayDataOutput from class "
766 + os.getClass().getName());
770 for (int i = 0; i < getNumberOfHDUs(); i += 1) {
772 hh = (BasicHDU) hduList.elementAt(i);
774 } catch (ArrayIndexOutOfBoundsException e) {
776 throw new FitsException("Internal Error: Vector Inconsistency" + e);
783 } catch (IOException e) {
784 System.err.println("Warning: error closing FITS output stream");
788 if (obs instanceof BufferedFile) {
789 ((BufferedFile) obs).setLength(((BufferedFile) obs).getFilePointer());
791 } catch (IOException e) {
792 // Ignore problems...
797 /** Read a FITS file from an InputStream object.
799 * @param is The InputStream stream whence the FITS information
802 public void read(InputStream is) throws FitsException, IOException {
804 boolean newIS = false;
806 if (is instanceof ArrayDataInput) {
807 dataStr = (ArrayDataInput) is;
809 dataStr = new BufferedDataInputStream(is);
821 /** Get the current number of HDUs in the Fits object.
822 * @return The number of HDU's in the object.
823 * @deprecated See getNumberOfHDUs()
825 public int currentSize() {
826 return hduList.size();
829 /** Get the current number of HDUs in the Fits object.
830 * @return The number of HDU's in the object.
832 public int getNumberOfHDUs() {
833 return hduList.size();
836 /** Get the data stream used for the Fits Data.
837 * @return The associated data stream. Users may wish to
838 * call this function after opening a Fits object when
839 * they wish detailed control for writing some part of the FITS file.
841 public ArrayDataInput getStream() {
845 /** Set the data stream to be used for future input.
847 * @param stream The data stream to be used.
849 public void setStream(ArrayDataInput stream) {
855 /** Create an HDU from the given header.
856 * @param h The header which describes the FITS extension
858 public static BasicHDU makeHDU(Header h) throws FitsException {
859 Data d = FitsFactory.dataFactory(h);
860 return FitsFactory.HDUFactory(h, d);
863 /** Create an HDU from the given data kernel.
864 * @param o The data to be described in this HDU.
866 public static BasicHDU makeHDU(Object o) throws FitsException {
867 return FitsFactory.HDUFactory(o);
870 /** Create an HDU from the given Data.
871 * @param datum The data to be described in this HDU.
873 public static BasicHDU makeHDU(Data datum) throws FitsException {
874 Header hdr = new Header();
875 datum.fillHeader(hdr);
876 return FitsFactory.HDUFactory(hdr, datum);
880 * Add or update the CHECKSUM keyword.
881 * @param hdr the primary or other header to get the current DATE
882 * @throws nom.tam.fits.HeaderCardException
886 public static void setChecksum(BasicHDU hdu)
887 throws nom.tam.fits.HeaderCardException, nom.tam.fits.FitsException, java.io.IOException {
888 /* the next line with the delete is needed to avoid some unexpected
889 * problems with non.tam.fits.Header.checkCard() which otherwise says
890 * it expected PCOUNT and found DATE.
892 Header hdr = hdu.getHeader();
893 hdr.deleteKey("CHECKSUM");
894 /* This would need org.nevec.utils.DateUtils compiled before org.nevec.prima.fits ....
895 * final String doneAt = DateUtils.dateToISOstring(0) ;
896 * We need to save the value of the comment string because this is becoming part
897 * of the checksum calculated and needs to be re-inserted again - with the same string -
898 * when the second/final call to addValue() is made below.
900 final String doneAt = HeaderCommentsMap.getComment("fits:checksum:1");
901 hdr.addValue("CHECKSUM", "0000000000000000", doneAt);
903 /* Convert the entire sequence of 2880 byte header cards into a byte array.
904 * The main benefit compared to the C implementations is that we do not need to worry
905 * about the particular byte order on machines (Linux/VAX/MIPS vs Hp-UX, Sparc...) supposed that
906 * the correct implementation is in the write() interface.
908 ByteArrayOutputStream hduByteImage = new ByteArrayOutputStream();
910 hdu.write(new BufferedDataOutputStream(hduByteImage));
911 final byte[] data = hduByteImage.toByteArray();
912 final long csu = checksum(data);
914 /* This time we do not use a deleteKey() to ensure that the keyword is replaced "in place".
915 * Note that the value of the checksum is actually independent to a permutation of the
916 * 80-byte records within the header.
918 hdr.addValue("CHECKSUM", checksumEnc(csu, true), doneAt);
922 * Add or Modify the CHECKSUM keyword in all headers.
923 * @throws nom.tam.fits.HeaderCardException
924 * @throws nom.tam.fits.FitsException
928 public void setChecksum()
929 throws nom.tam.fits.HeaderCardException, nom.tam.fits.FitsException, java.io.IOException {
930 for (int i = 0; i < getNumberOfHDUs(); i += 1) {
931 setChecksum(getHDU(i));
936 * Calculate the Seaman-Pence 32-bit 1's complement checksum over the byte stream. The option
937 * to start from an intermediate checksum accumulated over another previous
938 * byte stream is not implemented.
939 * The implementation accumulates in two 64-bit integer values the two low-order and the two
940 * high-order bytes of adjacent 4-byte groups. A carry-over of bits is never done within the main
941 * loop (only once at the end at reduction to a 32-bit positive integer) since an overflow
942 * of a 64-bit value (signed, with maximum at 2^63-1) by summation of 16-bit values could only
943 * occur after adding approximately 140G short values (=2^47) (280GBytes) or more. We assume
944 * for now that this routine here is never called to swallow FITS files of that size or larger.
945 * @param data the byte sequence
946 * @return the 32bit checksum in the range from 0 to 2^32-1
947 * @see http://heasarc.gsfc.nasa.gov/docs/heasarc/fits/checksum.html
951 private static long checksum(final byte[] data) {
954 final int len = 2 * (data.length / 4);
955 // System.out.println(data.length + " bytes") ;
956 final int remain = data.length % 4;
957 /* a write(2) on Sparc/PA-RISC would write the MSB first, on Linux the LSB; by some kind
958 * of coincidence, we can stay with the byte order known from the original C version of
961 for (int i = 0; i < len; i += 2) {
962 /* The four bytes in this block handled by a single 'i' are each signed (-128 to 127)
963 * in Java and need to be masked indivdually to avoid sign extension /propagation.
965 hi += (data[2 * i] << 8) & 0xff00L | data[2 * i + 1] & 0xffL;
966 lo += (data[2 * i + 2] << 8) & 0xff00L | data[2 * i + 3] & 0xffL;
969 /* The following three cases actually cannot happen since FITS records are multiples of 2880 bytes.
972 hi += (data[2 * len] << 8) & 0xff00L;
975 hi += data[2 * len + 1] & 0xffL;
978 lo += (data[2 * len + 2] << 8) & 0xff00L;
981 long hicarry = hi >>> 16;
982 long locarry = lo >>> 16;
983 while (hicarry != 0 || locarry != 0) {
984 hi = (hi & 0xffffL) + locarry;
985 lo = (lo & 0xffffL) + hicarry;
989 return (hi << 16) + lo;
993 * Encode a 32bit integer according to the Seaman-Pence proposal.
994 * @param c the checksum previously calculated
995 * @return the encoded string of 16 bytes.
996 * @see http://heasarc.gsfc.nasa.gov/docs/heasarc/ofwg/docs/general/checksum/node14.html#SECTION00035000000000000000
1000 private static String checksumEnc(final long c, final boolean compl) {
1001 byte[] asc = new byte[16];
1002 final int[] exclude = {0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60};
1003 final long[] mask = {0xff000000L, 0xff0000L, 0xff00L, 0xffL};
1004 final int offset = 0x30; /* ASCII 0 (zero */
1005 final long value = compl ? ~c : c;
1006 for (int i = 0; i < 4; i++) {
1007 final int byt = (int) ((value & mask[i]) >>> (24 - 8 * i)); // each byte becomes four
1008 final int quotient = byt / 4 + offset;
1009 final int remainder = byt % 4;
1010 int[] ch = new int[4];
1011 for (int j = 0; j < 4; j++) {
1016 boolean check = true;
1017 for (; check;) // avoid ASCII punctuation
1020 for (int k = 0; k < exclude.length; k++) {
1021 for (int j = 0; j < 4; j += 2) {
1022 if (ch[j] == exclude[k] || ch[j + 1] == exclude[k]) {
1031 for (int j = 0; j < 4; j++) // assign the bytes
1033 asc[4 * j + i] = (byte) (ch[j]);
1037 // shift the bytes 1 to the right circularly.
1039 String resul = AsciiFuncs.asciiString(asc, 15, 1);
1040 return resul.concat(AsciiFuncs.asciiString(asc, 0, 15));
1041 } catch (Exception e) {
1042 // Impossible I hope
1043 System.err.println("CheckSum Error finding ASCII encoding");