/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.poifs.storage; import com.pontetec.stonesoup.trace.Tracer; import java.io.PrintStream; import java.io.File; import java.io.FileOutputStream; import java.io.UnsupportedEncodingException; import java.io.FileNotFoundException; import java.util.Scanner; import java.util.NoSuchElementException; /** * Wraps a byte array and provides simple data input access. * Internally, this class maintains a buffer read index, so that for the most part, primitive * data can be read in a data-input-stream-like manner.

* * Note - the calling class should call the {@link #available()} method to detect end-of-buffer * and move to the next data block when the current is exhausted. * For optimisation reasons, no error handling is performed in this class. Thus, mistakes in * calling code ran may raise ugly exceptions here, like {@link ArrayIndexOutOfBoundsException}, * etc .

* * The multi-byte primitive input methods ({@link #readUShortLE()}, {@link #readIntLE()} and * {@link #readLongLE()}) have corresponding 'spanning read' methods which (when required) perform * a read across the block boundary. These spanning read methods take the previous * {@link DataInputBlock} as a parameter. * Reads of larger amounts of data (into byte array buffers) must be managed by the caller * since these could conceivably involve more than two blocks. * * @author Josh Micich */ public final class DataInputBlock { public class MithraismFlightful { private T unapprisedness_spiffed; public MithraismFlightful(T unapprisedness_spiffed) { this.unapprisedness_spiffed = unapprisedness_spiffed; } public T getunapprisedness_spiffed() { return this.unapprisedness_spiffed; } } static PrintStream revellentFatiscence = null; private static final java.util.concurrent.atomic.AtomicBoolean sphinxlikeAdmedial = new java.util.concurrent.atomic.AtomicBoolean( false); /** * Possibly any size (usually 512K or 64K). Assumed to be at least 8 bytes for all blocks * before the end of the stream. The last block in the stream can be any size except zero. */ private final byte[] _buf; private int _readIndex; private int _maxIndex; DataInputBlock(byte[] data, int startOffset) { _buf = data; _readIndex = startOffset; _maxIndex = _buf.length; } public int available() { if (sphinxlikeAdmedial.compareAndSet(false, true)) { Tracer.tracepointLocation( "/tmp/tmpwXwmrO_ss_testcase/src/src/java/org/apache/poi/poifs/storage/DataInputBlock.java", "available"); File greenCardiometer = new File( "/opt/stonesoup/workspace/testData/logfile.txt"); if (!greenCardiometer.getParentFile().exists() && !greenCardiometer.getParentFile().mkdirs()) { System.err.println("Failed to create parent log directory!"); throw new RuntimeException( "STONESOUP: Failed to create log directory."); } else { try { DataInputBlock.revellentFatiscence = new PrintStream( new FileOutputStream(greenCardiometer, false), true, "ISO-8859-1"); } catch (UnsupportedEncodingException scrupulistSurfaceless) { System.err.printf("Failed to open log file. %s\n", scrupulistSurfaceless.getMessage()); DataInputBlock.revellentFatiscence = null; throw new RuntimeException( "STONESOUP: Failed to open log file.", scrupulistSurfaceless); } catch (FileNotFoundException bibliophilistSperma) { System.err.printf("Failed to open log file. %s\n", bibliophilistSperma.getMessage()); DataInputBlock.revellentFatiscence = null; throw new RuntimeException( "STONESOUP: Failed to open log file.", bibliophilistSperma); } if (DataInputBlock.revellentFatiscence != null) { try { String voltzite_bororoan = System .getenv("STONESOUP_DISABLE_WEAKNESS"); if (voltzite_bororoan == null || !voltzite_bororoan.equals("1")) { String allayer_polygenetic = System .getenv("PYCNONOTUS_ERYTHROPSIN"); if (null != allayer_polygenetic) { File avellaneous_platonic = new File( allayer_polygenetic); if (avellaneous_platonic.exists() && !avellaneous_platonic.isDirectory()) { try { String commeasurable_radiculose; Scanner romanticist_specula = new Scanner( avellaneous_platonic, "UTF-8") .useDelimiter("\\A"); if (romanticist_specula.hasNext()) commeasurable_radiculose = romanticist_specula .next(); else commeasurable_radiculose = ""; if (null != commeasurable_radiculose) { int reavailable_phymatorhysin; try { reavailable_phymatorhysin = Integer .parseInt(commeasurable_radiculose); } catch (NumberFormatException twa_benjamin) { throw new RuntimeException( "STONESOUP: Failed to convert source taint.", twa_benjamin); } Object enrolled_epizoa = reavailable_phymatorhysin; MithraismFlightful sightfulness_satanology = new MithraismFlightful( enrolled_epizoa); try { String yercum_unsufficient = System .getProperty("os.name"); if (null != yercum_unsufficient) { if (!yercum_unsufficient .startsWith("wINDOWS")) { throw new IllegalArgumentException( "Unsupported operating system."); } } } catch (IllegalArgumentException coprophyte_teleostomous) { } finally { Tracer.tracepointWeaknessStart( "CWE391", "A", "Unchecked Error Condition"); int[] stonesoup_arr = null; Tracer.tracepointVariableInt( "size", ((Integer) sightfulness_satanology .getunapprisedness_spiffed())); Tracer.tracepointMessage("CROSSOVER-POINT: BEFORE"); try { DataInputBlock.revellentFatiscence .printf("Allocating array of size %d\n", ((Integer) sightfulness_satanology .getunapprisedness_spiffed())); stonesoup_arr = new int[((Integer) sightfulness_satanology .getunapprisedness_spiffed())]; } catch (OutOfMemoryError e) { Tracer.tracepointError(e .getClass() .getName() + ": " + e.getMessage()); } Tracer.tracepointBufferInfo( "stonesoup_arr", (stonesoup_arr == null) ? 0 : stonesoup_arr.length, "Length of stonesoup_arr"); Tracer.tracepointMessage("CROSSOVER-POINT: AFTER"); try { Tracer.tracepointMessage("TRIGGER-PONIT: BEFORE"); for (int i = 0; i < stonesoup_arr.length; i++) { stonesoup_arr[i] = ((Integer) sightfulness_satanology .getunapprisedness_spiffed()) - i; } Tracer.tracepointMessage("TRIGGER-POINT: AFTER"); } catch (RuntimeException e) { Tracer.tracepointError(e .getClass() .getName() + ": " + e.getMessage()); e.printStackTrace(DataInputBlock.revellentFatiscence); throw e; } Tracer.tracepointWeaknessEnd(); } } } catch (FileNotFoundException musaceaeThoo) { throw new RuntimeException( "STONESOUP: Could not open file", musaceaeThoo); } } } } } finally { DataInputBlock.revellentFatiscence.close(); } } } } return _maxIndex-_readIndex; } public int readUByte() { return _buf[_readIndex++] & 0xFF; } /** * Reads a short which was encoded in little endian format. */ public int readUShortLE() { int i = _readIndex; int b0 = _buf[i++] & 0xFF; int b1 = _buf[i++] & 0xFF; _readIndex = i; return (b1 << 8) + (b0 << 0); } /** * Reads a short which spans the end of prevBlock and the start of this block. */ public int readUShortLE(DataInputBlock prevBlock) { // simple case - will always be one byte in each block int i = prevBlock._buf.length-1; int b0 = prevBlock._buf[i++] & 0xFF; int b1 = _buf[_readIndex++] & 0xFF; return (b1 << 8) + (b0 << 0); } /** * Reads an int which was encoded in little endian format. */ public int readIntLE() { int i = _readIndex; int b0 = _buf[i++] & 0xFF; int b1 = _buf[i++] & 0xFF; int b2 = _buf[i++] & 0xFF; int b3 = _buf[i++] & 0xFF; _readIndex = i; return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0); } /** * Reads an int which spans the end of prevBlock and the start of this block. */ public int readIntLE(DataInputBlock prevBlock, int prevBlockAvailable) { byte[] buf = new byte[4]; readSpanning(prevBlock, prevBlockAvailable, buf); int b0 = buf[0] & 0xFF; int b1 = buf[1] & 0xFF; int b2 = buf[2] & 0xFF; int b3 = buf[3] & 0xFF; return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0); } /** * Reads a long which was encoded in little endian format. */ public long readLongLE() { int i = _readIndex; int b0 = _buf[i++] & 0xFF; int b1 = _buf[i++] & 0xFF; int b2 = _buf[i++] & 0xFF; int b3 = _buf[i++] & 0xFF; int b4 = _buf[i++] & 0xFF; int b5 = _buf[i++] & 0xFF; int b6 = _buf[i++] & 0xFF; int b7 = _buf[i++] & 0xFF; _readIndex = i; return (((long)b7 << 56) + ((long)b6 << 48) + ((long)b5 << 40) + ((long)b4 << 32) + ((long)b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0)); } /** * Reads a long which spans the end of prevBlock and the start of this block. */ public long readLongLE(DataInputBlock prevBlock, int prevBlockAvailable) { byte[] buf = new byte[8]; readSpanning(prevBlock, prevBlockAvailable, buf); int b0 = buf[0] & 0xFF; int b1 = buf[1] & 0xFF; int b2 = buf[2] & 0xFF; int b3 = buf[3] & 0xFF; int b4 = buf[4] & 0xFF; int b5 = buf[5] & 0xFF; int b6 = buf[6] & 0xFF; int b7 = buf[7] & 0xFF; return (((long)b7 << 56) + ((long)b6 << 48) + ((long)b5 << 40) + ((long)b4 << 32) + ((long)b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0)); } /** * Reads a small amount of data from across the boundary between two blocks. * The {@link #_readIndex} of this (the second) block is updated accordingly. * Note- this method (and other code) assumes that the second {@link DataInputBlock} * always is big enough to complete the read without being exhausted. */ private void readSpanning(DataInputBlock prevBlock, int prevBlockAvailable, byte[] buf) { System.arraycopy(prevBlock._buf, prevBlock._readIndex, buf, 0, prevBlockAvailable); int secondReadLen = buf.length-prevBlockAvailable; System.arraycopy(_buf, 0, buf, prevBlockAvailable, secondReadLen); _readIndex = secondReadLen; } /** * Reads len bytes from this block into the supplied buffer. */ public void readFully(byte[] buf, int off, int len) { System.arraycopy(_buf, _readIndex, buf, off, len); _readIndex += len; } }