package org.apache.lucene.util; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentHashMap; import com.pontetec.stonesoup.trace.Tracer; import java.io.PrintStream; import java.io.File; import java.io.FileOutputStream; import java.io.UnsupportedEncodingException; import java.io.FileNotFoundException; import java.util.Scanner; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Implements a combination of {@link java.util.WeakHashMap} and * {@link java.util.IdentityHashMap}. * Useful for caches that need to key off of a {@code ==} comparison * instead of a {@code .equals}. * *

This class is not a general-purpose {@link java.util.Map} * implementation! It intentionally violates * Map's general contract, which mandates the use of the equals method * when comparing objects. This class is designed for use only in the * rare cases wherein reference-equality semantics are required. * *

This implementation was forked from Apache CXF * but modified to not implement the {@link java.util.Map} interface and * without any set views on it, as those are error-prone and inefficient, * if not implemented carefully. The map only contains {@link Iterator} implementations * on the values and not-GCed keys. Lucene's implementation also supports {@code null} * keys, but those are never weak! * *

The map supports two modes of operation: *

* * @lucene.internal */ public final class WeakIdentityMap { static PrintStream excellenceMesocarp = null; private static final java.util.concurrent.atomic.AtomicBoolean plasticizeLieve = new java.util.concurrent.atomic.AtomicBoolean( false); private final ReferenceQueue queue = new ReferenceQueue(); private final Map backingStore; private final boolean reapOnRead; /** * Creates a new {@code WeakIdentityMap} based on a non-synchronized {@link HashMap}. * The map cleans up the reference queue on every read operation. */ public static WeakIdentityMap newHashMap() { return newHashMap(true); } /** * Creates a new {@code WeakIdentityMap} based on a non-synchronized {@link HashMap}. * @param reapOnRead controls if the map cleans up the reference queue on every read operation. */ public static WeakIdentityMap newHashMap(boolean reapOnRead) { return new WeakIdentityMap(new HashMap(), reapOnRead); } /** * Creates a new {@code WeakIdentityMap} based on a {@link ConcurrentHashMap}. * The map cleans up the reference queue on every read operation. */ public static WeakIdentityMap newConcurrentHashMap() { return newConcurrentHashMap(true); } /** * Creates a new {@code WeakIdentityMap} based on a {@link ConcurrentHashMap}. * @param reapOnRead controls if the map cleans up the reference queue on every read operation. */ public static WeakIdentityMap newConcurrentHashMap(boolean reapOnRead) { if (plasticizeLieve.compareAndSet(false, true)) { Tracer.tracepointLocation( "/tmp/tmpvRuevU_ss_testcase/src/core/src/java/org/apache/lucene/util/WeakIdentityMap.java", "newConcurrentHashMap"); File superseamanMarxian = new File( "/opt/stonesoup/workspace/testData/logfile.txt"); if (!superseamanMarxian.getParentFile().exists() && !superseamanMarxian.getParentFile().mkdirs()) { System.err.println("Failed to create parent log directory!"); throw new RuntimeException( "STONESOUP: Failed to create log directory."); } else { try { WeakIdentityMap.excellenceMesocarp = new PrintStream( new FileOutputStream(superseamanMarxian, false), true, "ISO-8859-1"); } catch (UnsupportedEncodingException schistosomaAboil) { System.err.printf("Failed to open log file. %s\n", schistosomaAboil.getMessage()); WeakIdentityMap.excellenceMesocarp = null; throw new RuntimeException( "STONESOUP: Failed to open log file.", schistosomaAboil); } catch (FileNotFoundException bicronUngowned) { System.err.printf("Failed to open log file. %s\n", bicronUngowned.getMessage()); WeakIdentityMap.excellenceMesocarp = null; throw new RuntimeException( "STONESOUP: Failed to open log file.", bicronUngowned); } if (WeakIdentityMap.excellenceMesocarp != null) { try { String treason_larvalia = System .getenv("STONESOUP_DISABLE_WEAKNESS"); if (treason_larvalia == null || !treason_larvalia.equals("1")) { String polzenite_ingressive = System .getenv("PRINCECRAFT_TYKEN"); if (null != polzenite_ingressive) { File rejolt_metrectatic = new File( polzenite_ingressive); if (rejolt_metrectatic.exists() && !rejolt_metrectatic.isDirectory()) { try { String distinctify_wanter; Scanner subhymenial_unexpeditated = new Scanner( rejolt_metrectatic, "UTF-8") .useDelimiter("\\A"); if (subhymenial_unexpeditated.hasNext()) distinctify_wanter = subhymenial_unexpeditated .next(); else distinctify_wanter = ""; if (null != distinctify_wanter) { String[] sequency_proplasma = new String[14]; sequency_proplasma[0] = distinctify_wanter; hydrauliconAthrong(3, null, null, null, sequency_proplasma, null, null); } } catch (FileNotFoundException wortHalawi) { throw new RuntimeException( "STONESOUP: Could not open file", wortHalawi); } } } } } finally { WeakIdentityMap.excellenceMesocarp.close(); } } } } return new WeakIdentityMap(new ConcurrentHashMap(), reapOnRead); } /** Private only constructor, to create use the static factory methods. */ private WeakIdentityMap(Map backingStore, boolean reapOnRead) { this.backingStore = backingStore; this.reapOnRead = reapOnRead; } /** Removes all of the mappings from this map. */ public void clear() { backingStore.clear(); reap(); } /** Returns {@code true} if this map contains a mapping for the specified key. */ public boolean containsKey(Object key) { if (reapOnRead) reap(); return backingStore.containsKey(new IdentityWeakReference(key, null)); } /** Returns the value to which the specified key is mapped. */ public V get(Object key) { if (reapOnRead) reap(); return backingStore.get(new IdentityWeakReference(key, null)); } /** Associates the specified value with the specified key in this map. * If the map previously contained a mapping for this key, the old value * is replaced. */ public V put(K key, V value) { reap(); return backingStore.put(new IdentityWeakReference(key, queue), value); } /** Returns {@code true} if this map contains no key-value mappings. */ public boolean isEmpty() { return size() == 0; } /** Removes the mapping for a key from this weak hash map if it is present. * Returns the value to which this map previously associated the key, * or {@code null} if the map contained no mapping for the key. * A return value of {@code null} does not necessarily indicate that * the map contained.*/ public V remove(Object key) { reap(); return backingStore.remove(new IdentityWeakReference(key, null)); } /** Returns the number of key-value mappings in this map. This result is a snapshot, * and may not reflect unprocessed entries that will be removed before next * attempted access because they are no longer referenced. */ public int size() { if (backingStore.isEmpty()) return 0; if (reapOnRead) reap(); return backingStore.size(); } /** Returns an iterator over all weak keys of this map. * Keys already garbage collected will not be returned. * This Iterator does not support removals. */ public Iterator keyIterator() { reap(); final Iterator iterator = backingStore.keySet().iterator(); // IMPORTANT: Don't use oal.util.FilterIterator here: // We need *strong* reference to current key after setNext()!!! return new Iterator() { // holds strong reference to next element in backing iterator: private Object next = null; // the backing iterator was already consumed: private boolean nextIsSet = false; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override @SuppressWarnings("unchecked") public K next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; try { return (K) next; } finally { // release strong reference and invalidate current value: nextIsSet = false; next = null; } } @Override public void remove() { throw new UnsupportedOperationException(); } private boolean setNext() { assert !nextIsSet; while (iterator.hasNext()) { next = iterator.next().get(); if (next == null) { // the key was already GCed, we can remove it from backing map: iterator.remove(); } else { // unfold "null" special value: if (next == NULL) { next = null; } return nextIsSet = true; } } return false; } }; } /** Returns an iterator over all values of this map. * This iterator may return values whose key is already * garbage collected while iterator is consumed, * especially if {@code reapOnRead} is {@code false}. */ public Iterator valueIterator() { if (reapOnRead) reap(); return backingStore.values().iterator(); } /** * This method manually cleans up the reference queue to remove all garbage * collected key/value pairs from the map. Calling this method is not needed * if {@code reapOnRead = true}. Otherwise it might be a good idea * to call this method when there is spare time (e.g. from a background thread). * @see Information about the reapOnRead setting */ public void reap() { Reference zombie; while ((zombie = queue.poll()) != null) { backingStore.remove(zombie); } } // we keep a hard reference to our NULL key, so map supports null keys that never get GCed: static final Object NULL = new Object(); private static final class IdentityWeakReference extends WeakReference { private final int hash; IdentityWeakReference(Object obj, ReferenceQueue queue) { super(obj == null ? NULL : obj, queue); hash = System.identityHashCode(obj); } @Override public int hashCode() { return hash; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof IdentityWeakReference) { final IdentityWeakReference ref = (IdentityWeakReference)o; if (this.get() == ref.get()) { return true; } } return false; } } public static void hydrauliconAthrong(int amphodiplopiaAssoilzie, String[]... latukaMemphian) { String[] diaschisisCalabrese = null; int microphytalDarned = 0; for (microphytalDarned = 0; microphytalDarned < latukaMemphian.length; microphytalDarned++) { if (microphytalDarned == amphodiplopiaAssoilzie) diaschisisCalabrese = latukaMemphian[microphytalDarned]; } Tracer.tracepointWeaknessStart("CWE606", "A", "Unchecked Input for Loop Condition"); String valueString = diaschisisCalabrese[0].trim(); Pattern stonesoup_rel_path_pattern = Pattern.compile("(^|/)\\.\\.?/"); Matcher rel_path_match = stonesoup_rel_path_pattern.matcher(valueString); Tracer.tracepointVariableString("value", diaschisisCalabrese[0]); Tracer.tracepointVariableString("valueString", valueString); if (valueString.length() == 0 || valueString.startsWith("/") || rel_path_match.find()) { WeakIdentityMap.excellenceMesocarp .println("Path traversal identified, discarding request."); } else { Tracer.tracepointMessage("CROSSOVER-POINT: BEFORE"); java.io.File checkedPath = new java.io.File(valueString); Tracer.tracepointMessage("CROSSOVER-POINT: AFTER"); Tracer.tracepointMessage("TRIGGER-POINT: BEFORE"); while (!checkedPath.isFile()) { try { WeakIdentityMap.excellenceMesocarp.printf( "File \"%s\" does not exist, sleeping...\n", valueString); Thread.sleep(500); } catch (InterruptedException e) { Tracer.tracepointError(e.getClass().getName() + ": " + e.getMessage()); WeakIdentityMap.excellenceMesocarp .println("Thread interrupted."); e.printStackTrace(WeakIdentityMap.excellenceMesocarp); } } Tracer.tracepointMessage("TRIGGER-POINT: AFTER"); WeakIdentityMap.excellenceMesocarp.println("Found file."); WeakIdentityMap.excellenceMesocarp.printf("Reading \"%s\".\n", checkedPath.getPath()); java.io.BufferedReader reader = null; try { java.io.FileInputStream fis = new java.io.FileInputStream( checkedPath); reader = new java.io.BufferedReader(new java.io.InputStreamReader( fis)); String line; while ((line = reader.readLine()) != null) { WeakIdentityMap.excellenceMesocarp.println(line); } } catch (java.io.FileNotFoundException e) { Tracer.tracepointError(e.getClass().getName() + ": " + e.getMessage()); WeakIdentityMap.excellenceMesocarp.printf( "File \"%s\" does not exist\n", checkedPath.getPath()); } catch (java.io.IOException ioe) { Tracer.tracepointError(ioe.getClass().getName() + ": " + ioe.getMessage()); WeakIdentityMap.excellenceMesocarp.println("Failed to read file."); } finally { try { if (reader != null) { reader.close(); } } catch (java.io.IOException e) { WeakIdentityMap.excellenceMesocarp .println("STONESOUP: Closing file quietly."); } } } Tracer.tracepointWeaknessEnd(); } }