Bug 508834 - [regression] Compile error with flatMap after updating to
3.12.2.v20161124-1400
- retrofitted for Java 7
- additional tests from Bug 509694 and Bug 510004

Change-Id: Ia08f6eceda1c0f97179c25fe29b7312be27e807c
Signed-off-by: Stephan Herrmann <stephan.herrmann@berlin.de>
diff --git a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/GenericsRegressionTest_1_8.java b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/GenericsRegressionTest_1_8.java
index f632841..d2f5b21 100644
--- a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/GenericsRegressionTest_1_8.java
+++ b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/GenericsRegressionTest_1_8.java
@@ -7072,4 +7072,225 @@
 		"----------\n"
 	);
 }
+public void testBug508834() {
+	runConformTest(
+		new String[] {
+			"FlatMapper.java",
+			"import java.util.stream.Stream;\n" + 
+			"public class FlatMapper {\n" + 
+			"\n" + 
+			"	private String[] stuff;\n" + 
+			"	\n" + 
+			"	public static void main(String[] args) {\n" + 
+			"	    Stream.of(new FlatMapper[]{})\n" + 
+			"	        .flatMap(fl -> Stream.of(fl.stuff)) //\n" + 
+			"	        .filter(st -> !st.isEmpty()); //\n" + 
+			"	}\n" + 
+			"}\n"
+		},
+		"");
+}
+public void testBug508834_comment0() {
+	runConformTest(
+		new String[] {
+			"test/TypeB.java",
+			"package test;\n" + 
+			"public class TypeB {\n" + 
+			"    public String getText() {\n" + 
+			"        return \"\";\n" + 
+			"    }\n" + 
+			"\n" + 
+			"}\n",
+			"test/TypeA.java",
+			"package test;\n" + 
+			"public class TypeA {\n" + 
+			"    public TypeB[] getArrayOfB() {\n" + 
+			"        return null;\n" + 
+			"    }\n" + 
+			"    public TypeB getB() {\n" + 
+			"        return null;\n" + 
+			"    }\n" + 
+			"}\n",
+			"test/Test1.java",
+			"package test;\n" + 
+			"import java.util.stream.Stream;\n" + 
+			"public class Test1 {\n" + 
+			"    private TypeA[] arrayOfType() {\n" + 
+			"        return null;\n" + 
+			"    }\n" + 
+			"    private String[] test1() {\n" + 
+			"        return Stream\n" + 
+			"                .of(arrayOfType())\n" + 
+			"                .filter(a -> a.getB() != null)\n" + 
+			"                .flatMap(a -> Stream.of(a.getB()))\n" + 
+			"                .map(TypeB::getText)\n" + 
+			"                .sorted()\n" + 
+			"                .toArray(String[]::new);\n" + 
+			"    }\n" + 
+			"    private String[] test2() {\n" + 
+			"        return Stream\n" + 
+			"                .of(arrayOfType())\n" + 
+			"                .filter(a -> a.getArrayOfB() != null)\n" + 
+			"                .flatMap(a -> Stream.of(a.getArrayOfB()))\n" + 
+			"                .map(TypeB::getText)\n" + 
+			"                .sorted()\n" + 
+			"                .toArray(String[]::new);\n" + 
+			"    }\n" + 
+			"}\n"
+		},
+		"");
+	}
+	public void testBug509694() {
+		runConformTest(
+			new String[] {
+				"NfaUtil.java",
+				"/*******************************************************************************\n" + 
+				" * Copyright (c) 2011 itemis AG (http://www.itemis.eu) and others.\n" + 
+				" * All rights reserved. This program and the accompanying materials\n" + 
+				" * are made available under the terms of the Eclipse Public License v1.0\n" + 
+				" * which accompanies this distribution, and is available at\n" + 
+				" * http://www.eclipse.org/legal/epl-v10.html\n" + 
+				" *******************************************************************************/\n" + 
+				"import java.util.*;\n" + 
+				"\n" + 
+				"class Lists {\n" + 
+				"	public static <E> LinkedList<E> newLinkedList() {\n" + 
+				"		return new LinkedList<E>();\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	public static <E> LinkedList<E> newLinkedList(Iterable<? extends E> elements) {\n" + 
+				"		return newLinkedList();\n" + 
+				"	}\n" + 
+				"}\n" + 
+				"\n" + 
+				"class Maps {\n" + 
+				"	public static <K, V> HashMap<K, V> newHashMap() {\n" + 
+				"		return new HashMap<K, V>();\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	public static <K, V> LinkedHashMap<K, V> newLinkedHashMap() {\n" + 
+				"		return new LinkedHashMap<K, V>();\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	public static <K, V> LinkedHashMap<K, V> newLinkedHashMap(Map<? extends K, ? extends V> map) {\n" + 
+				"		return new LinkedHashMap<K, V>(map);\n" + 
+				"	}\n" + 
+				"}\n" + 
+				"\n" + 
+				"class Sets {\n" + 
+				"	public static <E> HashSet<E> newHashSet(Iterable<? extends E> elements) {\n" + 
+				"		return new HashSet<E>();\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	public static <E> HashSet<E> newHashSet(E... elements) {\n" + 
+				"		HashSet<E> set = new HashSet<>();\n" + 
+				"		Collections.addAll(set, elements);\n" + 
+				"		return set;\n" + 
+				"	}\n" + 
+				"}\n" + 
+				"\n" + 
+				"interface IAcceptor<T> {\n" + 
+				"	void accept(T t);\n" + 
+				"}\n" + 
+				"\n" + 
+				"interface Nfa<STATE> extends DirectedGraph<STATE> {\n" + 
+				"	STATE getStop();\n" + 
+				"	STATE getStart();\n" + 
+				"}\n" + 
+				"interface DirectedGraph<NODE> {\n" + 
+				"	Iterable<NODE> getFollowers(NODE state);\n" + 
+				"}\n" + 
+				"\n" + 
+				"/**\n" + 
+				" * @author Moritz Eysholdt - Initial contribution and API\n" + 
+				" */\n" + 
+				"public class NfaUtil {\n" + 
+				"\n" + 
+				"	public <S> Map<S, Set<S>> findCycles(Nfa<S> nfa) {\n" + 
+				"		Map<S, Set<S>> cycles = Maps.newLinkedHashMap();\n" + 
+				"		findCycles(nfa, nfa.getStart(), (List<S> t) -> {\n" + 
+				"			Set<S> cycle = Sets.newHashSet(t);\n" + 
+				"			for (S cycleNode : t) {\n" + 
+				"				// We have two cycles that are connected via at least\n" + 
+				"				// one node. Treat them as one cycle.\n" + 
+				"				Set<S> existingCycle = cycles.get(cycleNode);\n" + 
+				"				if (existingCycle != null) {\n" + 
+				"					cycle.addAll(existingCycle);\n" + 
+				"				}\n" + 
+				"			}\n" + 
+				"			for (S n : cycle) {\n" + 
+				"				cycles.put(n, cycle);\n" + 
+				"			}\n" + 
+				"		}, Maps.newHashMap(), Lists.newLinkedList());\n" + 
+				"		return cycles;\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	public <S> void findCycles(Nfa<S> nfa, IAcceptor<List<S>> cycleAcceptor) {\n" + 
+				"		findCycles(nfa, nfa.getStart(), cycleAcceptor, Maps.newHashMap(), Lists.newLinkedList());\n" + 
+				"	}\n" + 
+				"\n" + 
+				"	private static final int DFS_VISITED = 1;\n" + 
+				"	private static final int DFS_ON_STACK = 2;\n" + 
+				"\n" + 
+				"	protected <S> void findCycles(Nfa<S> nfa, S node, IAcceptor<List<S>> cycleAcceptor, Map<S, Integer> dfsMark,\n" + 
+				"			LinkedList<S> dfsStack) {\n" + 
+				"		dfsStack.push(node);\n" + 
+				"		dfsMark.put(node, DFS_ON_STACK);\n" + 
+				"		for (S follower : nfa.getFollowers(node)) {\n" + 
+				"			Integer followerMark = dfsMark.get(follower);\n" + 
+				"			if (followerMark == null) {\n" + 
+				"				findCycles(nfa, follower, cycleAcceptor, dfsMark, dfsStack);\n" + 
+				"			} else if (followerMark == DFS_ON_STACK) {\n" + 
+				"				LinkedList<S> cycle = Lists.newLinkedList();\n" + 
+				"				Iterator<S> stackIter = dfsStack.iterator();\n" + 
+				"				S cycleNode;\n" + 
+				"				do {\n" + 
+				"					cycleNode = stackIter.next();\n" + 
+				"					cycle.addFirst(cycleNode);\n" + 
+				"				} while (cycleNode != follower && stackIter.hasNext());\n" + 
+				"				cycleAcceptor.accept(cycle);\n" + 
+				"			}\n" + 
+				"		}\n" + 
+				"		dfsStack.pop();\n" + 
+				"		dfsMark.put(node, DFS_VISITED);\n" + 
+				"	}\n" + 
+				"}\n"
+			});
+	}
+	public void testBug510004_a() {
+		runConformTest(
+			new String[] {
+				"BrokenTypeInference.java",
+				"import java.util.Optional;\n" + 
+				"import java.util.stream.Stream;\n" + 
+				"\n" + 
+				"public class BrokenTypeInference {\n" + 
+				"    public static void main(String[] args) {\n" + 
+				"        Optional.of(\"42,43\").map(s -> Stream.of(s.split(\",\")));\n" + 
+				"    }\n" + 
+				"}\n"
+			});
+	}
+	public void testBug510004_b() {
+		runConformTest(
+			new String[] {
+				"BrokenTypeInference.java",
+				"import java.util.List;\n" + 
+				"import java.util.Optional;\n" + 
+				"\n" + 
+				"public class BrokenTypeInference {\n" + 
+				"    public static void main(String[] args) {\n" + 
+				"        Optional.of(\"42,43\").map(s -> x(s.split(\",\")));\n" + 
+				"    }\n" + 
+				"\n" + 
+				"    private static <X> List<X> x(X ... xs) {\n" + 
+				"        return java.util.Collections.emptyList();\n" + 
+				"    }\n" + 
+				"\n" + 
+				"    private static <X> List<X> x(X x) {\n" + 
+				"        return java.util.Collections.emptyList();\n" + 
+				"    }\n" + 
+				"}\n"
+			});
+	}
 }
diff --git a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/ast/ASTNode.java b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/ast/ASTNode.java
index 2007d33..7d5d9ba 100644
--- a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/ast/ASTNode.java
+++ b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/ast/ASTNode.java
@@ -64,6 +64,7 @@
 import org.eclipse.jdt.internal.compiler.lookup.MethodBinding;
 import org.eclipse.jdt.internal.compiler.lookup.PackageBinding;
 import org.eclipse.jdt.internal.compiler.lookup.ParameterizedGenericMethodBinding;
+import org.eclipse.jdt.internal.compiler.lookup.ParameterizedMethodBinding;
 import org.eclipse.jdt.internal.compiler.lookup.ProblemMethodBinding;
 import org.eclipse.jdt.internal.compiler.lookup.ProblemReasons;
 import org.eclipse.jdt.internal.compiler.lookup.ReferenceBinding;
@@ -699,6 +700,11 @@
 					argumentTypes[i] = updatedArgumentType;
 			}
 		}
+		if (method instanceof ParameterizedGenericMethodBinding) {
+			InferenceContext18 ic18 = invocation.getInferenceContext((ParameterizedMethodBinding) method);
+			if (ic18 != null)
+				ic18.flushBoundOutbox(); // overload resolution is done, now perform the push of bounds from inner to outer
+		}
 	}
 
 	public static void resolveAnnotations(BlockScope scope, Annotation[] sourceAnnotations, Binding recipient) {
diff --git a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/InferenceContext18.java b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/InferenceContext18.java
index 14a6493..4d9e567 100644
--- a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/InferenceContext18.java
+++ b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/InferenceContext18.java
@@ -160,9 +160,11 @@
 	public BoundSet b2;
 	private BoundSet b3;
 	/** Not per JLS: inbox for emulation of how javac passes type bounds from inner to outer */
-	private BoundSet innerInbox; 
+	BoundSet innerInbox; 
 	/** Not per JLS: signal when current is ready to directly merge all bounds from inner. */
-	private boolean directlyAcceptingInnerBounds = false;
+	boolean directlyAcceptingInnerBounds = false;
+	/** Not per JLS: pushing bounds from inner to outer may have to be deferred till after overload resolution, store here a runnable to perform the push. */
+	private Runnable pushToOuterJob = null;
 	
 	// InferenceVariable interning:
 	private InferenceVariable[] internedVariables;
@@ -495,17 +497,35 @@
 	// ---  not per JLS: emulate how javac passes type bounds from inner to outer: ---
 	/** Not per JLS: push current bounds to outer inference if outer is ready for it. */
 	private void pushBoundsToOuter() {
-		InferenceContext18 outer = this.outerContext;
+		final InferenceContext18 outer = this.outerContext;
 		if (outer != null && outer.stepCompleted >= APPLICABILITY_INFERRED) {
-			if (outer.directlyAcceptingInnerBounds) {
-				outer.currentBounds.addBounds(this.currentBounds, this.environment);
-			} else if (outer.innerInbox == null) {
-				outer.innerInbox = this.currentBounds.copy();
+			final boolean deferred = outer.currentInvocation instanceof Invocation; // need to wait till after overload resolution?
+			final BoundSet toPush = deferred ? this.currentBounds.copy() : this.currentBounds;
+			Runnable job = new Runnable() {
+				public void run() {
+					if (outer.directlyAcceptingInnerBounds) {
+						outer.currentBounds.addBounds(toPush, InferenceContext18.this.environment);
+					} else if (outer.innerInbox == null) {
+						outer.innerInbox = deferred ? toPush : toPush.copy(); // copy now, unless already copied on behalf of 'deferred'
+					} else {
+						outer.innerInbox.addBounds(toPush, InferenceContext18.this.environment);
+					}
+				}
+			};
+			if (deferred) {
+				this.pushToOuterJob = job;
 			} else {
-				outer.innerInbox.addBounds(this.currentBounds, this.environment);
+				job.run(); // TODO(stephan): ever reached? for ReferenceExpression? (would need a corresponding new call to flushBoundOutbox()).
 			}
 		}
 	}
+	/** Not JLS: after overload resolution is done, perform the push of type bounds to outer inference, if any. */
+	public void flushBoundOutbox() {
+		if (this.pushToOuterJob != null) {
+			this.pushToOuterJob.run();
+			this.pushToOuterJob = null;
+		}
+	}
 	/** Not JLS: merge pending bounds of inner inference into current. */
 	private void mergeInnerBounds() {
 		if (this.innerInbox != null) {