summaryrefslogtreecommitdiff
path: root/BJC-Utils2/src/main/java/bjc/utils/parserutils
diff options
context:
space:
mode:
authorbculkin2442 <bjculkin@mix.wvu.edu>2016-04-03 19:22:48 -0400
committerbculkin2442 <bjculkin@mix.wvu.edu>2016-04-03 19:22:48 -0400
commit1c8bc7132d980c1ff2dbd6b9af579c3b2fd8c63e (patch)
treea29777f07ebd81fbef61b5ae02f13f1a9d8f65a2 /BJC-Utils2/src/main/java/bjc/utils/parserutils
parenta023de85aa08c8f2b8b2441c6b14064eabee2775 (diff)
General code refactoring and maintenance
Diffstat (limited to 'BJC-Utils2/src/main/java/bjc/utils/parserutils')
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/AST.java179
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java110
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java119
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java151
4 files changed, 373 insertions, 186 deletions
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/AST.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/AST.java
index 4bfb469..d5ae3f2 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/AST.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/AST.java
@@ -17,11 +17,26 @@ import bjc.utils.funcdata.bst.ITreePart.TreeLinearizationMethod;
* The type of token in this AST
*/
public class AST<T> {
- private T token;
+ /**
+ * Indent a string n levels
+ *
+ * @param builder
+ * The string to indent
+ * @param levels
+ * The number of levels to indent
+ */
+ protected static void indentNLevels(StringBuilder builder,
+ int levels) {
+ for (int i = 0; i <= levels; i++) {
+ builder.append("\t");
+ }
+ }
private AST<T> left;
private AST<T> right;
+ private T token;
+
/**
* Create a new leaf AST node
*
@@ -53,43 +68,6 @@ public class AST<T> {
}
/**
- * Traverse an AST
- *
- * @param linearizationMethod
- * The way to traverse the tree
- * @param action
- * The function to call on each traversed element
- */
- public void traverse(TreeLinearizationMethod linearizationMethod,
- Consumer<T> action) {
- if (left != null && right != null) {
- switch (linearizationMethod) {
- case INORDER:
- left.traverse(linearizationMethod, action);
- action.accept(token);
- right.traverse(linearizationMethod, action);
- break;
- case POSTORDER:
- left.traverse(linearizationMethod, action);
- right.traverse(linearizationMethod, action);
- action.accept(token);
- break;
- case PREORDER:
- action.accept(token);
- left.traverse(linearizationMethod, action);
- right.traverse(linearizationMethod, action);
- break;
- default:
- throw new IllegalArgumentException(
- "Got a invalid tree linearizer "
- + linearizationMethod + ". WAT");
- }
- } else {
- action.accept(token);
- }
- }
-
- /**
* Collapse this tree into a single node
*
* @param <E>
@@ -104,11 +82,44 @@ public class AST<T> {
* The function for transforming the result
* @return The collapsed value of the tree
*/
+ @SuppressWarnings("unchecked")
public <E, T2> E collapse(Function<T, T2> tokenTransformer,
Function<T, BinaryOperator<T2>> nodeTransformer,
Function<T2, E> resultTransformer) {
- return resultTransformer.apply(
- internalCollapse(tokenTransformer, nodeTransformer));
+ if (tokenTransformer == null) {
+ throw new NullPointerException(
+ "Token transformer must not be null");
+ } else if (nodeTransformer == null) {
+ throw new NullPointerException(
+ "Node transformer must not be null");
+ }
+
+ if (resultTransformer != null) {
+ return resultTransformer.apply(
+ internalCollapse(tokenTransformer, nodeTransformer));
+ } else {
+ // This is valid because if the user passes null as the last
+ // parameter, E will be inferred as Object, but will actually
+ // be T2
+ return (E) internalCollapse(tokenTransformer, nodeTransformer);
+ }
+ }
+
+ /**
+ * Expand the nodes in an AST
+ *
+ * @param expander
+ * The function to use for expanding nodes
+ * @return The expanded AST
+ */
+ public AST<T> expand(Function<T, AST<T>> expander) {
+ if (expander == null) {
+ throw new NullPointerException("Expander must not be null");
+ }
+
+ return collapse(expander, (operator) -> (leftAST, rightAST) -> {
+ return new AST<>(operator, leftAST, rightAST);
+ }, null);
}
/*
@@ -129,6 +140,7 @@ public class AST<T> {
}
T2 rightCollapsed;
+
if (right == null) {
rightCollapsed = null;
} else {
@@ -141,15 +153,6 @@ public class AST<T> {
}
}
- @Override
- public String toString() {
- StringBuilder builder = new StringBuilder();
-
- internalToString(builder, -1);
-
- return builder.toString();
- }
-
/**
* Internal version of toString for proper rendering
*
@@ -187,21 +190,6 @@ public class AST<T> {
}
/**
- * Indent a string n levels
- *
- * @param builder
- * The string to indent
- * @param levels
- * The number of levels to indent
- */
- protected static void indentNLevels(StringBuilder builder,
- int levels) {
- for (int i = 0; i <= levels; i++) {
- builder.append("\t");
- }
- }
-
- /**
* Execute a transform on selective nodes of the tree
*
* @param transformerPredicate
@@ -211,6 +199,12 @@ public class AST<T> {
*/
public void selectiveTransform(Predicate<T> transformerPredicate,
UnaryOperator<T> transformer) {
+ if (transformerPredicate == null) {
+ throw new NullPointerException("Predicate must not be null");
+ } else if (transformer == null) {
+ throw new NullPointerException("Transformer must not be null");
+ }
+
if (transformerPredicate.test(token)) {
token = transformer.apply(token);
}
@@ -224,6 +218,15 @@ public class AST<T> {
}
}
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+
+ internalToString(builder, -1);
+
+ return builder.toString();
+ }
+
/**
* Transmute the tokens in an AST into a different sort of token
*
@@ -235,6 +238,10 @@ public class AST<T> {
* @return The AST with transformed tokens
*/
public <E> AST<E> transmuteAST(Function<T, E> tokenTransformer) {
+ if (tokenTransformer == null) {
+ throw new NullPointerException("Transformer must not be null");
+ }
+
AST<E> leftBranch = null;
AST<E> rightBranch = null;
@@ -249,4 +256,48 @@ public class AST<T> {
return new AST<>(tokenTransformer.apply(token), leftBranch,
rightBranch);
}
+
+ /**
+ * Traverse an AST
+ *
+ * @param linearizationMethod
+ * The way to traverse the tree
+ * @param action
+ * The function to call on each traversed element
+ */
+ public void traverse(TreeLinearizationMethod linearizationMethod,
+ Consumer<T> action) {
+ if (linearizationMethod == null) {
+ throw new NullPointerException(
+ "Linearization method must not be null");
+ } else if (action == null) {
+ throw new NullPointerException("Action must not be null");
+ }
+
+ if (left != null && right != null) {
+ switch (linearizationMethod) {
+ case INORDER:
+ left.traverse(linearizationMethod, action);
+ action.accept(token);
+ right.traverse(linearizationMethod, action);
+ break;
+ case POSTORDER:
+ left.traverse(linearizationMethod, action);
+ right.traverse(linearizationMethod, action);
+ action.accept(token);
+ break;
+ case PREORDER:
+ action.accept(token);
+ left.traverse(linearizationMethod, action);
+ right.traverse(linearizationMethod, action);
+ break;
+ default:
+ throw new IllegalArgumentException(
+ "Got a invalid tree linearizer "
+ + linearizationMethod + ". WAT");
+ }
+ } else {
+ action.accept(token);
+ }
+ }
}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
index 46f5f1d..b6162e5 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
@@ -2,6 +2,7 @@ package bjc.utils.parserutils;
import java.io.InputStream;
import java.util.HashMap;
+import java.util.InputMismatchException;
import java.util.Map;
import java.util.Scanner;
import java.util.function.BiConsumer;
@@ -58,6 +59,13 @@ public class RuleBasedConfigReader<E> {
*/
public void addPragma(String pragmaName,
BiConsumer<FunctionalStringTokenizer, E> pragmaAction) {
+ if (pragmaName == null) {
+ throw new NullPointerException("Pragma name must not be null");
+ } else if (pragmaAction == null) {
+ throw new NullPointerException(
+ "Pragma action must not be null");
+ }
+
pragmas.put(pragmaName, pragmaAction);
}
@@ -71,42 +79,77 @@ public class RuleBasedConfigReader<E> {
* @return The final state of the reader
*/
public E fromStream(InputStream inputStream, E initialState) {
- Scanner inputSource = new Scanner(inputStream);
-
- E state = initialState;
-
- while (inputSource.hasNextLine()) {
- String line = inputSource.nextLine();
-
- if (line.equals("")) {
- endRule.accept(state);
- continue;
- } else if (line.startsWith("\t")) {
- continueRule.accept(new FunctionalStringTokenizer(
- line.substring(1), " "), state);
- } else {
- FunctionalStringTokenizer tokenizer =
- new FunctionalStringTokenizer(line, " ");
-
- String nextToken = tokenizer.nextToken();
- if (nextToken.equals("#")) {
- // Do nothing, this is a comment
- } else if (nextToken.equals("pragma")) {
- String token = tokenizer.nextToken();
-
- pragmas.getOrDefault(token, (tokenzer, stat) -> {
- throw new UnknownPragmaException(
- "Unknown pragma " + token);
- }).accept(tokenizer, state);
+ if (inputStream == null) {
+ throw new NullPointerException(
+ "Input stream must not be null");
+ }
+
+ E state;
+
+ try (Scanner inputSource = new Scanner(inputStream)) {
+
+ state = initialState;
+ boolean ruleOpen = false;
+ while (inputSource.hasNextLine()) {
+ String line = inputSource.nextLine();
+
+ if (line.equals("")) {
+ if (ruleOpen == false) {
+ // Ignore blank line without an open rule
+ }
+
+ if (endRule == null) {
+ // Nothing happens on rule end
+ } else {
+ endRule.accept(state);
+ }
+
+ continue;
+ } else if (line.startsWith("\t")) {
+ if (ruleOpen == false) {
+ throw new InputMismatchException(
+ "Can't continue rule with no rule currently open");
+ }
+
+ if (continueRule == null) {
+ throw new InputMismatchException(
+ "Attempted to continue rule with rule continuation disabled."
+ + " Check for extraneous tabs");
+ }
+
+ continueRule.accept(new FunctionalStringTokenizer(
+ line.substring(1), " "), state);
} else {
- startRule.accept(tokenizer,
- new Pair<>(nextToken, state));
+ FunctionalStringTokenizer tokenizer =
+ new FunctionalStringTokenizer(line, " ");
+
+ String nextToken = tokenizer.nextToken();
+
+ if (nextToken.equals("#") || nextToken.equals("//")) {
+ // Do nothing, this is a comment
+ } else if (nextToken.equals("pragma")) {
+ String token = tokenizer.nextToken();
+
+ pragmas.getOrDefault(token, (tokenzer, stat) -> {
+ throw new UnknownPragmaException(
+ "Unknown pragma " + token);
+ }).accept(tokenizer, state);
+ } else {
+ if (ruleOpen == true) {
+ throw new InputMismatchException(
+ "Attempted to open a"
+ + " rule with a rule already open. Make sure rules are"
+ + " seperated by blank lines");
+ }
+
+ startRule.accept(tokenizer,
+ new Pair<>(nextToken, state));
+ ruleOpen = true;
+ }
}
}
}
- inputSource.close();
-
return state;
}
@@ -139,6 +182,11 @@ public class RuleBasedConfigReader<E> {
*/
public void setStartRule(
BiConsumer<FunctionalStringTokenizer, Pair<String, E>> startRule) {
+ if (startRule == null) {
+ throw new NullPointerException(
+ "Action on rule start must be non-null");
+ }
+
this.startRule = startRule;
}
}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
index 0ca1879..1e5d487 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
@@ -19,42 +19,6 @@ import bjc.utils.funcutils.StringUtils;
* The type of tokens being shunted
*/
public class ShuntingYard<E> {
-
- private final class TokenShunter implements Consumer<String> {
- private FunctionalList<E> output;
- private Deque<String> stack;
- private Function<String, E> transform;
-
- public TokenShunter(FunctionalList<E> outpt, Deque<String> stack,
- Function<String, E> transform) {
- this.output = outpt;
- this.stack = stack;
- this.transform = transform;
- }
-
- @Override
- public void accept(String token) {
- if (operators.containsKey(token)) {
- while (!stack.isEmpty()
- && isHigherPrec(token, stack.peek())) {
- output.add(transform.apply(stack.pop()));
- }
-
- stack.push(token);
- } else if (StringUtils.containsOnly(token, "\\(")) {
- stack.push(token);
- } else if (StringUtils.containsOnly(token, "\\)")) {
- while (stack.peek().equals(token.replace(')', '('))) {
- output.add(transform.apply(stack.pop()));
- }
-
- stack.pop();
- } else {
- output.add(transform.apply(token));
- }
- }
- }
-
/**
* A enum representing the fundamental operator types
*
@@ -96,6 +60,58 @@ public class ShuntingYard<E> {
}
}
+ private final class TokenShunter implements Consumer<String> {
+ private FunctionalList<E> output;
+ private Deque<String> stack;
+ private Function<String, E> transform;
+
+ public TokenShunter(FunctionalList<E> outpt, Deque<String> stack,
+ Function<String, E> transform) {
+ this.output = outpt;
+ this.stack = stack;
+ this.transform = transform;
+ }
+
+ @Override
+ public void accept(String token) {
+ if (operators.containsKey(token)) {
+ while (!stack.isEmpty()
+ && isHigherPrec(token, stack.peek())) {
+ output.add(transform.apply(stack.pop()));
+ }
+
+ stack.push(token);
+ } else if (StringUtils.containsOnly(token, "\\(")) {
+ // Handle groups of parenthesis for multiple nesting levels
+ stack.push(token);
+ } else if (StringUtils.containsOnly(token, "\\)")) {
+ // Handle groups of parenthesis for multiple nesting levels
+ while (stack.peek().equals(token.replace(')', '('))) {
+ output.add(transform.apply(stack.pop()));
+ }
+
+ stack.pop();
+ } else {
+ output.add(transform.apply(token));
+ }
+ }
+ }
+
+ private static boolean shouldConfigureBasicOperators = true;
+
+ /**
+ * Set whether the shunter should configure the four basic math
+ * operators
+ *
+ * @param configureBasicOperators
+ * Whether or not the four basic math operators should be
+ * configured
+ */
+ public static void setBasicOperatorConfiguration(
+ boolean configureBasicOperators) {
+ shouldConfigureBasicOperators = configureBasicOperators;
+ }
+
/**
* Holds all the shuntable operations
*/
@@ -107,10 +123,12 @@ public class ShuntingYard<E> {
public ShuntingYard() {
operators = new HashMap<>();
- operators.put("+", Operator.ADD);
- operators.put("-", Operator.SUBTRACT);
- operators.put("*", Operator.MULTIPLY);
- operators.put("/", Operator.DIVIDE);
+ if (shouldConfigureBasicOperators) {
+ operators.put("+", Operator.ADD);
+ operators.put("-", Operator.SUBTRACT);
+ operators.put("*", Operator.MULTIPLY);
+ operators.put("/", Operator.DIVIDE);
+ }
}
/**
@@ -129,13 +147,17 @@ public class ShuntingYard<E> {
/**
* Add an operator to the list of shuntable operators
*
- * @param token
+ * @param operatorToken
* The token representing the operator
* @param precedence
* The precedence of the operator
*/
- public void addOp(String token, IPrecedent precedence) {
- operators.put(token, precedence);
+ public void addOp(String operatorToken, IPrecedent precedence) {
+ if (operatorToken == null) {
+ throw new NullPointerException("Operator must not be null");
+ }
+
+ operators.put(operatorToken, precedence);
}
private boolean isHigherPrec(String operator, String rightOperator) {
@@ -155,7 +177,14 @@ public class ShuntingYard<E> {
*/
public FunctionalList<E> postfix(FunctionalList<String> input,
Function<String, E> tokenTransformer) {
+ if (input == null) {
+ throw new NullPointerException("Input must not be null");
+ } else if (tokenTransformer == null) {
+ throw new NullPointerException("Transformer must not be null");
+ }
+
FunctionalList<E> output = new FunctionalList<>();
+
Deque<String> stack = new LinkedList<>();
input.forEach(new TokenShunter(output, stack, tokenTransformer));
@@ -174,6 +203,10 @@ public class ShuntingYard<E> {
* The token representing the operator
*/
public void removeOp(String tok) {
+ if (tok == null) {
+ throw new NullPointerException("Token must not be null");
+ }
+
operators.remove(tok);
}
} \ No newline at end of file
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
index 42d5a9d..28ca1e3 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
@@ -2,10 +2,12 @@ package bjc.utils.parserutils;
import java.util.Deque;
import java.util.LinkedList;
+import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import bjc.utils.data.GenHolder;
+import bjc.utils.data.IPair;
import bjc.utils.data.Pair;
import bjc.utils.funcdata.FunctionalList;
@@ -16,6 +18,89 @@ import bjc.utils.funcdata.FunctionalList;
*
*/
public class TreeConstructor {
+ private static final class TokenTransformer<T> implements Consumer<T> {
+ private final class OperatorHandler implements
+ Function<IPair<Deque<AST<T>>, AST<T>>, IPair<Deque<AST<T>>, AST<T>>> {
+ private T element;
+
+ public OperatorHandler(T element) {
+ this.element = element;
+ }
+
+ @Override
+ public IPair<Deque<AST<T>>, AST<T>>
+ apply(IPair<Deque<AST<T>>, AST<T>> pair) {
+ Deque<AST<T>> queuedASTs =
+ pair.merge((queue, currentAST) -> queue);
+
+ AST<T> mergedAST = pair.merge((queue, currentAST) -> {
+ AST<T> newAST;
+
+ if (isSpecialOperator.test(element)) {
+ newAST = handleSpecialOperator.apply(queue);
+ } else {
+ if (queue.size() < 2) {
+ throw new IllegalStateException(
+ "Attempted to parse binary operator without enough operands");
+ }
+
+ AST<T> rightAST = queue.pop();
+ AST<T> leftAST = queue.pop();
+
+ newAST = new AST<>(element, leftAST, rightAST);
+ }
+
+ queue.push(newAST);
+ return newAST;
+ });
+
+ Pair<Deque<AST<T>>, AST<T>> newPair =
+ new Pair<>(queuedASTs, mergedAST);
+
+ return newPair;
+ }
+ }
+
+ private GenHolder<IPair<Deque<AST<T>>, AST<T>>> initialState;
+ private Predicate<T> operatorPredicate;
+ private Predicate<T> isSpecialOperator;
+ private Function<Deque<AST<T>>, AST<T>> handleSpecialOperator;
+
+ public TokenTransformer(
+ GenHolder<IPair<Deque<AST<T>>, AST<T>>> initialState,
+ Predicate<T> operatorPredicate,
+ Predicate<T> isSpecialOperator,
+ Function<Deque<AST<T>>, AST<T>> handleSpecialOperator) {
+ this.initialState = initialState;
+ this.operatorPredicate = operatorPredicate;
+ this.isSpecialOperator = isSpecialOperator;
+ this.handleSpecialOperator = handleSpecialOperator;
+ }
+
+ @Override
+ public void accept(T element) {
+ if (operatorPredicate.test(element)) {
+ initialState.transform(new OperatorHandler(element));
+ } else {
+ AST<T> newAST = new AST<>(element);
+
+ initialState.doWith((pair) -> {
+ pair.doWith((queue, currentAST) -> {
+ queue.push(newAST);
+ });
+ });
+
+ initialState.transform((pair) -> {
+ return pair.apply((Deque<AST<T>> queue) -> {
+ return queue;
+ }, (AST<T> currentAST) -> {
+ return newAST;
+ });
+ });
+ }
+ }
+ }
+
/**
* Construct a tree from a list of tokens in postfix notation
*
@@ -29,10 +114,6 @@ public class TreeConstructor {
* The predicate to use to determine if something is a
* operator
* @return A AST from the expression
- *
- * @deprecated Use
- * {@link TreeConstructor#constructTree(FunctionalList, Predicate, Predicate, Function)}
- * instead
*/
public static <T> AST<T> constructTree(FunctionalList<T> tokens,
Predicate<T> operatorPredicate) {
@@ -43,7 +124,8 @@ public class TreeConstructor {
/**
* Construct a tree from a list of tokens in postfix notation
*
- * Only binary operators are accepted.
+ * Only binary operators are accepted by default. Use the last two
+ * parameters to handle non-binary operators
*
* @param <T>
* The elements of the parse tree
@@ -66,52 +148,25 @@ public class TreeConstructor {
public static <T> AST<T> constructTree(FunctionalList<T> tokens,
Predicate<T> operatorPredicate, Predicate<T> isSpecialOperator,
Function<Deque<AST<T>>, AST<T>> handleSpecialOperator) {
- GenHolder<Pair<Deque<AST<T>>, AST<T>>> initialState =
- new GenHolder<>(new Pair<>(new LinkedList<>(), null));
-
- tokens.forEach((element) -> {
- if (operatorPredicate.test(element)) {
- initialState.transform((pair) -> {
- Deque<AST<T>> queuedASTs =
- pair.merge((queue, currentAST) -> queue);
-
- AST<T> mergedAST = pair.merge((queue, currentAST) -> {
- AST<T> newAST;
-
- if (isSpecialOperator.test(element)) {
- newAST = handleSpecialOperator.apply(queue);
- } else {
- AST<T> rightAST = queue.pop();
- AST<T> leftAST = queue.pop();
-
- newAST = new AST<>(element, leftAST, rightAST);
- }
+ if (tokens == null) {
+ throw new NullPointerException("Tokens must not be null");
+ } else if (operatorPredicate == null) {
+ throw new NullPointerException(
+ "Operator predicate must not be null");
+ } else if (isSpecialOperator == null) {
+ throw new NullPointerException(
+ "Special operator determiner must not be null");
+ }
- queue.push(newAST);
- return newAST;
- });
-
- Pair<Deque<AST<T>>, AST<T>> newPair =
- new Pair<>(queuedASTs, mergedAST);
-
- return newPair;
- });
- } else {
- AST<T> newAST = new AST<>(element);
+ GenHolder<IPair<Deque<AST<T>>, AST<T>>> initialState =
+ new GenHolder<>(new Pair<>(new LinkedList<>(), null));
- initialState.doWith(
- (pair) -> pair.doWith((queue, currentAST) -> {
- queue.push(newAST);
- }));
+ tokens.forEach(
+ new TokenTransformer<>(initialState, operatorPredicate,
+ isSpecialOperator, handleSpecialOperator));
- initialState.transform((pair) -> {
- return (Pair<Deque<AST<T>>, AST<T>>) pair.apply(
- (queue) -> queue, (currentAST) -> newAST);
- });
- }
+ return initialState.unwrap((pair) -> {
+ return pair.merge((queue, currentAST) -> currentAST);
});
-
- return initialState.unwrap(
- (pair) -> pair.merge((queue, currentAST) -> currentAST));
}
}