summaryrefslogtreecommitdiff
path: root/BJC-Utils2/src/main/java/bjc/utils/parserutils
diff options
context:
space:
mode:
Diffstat (limited to 'BJC-Utils2/src/main/java/bjc/utils/parserutils')
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java55
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedReaderPragmas.java17
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java46
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/TokenTransformer.java16
-rw-r--r--BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java5
5 files changed, 106 insertions, 33 deletions
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
index 1ee19f5..9da76f8 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedConfigReader.java
@@ -26,13 +26,21 @@ import bjc.utils.funcdata.IMap;
*
*/
public class RuleBasedConfigReader<E> {
+ // Function to execute when starting a rule
+ // Takes the tokenizer, and a pair of the read token and application state
private BiConsumer<FunctionalStringTokenizer, IPair<String,
E>> startRule;
+ // Function to use when continuing a rule
+ // Takes a tokenizer and application state
private BiConsumer<FunctionalStringTokenizer,
E> continueRule;
+ // Function to use when ending a rule
+ // Takes an application state
private Consumer<
E> endRule;
+ // Map of pragma names to pragma actions
+ // Pragma actions are functions taking a tokenizer and application state
private IMap<String, BiConsumer<FunctionalStringTokenizer,
E>> pragmas;
@@ -79,36 +87,37 @@ public class RuleBasedConfigReader<E> {
}
private void continueRule(E state, boolean ruleOpen, String line) {
+ // Make sure our input is correct
if (ruleOpen == false) {
throw new InputMismatchException(
"Can't continue rule with no rule currently open");
- }
-
- if (continueRule == null) {
+ } else if (continueRule == null) {
throw new InputMismatchException(
"Attempted to continue rule with rule continuation disabled."
+ " Check for extraneous tabs");
}
+ // Accept the rule
continueRule.accept(
new FunctionalStringTokenizer(line.substring(1), " "),
state);
- }
+ }
private boolean endRule(E state, boolean ruleOpen) {
+ // Ignore blank line without an open rule
if (ruleOpen == false) {
- // Ignore blank line without an open rule
+ // Do nothing
+ return false;
} else {
- if (endRule == null) {
- // Nothing happens on rule end
- ruleOpen = false;
- } else {
+ // Nothing happens on rule end
+ if (endRule != null) {
+ // Process the rule ending
endRule.accept(state);
}
- ruleOpen = false;
+ // Return a closed rule
+ return false;
}
- return ruleOpen;
}
/**
@@ -126,30 +135,35 @@ public class RuleBasedConfigReader<E> {
"Input stream must not be null");
}
- E state;
+ // Application state: We're giving this back later
+ E state = initialState;
+ // Prepare our input source
try (Scanner inputSource = new Scanner(inputStream, "\n")) {
-
- state = initialState;
+ // This is true when a rule's open
IHolder<Boolean> ruleOpen = new Identity<>(false);
+ // Do something for every line of the file
inputSource.forEachRemaining((line) -> {
+ // Skip comment lines
if (line.startsWith("#") || line.startsWith("//")) {
// It's a comment
return;
} else if (line.equals("")) {
+ // End the rule
ruleOpen.replace(endRule(state, ruleOpen.getValue()));
-
- return;
} else if (line.startsWith("\t")) {
+ // Continue the rule
continueRule(state, ruleOpen.getValue(), line);
} else {
+ // Open a rule
ruleOpen.replace(
startRule(state, ruleOpen.getValue(), line));
}
});
}
+ // Return the state that the user has created
return state;
}
@@ -192,28 +206,37 @@ public class RuleBasedConfigReader<E> {
}
private boolean startRule(E state, boolean ruleOpen, String line) {
+ // Create the line tokenizer
FunctionalStringTokenizer tokenizer = new FunctionalStringTokenizer(
line, " ");
+ // Get the initial token
String nextToken = tokenizer.nextToken();
+ // Handle pragmas
if (nextToken.equals("pragma")) {
+ // Get the pragma name
String token = tokenizer.nextToken();
+ // Handle pragmas
pragmas.getOrDefault(token, (tokenzer, stat) -> {
throw new UnknownPragmaException(
"Unknown pragma " + token);
}).accept(tokenizer, state);
} else {
+ // Make sure input is correct
if (ruleOpen == true) {
throw new InputMismatchException("Attempted to open a"
+ " rule with a rule already open. Make sure rules are"
+ " seperated by blank lines");
}
+ // Start a rule
startRule.accept(tokenizer, new Pair<>(nextToken, state));
+
ruleOpen = true;
}
+
return ruleOpen;
}
}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedReaderPragmas.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedReaderPragmas.java
index 392a6c8..eef55a8 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedReaderPragmas.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/RuleBasedReaderPragmas.java
@@ -29,19 +29,23 @@ public class RuleBasedReaderPragmas {
StateType> buildInteger(String name,
BiConsumer<Integer, StateType> consumer) {
return (tokenizer, state) -> {
+ // Check our input is correct
if (!tokenizer.hasMoreTokens()) {
throw new PragmaFormatException("Pragma " + name
+ " requires one integer argument");
}
+ // Read the argument
String token = tokenizer.nextToken();
try {
+ // Run the pragma
consumer.accept(Integer.parseInt(token), state);
} catch (NumberFormatException nfex) {
+ // Tell the user their argument isn't correct
PragmaFormatException pfex = new PragmaFormatException(
"Argument " + token
- + " to version pragma isn't a valid integer. "
+ + " to " + name + " pragma isn't a valid integer. "
+ "This pragma requires a integer argument");
pfex.initCause(nfex);
@@ -67,14 +71,17 @@ public class RuleBasedReaderPragmas {
StateType> buildStringCollapser(String name,
BiConsumer<String, StateType> consumer) {
return (tokenizer, state) -> {
+ // Check our input
if (!tokenizer.hasMoreTokens()) {
throw new PragmaFormatException("Pragma " + name
- + " requires one string argument");
+ + " requires one or more string arguments");
}
- consumer.accept(ListUtils.collapseTokens(
- tokenizer.toList((strang) -> strang)), state);
+ // Build our argument
+ String collapsed = ListUtils.collapseTokens(tokenizer.toList());
+
+ // Run the pragma
+ consumer.accept(collapsed, state);
};
}
-
}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
index c1cd5c7..99e3e60 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/ShuntingYard.java
@@ -76,12 +76,15 @@ public class ShuntingYard<TokenType> {
@Override
public void accept(String token) {
+ // Handle operators
if (operators.containsKey(token)) {
+ // Pop operators while there isn't a higher precedence one
while (!stack.isEmpty()
&& isHigherPrec(token, stack.peek())) {
output.add(transform.apply(stack.pop()));
}
+ // Put this operator onto the stack
stack.push(token);
} else if (StringUtils.containsOnly(token, "\\(")) {
// Handle groups of parenthesis for multiple nesting levels
@@ -90,12 +93,15 @@ public class ShuntingYard<TokenType> {
// Handle groups of parenthesis for multiple nesting levels
String swappedToken = token.replace(')', '(');
+ // Remove tokens up to a matching parenthesis
while (!stack.peek().equals(swappedToken)) {
output.add(transform.apply(stack.pop()));
}
+ // Remove the parenthesis
stack.pop();
} else {
+ // Just add the transformed token
output.add(transform.apply(token));
}
}
@@ -115,6 +121,7 @@ public class ShuntingYard<TokenType> {
public ShuntingYard(boolean configureBasics) {
operators = new FunctionalMap<>();
+ // Add basic operators if we're configured to do so
if (configureBasics) {
operators.put("+", Operator.ADD);
operators.put("-", Operator.SUBTRACT);
@@ -132,8 +139,10 @@ public class ShuntingYard<TokenType> {
* The precedence of the operator to add
*/
public void addOp(String operatorToken, int precedence) {
- this.addOp(operatorToken,
- IPrecedent.newSimplePrecedent(precedence));
+ // Create the precedence marker
+ IPrecedent prec = IPrecedent.newSimplePrecedent(precedence);
+
+ this.addOp(operatorToken, prec);
}
/**
@@ -145,24 +154,31 @@ public class ShuntingYard<TokenType> {
* The precedence of the operator
*/
public void addOp(String operatorToken, IPrecedent precedence) {
+ // Complain about trying to add an incorrect operator
if (operatorToken == null) {
throw new NullPointerException("Operator must not be null");
+ } else if(precedence == null) {
+ throw new NullPointerException("Precedence must not be null");
}
+ // Add the operator to the ones we handle
operators.put(operatorToken, precedence);
}
- private boolean isHigherPrec(String leftOperator,
- String rightOperator) {
+ private boolean isHigherPrec(String leftOperator, String rightOperator) {
+ // Check if the right operator exists
boolean operatorExists = operators.containsKey(rightOperator);
+ // If it doesn't, the left is higher precedence.
if (!operatorExists) {
return false;
}
+ // Get the precedence of operators
int rightPrecedence = operators.get(rightOperator).getPrecedence();
int leftPrecedence = operators.get(leftOperator).getPrecedence();
+ // Evaluate what we were asked
return rightPrecedence >= leftPrecedence;
}
@@ -177,20 +193,26 @@ public class ShuntingYard<TokenType> {
*/
public IList<TokenType> postfix(IList<String> input,
Function<String, TokenType> tokenTransformer) {
+ // Check our input
if (input == null) {
throw new NullPointerException("Input must not be null");
} else if (tokenTransformer == null) {
throw new NullPointerException("Transformer must not be null");
}
+ // Here's what we're handing back
IList<TokenType> output = new FunctionalList<>();
+ // The stack to put operators on
Deque<String> stack = new LinkedList<>();
+ // Shunt the tokens
input.forEach(new TokenShunter(output, stack, tokenTransformer));
- stack.forEach(
- (token) -> output.add(tokenTransformer.apply(token)));
+ // Transform any resulting tokens
+ stack.forEach((token) -> {
+ output.add(tokenTransformer.apply(token));
+ });
return output;
}
@@ -199,13 +221,15 @@ public class ShuntingYard<TokenType> {
* Remove an operator from the list of shuntable operators
*
* @param token
- * The token representing the operator
+ * The token representing the operator. If null, remove all
+ * operators
*/
public void removeOp(String token) {
+ // Check if we want to remove all operators
if (token == null) {
- throw new NullPointerException("Token must not be null");
+ operators = new FunctionalMap<>();
+ } else {
+ operators.remove(token);
}
-
- operators.remove(token);
}
-} \ No newline at end of file
+}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TokenTransformer.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TokenTransformer.java
index 149cbbf..ff3a6b9 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TokenTransformer.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TokenTransformer.java
@@ -13,6 +13,7 @@ import bjc.utils.data.Pair;
import bjc.utils.data.Tree;
final class TokenTransformer<TokenType> implements Consumer<TokenType> {
+ // Handle operators
private final class OperatorHandler implements UnaryOperator<
IPair<Deque<ITree<TokenType>>, ITree<TokenType>>> {
private TokenType element;
@@ -24,6 +25,7 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
@Override
public IPair<Deque<ITree<TokenType>>, ITree<TokenType>> apply(
IPair<Deque<ITree<TokenType>>, ITree<TokenType>> pair) {
+ // Replace the current AST with the result of handling an operator
return pair.bindLeft((queuedASTs) -> {
return handleOperator(queuedASTs);
});
@@ -32,12 +34,15 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
private IPair<Deque<ITree<TokenType>>,
ITree<TokenType>> handleOperator(
Deque<ITree<TokenType>> queuedASTs) {
+ // The AST we're going to hand back
ITree<TokenType> newAST;
+ // Handle special operators
if (isSpecialOperator.test(element)) {
newAST = handleSpecialOperator.apply(element)
.apply(queuedASTs);
} else {
+ // Error if we don't have enough for a binary operator
if (queuedASTs.size() < 2) {
throw new IllegalStateException(
"Attempted to parse binary operator without enough operands.\n"
@@ -46,14 +51,18 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
+ queuedASTs.peek());
}
+ // Grab the two operands
ITree<TokenType> rightAST = queuedASTs.pop();
ITree<TokenType> leftAST = queuedASTs.pop();
+ // Create a new AST
newAST = new Tree<>(element, leftAST, rightAST);
}
+ // Stick it onto the stack
queuedASTs.push(newAST);
+ // Hand back the state
return new Pair<>(queuedASTs, newAST);
}
}
@@ -69,6 +78,7 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
private Function<TokenType, Function<Deque<ITree<TokenType>>,
ITree<TokenType>>> handleSpecialOperator;
+ // Create a new transformer
public TokenTransformer(
IHolder<IPair<Deque<ITree<TokenType>>,
ITree<TokenType>>> initialState,
@@ -84,12 +94,16 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
@Override
public void accept(TokenType element) {
+ // Handle operators
if (operatorPredicate.test(element)) {
initialState.transform(new OperatorHandler(element));
} else {
ITree<TokenType> newAST = new Tree<>(element);
+ // Insert the new tree into the AST
initialState.transform((pair) -> {
+ // Transform the pair, ignoring the current AST in favor of the
+ // one consisting of the current element
return pair.bindLeft((queue) -> {
queue.push(newAST);
@@ -98,4 +112,4 @@ final class TokenTransformer<TokenType> implements Consumer<TokenType> {
});
}
}
-} \ No newline at end of file
+}
diff --git a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
index 2ddde9d..0b61363 100644
--- a/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
+++ b/BJC-Utils2/src/main/java/bjc/utils/parserutils/TreeConstructor.java
@@ -36,6 +36,7 @@ public class TreeConstructor {
public static <TokenType> ITree<TokenType> constructTree(
IList<TokenType> tokens,
Predicate<TokenType> operatorPredicate) {
+ // Construct a tree with no special operators
return constructTree(tokens, operatorPredicate, (op) -> false,
null);
}
@@ -70,6 +71,7 @@ public class TreeConstructor {
Predicate<TokenType> isSpecialOperator,
Function<TokenType, Function<Deque<ITree<TokenType>>,
ITree<TokenType>>> handleSpecialOperator) {
+ // Make sure our parameters are valid
if (tokens == null) {
throw new NullPointerException("Tokens must not be null");
} else if (operatorPredicate == null) {
@@ -80,14 +82,17 @@ public class TreeConstructor {
"Special operator determiner must not be null");
}
+ // Here is the state for the tree construction
IHolder<IPair<Deque<ITree<TokenType>>,
ITree<TokenType>>> initialState = new Identity<>(
new Pair<>(new LinkedList<>(), null));
+ // Transform each of the tokens
tokens.forEach(
new TokenTransformer<>(initialState, operatorPredicate,
isSpecialOperator, handleSpecialOperator));
+ // Grab the tree from the state
return initialState.unwrap((pair) -> {
return pair.getRight();
});