summaryrefslogtreecommitdiff
path: root/dice-lang/src/bjc/dicelang/DiceLangEngine.java
blob: d37332d0783563def4b0de0e86c9b122f9dc1452 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
package bjc.dicelang;

import bjc.dicelang.scl.StreamEngine;
import bjc.utils.data.ITree;
import bjc.utils.funcdata.FunctionalList;
import bjc.utils.funcdata.FunctionalMap;
import bjc.utils.funcdata.FunctionalStringTokenizer;
import bjc.utils.funcdata.IList;
import bjc.utils.funcdata.IMap;
import bjc.utils.funcutils.ListUtils;
import bjc.utils.parserutils.TokenUtils;
import bjc.utils.parserutils.splitter.ConfigurableTokenSplitter;

import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static bjc.dicelang.Errors.ErrorKey.EK_ENG_NOCLOSING;
import static bjc.dicelang.Errors.ErrorKey.EK_ENG_NOOPENING;
import static bjc.dicelang.Token.Type.*;

/**
 * Implements the orchestration necessary for processing DiceLang commands.
 *
 * @author Ben Culkin
 */
public class DiceLangEngine {
	/*
	 * The random fields that are package private instead of private-private
	 * are for the benefit of the tweaker, so that it can mess around with
	 * them.
	 */

	/*
	 * Split tokens around operators with regex
	 */
	ConfigurableTokenSplitter opExpander;

	/*
	 * ID for generation.
	 */
	int nextLiteral;

	/*
	 * Debug indicator.
	 */
	private boolean debugMode;
	/*
	 * Should we do shunting?
	 */
	private boolean postfixMode;
	/*
	 * Should we reverse the token stream?
	 */
	private boolean prefixMode;
	/*
	 * Should we do step-by-step evaluation?
	 */
	private boolean stepEval;

	/*
	 * Shunter for token shunting.
	 */
	Shunter shunt;
	/*
	 * Tokenizer for tokenizing.
	 */
	Tokenizer tokenzer;
	/*
	 * Parser for tree construction.
	 */
	Parser parsr;
	/*
	 * Evaluator for evaluating.
	 */
	Evaluator eval;

	/*
	 * Tables for various things.
	 */
	/**
	 * The symbol table.
	 */
	public final IMap<Integer, String> symTable;

	private IMap<Integer, String>	stringLits;
	private IMap<String, String>	stringLiterals;

	/*
	 * Lists of defns.
	 */
	private IList<Define>	lineDefns;
	private IList<Define>	tokenDefns;

	/*
	 * Are defns sorted by priority?
	 */
	private boolean defnsSorted;

	/*
	 * Stream engine for processing streams.
	 */
	StreamEngine streamEng;

	/**
	 * Create a new DiceLang engine.
	 */
	public DiceLangEngine() {
		/*
		 * Initialize defns.
		 */
		lineDefns = new FunctionalList<>();
		tokenDefns = new FunctionalList<>();
		defnsSorted = true;

		/*
		 * Initialize tables.
		 */
		symTable = new FunctionalMap<>();
		stringLits = new FunctionalMap<>();
		stringLiterals = new FunctionalMap<>();

		/*
		 * Initialize operator expansion list.
		 */
		opExpander = new ConfigurableTokenSplitter(true);

		opExpander.addMultiDelimiters("(", ")");
		opExpander.addMultiDelimiters("[", "]");
		opExpander.addMultiDelimiters("{", "}");

		opExpander.addSimpleDelimiters(":=");
		opExpander.addSimpleDelimiters("=>");
		opExpander.addSimpleDelimiters("//");
		opExpander.addSimpleDelimiters(".+.");
		opExpander.addSimpleDelimiters(".*.");
		opExpander.addSimpleDelimiters("+");
		opExpander.addSimpleDelimiters("-");
		opExpander.addSimpleDelimiters("*");
		opExpander.addSimpleDelimiters("/");
		opExpander.compile();

		/*
		 * Initialize literal IDs
		 */
		nextLiteral = 1;

		/*
		 * Initial mode settings.
		 */
		debugMode = true;
		postfixMode = false;
		prefixMode = false;
		stepEval = false;

		/*
		 * Create components.
		 */
		streamEng = new StreamEngine(this);
		shunt = new Shunter();
		tokenzer = new Tokenizer(this);
		parsr = new Parser();
		eval = new Evaluator(this);
	}

	/**
	 * Sort defns by priority.
	 */
	public void sortDefns() {
		lineDefns.sort(null);
		tokenDefns.sort(null);

		defnsSorted = true;
	}

	/**
	 * Add a defn that's applied to lines.
	 *
	 * @param dfn
	 *                The defn to add.
	 */
	public void addLineDefine(Define dfn) {
		lineDefns.add(dfn);

		defnsSorted = false;
	}

	/**
	 * Add a defn that's applied to tokens.
	 *
	 * @param dfn
	 *                The defn to add.
	 */
	public void addTokenDefine(Define dfn) {
		tokenDefns.add(dfn);

		defnsSorted = false;
	}

	/**
	 * Toggle debug mode.
	 *
	 * @return The current state of debug mode.
	 */
	public boolean toggleDebug() {
		debugMode = !debugMode;

		return debugMode;
	}

	/**
	 * Toggle postfix mode.
	 *
	 * @return The current state of postfix mode.
	 */
	public boolean togglePostfix() {
		postfixMode = !postfixMode;

		return postfixMode;
	}

	/**
	 * Toggle prefix mode.
	 *
	 * @return The current state of prefix mode
	 */
	public boolean togglePrefix() {
		prefixMode = !prefixMode;

		return prefixMode;
	}

	/**
	 * Toggle step-eval mode
	 *
	 * @return The current state of step-eval mode
	 */
	public boolean toggleStepEval() {
		stepEval = !stepEval;

		return stepEval;
	}

	/*
	 * Matches double-angle bracketed strings.
	 */
	private Pattern nonExpandPattern = Pattern.compile("<<([^\\>]*(?:\\>(?:[^\\>])*)*)>>");

	/**
	 * Run a command to completion.
	 *
	 * @param command
	 *                The command to run
	 *
	 * @return Whether or not the command ran successfully
	 */
	public boolean runCommand(String command) {
		/*
		 * Preprocess the command into tokens
		 */
		IList<String> preprocessedTokens = preprocessCommand(command);

		if(preprocessedTokens == null) return false;

		/*
		 * Lex the string tokens into token-tokens
		 */
		IList<Token> lexedTokens = lexTokens(preprocessedTokens);

		if(lexedTokens == null) return false;

		/*
		 * Parse the tokens into an AST forest
		 */
		IList<ITree<Node>> astForest = new FunctionalList<>();
		boolean succ = parsr.parseTokens(lexedTokens, astForest);

		if(!succ) return false;

		/*
		 * Evaluate the AST forest
		 */
		evaluateForest(astForest);

		return true;
	}

	/*
	 * Lex string tokens into token-tokens
	 */
	private IList<Token> lexTokens(IList<String> preprocessedTokens) {
		IList<Token> lexedTokens = new FunctionalList<>();

		for(String token : preprocessedTokens) {
			String newTok = token;

			/*
			 * Apply token defns
			 */
			for(Define dfn : tokenDefns.toIterable()) {
				newTok = dfn.apply(newTok);
			}

			/*
			 * Lex the token
			 */
			Token tk = tokenzer.lexToken(token, stringLiterals);

			if(tk == null) {
				/*
				 * Ignore blank tokens
				 */
				continue;
			} else if(tk == Token.NIL_TOKEN)
				/*
				 * Fail on bad tokens
				 */
				return null;
			else {
				lexedTokens.add(tk);
			}
		}

		if(debugMode) {
			System.out.printf("\tCommand after tokenization: %s\n", lexedTokens.toString());
		}

		/*
		 * Preshunt preshunt-marked groups of tokens
		 */
		IList<Token> shuntedTokens = lexedTokens;
		IList<Token> preparedTokens = new FunctionalList<>();

		boolean succ = removePreshuntTokens(lexedTokens, preparedTokens);

		if(!succ) return null;

		if(debugMode && !postfixMode) {
			System.out.printf("\tCommand after pre-shunter removal: %s\n", preparedTokens.toString());
		}

		if(!postfixMode && !prefixMode) {
			/*
			 * Shunt the tokens
			 */
			shuntedTokens = new FunctionalList<>();
			succ = shunt.shuntTokens(preparedTokens, shuntedTokens);

			if(!succ) return null;
		} else if(prefixMode) {
			/*
			 * Reverse directional tokens
			 */
			preparedTokens.reverse();
			shuntedTokens = preparedTokens.map(this::reverseToken);
		}

		if(debugMode && !postfixMode) {
			System.out.printf("\tCommand after shunting: %s\n", shuntedTokens.toString());
		}

		/*
		 * Expand token groups
		 */
		IList<Token> readyTokens = shuntedTokens.flatMap(tk -> {
			if(tk.type == Token.Type.TOKGROUP)
				return tk.tokenValues;
			else if(tk.type == Token.Type.TAGOP || tk.type == Token.Type.TAGOPR)
				return tk.tokenValues;
			else
				return new FunctionalList<>(tk);
		});

		if(debugMode && !postfixMode) {
			System.out.printf("\tCommand after re-preshunting: %s\n", readyTokens.toString());
		}

		return readyTokens;
	}

	/*
	 * Reverse orientation-sensitive tokens.
	 *
	 * These are mostly just things like (, {, and [
	 */
	private Token reverseToken(Token tk) {
		switch(tk.type) {
		case OBRACE:
			return new Token(CBRACE, tk.intValue);
		case OPAREN:
			return new Token(CPAREN, tk.intValue);
		case OBRACKET:
			return new Token(CBRACKET, tk.intValue);
		case CBRACE:
			return new Token(OBRACE, tk.intValue);
		case CPAREN:
			return new Token(OPAREN, tk.intValue);
		case CBRACKET:
			return new Token(OBRACKET, tk.intValue);
		default:
			return tk;
		}
	}

	/*
	 * Preprocess a command into a list of string tokens.
	 */
	private IList<String> preprocessCommand(String command) {
		/*
		 * Sort the defines if they aren't sorted
		 */
		if(!defnsSorted) {
			sortDefns();
		}

		/*
		 * Run the tokens through the stream engine
		 */
		IList<String> streamToks = new FunctionalList<>();
		boolean succ = streamEng.doStreams(command.split(" "), streamToks);

		if(!succ) return null;

		/*
		 * Apply line defns
		 */
		String newComm = ListUtils.collapseTokens(streamToks, " ");

		if(debugMode) {
			System.out.println("\tCommand after stream commands: " + newComm);
		}

		for(Define dfn : lineDefns.toIterable()) {
			newComm = dfn.apply(newComm);
		}

		if(debugMode) {
			System.out.println("\tCommand after line defines: " + newComm);
		}

		/*
		 * Remove string literals.
		 */
		List<String> destringedParts = TokenUtils.removeDQuotedStrings(newComm);
		StringBuffer destringedCommand = new StringBuffer();

		for(String part : destringedParts) {
			/*
			 * Handle string literals
			 */
			if(part.startsWith("\"") && part.endsWith("\"")) {
				/*
				 * Get the actual string.
				 */
				String litName = "stringLiteral" + nextLiteral;
				String litVal = part.substring(1, part.length() - 1);

				/*
				 * Insert the string with its escape sequences
				 * interpreted.
				 */
				stringLiterals.put(litName, TokenUtils.descapeString(litVal));
				nextLiteral += 1;

				/*
				 * Place a ref. to the string in the command
				 */
				destringedCommand.append(" " + litName + " ");
			} else {
				destringedCommand.append(part);
			}
		}

		if(debugMode) {
			System.out.println("\tCommand after destringing: " + destringedCommand);

			/*
			 * Print the string table if it exists.
			 */
			if(stringLiterals.size() > 0) {
				System.out.println("\tString literals in table");

				stringLiterals.forEach((key, val) -> {
					System.out.printf("\t\tName: (%s)\tValue: (%s)\n", key, val);
				});
			}
		}

		/*
		 * Split the command into tokens
		 */
		String strang = destringedCommand.toString();
		IList<String> tokens = FunctionalStringTokenizer.fromString(strang).toList();

		/*
		 * Temporarily remove non-expanding tokens
		 */
		IMap<String, String> nonExpandedTokens = new FunctionalMap<>();
		tokens = tokens.map(tk -> {
			Matcher nonExpandMatcher = nonExpandPattern.matcher(tk);

			if(nonExpandMatcher.matches()) {
				String tkName = "nonExpandToken" + nextLiteral++;
				nonExpandedTokens.put(tkName, nonExpandMatcher.group(1));

				return tkName;
			} else
				return tk;
		});

		if(debugMode) {
			System.out.printf("\tCommand after removal of non-expanders: %s\n", tokens.toString());
		}

		/*
		 * Expand tokens
		 */
		IList<String> fullyExpandedTokens = tokens.flatMap(opExpander::split);
		System.out.println("\tCommand after token expansion: " + fullyExpandedTokens.toString());

		/*
		 * Reinsert non-expanded tokens
		 */
		fullyExpandedTokens = fullyExpandedTokens.map(tk -> {
			if(tk.startsWith("nonExpandToken"))
				return nonExpandedTokens.get(tk);
			else
				return tk;
		});

		if(debugMode) {
			System.out.printf("\tCommand after non-expander reinsertion: %s\n",
					fullyExpandedTokens.toString());
		}

		return fullyExpandedTokens;
	}

	private void evaluateForest(IList<ITree<Node>> astForest) {
		if(debugMode) {
			System.out.println("\tParsed forest of asts");
		}

		int treeNo = 1;

		for(ITree<Node> ast : astForest) {
			if(debugMode) {
				System.out.printf("\t\tTree %d in forest:\n%s\n", treeNo, ast.toString());
			}

			if(debugMode && stepEval) {
				int step = 1;

				/*
				 * Evaluate it step by step
				 */
				for(Iterator<ITree<Node>> itr = eval.stepDebug(ast); itr.hasNext();) {
					ITree<Node> nodeStep = itr.next();

					System.out.printf("\t\tStep %d: Node is %s", step, nodeStep);

					/*
					 * Don't evaluate null steps
					 */
					if(nodeStep == null) {
						System.out.println();

						step += 1;
						continue;
					}

					/*
					 * Print out details for results
					 */
					if(nodeStep.getHead().type == Node.Type.RESULT) {
						EvaluatorResult res = nodeStep.getHead().resultVal;

						System.out.printf(" (result is %s", res);

						if(res.type == EvaluatorResult.Type.DICE) {
							System.out.printf(" (sample roll %s)", res.diceVal.value());
						}

						if(res.origVal != null) {
							System.out.printf(" (original tree is %s)", res.origVal);
						}

						System.out.printf(")");
					}

					/*
					 * Advance a step
					 */
					System.out.println();
					step += 1;
				}
			} else {
				/*
				 * Evaluate it normally
				 */
				EvaluatorResult res = eval.evaluate(ast);

				if(debugMode) {
					System.out.printf("\t\tEvaluates to %s", res);

					if(res.type == EvaluatorResult.Type.DICE) {
						System.out.println("\t\t (sample roll " + res.diceVal.value() + ")");
					}
				}
			}

			System.out.println();

			treeNo += 1;
		}
	}

	/*
	 * Preshunt preshunt-marked groups of tokens.
	 */
	private boolean removePreshuntTokens(IList<Token> lexedTokens, IList<Token> preparedTokens) {
		/*
		 * Current nesting level of tokens.
		 */
		int curBraceCount = 0;

		/*
		 * Data storage.
		 */
		Deque<IList<Token>> bracedTokens = new LinkedList<>();
		IList<Token> curBracedTokens = new FunctionalList<>();

		for(Token tk : lexedTokens) {
			if(tk.type == Token.Type.OBRACE && tk.intValue == 2) {
				/*
				 * Open a preshunt group.
				 */
				curBraceCount += 1;

				if(curBraceCount != 1) {
					/*
					 * Push the old group onto the group
					 * stack.
					 */
					bracedTokens.push(curBracedTokens);
				}

				curBracedTokens = new FunctionalList<>();
			} else if(tk.type == Token.Type.CBRACE && tk.intValue == 2) {
				/*
				 * Close a preshunt group.
				 */
				if(curBraceCount == 0) {
					/*
					 * Error if there couldn't have been an
					 * opening.
					 */
					Errors.inst.printError(EK_ENG_NOOPENING);
					return false;
				}

				curBraceCount -= 1;

				IList<Token> preshuntTokens = new FunctionalList<>();

				/*
				 * Shunt preshunt group.
				 */
				boolean success = shunt.shuntTokens(curBracedTokens, preshuntTokens);

				if(debugMode) {
					System.out.println("\t\tPreshunted " + curBracedTokens + " into "
							+ preshuntTokens);
				}

				if(!success) return false;

				if(curBraceCount >= 1) {
					/*
					 * Add the preshunt group to the
					 * previous group.
					 */
					curBracedTokens = bracedTokens.pop();

					curBracedTokens.add(new Token(Token.Type.TOKGROUP, preshuntTokens));
				} else {
					/*
					 * Add the preshunt group to the token
					 * stream..
					 */
					preparedTokens.add(new Token(Token.Type.TOKGROUP, preshuntTokens));
				}
			} else {
				/*
				 * Add the token to the active preshunt group,
				 * if there is one..
				 */
				if(curBraceCount >= 1) {
					curBracedTokens.add(tk);
				} else {
					preparedTokens.add(tk);
				}
			}
		}

		if(curBraceCount > 0) {
			/*
			 * There was an unclosed group.
			 */
			Errors.inst.printError(EK_ENG_NOCLOSING);
			return false;
		}

		return true;
	}

	String getStringLiteral(int key) {
		return stringLits.get(key);
	}

	void addStringLiteral(int key, String val) {
		stringLits.put(key, val);
	}
}