1<?php 2 3declare(strict_types=1); 4 5namespace Antlr\Antlr4\Runtime\Atn; 6 7use Antlr\Antlr4\Runtime\Dfa\DFAState; 8use Antlr\Antlr4\Runtime\PredictionContexts\PredictionContext; 9use Antlr\Antlr4\Runtime\PredictionContexts\PredictionContextCache; 10 11/** 12 * The context cache maps all PredictionContext objects that are == 13 * to a single cached copy. This cache is shared across all contexts 14 * in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet 15 * to use only cached nodes/graphs in addDFAState(). We don't want to 16 * fill this during closure() since there are lots of contexts that 17 * pop up but are not used ever again. It also greatly slows down closure(). 18 * 19 * This cache makes a huge difference in memory and a little bit in speed. 20 * For the Java grammar on java.*, it dropped the memory requirements 21 * at the end from 25M to 16M. We don't store any of the full context 22 * graphs in the DFA because they are limited to local context only, 23 * but apparently there's a lot of repetition there as well. We optimize 24 * the config contexts before storing the config set in the DFA states 25 * by literally rebuilding them with cached subgraphs only. 26 * 27 * I tried a cache for use during closure operations, that was 28 * whacked after each adaptivePredict(). It cost a little bit 29 * more time I think and doesn't save on the overall footprint 30 * so it's not worth the complexity. 31 */ 32abstract class ATNSimulator 33{ 34 /** @var ATN */ 35 public $atn; 36 37 /** 38 * The context cache maps all PredictionContext objects that are equals() 39 * to a single cached copy. This cache is shared across all contexts 40 * in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet 41 * to use only cached nodes/graphs in addDFAState(). We don't want to 42 * fill this during closure() since there are lots of contexts that 43 * pop up but are not used ever again. It also greatly slows down closure(). 44 * 45 * This cache makes a huge difference in memory and a little bit in speed. 46 * For the Java grammar on java.*, it dropped the memory requirements 47 * at the end from 25M to 16M. We don't store any of the full context 48 * graphs in the DFA because they are limited to local context only, 49 * but apparently there's a lot of repetition there as well. We optimize 50 * the config contexts before storing the config set in the DFA states 51 * by literally rebuilding them with cached subgraphs only. 52 * 53 * I tried a cache for use during closure operations, that was 54 * whacked after each adaptivePredict(). It cost a little bit 55 * more time I think and doesn't save on the overall footprint 56 * so it's not worth the complexity. 57 * 58 * @var PredictionContextCache 59 */ 60 protected $sharedContextCache; 61 62 public function __construct(ATN $atn, PredictionContextCache $sharedContextCache) 63 { 64 $this->atn = $atn; 65 $this->sharedContextCache = $sharedContextCache; 66 } 67 68 public static function error() : DFAState 69 { 70 static $error; 71 72 return $error ?? ($error = new DFAState(new ATNConfigSet(), 0x7FFFFFFF)); 73 } 74 75 abstract public function reset() : void; 76 77 /** 78 * Clear the DFA cache used by the current instance. Since the DFA cache 79 * may be shared by multiple ATN simulators, this method may affect the 80 * performance (but not accuracy) of other parsers which are being used 81 * concurrently. 82 * 83 * @throws \InvalidArgumentException If the current instance does not 84 * support clearing the DFA. 85 */ 86 public function clearDFA() : void 87 { 88 throw new \InvalidArgumentException('This ATN simulator does not support clearing the DFA.'); 89 } 90 91 public function getSharedContextCache() : PredictionContextCache 92 { 93 return $this->sharedContextCache; 94 } 95 96 public function getCachedContext(PredictionContext $context) : PredictionContext 97 { 98 if ($this->sharedContextCache === null) { 99 return $context; 100 } 101 102 $visited = []; 103 104 return PredictionContext::getCachedPredictionContext( 105 $context, 106 $this->sharedContextCache, 107 $visited 108 ); 109 } 110} 111