Lines Matching refs:template

32         $template = '{% § %}';
35 $stream = $lexer->tokenize(new Source($template, 'index'));
43 $template = '{{ §() }}';
46 $stream = $lexer->tokenize(new Source($template, 'index'));
54 $template = '{{ {"a":{"b":"c"}} }}';
56 $this->assertEquals(2, $this->countToken($template, Token::PUNCTUATION_TYPE, '{'));
57 $this->assertEquals(2, $this->countToken($template, Token::PUNCTUATION_TYPE, '}'));
60 protected function countToken($template, $type, $value = null) argument
63 $stream = $lexer->tokenize(new Source($template, 'index'));
80 $template = "foo\n"
88 $stream = $lexer->tokenize(new Source($template, 'index'));
102 $template = "foo\n"
108 $stream = $lexer->tokenize(new Source($template, 'index'));
120 $template = '{# '.str_repeat('*', 100000).' #}';
123 $lexer->tokenize(new Source($template, 'index'));
132 $template = '{% verbatim %}'.str_repeat('*', 100000).'{% endverbatim %}';
135 $lexer->tokenize(new Source($template, 'index'));
144 $template = '{{ '.str_repeat('x', 100000).' }}';
147 $lexer->tokenize(new Source($template, 'index'));
156 $template = '{% '.str_repeat('x', 100000).' %}';
159 $lexer->tokenize(new Source($template, 'index'));
168 $template = '{{ 922337203685477580700 }}';
171 $stream = $lexer->tokenize(new Source($template, 'index'));
184 foreach ($tests as $template => $expected) {
185 $stream = $lexer->tokenize(new Source($template, 'index'));
197 $template = 'foo {{ "bar #{ baz + 1 }" }}';
200 $stream = $lexer->tokenize(new Source($template, 'index'));
218 $template = '{{ "bar \#{baz+1}" }}';
221 $stream = $lexer->tokenize(new Source($template, 'index'));
233 $template = '{{ "bar # baz" }}';
236 $stream = $lexer->tokenize(new Source($template, 'index'));
252 $template = '{{ "bar #{x" }}';
255 $lexer->tokenize(new Source($template, 'index'));
260 $template = '{{ "bar #{ "foo#{bar}" }" }}';
263 $stream = $lexer->tokenize(new Source($template, 'index'));
281 $template = '{% foo "bar #{ "foo#{bar}" }" %}';
284 $stream = $lexer->tokenize(new Source($template, 'index'));
303 $template = "{{ 1 and\n0}}";
306 $stream = $lexer->tokenize(new Source($template, 'index'));
322 $template = '
332 $lexer->tokenize(new Source($template, 'index'));
341 $template = '
351 $lexer->tokenize(new Source($template, 'index'));