From 1e7d6487d514d6fbd1f0b5ab06f4eb1ff70abf81 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Hugo=20H=C3=B6rnquist?= <hugo@lysator.liu.se>
Date: Tue, 19 Sep 2023 07:36:45 +0200
Subject: [PATCH] Add a bunch of tests.

---
 tests/test_deserializable.py      | 57 ++++++++++++++++++++
 tests/test_format_html.py         |  8 +++
 tests/test_parse_elsif.py         | 90 +++++++++++++++++++++++++++++++
 tests/test_syntax_highlighters.py | 25 +++++++++
 4 files changed, 180 insertions(+)
 create mode 100644 tests/test_deserializable.py
 create mode 100644 tests/test_format_html.py
 create mode 100644 tests/test_parse_elsif.py
 create mode 100644 tests/test_syntax_highlighters.py

diff --git a/tests/test_deserializable.py b/tests/test_deserializable.py
new file mode 100644
index 0000000..e64914a
--- /dev/null
+++ b/tests/test_deserializable.py
@@ -0,0 +1,57 @@
+import pytest
+from muppet.puppet.strings import (
+    DocStringTag,
+    DocString,
+    DataTypeAlias,
+)
+
+
+def test_deserialize_from_json():
+    tagname = 'Tagname'
+    text = 'Contents of tagname'
+    types = ['what even is this?']
+
+    assert DocStringTag(tag_name=tagname, text=text, types=types) \
+        == DocStringTag.from_json({
+            'tag_name': tagname,
+            'text': text,
+            'types': types
+        })
+
+
+def test_deserialize_from_json_unknown_key():
+    # TODO ensure that a worning is logged
+    DocStringTag.from_json({
+        'tag_name': 'Tagname',
+        'text': 'Contents of tagname',
+        'XXX': 'what even is this?'
+    })
+
+
+def test_deserialize_nested():
+    text = 'A'
+    tn = 'B'
+    ttext = 'C'
+
+    assert DocString(text=text,
+                     tags=[DocStringTag(tag_name=tn,
+                                        text=ttext)]) \
+        == DocString.from_json({
+            'text': text,
+            'tags': [
+                {
+                    'tag_name': tn,
+                    'text': ttext,
+                    }
+                ]
+            })
+
+
+def test_deserialize_from_json_type_error():
+    with pytest.raises(TypeError):
+        DocString.from_json({
+            'text': 'text',
+            'tags': [
+                'no',
+            ]
+        })
diff --git a/tests/test_format_html.py b/tests/test_format_html.py
new file mode 100644
index 0000000..dde7c67
--- /dev/null
+++ b/tests/test_format_html.py
@@ -0,0 +1,8 @@
+from muppet.puppet.format.html import HTMLFormatter
+from muppet.puppet.ast import PuppetLiteral
+
+fmt = HTMLFormatter()
+
+
+def test_literal():
+    assert '<span class="literal">true</span>' == fmt._puppet_literal(PuppetLiteral('true'))
diff --git a/tests/test_parse_elsif.py b/tests/test_parse_elsif.py
new file mode 100644
index 0000000..3ef6a7f
--- /dev/null
+++ b/tests/test_parse_elsif.py
@@ -0,0 +1,90 @@
+"""Tests for Parser combinator re-parsers."""
+
+from muppet.puppet.format.parser import ParserFormatter
+from muppet.puppet.ast import build_ast
+from muppet.puppet.parser import puppet_parser
+from muppet.parser_combinator import ParserCombinator
+
+
+def test_parse_else_if():
+    s = """
+    if x {
+      1
+    } else {
+      if y {
+        2
+      } else {
+        3
+      }
+    }
+    """
+    ast = build_ast(puppet_parser(s))
+    parser = ParserFormatter(s, "s").serialize(ast)
+    match_objects = ParserCombinator(s, "s").get(parser)
+    print(match_objects)
+    # [`ws(['\n    '])`, `keyword(['if'])`, `ws([' '])`,
+    # `qn([`ws([])`, 'x'])`, `ws([' '])`, '{', `ws(['\n      '])`,
+    # `ws([])`, `ws([])`, '1', `ws(['\n    '])`, '}', `ws([' '])`,
+    # `keyword(['else'])`, `ws([' '])`, '{', `ws(['\n      '])`,
+    # `ws([])`, `keyword(['if'])`, `ws([' '])`, `qn([`ws([])`, 'y'])`,
+    # `ws([' '])`, '{', `ws(['\n        '])`, `ws([])`, `ws([])`, '2',
+    # `ws(['\n      '])`, '}', `ws([' '])`, `keyword(['else'])`,
+    # `ws([' '])`, '{', `ws(['\n        '])`, `ws([])`, `ws([])`, '3',
+    # `ws(['\n      '])`, '}', `ws(['\n    '])`, '}']
+
+
+def test_parse_elsif():
+    s = """
+    if x {
+      1
+    } elsif y {
+      2
+    } else {
+      3
+    }
+    """
+    ast = build_ast(puppet_parser(s))
+    parser = ParserFormatter(s, "s").serialize(ast)
+    match_objects = ParserCombinator(s, "s").get(parser)
+    print(match_objects)
+    # [`ws(['\n    '])`, `keyword(['if'])`, `ws([' '])`,
+    # `qn([`ws([])`, 'x'])`, `ws([' '])`, '{', `ws(['\n      '])`,
+    # `ws([])`, `ws([])`, '1', `ws(['\n    '])`, '}', `ws([' '])`,
+    # `keyword(['elsif'])`, `ws([' '])`, `qn([`ws([])`, 'y'])`, `ws(['
+    # '])`, '{', `ws(['\n      '])`, `ws([])`, `ws([])`, '2', `ws(['\n
+    # '])`, '}', `ws([' '])`, `keyword(['else'])`, `ws([' '])`, '{',
+    # `ws(['\n      '])`, `ws([])`, `ws([])`, '3', `ws(['\n    '])`,
+    # '}']
+
+
+from pprint import pprint
+
+
+def test_chained():
+    s = "x.filter().join()"
+    ast = build_ast(puppet_parser(s))
+    pprint(ast)
+    parser = ParserFormatter(s, "s").serialize(ast)
+    pprint(parser)
+    match_objects = ParserCombinator(s, "s").get(parser)
+    pprint(match_objects)
+
+
+def test_lambda():
+    s = "x.filter |$x, $y| { $x + 1 }"
+    ast = build_ast(puppet_parser(s))
+    pprint(ast)
+    parser = ParserFormatter(s, "s").serialize(ast)
+    pprint(parser)
+    match_objects = ParserCombinator(s, "s").get(parser)
+    pprint(match_objects)
+
+
+def test_chained_and_lambda():
+    s = "x.filter |$x, $y| { $x + 1 }.join()"
+    ast = build_ast(puppet_parser(s))
+    pprint(ast)
+    parser = ParserFormatter(s, "s").serialize(ast)
+    print("parser:\n" + str(parser))
+    match_objects = ParserCombinator(s, "s").get(parser)
+    pprint(match_objects)
diff --git a/tests/test_syntax_highlighters.py b/tests/test_syntax_highlighters.py
new file mode 100644
index 0000000..59dab3f
--- /dev/null
+++ b/tests/test_syntax_highlighters.py
@@ -0,0 +1,25 @@
+from muppet.syntax_highlight import pygments, andre_simon, plain
+
+
+def test_pygments():
+    """
+    <!-- Generated through pygments, as python -->
+    <div class="highlight-pygments"><table class="highlight-pygmentstable"><tr><td class="linenos"><div class="linenodiv"><pre><span class="normal"><a href="#line-1">1</a></span></pre></div></td><td class="code"><div><pre><span></span><a id="line-1" name="line-1"></a><span class="n">f</span><span class="p">(</span><span class="n">x</span><span class="o">+</span><span class="mi">1</span><span class="p">)</span>
+    </pre></div></td></tr></table></div>
+
+    """ == pygments.highlight("f(x+1)", "python")
+
+
+def test_andre_simon():
+    assert """
+    <!-- Generated through highlight(1), as language python -->
+    <div class="highlight-andre-simon"><pre><span class="lin" id="line_1">    1 </span><span class="kwd">f</span><span class="opt">(</span>x<span class="opt">+</span><span class="num">1</span><span class="opt">)</span>
+</pre></div>
+    """ == andre_simon.highlight("f(x+1)", "python")
+
+
+def test_plain():
+    assert """
+    <!-- "Genererated" as plain output -->
+    <div class"highlight-plain"><pre><code class="python">f(x+1)</code></pre></div>
+    """ == plain.highlight("f(x+1)", "python")
-- 
GitLab