diff --git a/r11k/gitutil.py b/r11k/gitutil.py
index 71f84e00282cfad3f37e63dd578211207036c852..bfa14e24a5a82bcb81351bf386a097f39d870fd1 100644
--- a/r11k/gitutil.py
+++ b/r11k/gitutil.py
@@ -3,13 +3,11 @@
 from datetime import datetime
 import os
 import os.path
+import subprocess
 from typing import (
     Optional,
 )
 
-# from git.repo.base import Repo
-# from git.util import IterableList
-# import git
 import pygit2
 
 
@@ -39,3 +37,25 @@ def find_remote(remotes: pygit2.remote.RemoteCollection,
         if target_url == remote.url:
             return remote
     return None
+
+
+def checkout(repo: pygit2.Repository,
+             refspec: str,
+             force: bool = False) -> None:
+    """
+    Run gits porecalin checkout.
+
+    Shells out to git checkout.
+
+    git-checkout(1) allows us to give a reference on any form,
+    including a commit hash, and a non-local branch (e.g.
+    origin/master). Proper checkout would be something like
+    >>> repo.checkout(f'refs/remotes/origin/{self.version}')
+    >>> repo.checkout(self.version)
+    """
+    cmdline = ['git', 'checkout', '--quiet', refspec]
+    if force:
+        cmdline += ['--force']
+    subprocess.run(cmdline,
+                   check=True,
+                   cwd=repo.workdir)
diff --git a/r11k/puppetfile.py b/r11k/puppetfile.py
index 72b2938a247401e69af05cf89cf36489e40e5331..f47a8de8eeb9b3e1681960c89e1eeb4c5e5eb447 100644
--- a/r11k/puppetfile.py
+++ b/r11k/puppetfile.py
@@ -252,6 +252,7 @@ def update_module_names(modules: list[PuppetModule]) -> None:
 
 
 # TODO figure out proper way to propagate config through everything
+# TODO shouldn't this be a method on PuppetFile?
 def find_all_modules_for_environment(puppetfile: PuppetFile) -> ResolvedPuppetFile:
     """
     Find all modules which would make out this puppet environment.
diff --git a/r11k/puppetmodule/git.py b/r11k/puppetmodule/git.py
index e99008c83c67da8001db7b94c315aaa2cd9fbd6c..b937718bf3c70c46d13a5017896a788152075c28 100644
--- a/r11k/puppetmodule/git.py
+++ b/r11k/puppetmodule/git.py
@@ -5,6 +5,8 @@ import json
 import logging
 import os.path
 import time
+import shutil
+import base64
 
 from typing import (
     Any,
@@ -30,6 +32,7 @@ import r11k.config
 from r11k.gitutil import (
     find_remote,
     repo_last_fetch,
+    checkout,
 )
 
 
@@ -40,6 +43,12 @@ class GitPuppetModule(PuppetModule):
     """
     Puppet module backed by git repo.
 
+    Three different git repos are handled by this class:
+    - The upstream, referenced by the url self.git.
+    - The published module, which is given as the argument to publish()
+    - Our local cache of the upstream, contained in self.repo_path
+      (and self.__repo)
+
     :param name: If the given module dosen't provide any metadata,
                  then this becomes the name of the published directory
                  for module (and must therefor match what the module
@@ -68,25 +77,37 @@ class GitPuppetModule(PuppetModule):
 
     @property
     def repo_path(self) -> str:
-        """Return where this modules repo is located in the filesystem."""
-        return os.path.join(self.config.clone_base, self._og_name)
+        """Return path of local cache of upstream."""
+        # using self._og_name would be nice, but will lead to
+        # problems if two repos have the same name (which is rather
+        # common, if different modules for the same functionallity is
+        # used in different part of the puppet deployment).
+        cache_name: str = base64.b64encode(self.git.encode('UTF-8'), b'-_') \
+                                .decode('ASCII')
+        return os.path.join(self.config.clone_base, cache_name)
 
     @property
     def repo(self) -> Repository:
         """
         Return the coresponding git repo for this module, if available.
 
+        This will be the bare repo placed in the cache directory.
+
         This method only works if the module have git as its origin.
         Otherwise it will throw a value error.
         """
         if not self.__repo:
-            if os.path.exists(self.repo_path):
-                # self.__repo = git.Repo(self.repo_path)
+            try:
                 self.__repo = pygit2.Repository(self.repo_path, GIT_REPOSITORY_OPEN_BARE)
-            else:
+                if self.__repo.remotes['origin'].url != self.git:
+                    logger.warning('Cached repo "%s" already exists, but points to "%s" rather than "%s". Replacing.' % (self.name, self.__repo.remotes['origin'].url, self.git))  # noqa: E501
+                    shutil.rmtree(self.repo_path)
+                    self.__repo = pygit2.clone_repository(self.git, self.repo_path, bare=True)
+            except pygit2.GitError:
                 self.__repo = pygit2.clone_repository(self.git, self.repo_path, bare=True)
-                if self.__repo.is_empty:
-                    logger.warning('Empty repository cloned %s' % self.git)
+
+            if self.__repo.is_empty:
+                logger.warning('Empty repository cloned %s' % self.git)
 
         # This fetches updates all remote branches, and then merges
         # the local branches to match remote. Local branches is needed
@@ -101,24 +122,25 @@ class GitPuppetModule(PuppetModule):
         # This forces the download of upstream, which is needed
         # to publish it.
         assert self.repo
+        assert self.version
+
         try:
-            # repo = git.Repo(path)
             repo = pygit2.Repository(path)
-            # repo.remote().pull('+refs/heads/*:refs/heads/*')
             # TODO default origin name
-            repo.remotes['origin'].fetch(['+refs/heads/*:refs/heads/*'])
+            if repo.remotes['origin'].url == self.repo_path:
+                repo.remotes['origin'].fetch(['+refs/heads/*:refs/heads/*'])
+            else:
+                logger.warning('Collision when publishing "%s", expected remote "%s", got %s". Replacing' % (self.name, self.repo_path, repo.remotes['origin'].url))  # noqa: E501
+                shutil.rmtree(path)
+                repo = pygit2.clone_repository(self.repo_path, path)
         except pygit2.GitError:
-            # repo = git.Repo.clone_from(self.repo_path, path)
-            # TODO Would replace self.repo_path to self.repo.path
+            # TODO Would replacing self.repo_path to self.repo.path
             #      allow us to remove the assert above?
             repo = pygit2.clone_repository(self.repo_path, path)
-        # Not the prettiest way to force the correct branch, but it .works
-        # repo.git.checkout(self.version)
-        assert self.version
-        # if '/' not in self.version:
-        #     self.version = f'refs/heads/{self.version}'
-        # NOTE is this what we want?
-        repo.checkout(f'refs/remotes/origin/{self.version}')
+
+        # NOTE force without a warning isn't the best idea, but will
+        # have to do for now.
+        checkout(repo, self.version, force=True)
 
     def versions(self) -> list[VersionInfo]:
         """
diff --git a/setup.cfg b/setup.cfg
index 90209c307c04f9e6b5036a55bf77fab42b026e40..ecad0055c2b4450659b7de115219c219adb69f77 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -58,3 +58,4 @@ strict_equality = True
 
 [mypy-tests.*]
 allow_untyped_defs = True
+check_untyped_defs = True
diff --git a/tests/fixtures/git_upstream.py b/tests/fixtures/git_upstream.py
index 0f82b172a9d971f31ab5954ba893220ff0dc0e30..14702a3e3fd592dd93c94a5893110428e54cbd62 100644
--- a/tests/fixtures/git_upstream.py
+++ b/tests/fixtures/git_upstream.py
@@ -6,10 +6,13 @@ from typing import (
     Any,
     Callable,
     Optional,
+    Tuple,
+    TypeAlias,
 )
 
 import pytest
 import pygit2
+from tests.util.git import DEFAULT_BRANCH
 
 try:
     # builtin fixture types started being exported in pytest 6.2.0.
@@ -29,9 +32,12 @@ def git_upstream(request: FixtureRequest, tmp_path: Path) -> pygit2.Repository:
     return repo
 
 
+UpstreamFactory: TypeAlias = Callable[[Optional[str]], pygit2.Repository]
+
+
 @pytest.fixture()
 def git_upstream_factory(request: FixtureRequest,
-                         tmp_path: Path) -> Callable[[Optional[str]], pygit2.Repository]:
+                         tmp_path: Path) -> UpstreamFactory:
     """Return a function returning a new bare git repo every time its called."""
     def new_repo(name: Optional[str] = None):
         # NOTE We technically have to check for collisions...
@@ -40,3 +46,63 @@ def git_upstream_factory(request: FixtureRequest,
         repo = pygit2.init_repository(path, bare=True)
         return repo
     return new_repo
+
+
+sample_author: pygit2.Signature = pygit2.Signature('Firstname Lastnameson',
+                                                   'author@example.com')
+
+
+@pytest.fixture()
+def populated_git_upstream_factory(request: FixtureRequest,
+                                   git_upstream_factory: UpstreamFactory,
+                                   tmp_path: Path):
+    """
+    Return a function configuring a new git repo, which returns a upstream and downstream.
+
+    The parameters will be used to construct a single inistial commit,
+    which will be pushed to the upstream.
+    """
+    def new_populated_upstream(repo_name: str,
+                               content: dict[str, bytes],
+                               *,
+                               author: pygit2.Signature = sample_author,
+                               comitter: pygit2.Signature = sample_author,
+                               ref='HEAD') -> Tuple[pygit2.Repository, pygit2.Repository]:
+        """
+        :param name: Name of the downstream, part of the name of
+                     downstream, and used for the commit message.
+                     Mostly for easy of debug.
+        :param content: File contents of the commit. Keys are
+                        filenames, values are file contents. Currently
+                        only supports a root directory.
+        :param author: Author signature to use, defaluts to `sample_author'.
+        :param comitter: Author signature to use, defaluts to `sample_author'.
+        :param ref: Reference to commit to, defaluts to 'HEAD', but
+                    'refs/heads/{name}' is a good alternative.
+        """
+
+        work_repo_path = os.path.join(tmp_path, 'workdir', repo_name)
+        work_repo = pygit2.init_repository(work_repo_path)
+
+        for filename, file_content in content.items():
+            with open(os.path.join(work_repo_path, filename), 'wb') as f:
+                f.write(file_content)
+            work_repo.index.add(filename)
+
+        work_repo.create_commit(ref, author, comitter,
+                                f'Initial commit {repo_name}',
+                                work_repo.index.write_tree(),
+                                [])
+
+        upstream = git_upstream_factory(f'upstream-{repo_name}')
+        origin = work_repo.remotes.create('origin', upstream.path)
+        # TODO DEFAULT_BRANCH
+        # TODO non-hacky way to push, or distinguish default ref.
+        if ref == 'HEAD':
+            origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
+        else:
+            origin.push([ref])
+
+        return work_repo, upstream
+
+    return new_populated_upstream
diff --git a/tests/fixtures/testconf.py b/tests/fixtures/testconf.py
index 13fe97f9af1aafd3e64547660a79e13882bc8406..56546acbf80b338599736d0005cd0c94b850c7ea 100644
--- a/tests/fixtures/testconf.py
+++ b/tests/fixtures/testconf.py
@@ -12,7 +12,16 @@ import r11k.config
 def testconf(forge: HTTPServer, tmp_path: Path) -> r11k.config.Config:
     """Alternative configuration containing our test Forge."""
     conf = r11k.config.Config(api_base=forge.url_for(''),
-                              http_ttl=0,
-                              git_ttl=0,
+                              http_ttl=3600,
+                              git_ttl=3600,
+                              cache_root=os.path.join(tmp_path, 'cache'))
+    return conf
+
+
+@pytest.fixture()
+def testconf_noforge(tmp_path: Path) -> r11k.config.Config:
+    conf = r11k.config.Config(api_base='http://localhost:0',
+                              http_ttl=3600,
+                              git_ttl=3600,
                               cache_root=os.path.join(tmp_path, 'cache'))
     return conf
diff --git a/tests/test_main.py b/tests/test_main.py
index d74e680a98791f2e6c3ceec144aab8357c69c688..f1879222c498a9bb0bec8a70bdb7de46e0ac9315 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -248,3 +248,12 @@ def test_git_dependencies(testconf, git_upstream_factory, tmp_path):
 
     modules = find_all_modules_for_environment(puppetfile).modules.values()
     assert {'HugoNikanor-A', 'HugoNikanor-B'} == set(module.name for module in modules)
+
+
+def test_publish(tmp_path, testconf):
+    """
+    Test publish for a complete puppet file
+    TODO write this test
+    """
+    # puppetfile = parse_puppetfile({
+    # }, config=testconf)
diff --git a/tests/test_puppetmodule_git.py b/tests/test_puppetmodule_git.py
index ed7f4f2801a18563299cce6e1af0a2e8ee9ef8d1..866b906402f76ea1a7423dd1553822e47ac58bfe 100644
--- a/tests/test_puppetmodule_git.py
+++ b/tests/test_puppetmodule_git.py
@@ -1,3 +1,5 @@
+import pytest
+
 import os
 import os.path
 import json
@@ -5,6 +7,7 @@ import logging
 
 from semver import VersionInfo
 import pygit2
+from pygit2 import GIT_RESET_HARD, GIT_OBJ_COMMIT
 
 from r11k.puppetmodule.git import GitPuppetModule
 from r11k.puppet import PuppetMetadata
@@ -14,40 +17,28 @@ from tests.util.git import DEFAULT_BRANCH
 
 
 class TestGitStuff:
-    def test_publish_git(self, testconf, tmp_path, git_upstream):
+    def test_publish_git(self, testconf_noforge, tmp_path, populated_git_upstream_factory):
         """
-        Test most basic publish. A simple repo with a single commit,
-        using gits default branch.
+        Test basic publish.
+
+        - Setup
+          - Create local repository
+          - Initialize it with some data
+          - Push it to remote gotten from fixture
+        - Exec
+          - Creates a single GitPuppetModule
+          - Publish it to directory
+          - Check that published data matches commited data
         """
-        # Setup local repo
-        print('repo path =', tmp_path)
-        repo_path = os.path.join(tmp_path, 'workdir', 'gittest')
         content = b'Content'
-
-        repo = pygit2.init_repository(repo_path)
-        file = os.path.join(repo_path, 'file')
-        with open(file, 'wb') as f:
-            f.write(content)
-        repo.index.add('file')
-        # repo.index.commit('Initial commit')
-        repo.create_commit(f'refs/heads/{DEFAULT_BRANCH}',
-                           pygit2.Signature('Sample Author', 'author@example.com'),
-                           pygit2.Signature('Sample Commiter', 'comitter@example.com'),
-                           'Initial commit',
-                           repo.index.write_tree(),
-                           [])
-        # Push to remote
-        # origin = repo.create_remote('origin', git_upstream.workdir)
-        origin = repo.remotes.create('origin', git_upstream.path)
-        # origin.push('+refs/heads/*:refs/heads/*')
-        origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
+        _, upstream = populated_git_upstream_factory('upstream', {'file': content})
 
         # Load from remote as if from puppetfile
         module = GitPuppetModule(
             name='HugoNikanor-gittest',
             version=DEFAULT_BRANCH,
-            git=git_upstream.path,
-            config=testconf)
+            git=upstream.path,
+            config=testconf_noforge)
 
         # publish module to target
         path_base = os.path.join(tmp_path, 'target', 'modules', 'gittest')
@@ -57,17 +48,27 @@ class TestGitStuff:
             '.git': b'IGNORE',
         }
 
+        # Published data is same as commited data
         module.publish(path_base)
-        assert check_dir(path_base, expected), "First check"
-        # Test update
+        assert check_dir(path_base, expected)
+
+        # A republish without an updated upsteram is idempotent
         module.publish(path_base)
-        assert check_dir(path_base, expected), "Second check"
+        assert check_dir(path_base, expected)
 
-    def test_publish_weird_branch(self, testconf, tmp_path, git_upstream):
+    def test_publish_weird_branch(self, testconf_noforge, tmp_path, populated_git_upstream_factory):
         """
-        The upstream repo only has a devel branch (which should never
-        be git's default branch). This has at one point failed due to
-        the publish method not checking which version to publish.
+        Test publish when default branch is missing.
+
+        We initialize the upstream repo with a single branch: 'devel'
+        (this assumes git's default branch isn't called 'devel'). Note
+        that no 'master' (or similar) branch exists.
+
+        It then initializes a GitPuppetModule as in test_publish_git,
+        publishes that, and checks that the content matches.
+
+        This has at one point failed due to the publish method not
+        checking which version to publish.
         """
 
         # There are four instances of the repo in play here:
@@ -79,32 +80,16 @@ class TestGitStuff:
 
         content = b'Contents'
 
-        # Create working dir
-        repo_path = os.path.join(tmp_path, 'workdir', 'weird')
-        repo = pygit2.init_repository(repo_path)
-        # repo = pygit2.clone_repository(git_upstream.path, repo_path)
-        file = os.path.join(repo_path, 'file')
-        with open(file, 'wb') as f:
-            f.write(content)
-        repo.index.add('file')
-        repo.create_commit(
-            'refs/heads/devel',
-            pygit2.Signature('Sample Author', 'author@xample.com'),
-            pygit2.Signature('Sample Commiter', 'comitter@example.com'),
-            'Initial commit',
-            repo.index.write_tree(),
-            [])
-
-        # Publish to upstream
-        origin = repo.remotes.create('origin', git_upstream.path)
-        origin.push(['refs/heads/devel'])
-        print('origin = ', git_upstream.path)
+        _, git_upstream = populated_git_upstream_factory(
+            'weird',
+            {'file': content},
+            ref='refs/heads/devel')
 
         # Sets up repo in cache
         module = GitPuppetModule(name='gittest2',
                                  version='devel',
                                  git=git_upstream.path,
-                                 config=testconf)
+                                 config=testconf_noforge)
 
         path_base = os.path.join(tmp_path, 'target', 'modules', 'gittest2')
         expected = {
@@ -119,7 +104,7 @@ class TestGitStuff:
         module.publish(path_base)
         assert check_dir(path_base, expected), "Second check"
 
-    def test_git_versions(self, testconf, tmp_path):
+    def test_git_versions(self, testconf_noforge, tmp_path):
         """
         Check that all tags which are semver-tags gets reported.
 
@@ -145,6 +130,7 @@ class TestGitStuff:
                             pygit2.GIT_OBJ_COMMIT,
                             pygit2.Signature('Sample Author', 'author@example.com'),
                             '')
+
         create_tag('v1.0.0', commit)
         create_tag('v1.0.1', commit)   # Multiple tags on one commit
         commit = repo.create_commit(
@@ -162,7 +148,7 @@ class TestGitStuff:
             name='HugoNikanor-gittest',
             version='',   # Sentinel value, module only used to find available versions.
             git=repo_path,
-            config=testconf)
+            config=testconf_noforge)
 
         assert module.versions() == [VersionInfo.parse(x) for x in
                                      ['1.0.0',
@@ -170,35 +156,124 @@ class TestGitStuff:
                                       '2.0.0',
                                       ]]
 
-    def test_git_publish_diverged(self, testconf, tmp_path, git_upstream):
+    def test_hash_version(self, testconf_noforge, tmp_path, populated_git_upstream_factory):
+        """
+        Test specifiying hash as version.
+
+        Creates a repo with 2 commits, 2 being a child of 1:
+        * 2
+        * 1
+        Then publish commit 1 (by hash) and check if it contains the
+        expected data.
+
+        Currently commit hash is gotten from commit object, but
+        specifying commiting and auhoring date in `sig` would allow a
+        stable hash, allowing us to hard code the hash.
+        """
+
+        downstream, upstream = populated_git_upstream_factory(
+            'hashversion',
+            {'file': b'1'})
+
+        commit1 = downstream.head.peel(GIT_OBJ_COMMIT)
+
+        with open(os.path.join(downstream.path, 'file'), 'wb') as f:
+            f.write(b'2')
+        downstream.index.add('file')
+
+        sig = pygit2.Signature('Sample Author', 'author@example.com')
+        downstream.create_commit('HEAD', sig, sig,
+                                 'Second commit',
+                                 downstream.index.write_tree(),
+                                 [commit1.id])
+
+        origin = downstream.remotes['origin']
+        origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
+
+        module = GitPuppetModule(name='Anything',
+                                 version=str(commit1.id),
+                                 git=upstream.path,
+                                 config=testconf_noforge)
+
+        pub_dir = os.path.join(tmp_path, 'target', 'modules', 'anything')
+        module.publish(pub_dir)
+
+        with open(os.path.join(pub_dir, 'file'), 'rb') as f:
+            assert b'1' == f.read()
+
+    def test_git_publish_diverged(self, testconf_noforge, tmp_path, git_upstream):
         """
         Test publish of diverged branches.
 
         Sets up a local repo, which we commit to, then pushes that,
         and publishes it. The local repo then rewrites its history and
         force pushes, followed by us publishing the new data.
+
+        * new master
+        | * old master
+        |/
+        * common ancestor
+        ...
         """
-        # TODO write this test
-        pass
+        publish_path = os.path.join(tmp_path, 'target', 'modules', 'diverged')
+        module = GitPuppetModule(name='diverged',
+                                 version=DEFAULT_BRANCH,
+                                 git=git_upstream.path,
+                                 config=testconf_noforge)
+
+        sig = pygit2.Signature('Sample Author', 'author@example.com')
+
+        repo_path = os.path.join(tmp_path, "workdir", "diverging")
+        repo = pygit2.init_repository(repo_path)
+
+        origin = repo.remotes.create('origin', git_upstream.path)
 
-    # TODO add another test where we use commit hash as version
-    # Problem being that pygit2 doesn't seem to enable setting commit
-    # time, which prevents stable commit hashes.
+        file = os.path.join(repo_path, 'file')
+        with open(file, 'wb') as f:
+            f.write(b'1\n')
+        repo.index.add('file')
+        ancestor = repo.create_commit('HEAD', sig, sig, '1',
+                                      repo.index.write_tree(),
+                                      [])
+
+        with open(file, 'wb') as f:
+            f.write(b'2\n')
+        repo.index.add('file')
+        repo.create_commit('HEAD', sig, sig, '2',
+                           repo.index.write_tree(),
+                           [ancestor])
+
+        # First check
+        origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
+        module.publish(publish_path)
+        with open(os.path.join(publish_path, file), 'rb') as f:
+            assert b'2\n' == f.read()
+        # end first check
+
+        repo.reset(ancestor, GIT_RESET_HARD)
+
+        with open(file, 'wb') as f:
+            f.write(b'3\n')
+        repo.index.add('file')
+        repo.create_commit('HEAD', sig, sig, '3',
+                           repo.index.write_tree(),
+                           [ancestor])
 
-    def test_git_metadata(self, testconf, tmp_path, git_upstream):
+        # Second check
+        # leading '+' allows non-fastforward update (force push)
+        origin.push([f'+refs/heads/{DEFAULT_BRANCH}'])
+        module.publish(publish_path)
+        with open(os.path.join(publish_path, file), 'rb') as f:
+            assert b'3\n' == f.read()
+        # end second check
+
+    def test_git_metadata(self, testconf_noforge, tmp_path, populated_git_upstream_factory):
         """
         Checks that we can retrieve metadata.json from a git repo.
 
         Also tests that our git procedures handles version='1.0.0',
         when the actual tag is 'v1.0.0'
-
         """
-
-        repo_path = os.path.join(tmp_path, 'workdir', 'versiontest')
-
-        # Creates our test repository
-        repo = pygit2.init_repository(repo_path)
-
         owner = 'hugonikanor'
         name = 'mdtest'
         version = 'v1.0.0'
@@ -210,34 +285,24 @@ class TestGitStuff:
                                   source='',
                                   dependencies=[])
 
-        with open(os.path.join(repo_path, 'metadata.json'), 'w') as f:
-            json.dump(metadata, f, cls=Encoder)
-
-        repo.index.add('metadata.json')
-        commit = repo.create_commit(
-            f'refs/heads/{DEFAULT_BRANCH}',
-            pygit2.Signature('Sample Author', 'author@xample.com'),
-            pygit2.Signature('Sample Commiter', 'comitter@example.com'),
-            'Add metadata.json',
-            repo.index.write_tree(),
-            [])
-        repo.create_tag('v1.0.0',
-                        commit,
-                        pygit2.GIT_OBJ_COMMIT,
-                        pygit2.Signature('Sample Author', 'author@example.com'),
-                        '')
+        _, git_upstream = populated_git_upstream_factory(
+            'versiontest',
+            {'metadata.json': json.dumps(metadata, cls=Encoder).encode('ASCII')})
 
-        origin = repo.remotes.create('origin', git_upstream.path)
-        origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
-        origin.push(['refs/tags/v1.0.0'])
-        print(git_upstream.path)
+        # It doesn't matter if tag is created downstream and pushed,
+        # or upstream directly. Upstream if fewer instructions.
+        git_upstream.create_tag('v1.0.0',
+                                git_upstream.head.peel(GIT_OBJ_COMMIT).id,
+                                pygit2.GIT_OBJ_COMMIT,
+                                pygit2.Signature('Sample Author', 'author@example.com'),
+                                '')
 
         # We are done with our local repo, fetch it again from upstream
 
         module = GitPuppetModule(name=f'{owner}-{name}',
                                  version=version,
                                  git=git_upstream.path,
-                                 config=testconf)
+                                 config=testconf_noforge)
 
         assert module.metadata
         assert isinstance(module.metadata, PuppetMetadata)
@@ -250,29 +315,17 @@ class TestGitStuff:
         fd_json = json.dumps(module.metadata, cls=Encoder)
         assert ld_json == fd_json
 
-    def test_missing_metadata(self, tmp_path, testconf, git_upstream):
-        repo_path = os.path.join(tmp_path, 'workdir', 'missing_metadata')
-        repo = pygit2.init_repository(repo_path)
-        repo.create_commit(
-            f'refs/heads/{DEFAULT_BRANCH}',
-            pygit2.Signature('Sample Author', 'author@xample.com'),
-            pygit2.Signature('Sample Commiter', 'comitter@example.com'),
-            'Initial commit',
-            repo.index.write_tree(),
-            [])
-
-        # origin = repo.create_remote('origin', git_upstream.path)
-        origin = repo.remotes.create('origin', git_upstream.path)
-        origin.push([f'refs/heads/{DEFAULT_BRANCH}'])
+    def test_missing_metadata(self, tmp_path, testconf_noforge, populated_git_upstream_factory):
+        _, git_upstream = populated_git_upstream_factory('missing_metadata', {})
 
         module = GitPuppetModule(name='anything',
                                  version=DEFAULT_BRANCH,
                                  git=git_upstream.path,
-                                 config=testconf)
+                                 config=testconf_noforge)
         assert module.metadata
         assert isinstance(module.metadata, PuppetMetadata)
 
-    def test_empty_repo(self, testconf, git_upstream, caplog):
+    def test_empty_repo(self, testconf_noforge, git_upstream, caplog):
         """Ensure a warning is raised when remote is empty."""
 
         caplog.set_level(logging.WARNING)
@@ -280,7 +333,7 @@ class TestGitStuff:
         module = GitPuppetModule(name='HugoNikanor-empty',
                                  version=DEFAULT_BRANCH,
                                  git=git_upstream.path,
-                                 config=testconf)
+                                 config=testconf_noforge)
 
         # This forces the repo to be cloned, exposing if it's empty.
         assert module.repo
@@ -290,10 +343,140 @@ class TestGitStuff:
         assert caplog.records[0].levelname == 'WARNING'
         assert caplog.records[0].message.startswith('Empty repository cloned')
 
-    def test_existing_local_repo(self):  # TODO
+    def test_existing_local_repo_different(
+            self,
+            tmp_path,
+            populated_git_upstream_factory,
+            testconf_noforge):
+        """
+        Checks the case where the local repo (as returned by .repo())
+        already exists, and refers to a DIFFERENT upstream.
+        """
+
+        # --------------------------------------------------
+
+        _, upstream_a = populated_git_upstream_factory('a', {'filename.txt': b'a'})
+        _, upstream_b = populated_git_upstream_factory('b', {'filename.txt': b'b'})
+
+        # --------------------------------------------------
+
+        dest = os.path.join(tmp_path, 'target', 'testrepo')
+
+        module_a = GitPuppetModule(name='testrepo',
+                                   version=DEFAULT_BRANCH,
+                                   git=upstream_a.path,
+                                   config=testconf_noforge)
+
+        module_a.publish(dest)
+        assert check_dir(dest, {
+            'filename.txt': b'a',
+            '.git': b'IGNORE',
+        })
+
+        module_b = GitPuppetModule(name='testrepo',
+                                   version=DEFAULT_BRANCH,
+                                   git=upstream_b.path,
+                                   config=testconf_noforge)
+
+        module_b.publish(dest)
+        assert check_dir(dest, {
+            'filename.txt': b'b',
+            '.git': b'IGNORE',
+        })
+
+    def test_serialize(self, tmp_path, testconf_noforge, populated_git_upstream_factory):
+        """
+        Check that a GitPuppetModule serializes correctly for trace.
+
+        Each PuppetModule should serialize to the exact version used,
+        allowing us to produce a new puppetfile which exactly
+        recreates a given deployment.
+        """
+
+        work_repo, git_upstream = populated_git_upstream_factory('repo', {})
+
+        module = GitPuppetModule(name='repo',
+                                 version=DEFAULT_BRANCH,
+                                 git=git_upstream.path,
+                                 config=testconf_noforge)
+
+        serialized = module.serialize()
+
+        # Expected keys in serialized output, change in the future.
+        assert set(serialized) == set(['name', 'git', 'version'])
+
+        assert serialized.get('name') == 'repo'
+
+        # Output version should be the exact commit published, even
+        # though the input gave a "named" version
+        assert serialized.get('version') == work_repo.head.peel(GIT_OBJ_COMMIT).hex
+
+    @pytest.mark.xfail(reason="What to do when publishing to existing non-git repo is not yet decided")  # noqa: E501
+    def test_to_non_git_module(self, tmp_path, testconf_noforge, populated_git_upstream_factory):
+        """
+        Runs publish into a directory which already is a non-git module.
+        """
+        dest = os.path.join(tmp_path, 'target', 'module')
+        os.makedirs(dest, exist_ok=True)
+
+        with open(os.path.join(dest, 'file'), 'w') as f:
+            print('non-git content', file=f)
+
+        _, upstream = populated_git_upstream_factory('repo', {'file': b'from git'})
+        module = GitPuppetModule(name='module',
+                                 version=DEFAULT_BRANCH,
+                                 git=upstream.path,
+                                 config=testconf_noforge)
+
+        # This crashes (as expected)
+        module.publish(dest)
+
+    def test_to_modified_module(self, tmp_path, testconf_noforge, populated_git_upstream_factory):
+        """
+        Runs publish into a correctly configured repository, but local
+        (un-commited changes have been made).
+
+        TODO do we also need a case for commited local changes?
         """
-        Checks the case where the local repo (as returned by .repo()) already exists.
+        _, upstream = populated_git_upstream_factory('repo', {'file': b'from git'})
+
+        module = GitPuppetModule(name='module',
+                                 version=DEFAULT_BRANCH,
+                                 git=upstream.path,
+                                 config=testconf_noforge)
+
+        dest = os.path.join(tmp_path, 'target', 'module')
+        module.publish(dest)
+
+        #
+
+        # Check if the local change is a non-tracked file
+        with open(os.path.join(dest, 'new-file'), 'wb') as f:
+            f.write(b'some content')
+        module.publish(dest)
+        assert check_dir(dest, {
+            'file': b'from git',
+            'new-file': b'some content',
+            '.git': b'IGNORE',
+        })
+
+        # Check if the local change is a tracked file
+        # This works if we run 'git checkout --force', but not
+        # otherwise. Contemplate what is better, or if we could get a
+        # warning here.
+        with open(os.path.join(dest, 'file'), 'w') as f:
+            print('some other content', file=f)
+        module.publish(dest)
+        assert check_dir(dest, {
+            'file': b'from git',
+            'new-file': b'some content',
+            '.git': b'IGNORE',
+        })
+
+    def test_publish_missing_ref(self):
         """
+        Attempt publish of version which doesn't exist.
 
-    def test_serialize(self):  # TODO
-        " Depends on stable commits to work "
+        Should fail gracefully.
+        """
+        # TODO