about summary refs log tree commit diff
path: root/pkgs/games/gog/factorio/test_patch.py
diff options
context:
space:
mode:
authoraszlig <aszlig@nix.build>2020-12-28 09:30:45 +0100
committeraszlig <aszlig@nix.build>2020-12-30 16:37:54 +0100
commit2c24ba1d864d98d4c2c3a50501a41b61b43f854a (patch)
tree3b7346ea52c84a34cc66fb94ed97c25c6a0e4ba5 /pkgs/games/gog/factorio/test_patch.py
parent4f91df5db28a67fda6eca81ae9be7e580e637f98 (diff)
games: Add Factorio
This has been laying around since a few weeks but instead of just using
a simple shell script wrapper (which I had in the first place), I
decided to directly patch the binary during rC3.

The main reason why I went this route was because in the long run we
want to have a generic implementation we can use to patch all sorts of
games, similar to what we have with monogame-patcher.

So the way the *current* patcher roughly works is by allocating an area
called "compost", which is then used as some kind of abstraction for
allocating code and data to be used for the references/logic that we
need to patch.

The latter involves patching XDG_DATA_HOME into the game and changing
the "/usr/share/factorio" path to use the ones within the store path
($out/share/factorio). Fortunately, Factorio already assumes that
everything within /usr/share/factorio (or our path for that matter) is
read-only, so we don't need to add extra code/conditions specifically to
handle that.

Patching both cases is made possible by patching a third location
(get_executable_name), which tries to get the current executable path
via several methods (using /proc/self/exe, running "ps", ...) at
runtime, which in our case is really unnecessary and a perfect
opportunity to replace the function logic by the hardcoded path and
using the rest of the function for our compost area.

While patching might sound straigtforward, I actually introduced two
little (but hard to debug) bugs, where I'm very grateful to all those
folks (you know who you are) at rC3 who were actually following along
and provided helpful input.

In the long term, the goal is to rewrite the patcher with more elaborate
type information (eg. right now function/opcode information is raw JSON)
and generalise it enough that we can use it to get rid of a few other
wrappers.

Signed-off-by: aszlig <aszlig@nix.build>
Diffstat (limited to 'pkgs/games/gog/factorio/test_patch.py')
-rw-r--r--pkgs/games/gog/factorio/test_patch.py210
1 files changed, 210 insertions, 0 deletions
diff --git a/pkgs/games/gog/factorio/test_patch.py b/pkgs/games/gog/factorio/test_patch.py
new file mode 100644
index 00000000..7d5fad82
--- /dev/null
+++ b/pkgs/games/gog/factorio/test_patch.py
@@ -0,0 +1,210 @@
+import pytest  # type: ignore
+
+from io import StringIO
+from itertools import groupby
+from typing import Set, Tuple, Iterable, Callable, Any
+
+from hypothesis import assume, given  # type: ignore
+import hypothesis.strategies as st  # type: ignore
+
+from patch import Compost
+
+MAX_OFFSET = 1000
+ints = st.integers(min_value=0, max_value=MAX_OFFSET)
+
+
+@given(ranges=st.iterables(st.tuples(ints, ints)))
+def test_compost_free(ranges: Iterable[Tuple[int, int]]) -> None:
+    compost = Compost()
+
+    # Use a completely different (however inefficient) implementation here to
+    # check for overlaps so that we can make sure the actual implementation
+    # isn't "accidentally correct".
+    bitset: Set[int] = set()
+    for start, end in ranges:
+        if end < start:
+            with pytest.raises(ValueError):
+                compost.free(start, end)
+        else:
+            must_fail = False
+            new_bitset = set()
+            for bit in range(start, end + 1):
+                if bit in bitset:
+                    must_fail = True
+                new_bitset.add(bit)
+            if must_fail:
+                with pytest.raises(ValueError):
+                    compost.free(start, end)
+            else:
+                bitset |= new_bitset
+                compost.free(start, end)
+
+    # Allocating from the biggest chunks to the smallest ones needs to always
+    # work and will also make sure that contiguous areas are properly merged.
+    grouped = groupby(range(MAX_OFFSET + 1), bitset.__contains__)
+    chunks = [len(list(chunk)) for valid, chunk in grouped if valid]
+    for size in sorted(chunks, reverse=True):
+        compost.alloc(size)
+
+    # The last allocation is just one byte and it must fail, since we exhausted
+    # all the space we have on the compost heap.
+    with pytest.raises(LookupError):
+        compost.alloc(1)
+
+
+@st.composite
+def valid_ranges(
+    draw: Callable[..., Any],
+    min_chunksize: int = 1,
+    max_chunksize: int = 10000,
+    min_ranges: int = 0,
+    max_ranges: int = 100,
+    allow_gaps: bool = True,
+):
+    gap = st.tuples(st.just(False), st.integers(min_value=0))
+    occupied = st.tuples(st.just(True),
+                         st.integers(min_value=min_chunksize,
+                                     max_value=max_chunksize))
+    elems = st.one_of(gap, occupied) if allow_gaps else occupied
+    sizes = draw(st.iterables(elems, min_size=min_ranges, max_size=max_ranges))
+    ranges = []
+    offset = 0
+    for is_occupied, size in sizes:
+        if is_occupied:
+            ranges.append((offset, offset + size))
+            offset += 1
+        offset += size
+    return ranges
+
+
+@given(ranges=valid_ranges())
+def test_compost_sizes(ranges: Iterable[Tuple[int, int]]) -> None:
+    compost = Compost()
+    total = 0
+    smallest = 0
+    largest = 0
+    for start, end in ranges:
+        size = end - start + 1
+        if smallest == 0 or size < smallest:
+            smallest = size
+        if size > largest:
+            largest = size
+        total += size
+        compost.free(start, end)
+
+    assert compost.available == total
+
+    # Values could have been merged, so the sizes will be at *least* the value
+    # we calculated here in the naive way.
+    assert compost.smallest_chunk >= smallest
+    assert compost.largest_chunk >= largest
+
+
+@given(ranges=valid_ranges(allow_gaps=False, min_ranges=1))
+def test_compost_gapless_alloc(ranges: Iterable[Tuple[int, int]]) -> None:
+    compost = Compost()
+    for start, end in ranges:
+        compost.free(start, end)
+
+    assert compost.available > 0
+    compost.alloc(compost.available)
+    assert compost.available == 0
+
+
+@given(ranges=valid_ranges(max_chunksize=100, min_ranges=1, max_ranges=10))
+def test_compost_alloc_nofrag(ranges: Iterable[Tuple[int, int]]) -> None:
+    compost = Compost()
+    for start, end in ranges:
+        compost.free(start, end)
+
+    total = compost.available
+    for n in range(compost.available):
+        compost.alloc(1)
+        total -= 1
+        assert compost.available == total
+
+    with pytest.raises(LookupError):
+        compost.alloc(1)
+
+
+@given(ranges=valid_ranges(min_chunksize=4, max_chunksize=20, min_ranges=1),
+       allocs=st.iterables(st.integers(min_value=1, max_value=21), min_size=1))
+def test_compost_alloc(ranges: Iterable[Tuple[int, int]],
+                       allocs: Iterable[int]) -> None:
+    compost = Compost()
+    for start, end in ranges:
+        compost.free(start, end)
+
+    for alloc_size in allocs:
+        assume(compost.largest_chunk >= alloc_size)
+        size_before = compost.available
+        compost.alloc(alloc_size)
+        assert size_before - alloc_size == compost.available
+
+
+@given(ranges=valid_ranges(min_chunksize=2, max_chunksize=5,
+                           min_ranges=10, max_ranges=15,
+                           allow_gaps=False),
+       use=st.integers(min_value=-1, max_value=21))
+def test_compost_maybe_alloc(ranges: Iterable[Tuple[int, int]],
+                             use: int) -> None:
+    compost = Compost()
+    for start, end in ranges:
+        compost.free(start, end)
+
+    # This is similar to the one in test_compost_free and it serves as a canary
+    # to make sure we don't accidentally overwrite something we don't want.
+    bitset_before = {i for start, end in compost.offsets
+                     for i in range(1000) if start <= i <= end}
+
+    initial_available = compost.available
+    with compost.maybe_alloc(20) as (_, mark_used):
+        assert initial_available - compost.available == 20
+        assume(0 <= use <= 20)
+        mark_used(use)
+
+    # If the bitset before the allocation is a superset, something is clearly
+    # wrong and we're now in the middle of invalid memory.
+    bitset_afterwards = {i for start, end in compost.offsets
+                         for i in range(1000) if start <= i <= end}
+    assert bitset_before.issuperset(bitset_afterwards)
+
+    assert initial_available - use == compost.available
+
+
+@given(start=st.integers(min_value=0, max_value=100),
+       alloc_sizes=st.iterables(st.integers(min_value=1, max_value=20),
+                                max_size=5))
+def test_compost_writes(start: int, alloc_sizes: Iterable[int]):
+    compost = Compost()
+    compost.free(start, start + 100)
+
+    io = StringIO('.' * 200)
+    alloc_chars = 'abcdefghijkl'
+
+    expected_written = ''
+    for n, asize in enumerate(alloc_sizes):
+        char = alloc_chars[n]
+        offset = compost.alloc(asize)
+        io.seek(offset)
+        expected_written += char * asize
+        io.write(char * asize)
+
+    io.seek(0)
+    assert io.read().startswith('.' * start + expected_written)
+
+
+def test_invalid_compost_calls():
+    compost = Compost()
+
+    with pytest.raises(ValueError):
+        compost.free(10, 9)
+
+    with pytest.raises(ValueError):
+        compost.free(-10, 0)
+
+    with pytest.raises(ValueError):
+        compost.alloc(0)
+
+    with pytest.raises(ValueError):
+        compost.alloc(-10)