SCons: Improve cache purging logic

• Implement caching via SCons arguments, rather than environment variables
This commit is contained in:
Thaddeus Crews 2024-10-13 13:59:33 -05:00
parent 98ddec4b8b
commit 0e4a4e3c4d
No known key found for this signature in database
GPG Key ID: 62181B86FE9E5D84
7 changed files with 151 additions and 155 deletions

View File

@ -18,12 +18,12 @@ inputs:
required: false required: false
scons-cache: scons-cache:
description: The SCons cache path. description: The SCons cache path.
default: ${{ github.workspace }}/.scons-cache/ default: ${{ github.workspace }}/.scons_cache/
scons-cache-limit: scons-cache-limit:
description: The SCons cache size limit. description: The SCons cache size limit.
# actions/cache has 10 GiB limit, and GitHub runners have a 14 GiB disk. # actions/cache has 10 GiB limit, and GitHub runners have a 14 GiB disk.
# Limit to 7 GiB to avoid having the extracted cache fill the disk. # Limit to 7 GiB to avoid having the extracted cache fill the disk.
default: 7168 default: 7
runs: runs:
using: composite using: composite
@ -32,10 +32,8 @@ runs:
shell: sh shell: sh
env: env:
SCONSFLAGS: ${{ inputs.sconsflags }} SCONSFLAGS: ${{ inputs.sconsflags }}
SCONS_CACHE: ${{ inputs.scons-cache }}
SCONS_CACHE_LIMIT: ${{ inputs.scons-cache-limit }}
run: | run: |
echo "Building with flags:" platform=${{ inputs.platform }} target=${{ inputs.target }} tests=${{ inputs.tests }} ${{ env.SCONSFLAGS }} echo "Building with flags:" platform=${{ inputs.platform }} target=${{ inputs.target }} tests=${{ inputs.tests }} ${{ env.SCONSFLAGS }} "cache_path=${{ inputs.scons-cache }}" cache_limit=${{ inputs.scons-cache-limit }}
if [ "${{ inputs.target }}" != "editor" ]; then if [ "${{ inputs.target }}" != "editor" ]; then
# Ensure we don't include editor code in export template builds. # Ensure we don't include editor code in export template builds.
@ -49,5 +47,5 @@ runs:
export BUILD_NAME="gh" export BUILD_NAME="gh"
fi fi
scons platform=${{ inputs.platform }} target=${{ inputs.target }} tests=${{ inputs.tests }} ${{ env.SCONSFLAGS }} scons platform=${{ inputs.platform }} target=${{ inputs.target }} tests=${{ inputs.tests }} ${{ env.SCONSFLAGS }} "cache_path=${{ inputs.scons-cache }}" cache_limit=${{ inputs.scons-cache-limit }}
ls -l bin/ ls -l bin/

View File

@ -6,7 +6,7 @@ inputs:
default: ${{ github.job }} default: ${{ github.job }}
scons-cache: scons-cache:
description: The SCons cache path. description: The SCons cache path.
default: ${{ github.workspace }}/.scons-cache/ default: ${{ github.workspace }}/.scons_cache/
runs: runs:
using: composite using: composite
@ -29,7 +29,6 @@ runs:
# 4. A partial match for the same base branch only (not ideal, matches any PR with the same base branch). # 4. A partial match for the same base branch only (not ideal, matches any PR with the same base branch).
restore-keys: | restore-keys: |
${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}-${{ github.ref }}-${{ github.sha }}
${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}-${{ github.ref }} ${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}-${{ github.ref }}
${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}-refs/heads/${{ env.GODOT_BASE_BRANCH }} ${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}-refs/heads/${{ env.GODOT_BASE_BRANCH }}
${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }} ${{ inputs.cache-name }}-${{ env.GODOT_BASE_BRANCH }}

View File

@ -6,7 +6,7 @@ inputs:
default: ${{ github.job }} default: ${{ github.job }}
scons-cache: scons-cache:
description: The SCons cache path. description: The SCons cache path.
default: ${{ github.workspace }}/.scons-cache/ default: ${{ github.workspace }}/.scons_cache/
runs: runs:
using: composite using: composite

View File

@ -52,9 +52,6 @@ jobs:
# continue-on-error: true # continue-on-error: true
- name: Build godot-cpp test extension - name: Build godot-cpp test extension
env: # Keep synced with godot-build.
SCONS_CACHE: ${{ github.workspace }}/.scons-cache/
SCONS_CACHE_LIMIT: 7168
run: scons --directory=./godot-cpp/test target=template_debug dev_build=yes verbose=yes run: scons --directory=./godot-cpp/test target=template_debug dev_build=yes verbose=yes
# - name: Save Godot build cache # - name: Save Godot build cache

6
.gitignore vendored
View File

@ -77,6 +77,9 @@ venv
__pycache__/ __pycache__/
*.pyc *.pyc
# Python modules
.*_cache/
# Documentation # Documentation
doc/_build/ doc/_build/
@ -164,9 +167,6 @@ gmon.out
# Kdevelop # Kdevelop
*.kdev4 *.kdev4
# Mypy
.mypy_cache
# Qt Creator # Qt Creator
*.config *.config
*.creator *.creator

View File

@ -271,6 +271,8 @@ opts.Add(BoolVariable("scu_build", "Use single compilation unit build", False))
opts.Add("scu_limit", "Max includes per SCU file when using scu_build (determines RAM use)", "0") opts.Add("scu_limit", "Max includes per SCU file when using scu_build (determines RAM use)", "0")
opts.Add(BoolVariable("engine_update_check", "Enable engine update checks in the Project Manager", True)) opts.Add(BoolVariable("engine_update_check", "Enable engine update checks in the Project Manager", True))
opts.Add(BoolVariable("steamapi", "Enable minimal SteamAPI integration for usage time tracking (editor only)", False)) opts.Add(BoolVariable("steamapi", "Enable minimal SteamAPI integration for usage time tracking (editor only)", False))
opts.Add("cache_path", "Path to a directory where SCons cache files will be stored. No value disables the cache.", "")
opts.Add("cache_limit", "Max size (in GiB) for the SCons cache. 0 means no limit.", "0")
# Thirdparty libraries # Thirdparty libraries
opts.Add(BoolVariable("builtin_brotli", "Use the built-in Brotli library", True)) opts.Add(BoolVariable("builtin_brotli", "Use the built-in Brotli library", True))
@ -321,6 +323,9 @@ opts.Add("rcflags", "Custom flags for Windows resource compiler")
# in following code (especially platform and custom_modules). # in following code (especially platform and custom_modules).
opts.Update(env) opts.Update(env)
# Setup caching logic early to catch everything.
methods.prepare_cache(env)
# Copy custom environment variables if set. # Copy custom environment variables if set.
if env["import_env_vars"]: if env["import_env_vars"]:
for env_var in str(env["import_env_vars"]).split(","): for env_var in str(env["import_env_vars"]).split(","):
@ -354,7 +359,9 @@ if env["platform"] == "":
if env["platform"] in compatibility_platform_aliases: if env["platform"] in compatibility_platform_aliases:
alias = env["platform"] alias = env["platform"]
platform = compatibility_platform_aliases[alias] platform = compatibility_platform_aliases[alias]
print_warning(f'Platform "{alias}" has been renamed to "{platform}" in Godot 4. Building for platform "{platform}".') print_warning(
f'Platform "{alias}" has been renamed to "{platform}" in Godot 4. Building for platform "{platform}".'
)
env["platform"] = platform env["platform"] = platform
# Alias for convenience. # Alias for convenience.
@ -1039,11 +1046,6 @@ GLSL_BUILDERS = {
} }
env.Append(BUILDERS=GLSL_BUILDERS) env.Append(BUILDERS=GLSL_BUILDERS)
scons_cache_path = os.environ.get("SCONS_CACHE")
if scons_cache_path is not None:
CacheDir(scons_cache_path)
print("Scons cache enabled... (path: '" + scons_cache_path + "')")
if env["compiledb"]: if env["compiledb"]:
env.Tool("compilation_db") env.Tool("compilation_db")
env.Alias("compiledb", env.CompilationDatabase()) env.Alias("compiledb", env.CompilationDatabase())
@ -1126,5 +1128,3 @@ def purge_flaky_files():
atexit.register(purge_flaky_files) atexit.register(purge_flaky_files)
methods.clean_cache(env)

View File

@ -1,5 +1,7 @@
import atexit
import contextlib import contextlib
import glob import glob
import math
import os import os
import re import re
import subprocess import subprocess
@ -8,7 +10,7 @@ from collections import OrderedDict
from enum import Enum from enum import Enum
from io import StringIO, TextIOWrapper from io import StringIO, TextIOWrapper
from pathlib import Path from pathlib import Path
from typing import Generator, List, Optional, Union from typing import Generator, List, Optional, Union, cast
# Get the "Godot" folder name ahead of time # Get the "Godot" folder name ahead of time
base_folder_path = str(os.path.abspath(Path(__file__).parent)) + "/" base_folder_path = str(os.path.abspath(Path(__file__).parent)) + "/"
@ -784,159 +786,159 @@ def using_emcc(env):
def show_progress(env): def show_progress(env):
if env["ninja"]: # Progress reporting is not available in non-TTY environments since it messes with the output
# Has its own progress/tracking tool that clashes with ours # (for example, when writing to a file). Ninja has its own progress/tracking tool that clashes
# with ours.
if not env["progress"] or not sys.stdout.isatty() or env["ninja"]:
return return
import sys NODE_COUNT_FILENAME = f"{base_folder_path}.scons_node_count"
from SCons.Script import AlwaysBuild, Command, Progress class ShowProgress:
def __init__(self):
screen = sys.stdout self.count = 0
# Progress reporting is not available in non-TTY environments since it self.max = 0
# messes with the output (for example, when writing to a file) try:
show_progress = env["progress"] and sys.stdout.isatty() with open(NODE_COUNT_FILENAME, "r", encoding="utf-8") as f:
node_count = 0 self.max = int(f.readline())
node_count_max = 0 except OSError:
node_count_interval = 1 pass
node_count_fname = str(env.Dir("#")) + "/.scons_node_count" if self.max == 0:
print("NOTE: Performing initial build, progress percentage unavailable!")
import math
class cache_progress:
# The default is 1 GB cache
def __init__(self, path=None, limit=pow(1024, 3)):
self.path = path
self.limit = limit
if env["verbose"] and path is not None:
screen.write(
"Current cache limit is {} (used: {})\n".format(
self.convert_size(limit), self.convert_size(self.get_size(path))
)
)
def __call__(self, node, *args, **kw): def __call__(self, node, *args, **kw):
nonlocal node_count, node_count_max, node_count_interval, node_count_fname, show_progress self.count += 1
if show_progress: if self.max != 0:
# Print the progress percentage percent = int(min(self.count * 100 / self.max, 100))
node_count += node_count_interval sys.stdout.write(f"\r[{percent:3d}%] ")
if node_count_max > 0 and node_count <= node_count_max: sys.stdout.flush()
screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max))
screen.flush()
elif node_count_max > 0 and node_count > node_count_max:
screen.write("\r[100%] ")
screen.flush()
else:
screen.write("\r[Initial build] ")
screen.flush()
def convert_size(self, size_bytes): from SCons.Script import Progress
if size_bytes == 0:
return "0 bytes"
size_name = ("bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size_bytes, 1024)))
p = math.pow(1024, i)
s = round(size_bytes / p, 2)
return "%s %s" % (int(s) if i == 0 else s, size_name[i])
def get_size(self, start_path="."): progressor = ShowProgress()
total_size = 0 Progress(progressor)
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
def progress_finish(target, source, env): def progress_finish(target, source, env):
nonlocal node_count, progressor
try: try:
with open(node_count_fname, "w", encoding="utf-8", newline="\n") as f: with open(NODE_COUNT_FILENAME, "w", encoding="utf-8", newline="\n") as f:
f.write("%d\n" % node_count) f.write(f"{progressor.count}\n")
except Exception: except OSError:
pass pass
try: env.AlwaysBuild(
with open(node_count_fname, "r", encoding="utf-8") as f: env.CommandNoCache(
node_count_max = int(f.readline()) "progress_finish", [], env.Action(progress_finish, "Building node count database .scons_node_count")
except Exception: )
pass )
cache_directory = os.environ.get("SCONS_CACHE")
# Simple cache pruning, attached to SCons' progress callback. Trim the
# cache directory to a size not larger than cache_limit.
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024
progressor = cache_progress(cache_directory, cache_limit)
Progress(progressor, interval=node_count_interval)
progress_finish_command = Command("progress_finish", [], progress_finish)
AlwaysBuild(progress_finish_command)
def clean_cache(env): def convert_size(size_bytes: int) -> str:
import atexit if size_bytes == 0:
import time return "0 bytes"
SIZE_NAMES = ["bytes", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
index = math.floor(math.log(size_bytes, 1024))
power = math.pow(1024, index)
size = round(size_bytes / power, 2)
return f"{size} {SIZE_NAMES[index]}"
class cache_clean:
def __init__(self, path=None, limit=pow(1024, 3)):
self.path = path
self.limit = limit
def clean(self): def get_size(start_path: str = ".") -> int:
self.delete(self.file_list()) total_size = 0
for dirpath, _, filenames in os.walk(start_path):
for file in filenames:
path = os.path.join(dirpath, file)
total_size += os.path.getsize(path)
return total_size
def delete(self, files):
if len(files) == 0: def clean_cache(cache_path: str, cache_limit: int, verbose: bool):
files = glob.glob(os.path.join(cache_path, "*", "*"))
if not files:
return return
if env["verbose"]:
# Utter something
print("Purging %d %s from cache..." % (len(files), "files" if len(files) > 1 else "file"))
[os.remove(f) for f in files]
def file_list(self): # Remove all text files, store binary files in list of (filename, size, atime).
if self.path is None: purge = []
# Nothing to do texts = []
return [] stats = []
# Gather a list of (filename, (size, atime)) within the for file in files:
# cache directory # Failing a utf-8 decode is the easiest way to determine if a file is binary.
file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
if file_stat == []:
# Nothing to do
return []
# Weight the cache files by size (assumed to be roughly
# proportional to the recompilation time) times an exponential
# decay since the ctime, and return a list with the entries
# (filename, size, weight).
current_time = time.time()
file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat]
# Sort by the most recently accessed files (most sensible to keep) first
file_stat.sort(key=lambda x: x[2])
# Search for the first entry where the storage limit is
# reached
sum, mark = 0, None
for i, x in enumerate(file_stat):
sum += x[1]
if sum > self.limit:
mark = i
break
if mark is None:
return []
else:
return [x[0] for x in file_stat[mark:]]
def cache_finally():
nonlocal cleaner
try: try:
cleaner.clean() with open(file, encoding="utf-8") as out:
except Exception: out.read(1024)
pass except UnicodeDecodeError:
stats.append((file, *os.stat(file)[6:8]))
except OSError:
print_error(f'Failed to access cache file "{file}"; skipping.')
else:
texts.append(file)
cache_directory = os.environ.get("SCONS_CACHE") if texts:
# Simple cache pruning, attached to SCons' progress callback. Trim the count = len(texts)
# cache directory to a size not larger than cache_limit. for file in texts:
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024 try:
cleaner = cache_clean(cache_directory, cache_limit) os.remove(file)
except OSError:
print_error(f'Failed to remove cache file "{file}"; skipping.')
count -= 1
if verbose:
print("Purging %d text %s from cache..." % (count, "files" if count > 1 else "file"))
atexit.register(cache_finally) if cache_limit:
# Sort by most recent access (most sensible to keep) first. Search for the first entry where
# the cache limit is reached.
stats.sort(key=lambda x: x[2], reverse=True)
sum = 0
for index, stat in enumerate(stats):
sum += stat[1]
if sum > cache_limit:
purge.extend([x[0] for x in stats[index:]])
break
if purge:
count = len(purge)
for file in purge:
try:
os.remove(file)
except OSError:
print_error(f'Failed to remove cache file "{file}"; skipping.')
count -= 1
if verbose:
print("Purging %d %s from cache..." % (count, "files" if count > 1 else "file"))
def prepare_cache(env) -> None:
if env.GetOption("clean"):
return
if env["cache_path"]:
cache_path = cast(str, env["cache_path"])
elif os.environ.get("SCONS_CACHE"):
print_warning("Environment variable `SCONS_CACHE` is deprecated; use `cache_path` argument instead.")
cache_path = cast(str, os.environ.get("SCONS_CACHE"))
if not cache_path:
return
env.CacheDir(cache_path)
print(f'SCons cache enabled... (path: "{cache_path}")')
if env["cache_limit"]:
cache_limit = float(env["cache_limit"])
elif os.environ.get("SCONS_CACHE_LIMIT"):
print_warning("Environment variable `SCONS_CACHE_LIMIT` is deprecated; use `cache_limit` argument instead.")
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", "0")) / 1024 # Old method used MiB, convert to GiB
# Convert GiB to bytes; treat negative numbers as 0 (unlimited).
cache_limit = max(0, int(cache_limit * 1024 * 1024 * 1024))
if env["verbose"]:
print(
"Current cache limit is {} (used: {})".format(
convert_size(cache_limit) if cache_limit else "",
convert_size(get_size(cache_path)),
)
)
atexit.register(clean_cache, cache_path, cache_limit, env["verbose"])
def dump(env): def dump(env):