Added build scripts
This commit is contained in:
commit
7853bc5a94
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
**/*
|
161
.gitignore
vendored
Normal file
161
.gitignore
vendored
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
cache/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
27
Dockerfile
Normal file
27
Dockerfile
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
ARG BASE_IMAGE
|
||||||
|
ARG BUILD_IMAGE=ubuntu
|
||||||
|
|
||||||
|
FROM ${BUILD_IMAGE} as build
|
||||||
|
ARG INSTALLER_URL
|
||||||
|
ARG INSTALLER_CHECKSUM
|
||||||
|
ARG MODPACK_ID
|
||||||
|
ARG MODPACK_VERSION
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
ca-certificates \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
WORKDIR /server
|
||||||
|
RUN curl -o installer ${INSTALLER_URL} \
|
||||||
|
&& echo "${INSTALLER_CHECKSUM} installer" | sha256sum -c \
|
||||||
|
&& chmod +x installer \
|
||||||
|
&& ./installer ${MODPACK_ID} ${MODPACK_VERSION} --auto --verbose \
|
||||||
|
&& rm installer
|
||||||
|
|
||||||
|
FROM ${BASE_IMAGE} as final
|
||||||
|
WORKDIR /server
|
||||||
|
VOLUME [ "/server" ]
|
||||||
|
COPY --from=build /server ./
|
||||||
|
RUN echo "eula=true" > eula.txt
|
||||||
|
EXPOSE 25565
|
||||||
|
CMD [ "/bin/bash", "/server/start.sh" ]
|
0
__init__.py
Normal file
0
__init__.py
Normal file
109
build.py
Normal file
109
build.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from datetime import datetime
|
||||||
|
from re import search
|
||||||
|
from typing import Iterable
|
||||||
|
import docker
|
||||||
|
from ftbtypes import *
|
||||||
|
from jsongetcache import *
|
||||||
|
import jsongetcache
|
||||||
|
|
||||||
|
default_base_url = "https://api.modpacks.ch"
|
||||||
|
cache_path = Path("cache")
|
||||||
|
|
||||||
|
|
||||||
|
def get_entity(route: str, base_url: str = default_base_url):
|
||||||
|
slashes = '/\\'
|
||||||
|
cachefile = cache_path / f"{route.strip(slashes)}.json"
|
||||||
|
return jsongetcache.load_cached(base_url + route, cachefile)
|
||||||
|
|
||||||
|
def get_latest_release(versions: Iterable[ModPackVersion]):
|
||||||
|
return max((version for version in versions if version["type"].lower() == "release"), key=lambda version: version["updated"])
|
||||||
|
|
||||||
|
|
||||||
|
def get_modpack_route(modpack: ModPackManifest | int):
|
||||||
|
return f"/public/modpack/{modpack if isinstance(modpack, int) else modpack['id']}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_modpack_manifest(modpack: ModPackManifest | int) -> ModPackManifest:
|
||||||
|
return get_entity(get_modpack_route(modpack))
|
||||||
|
|
||||||
|
|
||||||
|
def get_version_route(modpack: ModPackManifest | int, version: ModPackVersion | int):
|
||||||
|
return f"{get_modpack_route(modpack)}/{version if isinstance(version, int) else version['id']}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_version_manifest(modpack: ModPackManifest | int, version: ModPackVersion | int) -> ModPackVersionManifest:
|
||||||
|
return get_entity(get_version_route(modpack, version))
|
||||||
|
|
||||||
|
|
||||||
|
def version_without_build(version: str):
|
||||||
|
print(version)
|
||||||
|
match = search(r"^(\d+(.\d+)*)\+(\d+)?$", version)
|
||||||
|
if match is None:
|
||||||
|
raise Exception(f"Invalid version string: {version}")
|
||||||
|
else:
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_modpack_slug(modpack: ModPackManifest):
|
||||||
|
return '-'.join(filter(None, modpack["name"].lower().split(' ')))
|
||||||
|
|
||||||
|
|
||||||
|
def get_installer(modpack: ModPackManifest | int, version: ModPackVersion | int, architecture: docker.Platforms):
|
||||||
|
match architecture:
|
||||||
|
case "linux/arm64":
|
||||||
|
return f"{get_version_route(modpack, version)}/server/arm/linux", "83b9ef3f8b0f525da83c10fd8692c12a6a200c5ce79eba9da97ac29a414232fd"
|
||||||
|
case "linux/amd64":
|
||||||
|
return f"{get_version_route(modpack, version)}/server/linux", "9c5eed5e160e329bb6c393db549db356b9cc6a9711a5461aba35607b4124485a"
|
||||||
|
case _:
|
||||||
|
raise Exception(f"Invalid or unsupported architecture {architecture}!")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments():
|
||||||
|
parser = ArgumentParser("build.py", description="FTB Docker image build helper.")
|
||||||
|
parser.add_argument("modpack", type=int, help="Modpack ID")
|
||||||
|
parser.add_argument("--version", "-v", type=int, help="Specific Modpack Version ID, otherwise uses the latest")
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
args = parse_arguments()
|
||||||
|
modpack = get_modpack_manifest(args.modpack)
|
||||||
|
slug = get_modpack_slug(modpack)
|
||||||
|
print("Slug", slug)
|
||||||
|
if args.version is None:
|
||||||
|
args.version = get_latest_release(modpack["versions"])["id"]
|
||||||
|
version = get_version_manifest(modpack, args.version)
|
||||||
|
print(f"{modpack['name']} version {version['name']}: updated {datetime.fromtimestamp(version['updated'])}")
|
||||||
|
java_target = next((target for target in version["targets"] if target["type"] == "runtime" and target["name"] == "java"), None)
|
||||||
|
if java_target is None:
|
||||||
|
raise Exception(f"{modpack['name']} version {version['name']} has no java target: {' ,'.join(str(x) for x in version['targets'])}")
|
||||||
|
|
||||||
|
# java_version = version_without_build(java_target["version"])
|
||||||
|
java_version = java_target["version"].replace('+', '_')
|
||||||
|
print(f"Required java version is version {java_version}")
|
||||||
|
# base_image = f"azul/zulu-openjdk:{java_version}-jre"
|
||||||
|
base_image = f"eclipse-temurin:{java_version}-jre"
|
||||||
|
|
||||||
|
repo = f"hub.cnml.de/{slug}"
|
||||||
|
semver_version_tags = docker.semver_tags(version["name"])
|
||||||
|
platforms: list[docker.Platforms] = ["linux/arm64", "linux/amd64"]
|
||||||
|
for platform in platforms:
|
||||||
|
installer, checksum = get_installer(modpack, version, platform)
|
||||||
|
tags = list(f"{ver}-{platform[(platform.rfind('/')+1):]}" for ver in semver_version_tags)
|
||||||
|
print(tags)
|
||||||
|
print(installer, checksum)
|
||||||
|
docker.buildx(repo,
|
||||||
|
tags,
|
||||||
|
[platform],
|
||||||
|
build_args={
|
||||||
|
"INSTALLER_URL": default_base_url + installer,
|
||||||
|
"INSTALLER_CHECKSUM": checksum,
|
||||||
|
"MODPACK_ID": str(modpack["id"]),
|
||||||
|
"MODPACK_VERSION": str(version["id"]),
|
||||||
|
"BASE_IMAGE": base_image
|
||||||
|
},
|
||||||
|
write_command=True)
|
||||||
|
for version_tag in semver_version_tags:
|
||||||
|
tags = list(f"{version_tag}-{platform[(platform.rfind('/')+1):]}" for platform in platforms)
|
||||||
|
docker.create_manifest(repo, version_tag, tags, write_command=True)
|
52
docker.py
Normal file
52
docker.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
from subprocess import run
|
||||||
|
from typing import Literal, get_args
|
||||||
|
|
||||||
|
Platforms = Literal["linux/amd64", "linux/amd64/v2", "linux/amd64/v3", "linux/arm64", "linux/riscv64", "linux/ppc64le", "linux/s390x", "linux/386", "linux/mips64le", "linux/mips64", "linux/arm/v7", "linux/arm/v6"]
|
||||||
|
supported_platforms: list[Platforms] = list(get_args(Platforms))
|
||||||
|
|
||||||
|
Progress = Literal["auto", "plain", "tty"]
|
||||||
|
|
||||||
|
def semver_tags(version: str):
|
||||||
|
parts = version.split('.')
|
||||||
|
return list('.'.join(parts[:i]) for i in range(1, len(parts) + 1))
|
||||||
|
|
||||||
|
def pull(repository: str, tag: str):
|
||||||
|
label = f"{repository}:{tag}"
|
||||||
|
run(["docker", "pull", label], check=True)
|
||||||
|
return label
|
||||||
|
|
||||||
|
def push_image(repository: str, tag: str):
|
||||||
|
label = f"{repository}:{tag}"
|
||||||
|
run(["docker", "push", label], check=True)
|
||||||
|
return label
|
||||||
|
|
||||||
|
def create_manifest(repository: str, manifest_tag: str, image_tags: list[str], push: bool = True, write_command: bool = False):
|
||||||
|
raise Exception("Creation of manifests is not yet supported!")
|
||||||
|
manifest = f"{repository}:{manifest_tag}"
|
||||||
|
images = [f"{repository}:{tag}" for tag in image_tags]
|
||||||
|
for image_tag in image_tags:
|
||||||
|
pull(repository, image_tag)
|
||||||
|
command = ["docker", "manifest", "create", manifest, *images]
|
||||||
|
if write_command:
|
||||||
|
print(" ".join(command))
|
||||||
|
run(command, check=True)
|
||||||
|
if push:
|
||||||
|
return push_image(repository, manifest_tag)
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
def buildx(repository: str, tags: list[str], build_platforms: list[Platforms], dockerfile: str = "Dockerfile", build_args: dict[str, str] | None = None, directory: str = ".", push: bool = True, pull: bool = False, progress: Progress = "auto", write_command: bool = False):
|
||||||
|
if build_args is None:
|
||||||
|
build_args = dict()
|
||||||
|
labels = [f"{repository}:{tag}" for tag in tags]
|
||||||
|
command = list(filter(None, ["docker", "buildx", "build",
|
||||||
|
"--platform", ",".join(build_platforms),
|
||||||
|
*[t for (key, value) in build_args.items() for t in ("--build-arg", f"{key}={value}")],
|
||||||
|
"--file", dockerfile,
|
||||||
|
*[t for label in labels for t in ("--tag", label)],
|
||||||
|
f"--progress={progress}",
|
||||||
|
"--pull" if pull else None,
|
||||||
|
"--push" if push else None,
|
||||||
|
directory]))
|
||||||
|
if write_command:
|
||||||
|
print(" ".join(command))
|
||||||
|
run(command, check=True)
|
106
ftbtypes.py
Normal file
106
ftbtypes.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
from typing import Any, Literal, NotRequired, TypedDict
|
||||||
|
|
||||||
|
class IdObject(TypedDict):
|
||||||
|
id: int
|
||||||
|
|
||||||
|
class IdTypeObject(IdObject):
|
||||||
|
type: str
|
||||||
|
|
||||||
|
class IdTypeUpdatedObject(IdTypeObject):
|
||||||
|
updated: int
|
||||||
|
|
||||||
|
class IdTypeNamedObject(IdTypeObject):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
class IdTypeUpdatedNamedObject(IdTypeUpdatedObject):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
class ModPackFileInfo(IdTypeUpdatedObject):
|
||||||
|
sha1: str
|
||||||
|
size: int
|
||||||
|
url: str
|
||||||
|
mirrors: list[Any]
|
||||||
|
|
||||||
|
class ModPackArt(ModPackFileInfo):
|
||||||
|
width: int
|
||||||
|
height: int
|
||||||
|
compressed: bool
|
||||||
|
|
||||||
|
class Tag(IdTypeNamedObject):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ModPackRating(IdObject):
|
||||||
|
configured: bool
|
||||||
|
verified: bool
|
||||||
|
age: int
|
||||||
|
gambling: bool
|
||||||
|
frightening: bool
|
||||||
|
alcoholdrugs: bool
|
||||||
|
nuditysexual: bool
|
||||||
|
sterotypeshate: bool
|
||||||
|
language: bool
|
||||||
|
violence: bool
|
||||||
|
|
||||||
|
class ModPackLink(IdTypeNamedObject):
|
||||||
|
link: str
|
||||||
|
|
||||||
|
class ModPackAuthor(IdTypeUpdatedNamedObject):
|
||||||
|
website: str
|
||||||
|
|
||||||
|
class ModPackSpecs(IdObject):
|
||||||
|
minimum: int
|
||||||
|
recommended: int
|
||||||
|
|
||||||
|
class ModPackTarget(IdTypeUpdatedNamedObject):
|
||||||
|
version: str
|
||||||
|
|
||||||
|
class ModPackVersion(IdTypeUpdatedNamedObject):
|
||||||
|
specs: ModPackSpecs
|
||||||
|
targets: list[ModPackTarget]
|
||||||
|
private: bool
|
||||||
|
|
||||||
|
class ModManifest(IdTypeUpdatedNamedObject):
|
||||||
|
synopsis: str
|
||||||
|
description: str
|
||||||
|
art: list[ModPackArt]
|
||||||
|
links: list[ModPackLink]
|
||||||
|
authors: list[ModPackAuthor]
|
||||||
|
versions: list[ModPackVersion]
|
||||||
|
installs: int
|
||||||
|
status: str
|
||||||
|
refreshed: int
|
||||||
|
|
||||||
|
class ModPackManifest(ModManifest):
|
||||||
|
plays: int
|
||||||
|
tags: list[Tag]
|
||||||
|
featured: bool
|
||||||
|
notification: str
|
||||||
|
rating: ModPackRating
|
||||||
|
released: int
|
||||||
|
plays_14d: int
|
||||||
|
private: bool
|
||||||
|
|
||||||
|
class ModPackFile(ModPackFileInfo):
|
||||||
|
version: str
|
||||||
|
path: str
|
||||||
|
tags: list[str]
|
||||||
|
clientonly: bool
|
||||||
|
serveronly: bool
|
||||||
|
optional: bool
|
||||||
|
name: str
|
||||||
|
|
||||||
|
class ApiError(TypedDict):
|
||||||
|
status: Literal["error"] | str
|
||||||
|
message: str
|
||||||
|
target: NotRequired[str]
|
||||||
|
|
||||||
|
class ModPackVersionManifest(ModPackVersion):
|
||||||
|
files: list[ModPackFile]
|
||||||
|
installs: int
|
||||||
|
plays: int
|
||||||
|
refreshed: int
|
||||||
|
changelog: str
|
||||||
|
parent: int
|
||||||
|
notification: str
|
||||||
|
links: list[ModPackLink]
|
||||||
|
status: str
|
54
jsongetcache.py
Normal file
54
jsongetcache.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from json import dump, load
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Callable, Generic, TypeVar, TypedDict
|
||||||
|
from requests import RequestException, get
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
class Cached(TypedDict, Generic[T]):
|
||||||
|
loaded: int
|
||||||
|
data: T
|
||||||
|
|
||||||
|
|
||||||
|
def get_resource(url: str):
|
||||||
|
with get(url) as request:
|
||||||
|
request.raise_for_status()
|
||||||
|
return request.json()
|
||||||
|
|
||||||
|
|
||||||
|
def load_cached(url: str, cache_file: Path, loader: Callable[[str], T] = get_resource, max_age: timedelta = timedelta(hours=1), reload: bool = False, encoding: str = "utf-8") -> T:
|
||||||
|
if reload:
|
||||||
|
# print(f"Forced cache reload for {cache_file}!")
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with cache_file.open("r", encoding=encoding) as f:
|
||||||
|
cached = load(f)
|
||||||
|
data: T = cached["data"]
|
||||||
|
time = datetime.fromtimestamp(cached["loaded"])
|
||||||
|
cache_age = datetime.now() - time
|
||||||
|
if cache_age <= max_age:
|
||||||
|
# print(f"Loaded cache file with age {cache_age}.")
|
||||||
|
return data
|
||||||
|
# print(f"Cache is older than {max_age} and will be reloaded.")
|
||||||
|
except FileNotFoundError:
|
||||||
|
# print(f"File {cache_file} not cached, will be reloaded!")
|
||||||
|
pass
|
||||||
|
|
||||||
|
data = loader(url)
|
||||||
|
data_dict_tmp: Any = data
|
||||||
|
data_dict: dict[str, Any] = data_dict_tmp
|
||||||
|
status: None | str = data_dict.get("status", None)
|
||||||
|
if status is not None and status.lower() == "error":
|
||||||
|
raise RequestException(data_dict.get("message", "No error message!"))
|
||||||
|
|
||||||
|
# Save to cache
|
||||||
|
cached: Cached[T] = {"data": data, "loaded": int(
|
||||||
|
datetime.now().timestamp())}
|
||||||
|
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with cache_file.open("w", encoding=encoding) as f:
|
||||||
|
dump(cached, f)
|
||||||
|
|
||||||
|
return data
|
Loading…
Reference in New Issue
Block a user