move scripts to nur subcommands
This commit is contained in:
parent
5c35d417ed
commit
475851ada5
13 changed files with 427 additions and 272 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1 +1,7 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
|
|
|||
|
|
@ -214,14 +214,14 @@ $ git clone https://github.com/nix-community/NUR
|
|||
}
|
||||
```
|
||||
|
||||
At the moment each URL must point to a git repository. By running `nur/update.py`
|
||||
At the moment each URL must point to a git repository. By running `bin/nur update`
|
||||
the corresponding `repos.json.lock` is updated and the repository is tested. This will
|
||||
perform also an evaluation check, which must be passed for your repository. Commit the changed
|
||||
`repos.json` but NOT `repos.json.lock`
|
||||
|
||||
```
|
||||
$ git add repos.json
|
||||
$ ./nur/format_manifest.py # ensure repos.json is sorted alphabetically
|
||||
$ ./bin/nur format-manifest # ensure repos.json is sorted alphabetically
|
||||
$ git commit -m "add <your-repo-name> repository"
|
||||
$ git push
|
||||
```
|
||||
|
|
|
|||
12
bin/nur
Executable file
12
bin/nur
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -p python3 -p nix-prefetch-git -p nix -i python3
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.insert(0, os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
|
||||
|
||||
from nur import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -14,7 +14,7 @@ fi
|
|||
|
||||
export encrypted_025d6e877aa4_key= encrypted_025d6e877aa4_iv=
|
||||
|
||||
./nur/format-manifest.py
|
||||
./bin/nur format-manifest
|
||||
if [ -n "$(git diff --exit-code repos.json)" ]; then
|
||||
echo "repos.json was not formatted before committing repos.json:" >&2
|
||||
git diff --exit-code repos.json
|
||||
|
|
@ -22,7 +22,7 @@ if [ -n "$(git diff --exit-code repos.json)" ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
./nur/update.py
|
||||
./bin/nur update
|
||||
nix-build
|
||||
|
||||
# Pull requests and commits to other branches shouldn't try to deploy, just build to verify
|
||||
|
|
|
|||
36
nur/__init__.py
Normal file
36
nur/__init__.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
import argparse
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from .format_manifest import format_manifest_command
|
||||
from .index import index_command
|
||||
from .update import update_command
|
||||
|
||||
# from .build import build_channel_command
|
||||
|
||||
|
||||
def parse_arguments(argv: List[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog=argv[0], description="nur management commands"
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(description="subcommands")
|
||||
|
||||
# build_channel = subparsers.add_parser("build-channel")
|
||||
# build_channel.set_defaults(func=build_channel_command)
|
||||
|
||||
format_manifest = subparsers.add_parser("format-manifest")
|
||||
format_manifest.set_defaults(func=format_manifest_command)
|
||||
|
||||
update = subparsers.add_parser("update")
|
||||
update.set_defaults(func=update_command)
|
||||
|
||||
index = subparsers.add_parser("index")
|
||||
index.set_defaults(func=index_command)
|
||||
|
||||
return parser.parse_args(argv[1:])
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_arguments(sys.argv)
|
||||
args.func(args)
|
||||
7
nur/channel.py
Normal file
7
nur/channel.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from pathlib import Path
|
||||
|
||||
from .path import LOCK_PATH, MANIFEST_PATH
|
||||
|
||||
|
||||
def build_channel_command(_path: str):
|
||||
pass
|
||||
2
nur/error.py
Normal file
2
nur/error.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
class NurError(Exception):
|
||||
pass
|
||||
13
nur/format-manifest.py → nur/format_manifest.py
Executable file → Normal file
13
nur/format-manifest.py → nur/format_manifest.py
Executable file → Normal file
|
|
@ -1,14 +1,11 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -p python3 -i python3
|
||||
|
||||
import json
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from argparse import Namespace
|
||||
|
||||
ROOT = Path(__file__).parent.parent
|
||||
from .path import ROOT
|
||||
|
||||
|
||||
def main() -> None:
|
||||
def format_manifest_command(args: Namespace) -> None:
|
||||
path = ROOT.joinpath("repos.json")
|
||||
manifest = json.load(open(path))
|
||||
tmp_path = str(path) + ".tmp"
|
||||
|
|
@ -16,7 +13,3 @@ def main() -> None:
|
|||
json.dump(manifest, tmp, indent=4, sort_keys=True)
|
||||
tmp.write("\n")
|
||||
shutil.move(tmp_path, path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
49
nur/index.py
Normal file
49
nur/index.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import json
|
||||
import subprocess
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Dict
|
||||
|
||||
from .path import ROOT
|
||||
|
||||
|
||||
def index_repo(repo: str, expression_file: str) -> Dict[str, Any]:
|
||||
fetch_source_cmd = [
|
||||
"nix-build",
|
||||
str(ROOT),
|
||||
"-A",
|
||||
f"repo-sources.{repo}",
|
||||
"--no-out-link",
|
||||
]
|
||||
|
||||
repo_path = subprocess.check_output(fetch_source_cmd).strip()
|
||||
|
||||
expression_path = Path(repo_path.decode("utf-8")).joinpath(expression_file)
|
||||
|
||||
with NamedTemporaryFile(mode="w") as f:
|
||||
expr = f"with import <nixpkgs> {{}}; callPackage {expression_path} {{}}"
|
||||
f.write(expr)
|
||||
f.flush()
|
||||
query_cmd = ["nix-env", "-qa", "*", "--json", "-f", str(f.name)]
|
||||
out = subprocess.check_output(query_cmd).strip()
|
||||
raw_pkgs = json.loads(out)
|
||||
pkgs = {}
|
||||
for name, pkg in raw_pkgs.items():
|
||||
pkg["_attr"] = name
|
||||
pkgs[f"nur.repos.{repo}.{name}"] = pkg
|
||||
return pkgs
|
||||
|
||||
|
||||
def index_command(args: Namespace) -> None:
|
||||
manifest_path = ROOT.joinpath("repos.json")
|
||||
with open(manifest_path) as f:
|
||||
manifest = json.load(f)
|
||||
repos = manifest.get("repos", [])
|
||||
pkgs: Dict[str, Any] = {}
|
||||
|
||||
for (repo, data) in repos.items():
|
||||
pkgs.update(index_repo(repo, data.get("file", "default.nix")))
|
||||
|
||||
with open(ROOT.joinpath("packages.json"), "w") as f:
|
||||
json.dump(pkgs, f, indent=4)
|
||||
118
nur/manifest.py
Normal file
118
nur/manifest.py
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
import json
|
||||
from enum import Enum, auto
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Union
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
Url = ParseResult
|
||||
|
||||
|
||||
class LockedVersion:
|
||||
def __init__(
|
||||
self, url: Url, rev: str, sha256: str, submodules: bool = False
|
||||
) -> None:
|
||||
self.url = url
|
||||
self.rev = rev
|
||||
self.sha256 = sha256
|
||||
self.submodules = submodules
|
||||
|
||||
def as_json(self) -> Dict[str, Union[bool, str]]:
|
||||
d = dict(
|
||||
url=self.url.geturl(),
|
||||
rev=self.rev,
|
||||
sha256=self.sha256,
|
||||
)
|
||||
if self.submodules:
|
||||
d["submodules"] = self.submodules
|
||||
return d
|
||||
|
||||
|
||||
class RepoType(Enum):
|
||||
GITHUB = auto()
|
||||
GITLAB = auto()
|
||||
GIT = auto()
|
||||
|
||||
@staticmethod
|
||||
def from_repo(repo: "Repo", type_: str) -> "RepoType":
|
||||
if repo.submodules:
|
||||
return RepoType.GIT
|
||||
if repo.url.hostname == "github.com":
|
||||
return RepoType.GITHUB
|
||||
if repo.url.hostname == "gitlab.com" or type_ == "gitlab":
|
||||
return RepoType.GITLAB
|
||||
else:
|
||||
return RepoType.GIT
|
||||
|
||||
|
||||
class Repo:
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
url: Url,
|
||||
submodules: bool,
|
||||
type_: str,
|
||||
file_: Optional[str],
|
||||
locked_version: Optional[LockedVersion],
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.submodules = submodules
|
||||
if file_ is None:
|
||||
self.file = "default.nix"
|
||||
else:
|
||||
self.file = file_
|
||||
self.locked_version = None
|
||||
|
||||
if (
|
||||
locked_version is not None
|
||||
and locked_version.url != url.geturl()
|
||||
and locked_version.submodules == submodules
|
||||
):
|
||||
self.locked_version = locked_version
|
||||
|
||||
self.type = RepoType.from_repo(self, type_)
|
||||
|
||||
|
||||
class Manifest:
|
||||
def __init__(self, repos: List[Repo]) -> None:
|
||||
self.repos = repos
|
||||
|
||||
|
||||
def _load_locked_versions(path: Path) -> Dict[str, LockedVersion]:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
locked_versions = {}
|
||||
|
||||
for name, repo in data["repos"].items():
|
||||
url = urlparse(repo["url"])
|
||||
rev = repo["rev"]
|
||||
sha256 = repo["sha256"]
|
||||
locked_versions[name] = LockedVersion(url, rev, sha256)
|
||||
|
||||
return locked_versions
|
||||
|
||||
|
||||
def load_locked_versions(path: Path) -> Dict[str, LockedVersion]:
|
||||
if path.exists():
|
||||
return _load_locked_versions(path)
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
def load_manifest(manifest_path: Union[str, Path], lock_path) -> Manifest:
|
||||
locked_versions = load_locked_versions(lock_path)
|
||||
|
||||
with open(manifest_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
repos = []
|
||||
for name, repo in data["repos"].items():
|
||||
url = urlparse(repo["url"])
|
||||
submodules = repo.get("submodules", False)
|
||||
file_ = repo.get("file", "default.nix")
|
||||
type_ = repo.get("type", None)
|
||||
locked_version = locked_versions.get(name)
|
||||
repos.append(Repo(name, url, submodules, type_, file_, locked_version))
|
||||
|
||||
return Manifest(repos)
|
||||
20
nur/path.py
Normal file
20
nur/path.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).parent.parent.resolve()
|
||||
LOCK_PATH = ROOT.joinpath("repos.json.lock")
|
||||
MANIFEST_PATH = ROOT.joinpath("repos.json")
|
||||
EVALREPO_PATH = ROOT.joinpath("lib/evalRepo.nix")
|
||||
|
||||
_NIXPKGS_PATH = None
|
||||
|
||||
|
||||
def nixpkgs_path() -> str:
|
||||
global _NIXPKGS_PATH
|
||||
if _NIXPKGS_PATH is not None:
|
||||
return _NIXPKGS_PATH
|
||||
cmd = ["nix-instantiate", "--find-file", "nixpkgs"]
|
||||
path = subprocess.check_output(cmd).decode("utf-8").strip()
|
||||
_NIXPKGS_PATH = str(Path(path).resolve())
|
||||
|
||||
return _NIXPKGS_PATH
|
||||
129
nur/prefetch.py
Normal file
129
nur/prefetch.py
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
from .error import NurError
|
||||
from .manifest import LockedVersion, Repo, RepoType
|
||||
|
||||
|
||||
def fetch_commit_from_feed(url: str) -> str:
|
||||
req = urllib.request.urlopen(url)
|
||||
try:
|
||||
xml = req.read()
|
||||
root = ET.fromstring(xml)
|
||||
ns = "{http://www.w3.org/2005/Atom}"
|
||||
xpath = f"./{ns}entry/{ns}link"
|
||||
commit_link = root.find(xpath)
|
||||
if commit_link is None:
|
||||
raise NurError(f"No commits found in repository feed {url}")
|
||||
return Path(urlparse(commit_link.attrib["href"]).path).parts[-1]
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
raise NurError(f"Repository feed {url} not found")
|
||||
raise
|
||||
|
||||
|
||||
def nix_prefetch_zip(url: str) -> Tuple[str, Path]:
|
||||
data = subprocess.check_output(
|
||||
["nix-prefetch-url", "--name", "source", "--unpack", "--print-path", url]
|
||||
)
|
||||
sha256, path = data.decode().strip().split("\n")
|
||||
return sha256, Path(path)
|
||||
|
||||
|
||||
class GithubRepo:
|
||||
def __init__(self, owner: str, name: str) -> None:
|
||||
self.owner = owner
|
||||
self.name = name
|
||||
|
||||
def url(self, path: str) -> str:
|
||||
return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
|
||||
|
||||
def latest_commit(self) -> str:
|
||||
return fetch_commit_from_feed(self.url("commits/master.atom"))
|
||||
|
||||
def prefetch(self, ref: str) -> Tuple[str, Path]:
|
||||
return nix_prefetch_zip(self.url(f"archive/{ref}.tar.gz"))
|
||||
|
||||
|
||||
class GitlabRepo:
|
||||
def __init__(self, domain: str, owner: str, name: str) -> None:
|
||||
self.domain = domain
|
||||
self.owner = owner
|
||||
self.name = name
|
||||
|
||||
def latest_commit(self) -> str:
|
||||
url = (
|
||||
f"https://{self.domain}/{self.owner}/{self.name}/commits/master?format=atom"
|
||||
)
|
||||
return fetch_commit_from_feed(url)
|
||||
|
||||
def prefetch(self, ref: str) -> Tuple[str, Path]:
|
||||
url = f"https://{self.domain}/api/v4/projects/{self.owner}%2F{self.name}/repository/archive.tar.gz?sha={ref}"
|
||||
return nix_prefetch_zip(url)
|
||||
|
||||
|
||||
def prefetch_git(repo: Repo) -> Tuple[LockedVersion, Path]:
|
||||
cmd = ["nix-prefetch-git"]
|
||||
if repo.submodules:
|
||||
cmd += ["--fetch-submodules"]
|
||||
cmd += [repo.url.geturl()]
|
||||
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if result.returncode != 0:
|
||||
raise NurError(
|
||||
f"Failed to prefetch git repository {repo.url.geturl()}: {result.stderr}"
|
||||
)
|
||||
|
||||
metadata = json.loads(result.stdout)
|
||||
lines = result.stderr.decode("utf-8").split("\n")
|
||||
repo_path = re.search("path is (.+)", lines[-5])
|
||||
assert repo_path is not None
|
||||
path = Path(repo_path.group(1))
|
||||
rev = metadata["rev"]
|
||||
sha256 = metadata["sha256"]
|
||||
return LockedVersion(repo.url, rev, sha256, repo.submodules), path
|
||||
|
||||
|
||||
def prefetch_github(repo: Repo) -> Tuple[LockedVersion, Optional[Path]]:
|
||||
github_path = Path(repo.url.path)
|
||||
gh_repo = GithubRepo(github_path.parts[1], github_path.parts[2])
|
||||
commit = gh_repo.latest_commit()
|
||||
locked_version = repo.locked_version
|
||||
if locked_version is not None:
|
||||
if locked_version.rev == commit:
|
||||
return locked_version, None
|
||||
sha256, path = gh_repo.prefetch(commit)
|
||||
|
||||
return LockedVersion(repo.url, commit, sha256), path
|
||||
|
||||
|
||||
def prefetch_gitlab(repo: Repo) -> Tuple[LockedVersion, Optional[Path]]:
|
||||
gitlab_path = Path(repo.url.path)
|
||||
gl_repo = GitlabRepo(
|
||||
repo.url.hostname, gitlab_path.parts[-2], gitlab_path.parts[-1]
|
||||
)
|
||||
commit = gl_repo.latest_commit()
|
||||
locked_version = repo.locked_version
|
||||
if locked_version is not None:
|
||||
if locked_version.rev == commit:
|
||||
return locked_version, None
|
||||
|
||||
sha256, path = gl_repo.prefetch(commit)
|
||||
return LockedVersion(repo.url, commit, sha256), path
|
||||
|
||||
|
||||
def prefetch(repo: Repo) -> Tuple[Repo, LockedVersion, Optional[Path]]:
|
||||
if repo.type == RepoType.GITHUB:
|
||||
locked_version, path = prefetch_github(repo)
|
||||
elif repo.type == RepoType.GITLAB:
|
||||
locked_version, path = prefetch_gitlab(repo)
|
||||
else:
|
||||
locked_version, path = prefetch_git(repo)
|
||||
|
||||
return repo, locked_version, path
|
||||
299
nur/update.py
Executable file → Normal file
299
nur/update.py
Executable file → Normal file
|
|
@ -1,231 +1,38 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -p python3 -p nix-prefetch-git -p nix -i python3
|
||||
|
||||
import json
|
||||
import shutil
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple, Dict, Any, Tuple
|
||||
import xml.etree.ElementTree as ET
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
#from dataclasses import dataclass, field, InitVar
|
||||
from enum import Enum, auto
|
||||
from urllib.parse import urlparse, urljoin, ParseResult
|
||||
import logging
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
ROOT = Path(__file__).parent.parent.resolve();
|
||||
LOCK_PATH = ROOT.joinpath("repos.json.lock")
|
||||
MANIFEST_PATH = ROOT.joinpath("repos.json")
|
||||
EVALREPO_PATH = ROOT.joinpath("lib/evalRepo.nix")
|
||||
|
||||
Url = ParseResult
|
||||
from .error import NurError
|
||||
from .manifest import Repo, load_manifest
|
||||
from .path import EVALREPO_PATH, LOCK_PATH, MANIFEST_PATH, nixpkgs_path
|
||||
from .prefetch import prefetch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NurError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def fetch_commit_from_feed(url: str) -> str:
|
||||
req = urllib.request.urlopen(url)
|
||||
try:
|
||||
xml = req.read()
|
||||
root = ET.fromstring(xml)
|
||||
ns = "{http://www.w3.org/2005/Atom}"
|
||||
xpath = f"./{ns}entry/{ns}link"
|
||||
commit_link = root.find(xpath)
|
||||
if commit_link is None:
|
||||
raise NurError(f"No commits found in repository feed {url}")
|
||||
return Path(urlparse(commit_link.attrib["href"]).path).parts[-1]
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
raise NurError(f"Repository feed {url} not found")
|
||||
raise
|
||||
|
||||
|
||||
def nix_prefetch_zip(url: str) -> Tuple[str, Path]:
|
||||
data = subprocess.check_output(
|
||||
["nix-prefetch-url", "--name", "source", "--unpack", "--print-path", url])
|
||||
sha256, path = data.decode().strip().split("\n")
|
||||
return sha256, Path(path)
|
||||
|
||||
|
||||
#@dataclass
|
||||
class GithubRepo():
|
||||
def __init__(self, owner: str, name: str) -> None:
|
||||
self.owner = owner
|
||||
self.name = name
|
||||
|
||||
#owner: str
|
||||
#name: str
|
||||
|
||||
def url(self, path: str) -> str:
|
||||
return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
|
||||
|
||||
def latest_commit(self) -> str:
|
||||
return fetch_commit_from_feed(self.url("commits/master.atom"))
|
||||
|
||||
def prefetch(self, ref: str) -> Tuple[str, Path]:
|
||||
return nix_prefetch_zip(self.url(f"archive/{ref}.tar.gz"))
|
||||
|
||||
|
||||
class GitlabRepo():
|
||||
def __init__(self, domain: str, owner: str, name: str) -> None:
|
||||
self.domain = domain
|
||||
self.owner = owner
|
||||
self.name = name
|
||||
|
||||
def latest_commit(self) -> str:
|
||||
url = f"https://{self.domain}/{self.owner}/{self.name}/commits/master?format=atom"
|
||||
return fetch_commit_from_feed(url)
|
||||
|
||||
def prefetch(self, ref: str) -> Tuple[str, Path]:
|
||||
url = f"https://{self.domain}/api/v4/projects/{self.owner}%2F{self.name}/repository/archive.tar.gz?sha={ref}"
|
||||
return nix_prefetch_zip(url)
|
||||
|
||||
|
||||
class RepoType(Enum):
|
||||
GITHUB = auto()
|
||||
GITLAB = auto()
|
||||
GIT = auto()
|
||||
|
||||
@staticmethod
|
||||
def from_spec(spec: 'RepoSpec') -> 'RepoType':
|
||||
if spec.url.hostname == "github.com" and not spec.submodules:
|
||||
return RepoType.GITHUB
|
||||
if (spec.url.hostname == "gitlab.com" or spec.type == "gitlab") \
|
||||
and not spec.submodules:
|
||||
return RepoType.GITLAB
|
||||
else:
|
||||
return RepoType.GIT
|
||||
|
||||
|
||||
#@dataclass
|
||||
class Repo():
|
||||
def __init__(self, spec: 'RepoSpec', rev: str, sha256: str) -> None:
|
||||
self.__post_init__(spec)
|
||||
self.rev = rev
|
||||
self.sha256 = sha256
|
||||
|
||||
#spec: InitVar['RepoSpec']
|
||||
#rev: str
|
||||
#sha256: str
|
||||
|
||||
#name: str = field(init=False)
|
||||
#url: Url = field(init=False)
|
||||
##type: RepoType = field(init=False)
|
||||
#submodules: bool = field(init=False)
|
||||
|
||||
def __post_init__(self, spec: 'RepoSpec'):
|
||||
self.name = spec.name
|
||||
self.url = spec.url
|
||||
self.submodules = spec.submodules
|
||||
self.type = RepoType.from_spec(spec)
|
||||
|
||||
|
||||
#@dataclass
|
||||
class RepoSpec():
|
||||
def __init__(self, name: str, url: Url, nix_file: str, submodules: bool,
|
||||
type_: str) -> None:
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.nix_file = nix_file
|
||||
self.submodules = submodules
|
||||
self.type = type_
|
||||
|
||||
#name: str
|
||||
#url: Url
|
||||
#nix_file: str
|
||||
#submodules: bool
|
||||
|
||||
|
||||
def prefetch_git(spec: RepoSpec) -> Tuple[str, str, Path]:
|
||||
url = spec.url.geturl()
|
||||
cmd = ["nix-prefetch-git"]
|
||||
if spec.submodules:
|
||||
cmd += ["--fetch-submodules"]
|
||||
cmd += [url]
|
||||
result = subprocess.run(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if result.returncode != 0:
|
||||
raise NurError(
|
||||
f"Failed to prefetch git repository {url}: {result.stderr}")
|
||||
|
||||
metadata = json.loads(result.stdout)
|
||||
lines = result.stderr.decode("utf-8").split("\n")
|
||||
repo_path = re.search("path is (.+)", lines[-5])
|
||||
assert repo_path is not None
|
||||
path = Path(repo_path.group(1))
|
||||
return metadata["rev"], metadata["sha256"], path
|
||||
|
||||
|
||||
def prefetch_github(spec: RepoSpec, locked_repo: Optional[Repo]
|
||||
) -> Tuple[str, str, Optional[Path]]:
|
||||
github_path = Path(spec.url.path)
|
||||
repo = GithubRepo(github_path.parts[1], github_path.parts[2])
|
||||
commit = repo.latest_commit()
|
||||
if locked_repo is not None:
|
||||
if locked_repo.rev == commit and \
|
||||
locked_repo.submodules == spec.submodules:
|
||||
return locked_repo.rev, locked_repo.sha256, None
|
||||
sha256, path = repo.prefetch(commit)
|
||||
return commit, sha256, path
|
||||
|
||||
|
||||
def prefetch_gitlab(spec: RepoSpec, locked_repo: Optional[Repo]
|
||||
) -> Tuple[str, str, Optional[Path]]:
|
||||
gitlab_path = Path(spec.url.path)
|
||||
repo = GitlabRepo(spec.url.hostname, gitlab_path.parts[-2],
|
||||
gitlab_path.parts[-1])
|
||||
commit = repo.latest_commit()
|
||||
if locked_repo is not None:
|
||||
if locked_repo.rev == commit and \
|
||||
locked_repo.submodules == spec.submodules:
|
||||
return locked_repo.rev, locked_repo.sha256, None
|
||||
sha256, path = repo.prefetch(commit)
|
||||
return commit, sha256, path
|
||||
|
||||
|
||||
def prefetch(spec: RepoSpec,
|
||||
locked_repo: Optional[Repo]) -> Tuple[Repo, Optional[Path]]:
|
||||
|
||||
repo_type = RepoType.from_spec(spec)
|
||||
if repo_type == RepoType.GITHUB:
|
||||
commit, sha256, path = prefetch_github(spec, locked_repo)
|
||||
elif repo_type == RepoType.GITLAB:
|
||||
commit, sha256, path = prefetch_gitlab(spec, locked_repo)
|
||||
else:
|
||||
commit, sha256, path = prefetch_git(spec)
|
||||
|
||||
return Repo(spec, commit, sha256), path
|
||||
|
||||
|
||||
def nixpkgs_path() -> str:
|
||||
cmd = ["nix-instantiate", "--find-file", "nixpkgs"]
|
||||
path = subprocess.check_output(cmd).decode("utf-8").strip()
|
||||
return str(Path(path).resolve())
|
||||
|
||||
|
||||
def eval_repo(spec: RepoSpec, repo_path: Path) -> None:
|
||||
def eval_repo(repo: Repo, repo_path: Path) -> None:
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
eval_path = Path(d).joinpath("default.nix")
|
||||
with open(eval_path, "w") as f:
|
||||
f.write(f"""
|
||||
f.write(
|
||||
f"""
|
||||
with import <nixpkgs> {{}};
|
||||
import {EVALREPO_PATH} {{
|
||||
name = "{spec.name}";
|
||||
url = "{spec.url}";
|
||||
src = {repo_path.joinpath(spec.nix_file)};
|
||||
name = "{repo.name}";
|
||||
url = "{repo.url}";
|
||||
src = {repo_path.joinpath(repo.file)};
|
||||
inherit pkgs lib;
|
||||
}}
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
"nix-env",
|
||||
"-f", str(eval_path),
|
||||
|
|
@ -233,82 +40,58 @@ import {EVALREPO_PATH} {{
|
|||
"--meta",
|
||||
"--xml",
|
||||
"--option", "restrict-eval", "true",
|
||||
"--option", "allow-import-from-derivation", "true",
|
||||
"--drv-path",
|
||||
"--show-trace",
|
||||
"-I", f"nixpkgs={nixpkgs_path()}",
|
||||
"-I", str(repo_path),
|
||||
"-I", str(eval_path),
|
||||
"-I", str(EVALREPO_PATH),
|
||||
] # yapf: disable
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
logger.info(f"Evaluate repository {spec.name}")
|
||||
logger.info(f"Evaluate repository {repo.name}")
|
||||
proc = subprocess.Popen(
|
||||
cmd, env=dict(PATH=os.environ["PATH"]), stdout=subprocess.PIPE)
|
||||
cmd, env=dict(PATH=os.environ["PATH"]), stdout=subprocess.PIPE
|
||||
)
|
||||
res = proc.wait()
|
||||
if res != 0:
|
||||
raise NurError(
|
||||
f"{spec.name} does not evaluate:\n$ {' '.join(cmd)}")
|
||||
raise NurError(f"{repo.name} does not evaluate:\n$ {' '.join(cmd)}")
|
||||
|
||||
|
||||
def update(spec: RepoSpec, locked_repo: Optional[Repo]) -> Repo:
|
||||
repo, repo_path = prefetch(spec, locked_repo)
|
||||
def update(repo: Repo) -> Repo:
|
||||
repo, locked_version, repo_path = prefetch(repo)
|
||||
|
||||
if repo_path:
|
||||
eval_repo(spec, repo_path)
|
||||
eval_repo(repo, repo_path)
|
||||
|
||||
repo.locked_version = locked_version
|
||||
return repo
|
||||
|
||||
|
||||
def update_lock_file(repos: List[Repo]):
|
||||
locked_repos = {}
|
||||
for repo in repos:
|
||||
locked_repo: Dict[str, Any] = dict(
|
||||
rev=repo.rev, sha256=repo.sha256, url=repo.url.geturl())
|
||||
if repo.submodules:
|
||||
locked_repo["submodules"] = True
|
||||
locked_repos[repo.name] = locked_repo
|
||||
if repo.locked_version:
|
||||
locked_repos[repo.name] = repo.locked_version.as_json()
|
||||
|
||||
tmp_file = str(LOCK_PATH) + "-new"
|
||||
with open(tmp_file, "w") as lock_file:
|
||||
json.dump(
|
||||
dict(repos=locked_repos), lock_file, indent=4, sort_keys=True)
|
||||
json.dump(dict(repos=locked_repos), lock_file, indent=4, sort_keys=True)
|
||||
|
||||
shutil.move(tmp_file, LOCK_PATH)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if LOCK_PATH.exists():
|
||||
with open(LOCK_PATH) as f:
|
||||
lock_manifest = json.load(f)
|
||||
else:
|
||||
lock_manifest = dict(repos={})
|
||||
|
||||
with open(MANIFEST_PATH) as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
repos = []
|
||||
|
||||
for name, repo in manifest["repos"].items():
|
||||
url = urlparse(repo["url"])
|
||||
repo_json = lock_manifest["repos"].get(name, None)
|
||||
spec = RepoSpec(name, url, repo.get("file", "default.nix"),
|
||||
repo.get("submodules", False), repo.get("type", None))
|
||||
if repo_json and repo_json["url"] != url.geturl():
|
||||
repo_json = None
|
||||
locked_repo = None
|
||||
if repo_json is not None:
|
||||
locked_repo = Repo(spec, repo_json["rev"], repo_json["sha256"])
|
||||
def update_command(args: Namespace) -> None:
|
||||
manifest = load_manifest(MANIFEST_PATH, LOCK_PATH)
|
||||
|
||||
for repo in manifest.repos:
|
||||
try:
|
||||
repos.append(update(spec, locked_repo))
|
||||
except Exception as e:
|
||||
if locked_repo is None:
|
||||
update(repo)
|
||||
except Exception:
|
||||
if repo.locked_version is None:
|
||||
# likely a repository added in a pull request, make it fatal then
|
||||
raise
|
||||
logger.exception(f"Failed to updated repository {spec.name}")
|
||||
repos.append(locked_repo)
|
||||
logger.exception(f"Failed to updated repository {repo.name}")
|
||||
|
||||
update_lock_file(repos)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
update_lock_file(manifest.repos)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue