Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
treewide(python): format using ruff
  • Loading branch information
PerchunPak committed Dec 15, 2025
commit 0137840114d1444c766db37c01e96258e4edf5fb
60 changes: 35 additions & 25 deletions doc/tests/manpage-urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,24 @@
from structlog.contextvars import bound_contextvars as log_context


LogLevel = IntEnum('LogLevel', {
lvl: getattr(logging, lvl)
for lvl in ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
})
LogLevel = IntEnum(
"LogLevel",
{
lvl: getattr(logging, lvl)
for lvl in ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL")
},
)
LogLevel.__str__ = lambda self: self.name


EXPECTED_STATUS=frozenset((
HTTPStatus.OK, HTTPStatus.FOUND,
HTTPStatus.NOT_FOUND,
))
EXPECTED_STATUS = frozenset(
(
HTTPStatus.OK,
HTTPStatus.FOUND,
HTTPStatus.NOT_FOUND,
)
)


async def check(session: aiohttp.ClientSession, manpage: str, url: str) -> HTTPStatus:
with log_context(manpage=manpage, url=url):
Expand All @@ -43,6 +50,7 @@ async def check(session: aiohttp.ClientSession, manpage: str, url: str) -> HTTPS

return st


async def main(urls_path: Path) -> Mapping[HTTPStatus, int]:
logger.info(f"Parsing {urls_path}")
with urls_path.open() as urls_file:
Expand All @@ -52,36 +60,38 @@ async def main(urls_path: Path) -> Mapping[HTTPStatus, int]:

logger.info(f"Checking URLs from {urls_path}")
async with aiohttp.ClientSession() as session:
for status in asyncio.as_completed([
check(session, manpage, url)
for manpage, url in urls.items()
]):
count[await status]+=1
for status in asyncio.as_completed(
[check(session, manpage, url) for manpage, url in urls.items()]
):
count[await status] += 1

ok = count[HTTPStatus.OK] + count[HTTPStatus.FOUND]
broken = count[HTTPStatus.NOT_FOUND]
unknown = sum(c for st, c in count.items() if st not in EXPECTED_STATUS)
logger.info(f"Done: {broken} broken links, "
f"{ok} correct links, and {unknown} unexpected status")
logger.info(
f"Done: {broken} broken links, "
f"{ok} correct links, and {unknown} unexpected status"
)

return count


def parse_args(args: Optional[Sequence[str]] = None) -> Namespace:
parser = ArgumentParser(
prog = 'check-manpage-urls',
description = 'Check the validity of the manpage URLs linked in the nixpkgs manual',
prog="check-manpage-urls",
description="Check the validity of the manpage URLs linked in the nixpkgs manual",
)
parser.add_argument(
'-l', '--log-level',
default = os.getenv('LOG_LEVEL', 'INFO'),
type = lambda s: LogLevel[s],
choices = list(LogLevel),
"-l",
"--log-level",
default=os.getenv("LOG_LEVEL", "INFO"),
type=lambda s: LogLevel[s],
choices=list(LogLevel),
)
parser.add_argument(
'file',
type = Path,
nargs = '?',
"file",
type=Path,
nargs="?",
)

return parser.parse_args(args)
Expand All @@ -102,7 +112,7 @@ def parse_args(args: Optional[Sequence[str]] = None) -> Namespace:
REPO_ROOT = Path(__file__).parent.parent.parent.parent
logger.info(f"Assuming we are in a nixpkgs repo rooted at {REPO_ROOT}")

urls_path = REPO_ROOT / 'doc' / 'manpage-urls.json'
urls_path = REPO_ROOT / "doc" / "manpage-urls.json"

count = asyncio.run(main(urls_path))

Expand Down
14 changes: 10 additions & 4 deletions maintainers/scripts/doc/escape-code-markup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import re
import sys


def replace_element_by_text(el: ET.Element, text: str) -> None:
"""
Author: bernulf
Expand All @@ -26,6 +27,7 @@ def replace_element_by_text(el: ET.Element, text: str) -> None:
parent.text = (parent.text or "") + text
parent.remove(el)


DOCBOOK_NS = "http://docbook.org/ns/docbook"

# List of elements that pandoc’s DocBook reader strips markup from.
Expand Down Expand Up @@ -58,23 +60,27 @@ def replace_element_by_text(el: ET.Element, text: str) -> None:
]

XMLNS_REGEX = re.compile(r'\s+xmlns(?::[^=]+)?="[^"]*"')
ROOT_ELEMENT_REGEX = re.compile(r'^\s*<[^>]+>')
ROOT_ELEMENT_REGEX = re.compile(r"^\s*<[^>]+>")


def remove_xmlns(match: re.Match) -> str:
"""
Removes xmlns attributes.

Expects a match containing an opening tag.
"""
return XMLNS_REGEX.sub('', match.group(0))
return XMLNS_REGEX.sub("", match.group(0))


if __name__ == '__main__':
if __name__ == "__main__":
assert len(sys.argv) >= 3, "usage: escape-code-markup.py <input> <output>"

tree = ET.parse(sys.argv[1])
name_predicate = " or ".join([f"local-name()='{el}'" for el in code_elements])

for markup in tree.xpath(f"//*[({name_predicate}) and namespace-uri()='{DOCBOOK_NS}']/*"):
for markup in tree.xpath(
f"//*[({name_predicate}) and namespace-uri()='{DOCBOOK_NS}']/*"
):
text = ET.tostring(markup, encoding=str)

# tostring adds xmlns attributes to the element we want to stringify
Expand Down
4 changes: 2 additions & 2 deletions maintainers/scripts/doc/replace-xrefs-by-empty-links.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
}


if __name__ == '__main__':
if __name__ == "__main__":
assert len(sys.argv) >= 3, "usage: replace-xrefs-by-empty-links.py <input> <output>"

tree = ET.parse(sys.argv[1])
for xref in tree.findall(".//db:xref", ns):
text = ET.tostring(xref, encoding=str)
parent = xref.getparent()
link = parent.makeelement('link')
link = parent.makeelement("link")
target_name = xref.get("linkend")
link.set(f"{{{XLINK_NS}}}href", f"#{target_name}")
parent.replace(xref, link)
Expand Down
96 changes: 55 additions & 41 deletions maintainers/scripts/hydra-eval-failures.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,68 +11,83 @@
import requests
from pyquery import PyQuery as pq

def map_dict (f, d):
for k,v in d.items():

def map_dict(f, d):
for k, v in d.items():
d[k] = f(v)

maintainers_json = subprocess.check_output([
'nix-instantiate', '-A', 'lib.maintainers', '--eval', '--strict', '--json'
])

maintainers_json = subprocess.check_output(
["nix-instantiate", "-A", "lib.maintainers", "--eval", "--strict", "--json"]
)
maintainers = json.loads(maintainers_json)
MAINTAINERS = map_dict(lambda v: v.get('github', None), maintainers)
MAINTAINERS = map_dict(lambda v: v.get("github", None), maintainers)


def get_response_text(url):
return pq(requests.get(url).text) # IO


EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
"nixos": "nixos/release.nix",
"nixpkgs": "pkgs/top-level/release.nix",
}


def get_maintainers(attr_name):
try:
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--arg',
'nixpkgs',
'./.',
'--json'])
nixname = attr_name.split(".")
meta_json = subprocess.check_output(
[
"nix-instantiate",
"--eval",
"--strict",
"-A",
".".join(nixname[1:]) + ".meta",
EVAL_FILE[nixname[0]],
"--arg",
"nixpkgs",
"./.",
"--json",
]
)
meta = json.loads(meta_json)
return meta.get('maintainers', [])
return meta.get("maintainers", [])
except:
return []
return []


def filter_github_users(maintainers):
github_only = []
for i in maintainers:
if i.get('github'):
if i.get("github"):
github_only.append(i)
return github_only


def print_build(table_row):
a = pq(table_row)('a')[1]
print("- [ ] [{}]({})".format(a.text, a.get('href')), flush=True)
a = pq(table_row)("a")[1]
print("- [ ] [{}]({})".format(a.text, a.get("href")), flush=True)

job_maintainers = filter_github_users(get_maintainers(a.text))
if job_maintainers:
print(" - maintainers: {}".format(" ".join(map(lambda u: '@' + u.get('github'), job_maintainers))))
print(
" - maintainers: {}".format(
" ".join(map(lambda u: "@" + u.get("github"), job_maintainers))
)
)
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?

sys.stdout.flush()


@click.command()
@click.option(
'--jobset',
"--jobset",
default="nixos/release-19.09",
help='Hydra project like nixos/release-19.09')
help="Hydra project like nixos/release-19.09",
)
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
Expand All @@ -82,31 +97,30 @@ def cli(jobset):
url = "https://hydra.nixos.org/jobset/{}".format(jobset)

# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
click.echo(click.style("Getting latest evaluation for {}".format(url), fg="green"))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
evaluations = d("#tabs-evaluations").find('a[class="row-link"]')
latest_eval_url = evaluations[0].get("href")

# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
click.echo(click.style("Parsing evaluation {}".format(latest_eval_url), fg="green"))
d = get_response_text(latest_eval_url + "?full=1")

# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
print('\nFailures:')
for tr in d('img[alt="Failed"]').parents('tr'):
print("\nFailures:")
for tr in d('img[alt="Failed"]').parents("tr"):
print_build(tr)

print('\nDependency failures:')
for tr in d('img[alt="Dependency failed"]').parents('tr'):
print("\nDependency failures:")
for tr in d('img[alt="Dependency failed"]').parents("tr"):
print_build(tr)



if __name__ == "__main__":
try:
cli()
except Exception as e:
import pdb;pdb.post_mortem()
import pdb

pdb.post_mortem()
14 changes: 7 additions & 7 deletions maintainers/scripts/kde/collect-metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import utils


@click.command
@click.argument(
"repo-metadata",
Expand All @@ -25,17 +26,16 @@
writable=True,
path_type=pathlib.Path,
),
default=pathlib.Path(__file__).parent.parent.parent.parent
)
@click.option(
"--unstable",
default=False,
is_flag=True
default=pathlib.Path(__file__).parent.parent.parent.parent,
)
@click.option("--unstable", default=False, is_flag=True)
def main(repo_metadata: pathlib.Path, nixpkgs: pathlib.Path, unstable: bool):
metadata = utils.KDERepoMetadata.from_repo_metadata_checkout(repo_metadata, unstable)
metadata = utils.KDERepoMetadata.from_repo_metadata_checkout(
repo_metadata, unstable
)
out_dir = nixpkgs / "pkgs/kde/generated"
metadata.write_json(out_dir)


if __name__ == "__main__":
main() # type: ignore
Loading
Loading