docsets: ship nixpkgs lib as a Dash/Zeal docset

This commit is contained in:
2024-11-04 18:16:33 +00:00
parent 598e55380c
commit 7e1624d017
4 changed files with 181 additions and 0 deletions

View File

@@ -60,6 +60,7 @@ in {
];
sane.programs.docsets.config.pkgs = with pkgs; [
# packages which ship docsets natively:
docsets.nixpkgs-lib
docsets.rust-std
] ++ lib.map
# reconfigure all the rustPkgs so they ship docsets:

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>nixpkgs-lib</string>
<key>CFBundleName</key>
<string>nixpkgs.lib</string>
<key>DocSetPlatformFamily</key>
<string>nix</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>manual.html</string>
</dict>
</plist>

View File

@@ -0,0 +1,104 @@
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p python3
import argparse
import json
import logging
import re
import sqlite3
from dataclasses import dataclass
from pathlib import Path
logger = logging.getLogger(__name__)
@dataclass
class Items:
functions: list[str]
sections: list[str]
def read_items(locations_json: Path) -> Items:
# TODO: this seems to yield some items which don't have documentation? like `lib.gvariant.type`
with open(locations_json, "r") as f:
functions = json.load(f).keys()
sections = set(parent_attrpath(f) for f in functions)
while True:
expanded_sections = sections.union(parent_attrpath(s) for s in sections)
if expanded_sections == sections:
break
else:
sections = expanded_sections
# the above algorithm includes a nameless root section `""`: remove that
sections = [ s for s in sections if s ]
# for some cases (lib.gvariant.type), both the section and its members show up in `locations.json`
functions = [ f for f in functions if f not in sections ]
return Items(functions=sort_attrpaths(functions), sections=sort_attrpaths(sections))
def sort_attrpaths(ps: list[str]) -> list[str]:
ps_expanded = sorted(p.split(".") for p in ps)
return [ ".".join(p) for p in ps_expanded ]
def parent_attrpath(p: str) -> str:
""" `lib.foo.bar` -> `lib.foo` """
components = p.split(".")
return ".".join(components[:-1])
def chomp_attrpath(p: str) -> str:
""" `lib.foo.bar` -> `foo.bar` """
components = p.split(".")
return ".".join(components[1:])
def init_db(db) -> None:
db.execute("CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT);")
db.execute("CREATE UNIQUE INDEX anchor ON searchIndex (name, type, path);")
def register_item(db, attrpath: str, kind: str, path: str) -> None:
logger.debug(f"register ({attrpath}, {kind}, {path})")
db.execute("INSERT INTO searchIndex(name, type, path) values (?, ?, ?);", (attrpath, kind, path))
def register_function(db, attrpath: str) -> None:
register_item(db, attrpath, kind="Function", path=f"manual.html#function-library-{attrpath}")
def register_section(db, attrpath: str) -> None:
library = chomp_attrpath(attrpath)
if library == "":
path = "manual.html#sec-functions-library"
else:
path = f"manual.html#sec-functions-library-{library}"
register_item(db, attrpath, kind="Property", path=path)
def main() -> None:
logging.basicConfig()
parser = argparse.ArgumentParser(description = "Generate Dash/Zeal docset index for use with nixpkgs html manual")
parser.add_argument("locations", type=Path, help="path to locations.json, a dict mapping attrpath to code location for all nixpkgs.lib attrs")
parser.add_argument("--output", type=Path)
parser.add_argument("--verbose", action="store_true")
args = parser.parse_args()
if args.verbose:
logging.getLogger().setLevel(logging.DEBUG)
items = read_items(args.locations)
conn = sqlite3.connect(args.output)
logger.debug("database opened")
db = conn.cursor()
init_db(db)
for section in items.sections:
register_section(db, section)
for fn in items.functions:
register_function(db, fn)
conn.commit()
conn.close()
logger.debug("database closed")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,60 @@
# dash nixpkgs docset tracking issue: <https://github.com/Kapeli/Dash-User-Contributions/issues/4812>
# this package is heavily based on:
# - <https://github.com/nixosbrasil/nix-docgen>
# - <https://kapeli.com/docsets#dashDocset>
{
lib,
nixpkgs-manual,
static-nix-shell,
stdenv,
}:
let
# nixpkgs has logic to build an attrset of all the items which make it into nixpkgs-manual.
# this is a json dictionary with each entry like:
# - `"lib.asserts.assertEachOneOf": "[lib/asserts.nix:135](https://github.com/NixOS/nixpkgs/blob/master/lib/asserts.nix#L135) in `<nixpkgs>`"`
lib-locations = nixpkgs-manual.lib-docs.overrideAttrs (base: {
installPhase = base.installPhase + ''
cp locations.json $out/locations.json
'';
});
generate_index = static-nix-shell.mkPython3 {
pname = "generate_index";
srcRoot = ./.;
};
docset = stdenv.mkDerivation {
pname = "nixpkgs-lib";
version = lib.version;
nativeBuildInputs = [ generate_index ];
unpackPhase = ''
cp ${./Info.plist} Info.plist
cp ${lib-locations}/locations.json locations.json
cp -R ${nixpkgs-manual}/share/doc/nixpkgs nixpkgs-manual
'';
buildPhase = ''
runHook preBuild
mkdir -p nixpkgs-lib.docset/Contents/Resources/
cp Info.plist nixpkgs-lib.docset/Contents/
cp -R nixpkgs-manual nixpkgs-lib.docset/Contents/Resources/Documents
generate_index --verbose locations.json --output nixpkgs-lib.docset/Contents/Resources/docSet.dsidx
runHook postBuild
'';
# docsets are usually distributed as .tgz, but compression is actually optional at least for tools like `dasht`
installPhase = ''
mkdir -p $out/share/docsets
cp -R nixpkgs-lib.docset $out/share/docsets/nixpkgs-lib.docset
'';
passthru = {
inherit generate_index lib-locations;
};
};
in
docset