mirror of
https://github.com/nix-community/nixvim.git
synced 2025-06-21 00:25:42 +02:00
ci: rename update-scripts
→ ci
This changes how we think about this directory; it does not need to be exclusively for scripts related to updates, but should be a place for any scripts intended to be run by CI workflows. This mindset should make it easier to develop and test the business logic of workflows, without always needing to test "in production" on the nixvim repo or a fork.
This commit is contained in:
parent
7388c85c54
commit
2b2b1e6d8f
18 changed files with 13 additions and 12 deletions
31
ci/README.md
Normal file
31
ci/README.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
# CI scripts
|
||||
|
||||
This directory contains CI-related scripts that are not part of the actual flake.
|
||||
Unless developing or testing changes, you shouldn't need to run them manually.
|
||||
|
||||
## Developing
|
||||
|
||||
Because these scripts aren't packaged in the flake, you should use `nix-build` and `nix-shell` instead of `nix build`, `nix run`, and `nix develop`, etc.
|
||||
|
||||
For example, `nix-build -A generate` will build `./generate.nix` into `./result/bin/generate`.
|
||||
|
||||
A `shell.nix` is available that will place `generate` on your PATH.
|
||||
|
||||
You could use this directory's shell/packages from another working directory by supplying `nix-build` or `nix-shell` with a path.
|
||||
E.g. `nix-shell ./ci`.
|
||||
|
||||
## Explanation
|
||||
|
||||
These packages are not in the flake outputs for three main reasons:
|
||||
- Packages built using the flake must follow the flake's `nixConfig`
|
||||
- Packages included in the flake's output are checked by `nix flake check`
|
||||
- Some of the packages should have no dependency on the flake at all,
|
||||
allowing this directory to be [sparse checked out][sparse-checkout] by a workflow
|
||||
|
||||
Being unable to bypass `nixConfig` is an issue because we want to disable [IFD] for the flake, but not for these scripts.
|
||||
|
||||
If something changes upstream that causes the builds to fail, we don't want this to block us updating `flake.lock`.
|
||||
We'd still be made aware of any issues by the `update` CI workflow failing.
|
||||
|
||||
[sparse-checkout]: https://github.com/actions/checkout#scenarios
|
||||
[IFD]: https://nixos.org/manual/nix/stable/language/import-from-derivation
|
23
ci/default.nix
Normal file
23
ci/default.nix
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
# By default, load nixvim using flake-compat
|
||||
nixvim ? import ../.,
|
||||
pkgs ? nixvim.inputs.nixpkgs.legacyPackages.${builtins.currentSystem},
|
||||
lib ? nixvim.inputs.nixpkgs.lib,
|
||||
}:
|
||||
lib.fix (self: {
|
||||
# The main script
|
||||
default = self.generate;
|
||||
generate = lib.callPackageWith (pkgs // self) ./generate.nix { };
|
||||
|
||||
update = lib.callPackageWith (pkgs // self) ./update.nix { };
|
||||
|
||||
# A shell that has the generate script
|
||||
shell = pkgs.mkShell { nativeBuildInputs = [ self.generate ]; };
|
||||
|
||||
# Derivations that build the generated files
|
||||
efmls-configs-sources = pkgs.callPackage ./efmls-configs.nix { };
|
||||
none-ls-builtins = pkgs.callPackage ./none-ls.nix { };
|
||||
rust-analyzer-options = pkgs.callPackage ./rust-analyzer { };
|
||||
lspconfig-servers = pkgs.callPackage ./nvim-lspconfig { };
|
||||
version-info = pkgs.callPackage ./version-info { };
|
||||
})
|
54
ci/efmls-configs.nix
Normal file
54
ci/efmls-configs.nix
Normal file
|
@ -0,0 +1,54 @@
|
|||
{
|
||||
lib,
|
||||
vimPlugins,
|
||||
writeText,
|
||||
}:
|
||||
let
|
||||
tools = lib.trivial.importJSON "${vimPlugins.efmls-configs-nvim.src}/doc/supported-list.json";
|
||||
languages = lib.attrNames tools;
|
||||
|
||||
toLangTools' = lang: kind: lib.map (lib.getAttr "name") (tools.${lang}.${kind} or [ ]);
|
||||
|
||||
miscLinters = toLangTools' "misc" "linters";
|
||||
miscFormatters = toLangTools' "misc" "formatters";
|
||||
|
||||
sources =
|
||||
(lib.listToAttrs (
|
||||
lib.map (
|
||||
lang:
|
||||
let
|
||||
toLangTools = toLangTools' lang;
|
||||
in
|
||||
{
|
||||
name = lang;
|
||||
value = {
|
||||
linter = {
|
||||
inherit lang;
|
||||
possible = (toLangTools "linters") ++ miscLinters;
|
||||
};
|
||||
formatter = {
|
||||
inherit lang;
|
||||
possible = (toLangTools "formatters") ++ miscFormatters;
|
||||
};
|
||||
};
|
||||
}
|
||||
) languages
|
||||
))
|
||||
// {
|
||||
all = {
|
||||
linter = {
|
||||
lang = "all languages";
|
||||
possible = miscLinters;
|
||||
};
|
||||
formatter = {
|
||||
lang = "all languages";
|
||||
possible = miscFormatters;
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
writeText "efmls-configs-sources.nix" (
|
||||
"# WARNING: DO NOT EDIT\n"
|
||||
+ "# This file is generated with packages.<system>.efmls-configs-sources, which is run automatically by CI\n"
|
||||
+ (lib.generators.toPretty { } sources)
|
||||
)
|
86
ci/generate.nix
Normal file
86
ci/generate.nix
Normal file
|
@ -0,0 +1,86 @@
|
|||
{
|
||||
writeShellApplication,
|
||||
rust-analyzer-options,
|
||||
efmls-configs-sources,
|
||||
none-ls-builtins,
|
||||
lspconfig-servers,
|
||||
nixfmt-rfc-style,
|
||||
nodePackages,
|
||||
}:
|
||||
writeShellApplication {
|
||||
name = "generate";
|
||||
|
||||
runtimeInputs = [
|
||||
nixfmt-rfc-style
|
||||
nodePackages.prettier
|
||||
];
|
||||
|
||||
text = ''
|
||||
repo_root=$(git rev-parse --show-toplevel)
|
||||
generated_dir=$repo_root/generated
|
||||
|
||||
commit=
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--commit) commit=1
|
||||
;;
|
||||
--*) echo "unknown option $1"
|
||||
;;
|
||||
*) echo "unexpected argument $1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
generate() {
|
||||
echo "$2"
|
||||
cp "$1" "$generated_dir/$2.nix"
|
||||
nixfmt "$generated_dir/$2.nix"
|
||||
}
|
||||
|
||||
mkdir -p "$generated_dir"
|
||||
generate "${rust-analyzer-options}" "rust-analyzer"
|
||||
generate "${efmls-configs-sources}" "efmls-configs"
|
||||
generate "${none-ls-builtins}" "none-ls"
|
||||
echo "lspconfig servers"
|
||||
prettier --parser=json "${lspconfig-servers}" >"$generated_dir/lspconfig-servers.json"
|
||||
|
||||
if [ -n "$commit" ]; then
|
||||
cd "$generated_dir"
|
||||
git add .
|
||||
|
||||
# Construct a msg body from `git status -- .`
|
||||
body=$(
|
||||
git status \
|
||||
--short \
|
||||
--ignored=no \
|
||||
--untracked-files=no \
|
||||
--no-ahead-behind \
|
||||
-- . \
|
||||
| sed \
|
||||
-e 's/^\s*\([A-Z]\)\s*/\1 /' \
|
||||
-e 's/^A/Added/' \
|
||||
-e 's/^M/Updated/' \
|
||||
-e 's/^R/Renamed/' \
|
||||
-e 's/^D/Removed/' \
|
||||
-e 's/^/- /'
|
||||
)
|
||||
|
||||
# Construct the commit message based on the body
|
||||
# NOTE: Can't use `wc -l` due to how `echo` pipes its output
|
||||
count=$(echo -n "$body" | awk 'END {print NR}')
|
||||
if [ "$count" -gt 1 ] || [ ''${#body} -gt 50 ]; then
|
||||
msg=$(echo -e "generated: Update\n\n$body")
|
||||
else
|
||||
msg="generated:''${body:1}"
|
||||
fi
|
||||
|
||||
# Commit if there are changes
|
||||
if [ "$count" -gt 0 ]; then
|
||||
echo "Committing $count changes..."
|
||||
echo "$msg"
|
||||
git commit -m "$msg" --no-verify
|
||||
fi
|
||||
fi
|
||||
'';
|
||||
}
|
14
ci/none-ls.nix
Normal file
14
ci/none-ls.nix
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
vimPlugins,
|
||||
lib,
|
||||
writeText,
|
||||
}:
|
||||
let
|
||||
builtinSources = lib.trivial.importJSON "${vimPlugins.none-ls-nvim.src}/doc/builtins.json";
|
||||
builtinSourceNames = lib.mapAttrs (_: lib.attrNames) builtinSources;
|
||||
in
|
||||
writeText "none-ls-sources.nix" (
|
||||
"# WARNING: DO NOT EDIT\n"
|
||||
+ "# This file is generated with packages.<system>.none-ls-builtins, which is run automatically by CI\n"
|
||||
+ (lib.generators.toPretty { } builtinSourceNames)
|
||||
)
|
23
ci/nvim-lspconfig/clean-desc.py
Executable file
23
ci/nvim-lspconfig/clean-desc.py
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
filter = os.environ.get("LUA_FILTER")
|
||||
if filter is None:
|
||||
filter = os.path.dirname(__file__) + "/desc-filter.lua"
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
data = json.load(f)
|
||||
for d in data:
|
||||
if "desc" in d:
|
||||
if "#" in d["desc"]:
|
||||
d["desc"] = subprocess.run(
|
||||
["pandoc", "-t", "markdown", f"--lua-filter={filter}"],
|
||||
input=d["desc"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
).stdout
|
||||
print(json.dumps(data, sort_keys=True))
|
37
ci/nvim-lspconfig/default.nix
Normal file
37
ci/nvim-lspconfig/default.nix
Normal file
|
@ -0,0 +1,37 @@
|
|||
{
|
||||
lib,
|
||||
vimPlugins,
|
||||
neovimUtils,
|
||||
wrapNeovimUnstable,
|
||||
neovim-unwrapped,
|
||||
runCommand,
|
||||
pandoc,
|
||||
python3,
|
||||
}:
|
||||
let
|
||||
nvimConfig = neovimUtils.makeNeovimConfig {
|
||||
plugins = [
|
||||
{
|
||||
plugin = vimPlugins.nvim-lspconfig;
|
||||
config = null;
|
||||
optional = false;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
nvim = wrapNeovimUnstable neovim-unwrapped nvimConfig;
|
||||
in
|
||||
runCommand "lspconfig-servers"
|
||||
{
|
||||
lspconfig = "${vimPlugins.nvim-lspconfig}";
|
||||
nativeBuildInputs = [
|
||||
pandoc
|
||||
python3
|
||||
];
|
||||
}
|
||||
''
|
||||
export HOME=$(realpath .)
|
||||
# Generates `lsp.json`
|
||||
${lib.getExe nvim} -u NONE -E -R --headless +'luafile ${./lspconfig-servers.lua}' +q
|
||||
LUA_FILTER=${./desc-filter.lua} python3 ${./clean-desc.py} "lsp.json" >$out
|
||||
''
|
3
ci/nvim-lspconfig/desc-filter.lua
Normal file
3
ci/nvim-lspconfig/desc-filter.lua
Normal file
|
@ -0,0 +1,3 @@
|
|||
function Header(elem)
|
||||
return pandoc.Strong(elem.content)
|
||||
end
|
52
ci/nvim-lspconfig/lspconfig-servers.lua
Normal file
52
ci/nvim-lspconfig/lspconfig-servers.lua
Normal file
|
@ -0,0 +1,52 @@
|
|||
-- This script is heavily inspired by https://github.com/neovim/nvim-lspconfig/blob/master/scripts/docgen.lua
|
||||
require("lspconfig")
|
||||
local configs = require("lspconfig.configs")
|
||||
local util = require("lspconfig.util")
|
||||
|
||||
local function require_all_configs()
|
||||
for _, v in ipairs(vim.fn.glob(vim.env.lspconfig .. "/lua/lspconfig/configs/*.lua", 1, 1)) do
|
||||
local module_name = v:gsub(".*/", ""):gsub("%.lua$", "")
|
||||
configs[module_name] = require("lspconfig.configs." .. module_name)
|
||||
end
|
||||
end
|
||||
|
||||
local function map_list(t, func)
|
||||
local res = {}
|
||||
for i, v in ipairs(t) do
|
||||
local x = func(v, i)
|
||||
if x ~= nil then
|
||||
table.insert(res, x)
|
||||
end
|
||||
end
|
||||
return res
|
||||
end
|
||||
|
||||
local function sorted_map_table(t, func)
|
||||
local keys = vim.tbl_keys(t)
|
||||
table.sort(keys)
|
||||
return map_list(keys, function(k)
|
||||
return func(k, t[k])
|
||||
end)
|
||||
end
|
||||
|
||||
require_all_configs()
|
||||
|
||||
info = sorted_map_table(configs, function(server_name, server_info)
|
||||
local description = nil
|
||||
if server_info.document_config.docs ~= nil then
|
||||
description = server_info.document_config.docs.description
|
||||
end
|
||||
local cmd = server_info.document_config.default_config.cmd
|
||||
if type(cmd) == "function" then
|
||||
cmd = "see source file"
|
||||
end
|
||||
return {
|
||||
name = server_name,
|
||||
cmd = cmd,
|
||||
desc = description,
|
||||
}
|
||||
end)
|
||||
|
||||
local writer = io.open("lsp.json", "w")
|
||||
writer:write(vim.json.encode(info))
|
||||
writer:close()
|
234
ci/rust-analyzer/default.nix
Normal file
234
ci/rust-analyzer/default.nix
Normal file
|
@ -0,0 +1,234 @@
|
|||
#
|
||||
# This derivation creates a Nix file that describes the Nix module that needs to be instantiated
|
||||
#
|
||||
# The create file is of the form:
|
||||
#
|
||||
# {
|
||||
# "<rust-analyzer.option.name>" = {
|
||||
# description = "<option description>";
|
||||
# type = {
|
||||
# kind = "<name of the type>";
|
||||
# # Other values depending on the kind, like values for enum or subTypes for oneOf
|
||||
# };
|
||||
# };
|
||||
# }
|
||||
#
|
||||
{
|
||||
lib,
|
||||
rust-analyzer,
|
||||
writeText,
|
||||
pandoc,
|
||||
runCommand,
|
||||
}:
|
||||
let
|
||||
packageJSON = "${rust-analyzer.src}/editors/code/package.json";
|
||||
options = (lib.importJSON packageJSON).contributes.configuration;
|
||||
|
||||
generatedStart = lib.lists.findFirstIndex (
|
||||
e: e == { title = "$generated-start"; }
|
||||
) (throw "missing generated start") options;
|
||||
generatedEnd = lib.lists.findFirstIndex (
|
||||
e: e == { title = "$generated-end"; }
|
||||
) (throw "missing generated end") options;
|
||||
|
||||
# Extract only the generated properties, removing vscode specific options
|
||||
rustAnalyzerProperties = lib.lists.sublist (generatedStart + 1) (
|
||||
generatedEnd - generatedStart - 1
|
||||
) options;
|
||||
|
||||
mkRustAnalyzerOptionType =
|
||||
nullable: property_name: property:
|
||||
let
|
||||
inner =
|
||||
{
|
||||
type ? null,
|
||||
enum ? null,
|
||||
minimum ? null,
|
||||
maximum ? null,
|
||||
items ? null,
|
||||
anyOf ? null,
|
||||
properties ? null,
|
||||
# Not used in the function, but anyOf values contain it
|
||||
enumDescriptions ? null,
|
||||
}@property:
|
||||
if enum != null then
|
||||
{
|
||||
kind = "enum";
|
||||
values = enum;
|
||||
}
|
||||
else if anyOf != null then
|
||||
let
|
||||
possibleTypes = lib.filter (sub: !(sub.type == "null" && nullable)) anyOf;
|
||||
in
|
||||
{
|
||||
kind = "oneOf";
|
||||
subTypes = builtins.map (
|
||||
t: mkRustAnalyzerOptionType nullable "${property_name}-sub" t
|
||||
) possibleTypes;
|
||||
}
|
||||
else
|
||||
(
|
||||
assert lib.assertMsg (type != null) "property is neither anyOf nor enum, it must have a type";
|
||||
if lib.isList type then
|
||||
(
|
||||
if lib.head type == "null" then
|
||||
assert lib.assertMsg (
|
||||
lib.length type == 2
|
||||
) "Lists starting with null are assumed to mean nullOr, so length 2";
|
||||
let
|
||||
innerType = property // {
|
||||
type = lib.elemAt type 1;
|
||||
};
|
||||
inner = mkRustAnalyzerOptionType nullable "${property_name}-inner" innerType;
|
||||
in
|
||||
assert lib.assertMsg nullable "nullOr types are not yet handled";
|
||||
inner
|
||||
else
|
||||
let
|
||||
innerTypes = builtins.map (
|
||||
t: mkRustAnalyzerOptionType nullable "${property_name}-inner" (property // { type = t; })
|
||||
) type;
|
||||
in
|
||||
{
|
||||
kind = "oneOf";
|
||||
subTypes = innerTypes;
|
||||
}
|
||||
)
|
||||
else if type == "array" then
|
||||
{
|
||||
kind = "list";
|
||||
item = mkRustAnalyzerOptionType false "${property_name}-item" items;
|
||||
}
|
||||
else if type == "number" || type == "integer" then
|
||||
{
|
||||
kind = type;
|
||||
inherit minimum maximum;
|
||||
}
|
||||
else if type == "object" && properties != null then
|
||||
{
|
||||
kind = "submodule";
|
||||
options = lib.mapAttrs (
|
||||
name: value: mkRustAnalyzerOptionType false "${property_name}.${name}" value
|
||||
) properties;
|
||||
}
|
||||
else if
|
||||
lib.elem type [
|
||||
"object"
|
||||
"string"
|
||||
"boolean"
|
||||
]
|
||||
then
|
||||
{ kind = type; }
|
||||
else
|
||||
throw "Unhandled value in ${property_name}: ${lib.generators.toPretty { } property}"
|
||||
);
|
||||
in
|
||||
builtins.addErrorContext "While creating type for ${property_name}:\n${lib.generators.toPretty { } property}" (
|
||||
inner property
|
||||
);
|
||||
|
||||
mkRustAnalyzerOption =
|
||||
property_name:
|
||||
{
|
||||
# List all possible values so that we are sure no new values are introduced
|
||||
default,
|
||||
markdownDescription,
|
||||
enum ? null,
|
||||
enumDescriptions ? null,
|
||||
anyOf ? null,
|
||||
minimum ? null,
|
||||
maximum ? null,
|
||||
items ? null,
|
||||
# TODO: add this in the documentation ?
|
||||
uniqueItems ? null,
|
||||
type ? null,
|
||||
}:
|
||||
let
|
||||
filteredMarkdownDesc =
|
||||
# If there is a risk that the string contains an heading filter it out
|
||||
if lib.hasInfix "# " markdownDescription then
|
||||
builtins.readFile (
|
||||
runCommand "filtered-documentation" { inherit markdownDescription; } ''
|
||||
${lib.getExe pandoc} -o $out -t markdown \
|
||||
--lua-filter=${./heading_filter.lua} <<<"$markdownDescription"
|
||||
''
|
||||
)
|
||||
else
|
||||
markdownDescription;
|
||||
|
||||
enumDesc =
|
||||
values: descriptions:
|
||||
let
|
||||
valueDesc = builtins.map ({ fst, snd }: ''- ${fst}: ${snd}'') (
|
||||
lib.lists.zipLists values descriptions
|
||||
);
|
||||
in
|
||||
''
|
||||
${filteredMarkdownDesc}
|
||||
|
||||
Values:
|
||||
${builtins.concatStringsSep "\n" valueDesc}
|
||||
'';
|
||||
in
|
||||
{
|
||||
type = mkRustAnalyzerOptionType true property_name {
|
||||
inherit
|
||||
type
|
||||
enum
|
||||
minimum
|
||||
maximum
|
||||
items
|
||||
anyOf
|
||||
enumDescriptions
|
||||
;
|
||||
};
|
||||
pluginDefault = default;
|
||||
description =
|
||||
let
|
||||
globalDescription = ''
|
||||
${filteredMarkdownDesc}
|
||||
'';
|
||||
in
|
||||
if
|
||||
enum == null && (anyOf == null || builtins.all (subProp: !(lib.hasAttr "enum" subProp)) anyOf)
|
||||
then
|
||||
globalDescription
|
||||
else if enum != null then
|
||||
assert lib.assertMsg (anyOf == null) "enum + anyOf types are not yet handled";
|
||||
enumDesc enum enumDescriptions
|
||||
else
|
||||
let
|
||||
subEnums = lib.filter (lib.hasAttr "enum") anyOf;
|
||||
subEnum =
|
||||
assert lib.assertMsg (
|
||||
lib.length subEnums == 1
|
||||
) "anyOf types may currently only contain a single enum";
|
||||
lib.head subEnums;
|
||||
in
|
||||
if subEnum ? enumDescriptions then
|
||||
enumDesc subEnum.enum subEnum.enumDescriptions
|
||||
else
|
||||
globalDescription;
|
||||
|
||||
};
|
||||
|
||||
rustAnalyzerOptions = builtins.map (
|
||||
v:
|
||||
let
|
||||
props = lib.attrsToList v.properties;
|
||||
prop =
|
||||
assert lib.assertMsg (
|
||||
lib.length props == 1
|
||||
) "Rust analyzer configuration items are only supported with a single element";
|
||||
lib.head props;
|
||||
in
|
||||
{
|
||||
"${prop.name}" = mkRustAnalyzerOption prop.name prop.value;
|
||||
}
|
||||
) rustAnalyzerProperties;
|
||||
in
|
||||
writeText "rust-analyzer-options.nix" (
|
||||
"# WARNING: DO NOT EDIT\n"
|
||||
+ "# This file is generated with packages.<system>.rust-analyzer-options, which is run automatically by CI\n"
|
||||
+ (lib.generators.toPretty { } (lib.mergeAttrsList rustAnalyzerOptions))
|
||||
)
|
3
ci/rust-analyzer/heading_filter.lua
Normal file
3
ci/rust-analyzer/heading_filter.lua
Normal file
|
@ -0,0 +1,3 @@
|
|||
function Header(elem)
|
||||
return pandoc.Strong(elem.content)
|
||||
end
|
4
ci/shell.nix
Normal file
4
ci/shell.nix
Normal file
|
@ -0,0 +1,4 @@
|
|||
let
|
||||
packages = import ./. { };
|
||||
in
|
||||
packages.shell
|
109
ci/update.nix
Normal file
109
ci/update.nix
Normal file
|
@ -0,0 +1,109 @@
|
|||
{
|
||||
nix,
|
||||
writeShellApplication,
|
||||
}:
|
||||
writeShellApplication {
|
||||
name = "update";
|
||||
|
||||
runtimeInputs = [
|
||||
nix
|
||||
];
|
||||
|
||||
text = ''
|
||||
commit=
|
||||
use_github_output=
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--commit) commit=1
|
||||
;;
|
||||
--github-output) use_github_output=1
|
||||
;;
|
||||
--*) echo "unknown option $1"
|
||||
;;
|
||||
*) echo "unexpected argument $1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
update_args=( )
|
||||
if [ -n "$commit" ]; then
|
||||
update_args+=( "--commit-lock-file" )
|
||||
fi
|
||||
|
||||
currentCommit() {
|
||||
git show --no-patch --format=%h
|
||||
}
|
||||
|
||||
hasChanges() {
|
||||
old="$1"
|
||||
new="$2"
|
||||
if [ -n "$commit" ]; then
|
||||
[ "$old" != "$new" ]
|
||||
elif git diff --quiet; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
writeGitHubOutput() {
|
||||
if [ -n "$use_github_output" ] && [ -n "$commit" ]; then
|
||||
{
|
||||
echo "$1<<EOF"
|
||||
git show --no-patch --format=%b
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
}
|
||||
|
||||
versionInfo() {
|
||||
extra_args=( )
|
||||
if [ "$1" = "--amend" ]; then
|
||||
extra_args+=(
|
||||
"--amend"
|
||||
"--no-edit"
|
||||
)
|
||||
fi
|
||||
|
||||
nix-build ./ci -A version-info
|
||||
./result/bin/version-info
|
||||
|
||||
if [ -n "$commit" ]; then
|
||||
git add version-info.toml
|
||||
git commit "''${extra_args[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Initialise version-info.toml
|
||||
if [ ! -f version-info.toml ]; then
|
||||
echo "Creating version-info file"
|
||||
versionInfo -m "version-info: init"
|
||||
fi
|
||||
|
||||
# Update the root lockfile
|
||||
old=$(currentCommit)
|
||||
echo "Updating root lockfile"
|
||||
nix flake update "''${update_args[@]}"
|
||||
new=$(currentCommit)
|
||||
if hasChanges "$old" "$new"; then
|
||||
echo "Updating version-info"
|
||||
versionInfo --amend
|
||||
writeGitHubOutput root_lock_body
|
||||
fi
|
||||
|
||||
# Update the dev lockfile
|
||||
root_nixpkgs=$(nix eval --raw --file . 'inputs.nixpkgs.rev')
|
||||
old=$(currentCommit)
|
||||
echo "Updating dev lockfile"
|
||||
nix flake update "''${update_args[@]}" \
|
||||
--override-input 'dev-nixpkgs' "github:NixOS/nixpkgs/$root_nixpkgs" \
|
||||
--flake './flake/dev'
|
||||
new=$(currentCommit)
|
||||
if hasChanges "$old" "$new"; then
|
||||
echo "Updating version-info"
|
||||
versionInfo --amend
|
||||
writeGitHubOutput dev_lock_body
|
||||
fi
|
||||
'';
|
||||
}
|
42
ci/version-info/default.nix
Normal file
42
ci/version-info/default.nix
Normal file
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
lib,
|
||||
callPackage,
|
||||
writeShellApplication,
|
||||
stdenv,
|
||||
}:
|
||||
let
|
||||
mainInfo = callPackage ./main.nix { };
|
||||
channelsURL = "https://raw.githubusercontent.com/NixOS/infra/refs/heads/main/channels.nix";
|
||||
in
|
||||
writeShellApplication {
|
||||
name = "version-info";
|
||||
|
||||
runtimeEnv = {
|
||||
NIX_CONFIG = ''
|
||||
experimental-features = nix-command flakes pipe-operators
|
||||
'';
|
||||
};
|
||||
|
||||
text = ''
|
||||
# Download channel info from NixOS/infra
|
||||
curl ${channelsURL} | nix eval --file - --json > channels.json
|
||||
|
||||
# Use channels.nix to build channels.toml
|
||||
nix build --impure \
|
||||
--inputs-from ${toString ../..} \
|
||||
--file ${./supported-versions.nix} \
|
||||
--argstr system ${stdenv.hostPlatform.system} \
|
||||
--arg-from-file channelsJSON channels.json \
|
||||
--out-link channels.toml
|
||||
|
||||
(
|
||||
echo "# DO NOT MODIFY!"
|
||||
echo "# This file was generated by ${
|
||||
lib.strings.removePrefix (toString ../.. + "/") (toString ./default.nix)
|
||||
}"
|
||||
cat ${mainInfo}
|
||||
echo
|
||||
cat channels.toml
|
||||
) > version-info.toml
|
||||
'';
|
||||
}
|
27
ci/version-info/main.nix
Normal file
27
ci/version-info/main.nix
Normal file
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
lib,
|
||||
writers,
|
||||
}:
|
||||
let
|
||||
inherit (builtins)
|
||||
all
|
||||
match
|
||||
attrNames
|
||||
;
|
||||
inherit (lib)
|
||||
importJSON
|
||||
;
|
||||
|
||||
lockFile = importJSON ../../flake.lock;
|
||||
nixpkgsLock =
|
||||
# Assert there is only one nixpkgs node
|
||||
assert all (node: match "nixpkgs_[0-9]+" node == null) (attrNames lockFile.nodes);
|
||||
lockFile.nodes.nixpkgs.original;
|
||||
|
||||
info = {
|
||||
inherit (lib.trivial) release;
|
||||
nixpkgs_rev = lib.trivial.revisionWithDefault (throw "nixpkgs revision not available");
|
||||
unstable = lib.strings.hasSuffix "-unstable" nixpkgsLock.ref;
|
||||
};
|
||||
in
|
||||
writers.writeTOML "version-info.toml" info
|
62
ci/version-info/supported-versions.nix
Normal file
62
ci/version-info/supported-versions.nix
Normal file
|
@ -0,0 +1,62 @@
|
|||
{
|
||||
system ? builtins.currentSystem,
|
||||
pkgs ? import <nixpkgs> { inherit system; },
|
||||
lib ? import <nixpkgs/lib>,
|
||||
channelsJSON ? throw "Neither `channels` or `channelsJSON` provided",
|
||||
channels ? builtins.fromJSON channelsJSON,
|
||||
}:
|
||||
let
|
||||
# Pick out supported stable channels
|
||||
supported = [
|
||||
"beta"
|
||||
"stable"
|
||||
"deprecated"
|
||||
];
|
||||
|
||||
stable_versions =
|
||||
channels.channels
|
||||
|> builtins.mapAttrs (channel: entry: entry // { inherit channel; })
|
||||
|> builtins.attrValues
|
||||
|> builtins.filter (entry: entry.variant or null == "primary")
|
||||
|> builtins.filter (entry: builtins.elem entry.status supported)
|
||||
|> builtins.map (entry: {
|
||||
name = entry.channel |> builtins.match "nixos-(.+)" |> builtins.head;
|
||||
value = {
|
||||
inherit (entry) channel status;
|
||||
# Currently, Nixvim stable branches match NixOS channel names
|
||||
branch = entry.channel;
|
||||
};
|
||||
})
|
||||
|> builtins.listToAttrs;
|
||||
|
||||
newest_stable =
|
||||
stable_versions |> builtins.attrNames |> builtins.sort (a: b: a > b) |> builtins.head;
|
||||
|
||||
bumpYear = y: toString (lib.toIntBase10 y + 1);
|
||||
bumpMonth =
|
||||
m:
|
||||
assert m == "05" || m == "11";
|
||||
if m == "05" then "11" else "05";
|
||||
|
||||
unstable =
|
||||
newest_stable
|
||||
|> builtins.match "(.+)[.](.+)"
|
||||
|> (v: {
|
||||
y = builtins.elemAt v 0;
|
||||
m = builtins.elemAt v 1;
|
||||
})
|
||||
|> (v: {
|
||||
y = if v.m == "11" then bumpYear v.y else v.y;
|
||||
m = bumpMonth v.m;
|
||||
})
|
||||
|> (v: "${v.y}.${v.m}");
|
||||
in
|
||||
pkgs.writers.writeTOML "channels.toml" {
|
||||
versions = stable_versions // {
|
||||
${unstable} = {
|
||||
branch = "main";
|
||||
channel = "nixpkgs-unstable";
|
||||
status = "rolling";
|
||||
};
|
||||
};
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue