pluginupdate.py: add support for adding/updating individual plugins

Changed files
+203 -45
doc
languages-frameworks
maintainers
scripts
pluginupdate-py
pkgs
applications
editors
kakoune
plugins
vim
plugins
by-name
lu
luarocks-packages-updater
+6 -1
doc/languages-frameworks/vim.section.md
···
Alternatively, set the number of processes to a lower count to avoid rate-limiting.
```sh
+
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --proc 1'
+
```
+
+
If you want to update only certain plugins, you can specify them after the `update` command. Note that you must use the same plugin names as the `pkgs/applications/editors/vim/plugins/vim-plugin-names` file.
-
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --proc 1'
+
```sh
+
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater update "nvim-treesitter" "LazyVim"'
```
## How to maintain an out-of-tree overlay of vim plugins ? {#vim-out-of-tree-overlays}
+145 -12
maintainers/scripts/pluginupdate-py/pluginupdate.py
···
fetch_config, args.input_file, editor.deprecated, append=append
)
plugin, _ = prefetch_plugin(pdesc)
+
+
if ( # lua updater doesn't support updating individual plugin
+
self.name != "lua"
+
):
+
# update generated.nix
+
update = self.get_update(
+
args.input_file,
+
args.outfile,
+
fetch_config,
+
[plugin.normalized_name],
+
)
+
update()
+
autocommit = not args.no_commit
if autocommit:
commit(
···
"""CSV spec"""
print("the update member function should be overridden in subclasses")
-
def get_current_plugins(self, nixpkgs: str) -> List[Plugin]:
+
def get_current_plugins(
+
self, config: FetchConfig, nixpkgs: str
+
) -> List[Tuple[PluginDesc, Plugin]]:
"""To fill the cache"""
data = run_nix_expr(self.get_plugins, nixpkgs)
plugins = []
for name, attr in data.items():
-
p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
-
plugins.append(p)
+
checksum = attr["checksum"]
+
+
# https://github.com/NixOS/nixpkgs/blob/8a335419/pkgs/applications/editors/neovim/build-neovim-plugin.nix#L36
+
# https://github.com/NixOS/nixpkgs/pull/344478#discussion_r1786646055
+
version = re.search(r"\d\d\d\d-\d\d?-\d\d?", attr["version"])
+
if version is None:
+
raise ValueError(f"Cannot parse version: {attr['version']}")
+
date = datetime.strptime(version.group(), "%Y-%m-%d")
+
+
pdesc = PluginDesc.load_from_string(config, f'{attr["homePage"]} as {name}')
+
p = Plugin(
+
attr["pname"],
+
checksum["rev"],
+
checksum["submodules"],
+
checksum["sha256"],
+
date,
+
)
+
+
plugins.append((pdesc, p))
return plugins
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
···
"""Returns nothing for now, writes directly to outfile"""
raise NotImplementedError()
-
def get_update(self, input_file: str, outfile: str, config: FetchConfig):
-
cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
+
def filter_plugins_to_update(
+
self, plugin: PluginDesc, to_update: List[str]
+
) -> bool:
+
"""Function for filtering out plugins, that user doesn't want to update.
+
+
It is mainly used for updating only specific plugins, not all of them.
+
By default it filters out plugins not present in `to_update`,
+
assuming `to_update` is a list of plugin names (the same as in the
+
result expression).
+
+
This function is never called if `to_update` is empty.
+
Feel free to override this function in derived classes.
+
+
Note:
+
Known bug: you have to use a deprecated name, instead of new one.
+
This is because we resolve deprecations later and can't get new
+
plugin URL before we request info about it.
+
+
Although, we could parse deprecated.json, but it's a whole bunch
+
of spaghetti code, which I don't want to write.
+
+
Arguments:
+
plugin: Plugin on which you decide whether to ignore or not.
+
to_update:
+
List of strings passed to via the `--update` command line parameter.
+
By default, we assume it is a list of URIs identical to what
+
is in the input file.
+
+
Returns:
+
True if we should update plugin and False if not.
+
"""
+
return plugin.name.replace(".", "-") in to_update
+
+
def get_update(
+
self,
+
input_file: str,
+
output_file: str,
+
config: FetchConfig,
+
to_update: Optional[List[str]],
+
):
+
if to_update is None:
+
to_update = []
+
+
current_plugins = self.get_current_plugins(config, self.nixpkgs)
+
current_plugin_specs = self.load_plugin_spec(config, input_file)
+
+
cache: Cache = Cache(
+
[plugin for _description, plugin in current_plugins], self.cache_file
+
)
_prefetch = functools.partial(prefetch, cache=cache)
-
def update() -> dict:
-
plugins = self.load_plugin_spec(config, input_file)
+
plugins_to_update = (
+
current_plugin_specs
+
if len(to_update) == 0
+
else [
+
description
+
for description in current_plugin_specs
+
if self.filter_plugins_to_update(description, to_update)
+
]
+
)
+
+
def update() -> Redirects:
+
if len(plugins_to_update) == 0:
+
log.error(
+
"\n\n\n\nIt seems like you provided some arguments to `--update`:\n"
+
+ ", ".join(to_update)
+
+ "\nBut after filtering, the result list of plugins is empty\n"
+
"\n"
+
"Are you sure you provided the same URIs as in your input file?\n"
+
"(" + str(input_file) + ")\n\n"
+
)
+
return {}
try:
pool = Pool(processes=config.proc)
-
results = pool.map(_prefetch, plugins)
+
results = pool.map(_prefetch, plugins_to_update)
finally:
cache.store()
+
print(f"{len(results)} of {len(current_plugins)} were checked")
+
# Do only partial update of out file
+
if len(results) != len(current_plugins):
+
results = self.merge_results(current_plugins, results)
plugins, redirects = check_results(results)
plugins = sorted(plugins, key=lambda v: v[1].normalized_name)
-
self.generate_nix(plugins, outfile)
+
self.generate_nix(plugins, output_file)
return redirects
return update
+
def merge_results(
+
self,
+
current: list[Tuple[PluginDesc, Plugin]],
+
fetched: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]],
+
) -> List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]:
+
# transforming this to dict, so lookup is O(1) instead of O(n) (n is len(current))
+
result: Dict[
+
str, Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]
+
] = {
+
# also adding redirect (third item in the result tuple)
+
pl.normalized_name: (pdesc, pl, None)
+
for pdesc, pl in current
+
}
+
+
for plugin_desc, plugin, redirect in fetched:
+
result[plugin.normalized_name] = (plugin_desc, plugin, redirect)
+
+
return list(result.values())
+
@property
def attr_path(self):
return self.name + "Plugins"
···
description="Update all or a subset of existing plugins",
add_help=False,
)
+
pupdate.add_argument(
+
"update_only",
+
default=None,
+
nargs="*",
+
help="Plugin URLs to update (must be the same as in the input file)",
+
)
pupdate.set_defaults(func=self.update)
return main
···
new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
plugins.append((new_pdesc, result))
-
print(f"{len(results) - len(failures)} plugins were checked", end="")
if len(failures) == 0:
return plugins, redirects
else:
-
log.error(f", {len(failures)} plugin(s) could not be downloaded:\n")
+
log.error(f"{len(failures)} plugin(s) could not be downloaded:\n")
for plugin, exception in failures:
print_download_error(plugin, exception)
···
)
fetch_config = FetchConfig(args.proc, args.github_token)
-
update = editor.get_update(args.input_file, args.outfile, fetch_config)
+
update = editor.get_update(
+
input_file=args.input_file,
+
output_file=args.outfile,
+
config=fetch_config,
+
to_update=getattr( # if script was called without arguments
+
args, "update_only", None
+
),
+
)
start_time = time.time()
redirects = update()
+20 -18
pkgs/applications/editors/kakoune/plugins/update.py
···
)
import pluginupdate
-
GET_PLUGINS = f"""(
-
with import <localpkgs> {{ }};
+
GET_PLUGINS = f"""with import <localpkgs> {{ }};
let
inherit (kakouneUtils.override {{ }}) buildKakounePluginFrom2Nix;
-
generated = callPackage {ROOT}/generated.nix {{
-
inherit buildKakounePluginFrom2Nix;
-
}};
+
generated = callPackage {ROOT}/generated.nix {{ inherit buildKakounePluginFrom2Nix; }};
+
hasChecksum =
value:
lib.isAttrs value
···
"src"
"outputHash"
] value;
-
getChecksum =
-
name: value:
-
if hasChecksum value then
-
{{
-
submodules = value.src.fetchSubmodules or false;
-
sha256 = value.src.outputHash;
-
rev = value.src.rev;
-
}}
-
else
-
null;
-
checksums = lib.mapAttrs getChecksum generated;
+
+
parse = name: value: {{
+
pname = value.pname;
+
version = value.version;
+
homePage = value.meta.homepage;
+
checksum =
+
if hasChecksum value then
+
{{
+
submodules = value.src.fetchSubmodules or false;
+
sha256 = value.src.outputHash;
+
rev = value.src.rev;
+
}}
+
else
+
null;
+
}};
in
-
lib.filterAttrs (n: v: v != null) checksums
-
)"""
+
lib.mapAttrs parse generated"""
HEADER = "# This file has been @generated by ./pkgs/applications/editors/kakoune/plugins/update.py. Do not edit!"
+
class KakouneEditor(pluginupdate.Editor):
+17 -12
pkgs/applications/editors/vim/plugins/get-plugins.nix
···
generated = callPackage <localpkgs/pkgs/applications/editors/vim/plugins/generated.nix> {
inherit buildNeovimPlugin buildVimPlugin;
} { } { };
+
hasChecksum =
value:
lib.isAttrs value
···
"src"
"outputHash"
] value;
-
getChecksum =
-
name: value:
-
if hasChecksum value then
-
{
-
submodules = value.src.fetchSubmodules or false;
-
sha256 = value.src.outputHash;
-
rev = value.src.rev;
-
}
-
else
-
null;
-
checksums = lib.mapAttrs getChecksum generated;
+
+
parse = name: value: {
+
pname = value.pname;
+
version = value.version;
+
homePage = value.meta.homepage;
+
checksum =
+
if hasChecksum value then
+
{
+
submodules = value.src.fetchSubmodules or false;
+
sha256 = value.src.outputHash;
+
rev = value.src.rev;
+
}
+
else
+
null;
+
};
in
-
lib.filterAttrs (n: v: v != null) checksums
+
lib.mapAttrs parse generated
+15 -2
pkgs/by-name/lu/luarocks-packages-updater/updater.py
···
log = logging.getLogger()
log.addHandler(logging.StreamHandler())
-
ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))).parent.parent # type: ignore
+
ROOT = Path(
+
os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
+
).parent.parent # type: ignore
PKG_LIST = "maintainers/scripts/luarocks-packages.csv"
TMP_FILE = "$(mktemp)"
···
def attr_path(self):
return "luaPackages"
-
def get_update(self, input_file: str, outfile: str, config: FetchConfig):
+
def get_update(
+
self,
+
input_file: str,
+
outfile: str,
+
config: FetchConfig,
+
# TODO: implement support for adding/updating individual plugins
+
to_update: Optional[List[str]],
+
):
+
if to_update is not None:
+
raise NotImplementedError(
+
"For now, lua updater doesn't support updating individual packages."
+
)
_prefetch = generate_pkg_nix
def update() -> dict: