···
from multiprocessing.dummy import Pool
from tempfile import NamedTemporaryFile
-
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from urllib.parse import urljoin, urlparse
···
# a dictionary of plugins and their new repositories
-
Redirects = Dict["PluginDesc", "Repo"]
···
# Redirect is the new Repo to use
-
self.redirect: Optional["Repo"] = None
self.token = "dummy_token"
···
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
-
def latest_commit(self) -> Tuple[str, datetime]:
log.debug("Latest commit")
loaded = self._prefetch(None)
updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
return loaded["rev"], updated
-
def _prefetch(self, ref: Optional[str]):
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
···
loaded = json.loads(data)
-
def prefetch(self, ref: Optional[str]) -> str:
log.info("Prefetching %s", self.uri)
loaded = self._prefetch(ref)
···
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
-
def latest_commit(self) -> Tuple[str, datetime]:
commit_url = self.url(f"commits/{self.branch}.atom")
log.debug("Sending request to %s", commit_url)
commit_req = make_request(commit_url, self.token)
···
return self.alias or self.repo.name
-
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
log.debug("Loading row %s", row)
repo = make_repo(row["repo"], branch.strip())
···
-
date: Optional[datetime] = None
def normalized_name(self) -> str:
···
assert self.date is not None
return self.date.strftime("%Y-%m-%d")
-
def as_json(self) -> Dict[str, str]:
copy = self.__dict__.copy()
···
def load_plugins_from_csv(
log.debug("Load plugins from csv %s", input_file)
with open(input_file, newline="") as csvfile:
···
-
default_in: Optional[Path] = None,
-
default_out: Optional[Path] = None,
-
deprecated: Optional[Path] = None,
-
cache_file: Optional[str] = None,
log.debug("get_plugins:", get_plugins)
···
self, config: FetchConfig, nixpkgs: str
-
) -> List[Tuple[PluginDesc, Plugin]]:
data = run_nix_expr(self.get_plugins, nixpkgs)
···
plugins.append((pdesc, p))
-
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
return load_plugins_from_csv(config, plugin_file)
···
raise NotImplementedError()
def filter_plugins_to_update(
-
self, plugin: PluginDesc, to_update: List[str]
"""Function for filtering out plugins, that user doesn't want to update.
···
-
to_update: Optional[List[str]],
···
-
current: list[Tuple[PluginDesc, Plugin]],
-
fetched: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]],
-
) -> List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]:
# transforming this to dict, so lookup is O(1) instead of O(n) (n is len(current))
-
str, Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]
# also adding redirect (third item in the result tuple)
pl.normalized_name: (pdesc, pl, None)
···
-
cache: "Optional[Cache]" = None,
-
) -> Tuple[Plugin, Optional[Repo]]:
log.info(f"Fetching last commit for plugin {p.name} from {p.repo.uri}@{p.branch}")
commit, date = p.repo.latest_commit()
···
-
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]],
-
) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
-
failures: List[Tuple[PluginDesc, Exception]] = []
redirects: Redirects = {}
for pdesc, result, redirect in results:
···
-
def get_cache_path(cache_file_name: str) -> Optional[Path]:
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
home = os.environ.get("HOME", None)
···
-
def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
self.cache_file = get_cache_path(cache_file_name)
···
downloads.update(self.load())
self.downloads = downloads
-
def load(self) -> Dict[str, Plugin]:
if self.cache_file is None or not self.cache_file.exists():
-
downloads: Dict[str, Plugin] = {}
with open(self.cache_file) as f:
for attr in data.values():
···
data[name] = attr.as_json()
json.dump(data, f, indent=4, sort_keys=True)
-
def __getitem__(self, key: str) -> Optional[Plugin]:
return self.downloads.get(key, None)
def __setitem__(self, key: str, value: Plugin) -> None:
···
pluginDesc: PluginDesc, cache: Cache
-
) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
plugin, redirect = prefetch_plugin(pluginDesc, cache)
cache[plugin.commit] = plugin
···
# old pluginDesc and the new
redirects: Redirects = {},
-
append: List[PluginDesc] = [],
log.info("Rewriting input file %s", input_file)
plugins = load_plugins_from_csv(config, input_file)
···
writer.writerow(asdict(plugin))
-
def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
repo.index.add([str(f.resolve()) for f in files])
if repo.index.diff("HEAD"):
···
from multiprocessing.dummy import Pool
from tempfile import NamedTemporaryFile
+
from typing import Any, Callable
from urllib.parse import urljoin, urlparse
···
# a dictionary of plugins and their new repositories
+
Redirects = dict["PluginDesc", "Repo"]
···
# Redirect is the new Repo to use
+
self.redirect: "Repo | None" = None
self.token = "dummy_token"
···
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+
def latest_commit(self) -> tuple[str, datetime]:
log.debug("Latest commit")
loaded = self._prefetch(None)
updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
return loaded["rev"], updated
+
def _prefetch(self, ref: str | None):
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
···
loaded = json.loads(data)
+
def prefetch(self, ref: str | None) -> str:
log.info("Prefetching %s", self.uri)
loaded = self._prefetch(ref)
···
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+
def latest_commit(self) -> tuple[str, datetime]:
commit_url = self.url(f"commits/{self.branch}.atom")
log.debug("Sending request to %s", commit_url)
commit_req = make_request(commit_url, self.token)
···
return self.alias or self.repo.name
+
def load_from_csv(config: FetchConfig, row: dict[str, str]) -> "PluginDesc":
log.debug("Loading row %s", row)
repo = make_repo(row["repo"], branch.strip())
···
+
date: datetime | None = None
def normalized_name(self) -> str:
···
assert self.date is not None
return self.date.strftime("%Y-%m-%d")
+
def as_json(self) -> dict[str, str]:
copy = self.__dict__.copy()
···
def load_plugins_from_csv(
log.debug("Load plugins from csv %s", input_file)
with open(input_file, newline="") as csvfile:
···
+
default_in: Path | None = None,
+
default_out: Path | None = None,
+
deprecated: Path | None = None,
+
cache_file: str | None = None,
log.debug("get_plugins:", get_plugins)
···
self, config: FetchConfig, nixpkgs: str
+
) -> list[tuple[PluginDesc, Plugin]]:
data = run_nix_expr(self.get_plugins, nixpkgs)
···
plugins.append((pdesc, p))
+
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> list[PluginDesc]:
return load_plugins_from_csv(config, plugin_file)
···
raise NotImplementedError()
def filter_plugins_to_update(
+
self, plugin: PluginDesc, to_update: list[str]
"""Function for filtering out plugins, that user doesn't want to update.
···
+
to_update: list[str] | None,
···
+
current: list[tuple[PluginDesc, Plugin]],
+
fetched: list[tuple[PluginDesc, Exception | Plugin, Repo | None]],
+
) -> list[tuple[PluginDesc, Exception | Plugin, Repo | None]]:
# transforming this to dict, so lookup is O(1) instead of O(n) (n is len(current))
+
result: dict[str, tuple[PluginDesc, Exception | Plugin, Repo | None]] = {
# also adding redirect (third item in the result tuple)
pl.normalized_name: (pdesc, pl, None)
···
+
cache: "Cache | None" = None,
+
) -> tuple[Plugin, Repo | None]:
log.info(f"Fetching last commit for plugin {p.name} from {p.repo.uri}@{p.branch}")
commit, date = p.repo.latest_commit()
···
+
results: list[tuple[PluginDesc, Exception | Plugin, Repo | None]],
+
) -> tuple[list[tuple[PluginDesc, Plugin]], Redirects]:
+
failures: list[tuple[PluginDesc, Exception]] = []
redirects: Redirects = {}
for pdesc, result, redirect in results:
···
+
def get_cache_path(cache_file_name: str) -> Path | None:
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
home = os.environ.get("HOME", None)
···
+
def __init__(self, initial_plugins: list[Plugin], cache_file_name: str) -> None:
self.cache_file = get_cache_path(cache_file_name)
···
downloads.update(self.load())
self.downloads = downloads
+
def load(self) -> dict[str, Plugin]:
if self.cache_file is None or not self.cache_file.exists():
+
downloads: dict[str, Plugin] = {}
with open(self.cache_file) as f:
for attr in data.values():
···
data[name] = attr.as_json()
json.dump(data, f, indent=4, sort_keys=True)
+
def __getitem__(self, key: str) -> Plugin | None:
return self.downloads.get(key, None)
def __setitem__(self, key: str, value: Plugin) -> None:
···
pluginDesc: PluginDesc, cache: Cache
+
) -> tuple[PluginDesc, Exception | Plugin, Repo | None]:
plugin, redirect = prefetch_plugin(pluginDesc, cache)
cache[plugin.commit] = plugin
···
# old pluginDesc and the new
redirects: Redirects = {},
+
append: list[PluginDesc] = [],
log.info("Rewriting input file %s", input_file)
plugins = load_plugins_from_csv(config, input_file)
···
writer.writerow(asdict(plugin))
+
def commit(repo: git.Repo, message: str, files: list[Path]) -> None:
repo.index.add([str(f.resolve()) for f in files])
if repo.index.diff("HEAD"):