Merge master into staging-next

Changed files
+718 -400
.github
workflows
lib
maintainers
pkgs
applications
emulators
libretro
cores
mame
networking
browsers
palemoon
by-name
az
ca
caesura
ch
check-jsonschema
cl
cloak-pt
ew
fa
fawltydeps
gh
ghostfolio
gl
glusterfs
go
ju
jujutsu
ma
mapnik
mi
microsoft-edge
n9
n98-magerun2
ne
nexusmods-app
ok
oklch-color-picker
ol
ollama
ph
phpunit
re
release-plz
sa
sp
sparrow
tb
we
development
libraries
mesa
python-modules
azure-mgmt-containerservice
azure-multiapi-storage
tools
misc
binutils
games
path-of-building
servers
home-assistant
custom-lovelace-modules
universal-remote-card
mastodon
minio
tools
package-management
nix
-9
.github/stale.yml
···
-
# Configuration for probot-stale - https://github.com/probot/stale
-
daysUntilStale: 180
-
daysUntilClose: false
-
exemptLabels:
-
- "1.severity: security"
-
- "2.status: never-stale"
-
staleLabel: "2.status: stale"
-
markComment: false
-
closeComment: false
+201 -162
.github/workflows/labels.yml
···
# This is used as fallback without app only.
# This happens when testing in forks without setting up that app.
-
# Labels will most likely not exist in forks, yet. For this case,
-
# we add the issues permission only here.
permissions:
-
issues: write # needed to create *new* labels
+
issues: write
pull-requests: write
defaults:
···
with:
app-id: ${{ vars.NIXPKGS_CI_APP_ID }}
private-key: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
-
# No issues: write permission here, because labels in Nixpkgs should
-
# be created explicitly via the UI with color and description.
+
permission-issues: write
permission-pull-requests: write
- name: Log current API rate limits
···
const Bottleneck = require('bottleneck')
const path = require('node:path')
const { DefaultArtifactClient } = require('@actions/artifact')
-
const { readFile } = require('node:fs/promises')
+
const { readFile, writeFile } = require('node:fs/promises')
const artifactClient = new DefaultArtifactClient()
const stats = {
+
issues: 0,
prs: 0,
requests: 0,
artifacts: 0
···
// Update remaining requests every minute to account for other jobs running in parallel.
const reservoirUpdater = setInterval(updateReservoir, 60 * 1000)
-
async function handle(item) {
-
try {
-
const log = (k,v,skip) => {
-
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
-
return skip
-
}
+
async function handlePullRequest(item) {
+
const log = (k,v) => core.info(`PR #${item.number} - ${k}: ${v}`)
-
log('Last updated at', item.updated_at)
-
stats.prs++
-
log('URL', item.html_url)
+
const pull_number = item.number
-
const pull_number = item.number
-
const issue_number = item.number
+
// This API request is important for the merge-conflict label, because it triggers the
+
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
+
const pull_request = (await github.rest.pulls.get({
+
...context.repo,
+
pull_number
+
})).data
-
// This API request is important for the merge-conflict label, because it triggers the
-
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
-
const pull_request = (await github.rest.pulls.get({
+
const approvals = new Set(
+
(await github.paginate(github.rest.pulls.listReviews, {
...context.repo,
pull_number
-
})).data
+
}))
+
.filter(review => review.state == 'APPROVED')
+
.map(review => review.user?.id)
+
)
+
+
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
+
// The very first merge commit will only be calculated after a little while.
+
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
+
// This is intentionally less than the time that Eval takes, so that the label job
+
// running after Eval can indeed label the PR as conflicted if that is the case.
+
const merge_commit_sha_valid = new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
+
+
const prLabels = {
+
// We intentionally don't use the mergeable or mergeable_state attributes.
+
// Those have an intermediate state while the test merge commit is created.
+
// This doesn't work well for us, because we might have just triggered another
+
// test merge commit creation by request the pull request via API at the start
+
// of this function.
+
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
+
// the new test merge commit has either successfully been created or failed so.
+
// This essentially means we are updating the merge conflict label in two steps:
+
// On the first pass of the day, we just fetch the pull request, which triggers
+
// the creation. At this stage, the label is likely not updated, yet.
+
// The second pass will then read the result from the first pass and set the label.
+
'2.status: merge conflict': merge_commit_sha_valid && !pull_request.merge_commit_sha,
+
'12.approvals: 1': approvals.size == 1,
+
'12.approvals: 2': approvals.size == 2,
+
'12.approvals: 3+': approvals.size >= 3,
+
'12.first-time contribution':
+
[ 'NONE', 'FIRST_TIMER', 'FIRST_TIME_CONTRIBUTOR' ].includes(pull_request.author_association),
+
}
-
const run_id = (await github.rest.actions.listWorkflowRuns({
+
const run_id = (await github.rest.actions.listWorkflowRuns({
+
...context.repo,
+
workflow_id: 'pr.yml',
+
event: 'pull_request_target',
+
exclude_pull_requests: true,
+
head_sha: pull_request.head.sha
+
})).data.workflow_runs[0]?.id ??
+
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
+
(await github.rest.actions.listWorkflowRuns({
...context.repo,
-
workflow_id: 'pr.yml',
+
// In older PRs, we need eval.yml instead of pr.yml.
+
workflow_id: 'eval.yml',
event: 'pull_request_target',
+
status: 'success',
exclude_pull_requests: true,
head_sha: pull_request.head.sha
-
})).data.workflow_runs[0]?.id ??
-
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
-
(await github.rest.actions.listWorkflowRuns({
-
...context.repo,
-
// In older PRs, we need eval.yml instead of pr.yml.
-
workflow_id: 'eval.yml',
-
event: 'pull_request_target',
-
status: 'success',
-
exclude_pull_requests: true,
-
head_sha: pull_request.head.sha
-
})).data.workflow_runs[0]?.id
+
})).data.workflow_runs[0]?.id
-
// Newer PRs might not have run Eval to completion, yet.
-
// Older PRs might not have an eval.yml workflow, yet.
-
// In either case we continue without fetching an artifact on a best-effort basis.
-
log('Last eval run', run_id ?? '<n/a>')
+
// Newer PRs might not have run Eval to completion, yet.
+
// Older PRs might not have an eval.yml workflow, yet.
+
// In either case we continue without fetching an artifact on a best-effort basis.
+
log('Last eval run', run_id ?? '<n/a>')
+
+
const artifact = run_id && (await github.rest.actions.listWorkflowRunArtifacts({
+
...context.repo,
+
run_id,
+
name: 'comparison'
+
})).data.artifacts[0]
-
const artifact = run_id && (await github.rest.actions.listWorkflowRunArtifacts({
-
...context.repo,
-
run_id,
-
name: 'comparison'
-
})).data.artifacts[0]
+
// Instead of checking the boolean artifact.expired, we will give us a minute to
+
// actually download the artifact in the next step and avoid that race condition.
+
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
+
// called "comparison", yet, will skip the download.
+
const expired = !artifact || new Date(artifact?.expires_at ?? 0) < new Date(new Date().getTime() + 60 * 1000)
+
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
+
if (!expired) {
+
stats.artifacts++
+
+
await artifactClient.downloadArtifact(artifact.id, {
+
findBy: {
+
repositoryName: context.repo.repo,
+
repositoryOwner: context.repo.owner,
+
token: core.getInput('github-token')
+
},
+
path: path.resolve(pull_number.toString()),
+
expectedHash: artifact.digest
+
})
+
+
const maintainers = new Set(Object.keys(
+
JSON.parse(await readFile(`${pull_number}/maintainers.json`, 'utf-8'))
+
).map(m => Number.parseInt(m, 10)))
+
+
const evalLabels = JSON.parse(await readFile(`${pull_number}/changed-paths.json`, 'utf-8')).labels
+
+
Object.assign(
+
prLabels,
+
// Ignore `evalLabels` if it's an array.
+
// This can happen for older eval runs, before we switched to objects.
+
// The old eval labels would have been set by the eval run,
+
// so now they'll be present in `before`.
+
// TODO: Simplify once old eval results have expired (~2025-10)
+
(Array.isArray(evalLabels) ? undefined : evalLabels),
+
{
+
'12.approved-by: package-maintainer': Array.from(maintainers).some(m => approvals.has(m)),
+
}
+
)
+
}
-
// Instead of checking the boolean artifact.expired, we will give us a minute to
-
// actually download the artifact in the next step and avoid that race condition.
-
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
-
// called "comparison", yet, will skip the download.
-
const expired = !artifact || new Date(artifact?.expires_at ?? 0) < new Date(new Date().getTime() + 60 * 1000)
-
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
-
if (!expired) {
-
stats.artifacts++
+
return prLabels
+
}
-
await artifactClient.downloadArtifact(artifact.id, {
-
findBy: {
-
repositoryName: context.repo.repo,
-
repositoryOwner: context.repo.owner,
-
token: core.getInput('github-token')
-
},
-
path: path.resolve(pull_number.toString()),
-
expectedHash: artifact.digest
-
})
+
async function handle(item) {
+
try {
+
const log = (k,v,skip) => {
+
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
+
return skip
}
-
// Create a map (Label -> Boolean) of all currently set labels.
-
// Each label is set to True and can be disabled later.
-
const before = Object.fromEntries(
-
(await github.paginate(github.rest.issues.listLabelsOnIssue, {
-
...context.repo,
-
issue_number
-
}))
-
.map(({ name }) => [name, true])
-
)
+
log('Last updated at', item.updated_at)
+
log('URL', item.html_url)
+
+
const issue_number = item.number
+
+
const itemLabels = {}
-
const approvals = new Set(
-
(await github.paginate(github.rest.pulls.listReviews, {
-
...context.repo,
-
pull_number
-
}))
-
.filter(review => review.state == 'APPROVED')
-
.map(review => review.user?.id)
-
)
+
if (item.pull_request) {
+
stats.prs++
+
Object.assign(itemLabels, await handlePullRequest(item))
+
} else {
+
stats.issues++
+
}
const latest_event_at = new Date(
(await github.paginate(
···
const stale_at = new Date(new Date().setDate(new Date().getDate() - 180))
-
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
-
// The very first merge commit will only be calculated after a little while.
-
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
-
// This is intentionally less than the time that Eval takes, so that the label job
-
// running after Eval can indeed label the PR as conflicted if that is the case.
-
const merge_commit_sha_valid = new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
-
-
// Manage most of the labels, without eval results
-
const after = Object.assign(
-
{},
-
before,
-
{
-
// We intentionally don't use the mergeable or mergeable_state attributes.
-
// Those have an intermediate state while the test merge commit is created.
-
// This doesn't work well for us, because we might have just triggered another
-
// test merge commit creation by request the pull request via API at the start
-
// of this function.
-
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
-
// the new test merge commit has either successfully been created or failed so.
-
// This essentially means we are updating the merge conflict label in two steps:
-
// On the first pass of the day, we just fetch the pull request, which triggers
-
// the creation. At this stage, the label is likely not updated, yet.
-
// The second pass will then read the result from the first pass and set the label.
-
'2.status: merge conflict': merge_commit_sha_valid && !pull_request.merge_commit_sha,
-
'2.status: stale': !before['1.severity: security'] && latest_event_at < stale_at,
-
'12.approvals: 1': approvals.size == 1,
-
'12.approvals: 2': approvals.size == 2,
-
'12.approvals: 3+': approvals.size >= 3,
-
'12.first-time contribution':
-
[ 'NONE', 'FIRST_TIMER', 'FIRST_TIME_CONTRIBUTOR' ].includes(pull_request.author_association),
-
}
+
// Create a map (Label -> Boolean) of all currently set labels.
+
// Each label is set to True and can be disabled later.
+
const before = Object.fromEntries(
+
(await github.paginate(github.rest.issues.listLabelsOnIssue, {
+
...context.repo,
+
issue_number
+
}))
+
.map(({ name }) => [name, true])
)
-
// Manage labels based on eval results
-
if (!expired) {
-
const maintainers = new Set(Object.keys(
-
JSON.parse(await readFile(`${pull_number}/maintainers.json`, 'utf-8'))
-
).map(m => Number.parseInt(m, 10)))
+
Object.assign(itemLabels, {
+
'2.status: stale': !before['1.severity: security'] && latest_event_at < stale_at,
+
})
-
const evalLabels = JSON.parse(await readFile(`${pull_number}/changed-paths.json`, 'utf-8')).labels
-
-
Object.assign(
-
after,
-
// Ignore `evalLabels` if it's an array.
-
// This can happen for older eval runs, before we switched to objects.
-
// The old eval labels would have been set by the eval run,
-
// so now they'll be present in `before`.
-
// TODO: Simplify once old eval results have expired (~2025-10)
-
(Array.isArray(evalLabels) ? undefined : evalLabels),
-
{
-
'12.approved-by: package-maintainer': Array.from(maintainers).some(m => approvals.has(m)),
-
}
-
)
-
}
+
const after = Object.assign({}, before, itemLabels)
// No need for an API request, if all labels are the same.
const hasChanges = Object.keys(after).some(name => (before[name] ?? false) != after[name])
···
if (context.payload.pull_request) {
await handle(context.payload.pull_request)
} else {
-
const workflowData = (await github.rest.actions.listWorkflowRuns({
+
const lastRun = (await github.rest.actions.listWorkflowRuns({
...context.repo,
workflow_id: 'labels.yml',
event: 'schedule',
status: 'success',
exclude_pull_requests: true,
per_page: 1
-
})).data
+
})).data.workflow_runs[0]
// Go back as far as the last successful run of this workflow to make sure
// we are not leaving anyone behind on GHA failures.
// Defaults to go back 1 hour on the first run.
-
const cutoff = new Date(workflowData.workflow_runs[0]?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000)
+
const cutoff = new Date(lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000)
core.info('cutoff timestamp: ' + cutoff.toISOString())
const updatedItems = await github.paginate(
···
{
q: [
`repo:"${process.env.GITHUB_REPOSITORY}"`,
-
'type:pr',
'is:open',
`updated:>=${cutoff.toISOString()}`
].join(' AND '),
···
}
)
-
// The search endpoint only allows fetching the first 1000 records, but the
-
// pull request list endpoint does not support counting the total number
-
// of results.
-
// Thus, we use /search for counting and /pulls for reading the response.
-
const { total_count: total_pulls } = (await github.rest.search.issuesAndPullRequests({
-
q: [
-
`repo:"${process.env.GITHUB_REPOSITORY}"`,
-
'type:pr',
-
'is:open'
-
].join(' AND '),
-
sort: 'created',
-
direction: 'asc',
-
// TODO: Remove in 2025-10, when it becomes the default.
-
advanced_search: true,
-
per_page: 1
-
})).data
-
const { total_count: total_runs } = workflowData
+
let cursor
+
+
// No workflow run available the first time.
+
if (lastRun) {
+
// The cursor to iterate through the full list of issues and pull requests
+
// is passed between jobs as an artifact.
+
const artifact = (await github.rest.actions.listWorkflowRunArtifacts({
+
...context.repo,
+
run_id: lastRun.id,
+
name: 'pagination-cursor'
+
})).data.artifacts[0]
+
+
// If the artifact is not available, the next iteration starts at the beginning.
+
if (artifact) {
+
stats.artifacts++
+
+
const { downloadPath } = await artifactClient.downloadArtifact(artifact.id, {
+
findBy: {
+
repositoryName: context.repo.repo,
+
repositoryOwner: context.repo.owner,
+
token: core.getInput('github-token')
+
},
+
expectedHash: artifact.digest
+
})
-
const allPulls = (await github.rest.pulls.list({
+
cursor = await readFile(path.resolve(downloadPath, 'cursor'), 'utf-8')
+
}
+
}
+
+
// From GitHub's API docs:
+
// GitHub's REST API considers every pull request an issue, but not every issue is a pull request.
+
// For this reason, "Issues" endpoints may return both issues and pull requests in the response.
+
// You can identify pull requests by the pull_request key.
+
const allItems = await github.rest.issues.listForRepo({
...context.repo,
state: 'open',
sort: 'created',
direction: 'asc',
per_page: 100,
-
// We iterate through pages of 100 items across scheduled runs. With currently ~7000 open PRs and
-
// up to 6*24=144 scheduled runs per day, we hit every PR twice each day.
-
// We might not hit every PR on one iteration, because the pages will shift slightly when
-
// PRs are closed or merged. We assume this to be OK on the bigger scale, because a PR which was
-
// missed once, would have to move through the whole page to be missed again. This is very unlikely,
-
// so it should certainly be hit on the next iteration.
-
// TODO: Evaluate after a while, whether the above holds still true and potentially implement
-
// an overlap between runs.
-
page: (total_runs % Math.ceil(total_pulls / 100)) + 1
-
})).data
+
after: cursor
+
})
+
+
// Regex taken and comment adjusted from:
+
// https://github.com/octokit/plugin-paginate-rest.js/blob/8e5da25f975d2f31dda6b8b588d71f2c768a8df2/src/iterator.ts#L36-L41
+
// `allItems.headers.link` format:
+
// <https://api.github.com/repositories/4542716/issues?page=3&per_page=100&after=Y3Vyc29yOnYyOpLPAAABl8qNnYDOvnSJxA%3D%3D>; rel="next",
+
// <https://api.github.com/repositories/4542716/issues?page=1&per_page=100&before=Y3Vyc29yOnYyOpLPAAABl8xFV9DOvoouJg%3D%3D>; rel="prev"
+
// Sets `next` to undefined if "next" URL is not present or `link` header is not set.
+
const next = ((allItems.headers.link ?? '').match(/<([^<>]+)>;\s*rel="next"/) ?? [])[1]
+
if (next) {
+
cursor = new URL(next).searchParams.get('after')
+
const uploadPath = path.resolve('cursor')
+
await writeFile(uploadPath, cursor, 'utf-8')
+
// No stats.artifacts++, because this does not allow passing a custom token.
+
// Thus, the upload will not happen with the app token, but the default github.token.
+
await artifactClient.uploadArtifact(
+
'pagination-cursor',
+
[uploadPath],
+
path.resolve('.'),
+
{
+
retentionDays: 1
+
}
+
)
+
}
// Some items might be in both search results, so filtering out duplicates as well.
-
const items = [].concat(updatedItems, allPulls)
+
const items = [].concat(updatedItems, allItems.data)
.filter((thisItem, idx, arr) => idx == arr.findIndex(firstItem => firstItem.number == thisItem.number))
;(await Promise.allSettled(items.map(handle)))
.filter(({ status }) => status == 'rejected')
.map(({ reason }) => core.setFailed(`${reason.message}\n${reason.cause.stack}`))
-
core.notice(`Processed ${stats.prs} PRs, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`)
+
core.notice(`Processed ${stats.prs} PRs, ${stats.issues} Issues, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`)
}
} finally {
clearInterval(reservoirUpdater)
+1 -1
lib/customisation.nix
···
if loc != null then
loc.file + ":" + toString loc.line
else if !isFunction fn then
-
toString fn + optionalString (pathIsDirectory fn) "/default.nix"
+
toString (lib.filesystem.resolveDefaultNix fn)
else
"<unknown location>";
in
+42
lib/filesystem.nix
···
)
else
processDir args;
+
+
/**
+
Append `/default.nix` if the passed path is a directory.
+
+
# Type
+
+
```
+
resolveDefaultNix :: (Path | String) -> (Path | String)
+
```
+
+
# Inputs
+
+
A single argument which can be a [path](https://nix.dev/manual/nix/stable/language/types#type-path) value or a string containing an absolute path.
+
+
# Output
+
+
If the input refers to a directory that exists, the output is that same path with `/default.nix` appended.
+
Furthermore, if the input is a string that ends with `/`, `default.nix` is appended to it.
+
Otherwise, the input is returned unchanged.
+
+
# Examples
+
:::{.example}
+
## `lib.filesystem.resolveDefaultNix` usage example
+
+
This expression checks whether `a` and `b` refer to the same locally available Nix file path.
+
+
```nix
+
resolveDefaultNix a == resolveDefaultNix b
+
```
+
+
For instance, if `a` is `/some/dir` and `b` is `/some/dir/default.nix`, and `/some/dir/` exists, the expression evaluates to `true`, despite `a` and `b` being different references to the same Nix file.
+
*/
+
resolveDefaultNix =
+
v:
+
if pathIsDirectory v then
+
v + "/default.nix"
+
else if lib.isString v && hasSuffix "/" v then
+
# A path ending in `/` can only refer to a directory, so we take the hint, even if we can't verify the validity of the path's `/` assertion.
+
# A `/` is already present, so we don't add another one.
+
v + "default.nix"
+
else
+
v;
}
+46
lib/tests/misc.nix
···
};
};
};
+
+
testFilesystemResolveDefaultNixFile1 = {
+
expr = lib.filesystem.resolveDefaultNix ./foo.nix;
+
expected = ./foo.nix;
+
};
+
+
testFilesystemResolveDefaultNixFile2 = {
+
expr = lib.filesystem.resolveDefaultNix ./default.nix;
+
expected = ./default.nix;
+
};
+
+
testFilesystemResolveDefaultNixDir1 = {
+
expr = lib.filesystem.resolveDefaultNix ./.;
+
expected = ./default.nix;
+
};
+
+
testFilesystemResolveDefaultNixFile1_toString = {
+
expr = lib.filesystem.resolveDefaultNix (toString ./foo.nix);
+
expected = toString ./foo.nix;
+
};
+
+
testFilesystemResolveDefaultNixFile2_toString = {
+
expr = lib.filesystem.resolveDefaultNix (toString ./default.nix);
+
expected = toString ./default.nix;
+
};
+
+
testFilesystemResolveDefaultNixDir1_toString = {
+
expr = lib.filesystem.resolveDefaultNix (toString ./.);
+
expected = toString ./default.nix;
+
};
+
+
testFilesystemResolveDefaultNixDir1_toString2 = {
+
expr = lib.filesystem.resolveDefaultNix (toString ./.);
+
expected = toString ./. + "/default.nix";
+
};
+
+
testFilesystemResolveDefaultNixNonExistent = {
+
expr = lib.filesystem.resolveDefaultNix "/non-existent/this/does/not/exist/for/real/please-dont-mess-with-your-local-fs";
+
expected = "/non-existent/this/does/not/exist/for/real/please-dont-mess-with-your-local-fs";
+
};
+
+
testFilesystemResolveDefaultNixNonExistentDir = {
+
expr = lib.filesystem.resolveDefaultNix "/non-existent/this/does/not/exist/for/real/please-dont-mess-with-your-local-fs/";
+
expected = "/non-existent/this/does/not/exist/for/real/please-dont-mess-with-your-local-fs/default.nix";
+
};
+
+13
maintainers/maintainer-list.nix
···
githubId = 30654959;
name = "Michele Sciabarra";
+
msgilligan = {
+
email = "sean@msgilligan.com";
+
github = "msgilligan";
+
githubId = 61612;
+
name = "Sean Gilligan";
+
keys = [ { fingerprint = "3B66 ACFA D10F 02AA B1D5  2CB1 8DD0 D81D 7D1F C61A"; } ];
+
};
msiedlarek = {
email = "mikolaj@siedlarek.pl";
github = "msiedlarek";
···
github = "vidbina";
githubId = 335406;
name = "David Asabina";
+
};
+
videl = {
+
email = "thibaut.smith@mailbox.org";
+
github = "videl";
+
githubId = 123554;
+
name = "Thibaut Smith";
vidister = {
email = "v@vidister.de";
+3 -3
pkgs/applications/emulators/libretro/cores/gambatte.nix
···
}:
mkLibretroCore {
core = "gambatte";
-
version = "0-unstable-2025-06-20";
+
version = "0-unstable-2025-06-27";
src = fetchFromGitHub {
owner = "libretro";
repo = "gambatte-libretro";
-
rev = "a693367ab1aea60266c7fa7c666b0779035d4745";
-
hash = "sha256-nQ/hh9EkcftcdV0MvPl3kRUGBxukOxbgLCM9786rtd4=";
+
rev = "9f591132e67f101780495a43df8da9bca43e08db";
+
hash = "sha256-wauSnUlZRAtZwheONd+NusM0D1q2pLwha6H90R4R1aU=";
};
meta = {
+2 -2
pkgs/applications/emulators/mame/default.nix
···
stdenv.mkDerivation rec {
pname = "mame";
-
version = "0.277";
+
version = "0.278";
srcVersion = builtins.replaceStrings [ "." ] [ "" ] version;
src = fetchFromGitHub {
owner = "mamedev";
repo = "mame";
rev = "mame${srcVersion}";
-
hash = "sha256-mGKTZ8/gvGQv9oXK4pgbJk580GAAXUS16hRQu4uHhdA=";
+
hash = "sha256-YJt+in9QV7a0tQZnfqFP3Iu6XQD0sryjud4FcgokYFg=";
};
outputs = [
+3 -3
pkgs/applications/networking/browsers/palemoon/bin.nix
···
stdenv.mkDerivation (finalAttrs: {
pname = "palemoon-bin";
-
version = "33.7.2";
+
version = "33.8.0";
src = finalAttrs.passthru.sources."gtk${if withGTK3 then "3" else "2"}";
···
{
gtk3 = fetchzip {
urls = urlRegionVariants "gtk3";
-
hash = "sha256-GE45GZ+OmNNwRLTD2pcZpqRA66k4q/+lkQnGJG+z6nQ=";
+
hash = "sha256-cdPFMYlVEr6D+0mH7Mg5nGpf0KvePGLm3Y/ZytdFHHA=";
};
gtk2 = fetchzip {
urls = urlRegionVariants "gtk2";
-
hash = "sha256-yJPmmQ9IkGzort9OPPWzv+LSeJci8VNoso3NLYev51Q=";
+
hash = "sha256-dgWKmkHl5B1ri3uev63MNz/+E767ip9wJ/YzSog8vdQ=";
};
};
+98 -91
pkgs/by-name/az/azure-cli/extensions-generated.json
···
},
"aem": {
"pname": "aem",
-
"version": "0.3.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/aem-0.3.0-py2.py3-none-any.whl",
-
"hash": "sha256-Jar5AGqx0RXXxITP2hya0ONhevbSFA24dJmq6oG2f/g=",
+
"version": "1.0.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/aem-1.0.0-py2.py3-none-any.whl",
+
"hash": "sha256-3QYsiwJ7avGM4Fg8H88shs3JKBo2cOSh0F39bTN/8vA=",
"description": "Manage Azure Enhanced Monitoring Extensions for SAP"
},
"ai-examples": {
"pname": "ai-examples",
"version": "0.2.5",
-
"url": "https://azurecliprod.blob.core.windows.net/cli-extensions/ai_examples-0.2.5-py2.py3-none-any.whl",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/ai_examples-0.2.5-py2.py3-none-any.whl",
"hash": "sha256-utvfX8LgtKhcQSTT/JKFm1gq348w9XJ0QM6BlCFACZo=",
"description": "Add AI powered examples to help content"
},
"aks-preview": {
"pname": "aks-preview",
-
"version": "18.0.0b7",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-18.0.0b7-py2.py3-none-any.whl",
-
"hash": "sha256-5xRE/WGe/PbTavC/b9MrrXMwXVsBoEEog44A8YJu9cY=",
+
"version": "18.0.0b15",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/aks_preview-18.0.0b15-py2.py3-none-any.whl",
+
"hash": "sha256-YjRPELWfLshLQHgZTa7jXS96nyYiZ7h2Gu25/wKvw7c=",
"description": "Provides a preview for upcoming AKS features"
},
"akshybrid": {
···
},
"amg": {
"pname": "amg",
-
"version": "2.6.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/amg-2.6.0-py3-none-any.whl",
-
"hash": "sha256-I8WRrhs2VdqwpIuT5fqPeITwvfy14X3hWZLFN5IEz80=",
+
"version": "2.6.1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/amg-2.6.1-py3-none-any.whl",
+
"hash": "sha256-ERGri8mXJtLy/acz8R2UqdmILr7JT4bTQaU6GtxhKjs=",
"description": "Microsoft Azure Command-Line Tools Azure Managed Grafana Extension"
},
"amlfs": {
"pname": "amlfs",
-
"version": "1.0.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/amlfs-1.0.0-py3-none-any.whl",
-
"hash": "sha256-IbWhKUPnJzFSiKoMocSaJYA6ZWt/OIw8Y3WWz99nvR0=",
+
"version": "1.1.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/amlfs-1.1.0-py3-none-any.whl",
+
"hash": "sha256-RbZNYIHcVsgziXGOHHcawoJSpEzphcv1loHY9dBpPvA=",
"description": "Microsoft Azure Command-Line Tools Amlfs Extension"
},
"apic-extension": {
"pname": "apic-extension",
-
"version": "1.2.0b1",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/apic_extension-1.2.0b1-py3-none-any.whl",
-
"hash": "sha256-v8y6Jgg9dYCO/GuLEl44il77WC9nuKT9cRnMI43wZaM=",
+
"version": "1.2.0b2",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/apic_extension-1.2.0b2-py3-none-any.whl",
+
"hash": "sha256-QtaiixX5daX3pOpi7jnJFH2cXe+J0+J/q9PJVZqkE28=",
"description": "Microsoft Azure Command-Line Tools ApicExtension Extension"
},
"appservice-kube": {
"pname": "appservice-kube",
-
"version": "0.1.10",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/appservice_kube-0.1.10-py2.py3-none-any.whl",
-
"hash": "sha256-f9ctJ+Sw7O2jsrTzAcegwwaP6ouW1w+fyq0UIkDefQ0=",
+
"version": "0.1.11",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/appservice_kube-0.1.11-py2.py3-none-any.whl",
+
"hash": "sha256-gTI/rjFHJ8mx1QfWeWgJStemOubwqEPqKuS3jCPnuKI=",
"description": "Microsoft Azure Command-Line Tools App Service on Kubernetes Extension"
},
"arcgateway": {
···
},
"azure-firewall": {
"pname": "azure-firewall",
-
"version": "1.2.3",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/azure_firewall-1.2.3-py2.py3-none-any.whl",
-
"hash": "sha256-bSUGhZI7L+XUsubSKhFwzw//uIXuA7qSLuEkyottgb4=",
+
"version": "1.3.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/azure_firewall-1.3.0-py2.py3-none-any.whl",
+
"hash": "sha256-AkelAJEIignnFomg7HJTrjl8UA1oeQZQTWjRqibUdDE=",
"description": "Manage Azure Firewall resources"
},
"azurelargeinstance": {
···
"hash": "sha256-b+5Hi9kZkioFMlc/3qO1Qikl03S6ZknqAV1NM5QegZo=",
"description": "Microsoft Azure Command-Line Tools Azurelargeinstance Extension"
},
-
"azurestackhci": {
-
"pname": "azurestackhci",
-
"version": "0.2.9",
-
"url": "https://hybridaksstorage.z13.web.core.windows.net/SelfServiceVM/CLI/azurestackhci-0.2.9-py3-none-any.whl",
-
"hash": "sha256-JVey/j+i+VGieUupZ1VbpUwuk+t1U4FS8hqy+1aP7xY=",
-
"description": "Microsoft Azure Command-Line Tools AzureStackHCI Extension"
-
},
"baremetal-infrastructure": {
"pname": "baremetal-infrastructure",
"version": "3.0.0b2",
···
},
"bastion": {
"pname": "bastion",
-
"version": "1.4.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/bastion-1.4.0-py3-none-any.whl",
-
"hash": "sha256-G3kYIjI8MiRLO9ug2F6DjzL/V+I13xhIDTDhuq0t3jQ=",
+
"version": "1.4.1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/bastion-1.4.1-py3-none-any.whl",
+
"hash": "sha256-Zx50jU9Vmt8vNe/50g8jWaxjeuXlHlMYVNG/v8exRmU=",
"description": "Microsoft Azure Command-Line Tools Bastion Extension"
},
"billing-benefits": {
···
"cli-translator": {
"pname": "cli-translator",
"version": "0.3.0",
-
"url": "https://azurecliprod.blob.core.windows.net/cli-extensions/cli_translator-0.3.0-py3-none-any.whl",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/cli_translator-0.3.0-py3-none-any.whl",
"hash": "sha256-nqYWLTf8M5C+Tc5kywXFxYgHAQTz6SpwGrR1RzVlqKk=",
"description": "Translate ARM template to executable Azure CLI scripts"
},
+
"cloudhsm": {
+
"pname": "cloudhsm",
+
"version": "1.0.0b1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/cloudhsm-1.0.0b1-py3-none-any.whl",
+
"hash": "sha256-GOcr3wcJ+FE/UUcvaoB90bWmdthmYZpDznaAtLr48LU=",
+
"description": "Microsoft Azure Command-Line Tools Cloudhsm Extension"
+
},
"computeschedule": {
"pname": "computeschedule",
"version": "1.0.0b1",
···
},
"confluent": {
"pname": "confluent",
-
"version": "0.6.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/confluent-0.6.0-py3-none-any.whl",
-
"hash": "sha256-eYfSLg6craKAh6kAv6U0hlUxlB8rv+ln60bJCy4KEr4=",
+
"version": "1.0.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/confluent-1.0.0-py3-none-any.whl",
+
"hash": "sha256-CmHPSaZdVmhow/CwCb45gua03Dw2m+nP1Cu35agO7xk=",
"description": "Microsoft Azure Command-Line Tools ConfluentManagementClient Extension"
},
"connectedmachine": {
···
"hash": "sha256-4Ou6/8XRwH5c1hXZy54hJE7fxEeyjLAYcTmhGNyIkrc=",
"description": "Microsoft Azure Command-Line Tools Customlocation Extension"
},
+
"data-transfer": {
+
"pname": "data-transfer",
+
"version": "1.0.0b1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/data_transfer-1.0.0b1-py3-none-any.whl",
+
"hash": "sha256-dnF7ZpUcGP7d3uywoXZdMmr/INH5y46glKuJwrlenfU=",
+
"description": "Microsoft Azure Command-Line Tools DataTransfer Extension"
+
},
"databox": {
"pname": "databox",
"version": "1.2.0",
···
"hash": "sha256-8agBvQw46y6/nC+04LQ6mEcK57QLvNBesqpZbWlXnJ4=",
"description": "Microsoft Azure Command-Line Tools DataShareManagementClient Extension"
},
+
"dependency-map": {
+
"pname": "dependency-map",
+
"version": "1.0.0b1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/dependency_map-1.0.0b1-py3-none-any.whl",
+
"hash": "sha256-/iqgYB47nWSUwT5AJ+1CnlVDX5+fw1DRew53AHRdVng=",
+
"description": "Microsoft Azure Command-Line Tools DependencyMap Extension"
+
},
"deploy-to-azure": {
"pname": "deploy-to-azure",
"version": "0.2.0",
···
"dev-spaces": {
"pname": "dev-spaces",
"version": "1.0.6",
-
"url": "https://azurecliprod.blob.core.windows.net/cli-extensions/dev_spaces-1.0.6-py2.py3-none-any.whl",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/dev_spaces-1.0.6-py2.py3-none-any.whl",
"hash": "sha256-cQQYCLJ82dM/2QXFCAyX9hKRgW8t3dbc2y5muftuv1k=",
"description": "Dev Spaces provides a rapid, iterative Kubernetes development experience for teams"
},
···
},
"dns-resolver": {
"pname": "dns-resolver",
-
"version": "1.0.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/dns_resolver-1.0.0-py3-none-any.whl",
-
"hash": "sha256-DdTqcuNTVT8gVFyv8lePy3YqniY3pFamM1ERCOLsAOM=",
+
"version": "1.1.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/dns_resolver-1.1.0-py3-none-any.whl",
+
"hash": "sha256-xRmWnmsEcHW/TXuR2RsWCz7W3JDLrqPCxx19H8rZ2JE=",
"description": "Microsoft Azure Command-Line Tools DnsResolverManagementClient Extension"
},
"durabletask": {
···
},
"elastic-san": {
"pname": "elastic-san",
-
"version": "1.3.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/elastic_san-1.3.0-py3-none-any.whl",
-
"hash": "sha256-Y1XlsJaX3nixL9AeENaVufA2rFwLTIwowGc7pt1OoOw=",
+
"version": "1.3.1b1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/elastic_san-1.3.1b1-py3-none-any.whl",
+
"hash": "sha256-L0ps/X+laOJO2aj3kqE7PVntTPxuY30BYeCmN/VWC44=",
"description": "Microsoft Azure Command-Line Tools ElasticSan Extension"
},
"eventgrid": {
···
"footprint": {
"pname": "footprint",
"version": "1.0.0",
-
"url": "https://azurecliprod.blob.core.windows.net/cli-extensions/footprint-1.0.0-py3-none-any.whl",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/footprint-1.0.0-py3-none-any.whl",
"hash": "sha256-SqWSiL9Gz9aFGfH39j0+M68W2AYyuEwoPMcVISkmCyw=",
"description": "Microsoft Azure Command-Line Tools FootprintMonitoringManagementClient Extension"
},
···
},
"managednetworkfabric": {
"pname": "managednetworkfabric",
-
"version": "8.0.0b3",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/managednetworkfabric-8.0.0b3-py3-none-any.whl",
-
"hash": "sha256-RoqlmB/Bl7S81w3IDL1MTooGkiabI14TxYFHaQ9Qi9U=",
+
"version": "8.0.0b5",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/managednetworkfabric-8.0.0b5-py3-none-any.whl",
+
"hash": "sha256-+6ueiYJnSMnGbawqTGoKkbN9Fvl5NCJuz3RUXW6mGBk=",
"description": "Support for managednetworkfabric commands based on 2024-06-15-preview API version"
},
"managementpartner": {
···
},
"mcc": {
"pname": "mcc",
-
"version": "1.0.0b2",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/mcc-1.0.0b2-py3-none-any.whl",
-
"hash": "sha256-z8u9+D5A5bq8WAUqeZx1H1Y+2ukQQXnAyefW51OvEU0=",
+
"version": "1.0.0b3",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/mcc-1.0.0b3-py3-none-any.whl",
+
"hash": "sha256-dY4oEzEbtxRSrnuDPYD2Jh2Rf5A+txfLFhy1wGevFSU=",
"description": "Microsoft Connected Cache CLI Commands"
},
"mdp": {
···
},
"neon": {
"pname": "neon",
-
"version": "1.0.0b3",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/neon-1.0.0b3-py3-none-any.whl",
-
"hash": "sha256-tvnHv5o0GxltVyZNQmraPsJBMXE+/XAIaJcMAVKTUBo=",
+
"version": "1.0.0b4",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/neon-1.0.0b4-py3-none-any.whl",
+
"hash": "sha256-iak+Dt5UD+YpVO1mQzatzIYybEyVIZaRabL0Jbgr17M=",
"description": "Microsoft Azure Command-Line Tools Neon Extension"
},
"network-analytics": {
···
},
"notification-hub": {
"pname": "notification-hub",
-
"version": "1.0.0a1",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/notification_hub-1.0.0a1-py3-none-any.whl",
-
"hash": "sha256-oDdRtxVwDg0Yo46Ai/7tFkM1AkyWCMS/1TrqzHMdEJk=",
+
"version": "2.0.0b1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/notification_hub-2.0.0b1-py3-none-any.whl",
+
"hash": "sha256-vjdwj26RG1HlatFfN/Jqh5BObSJQ1WmM8DWL5kbD3uo=",
"description": "Microsoft Azure Command-Line Tools Notification Hub Extension"
},
"nsp": {
···
},
"providerhub": {
"pname": "providerhub",
-
"version": "1.0.0b1",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/providerhub-1.0.0b1-py3-none-any.whl",
-
"hash": "sha256-e5PLfssfo6UgkJ1F5uZZfIun2qxPvBomw95mBDZ43Q0=",
+
"version": "1.0.0b2",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/providerhub-1.0.0b2-py3-none-any.whl",
+
"hash": "sha256-mqrXcCKvAEqWOzQZrBDSM4IO81Jduen2+fx5fhqFmtY=",
"description": "Microsoft Azure Command-Line Tools ProviderHub Extension"
},
"purview": {
···
},
"qumulo": {
"pname": "qumulo",
-
"version": "2.0.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/qumulo-2.0.0-py3-none-any.whl",
-
"hash": "sha256-fsUZyd0s+Rv1Sy6Lm2iq2xNMsrv+xU6KLLCOo6DkfmI=",
+
"version": "2.0.1",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/qumulo-2.0.1-py3-none-any.whl",
+
"hash": "sha256-HlMtkVEYvu86KqPPVBG/3i0zPg0S1P4qBedSnJwjIgg=",
"description": "Microsoft Azure Command-Line Tools Qumulo Extension"
},
"quota": {
···
"hash": "sha256-p+Ug4CXDyrokp4JTY3cEjxdJqZzrVPAJ8zOS/JZDVw8=",
"description": "Microsoft Azure Command-Line Tools ResourceMoverServiceAPI Extension"
},
-
"sap-hana": {
-
"pname": "sap-hana",
-
"version": "0.6.5",
-
"url": "https://github.com/Azure/azure-hanaonazure-cli-extension/releases/download/0.6.5/sap_hana-0.6.5-py2.py3-none-any.whl",
-
"hash": "sha256-tFVMEl86DrXIkc7DludwX26R1NgXiazvIOPE0XL6RUM=",
-
"description": "Additional commands for working with SAP HanaOnAzure instances"
-
},
"scenario-guide": {
"pname": "scenario-guide",
"version": "0.1.1",
···
},
"scvmm": {
"pname": "scvmm",
-
"version": "1.1.2",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/scvmm-1.1.2-py2.py3-none-any.whl",
-
"hash": "sha256-sbLmbA/wV5dtSPGKQ5YPT/WAK1UC6ebS1aXY8bTotvI=",
+
"version": "1.2.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/scvmm-1.2.0-py2.py3-none-any.whl",
+
"hash": "sha256-Cq6dSNRIM/vWgwDCaxTRhosVR9dCD5URYQUI+eBpN0Y=",
"description": "Microsoft Azure Command-Line Tools SCVMM Extension"
},
"self-help": {
···
"hash": "sha256-qxkULJouBhkLbawnLYzynhecnig/ll+OOk0pJ1uEfOU=",
"description": "Microsoft Azure Command-Line Tools SiteRecovery Extension"
},
-
"spring-cloud": {
-
"pname": "spring-cloud",
-
"version": "3.1.9",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/spring_cloud-3.1.9-py3-none-any.whl",
-
"hash": "sha256-ySMrn4B/ff7cLESAZJUFrR5AajwTbAYeC0hd3ypJivU=",
-
"description": "Microsoft Azure Command-Line Tools spring-cloud Extension"
-
},
"stack-hci": {
"pname": "stack-hci",
"version": "1.1.0",
···
},
"storage-actions": {
"pname": "storage-actions",
-
"version": "1.0.0b1",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/storage_actions-1.0.0b1-py3-none-any.whl",
-
"hash": "sha256-B8W+JW7bviyB2DnkxtPZF6Vrk5IVFQKM+WI5PhF2Mxs=",
+
"version": "1.0.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/storage_actions-1.0.0-py3-none-any.whl",
+
"hash": "sha256-OzMuN2blvmoj9GuzU1X3O2PJ0Yr8rsnFXYzypAJlF58=",
"description": "Microsoft Azure Command-Line Tools StorageActions Extension"
},
"storage-blob-preview": {
···
},
"vme": {
"pname": "vme",
-
"version": "1.0.0b1",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/vme-1.0.0b1-py3-none-any.whl",
-
"hash": "sha256-cCAZh8ytxz5sEtyNuV/EqZ9KqOifBXr1W8PBJWctz/8=",
+
"version": "1.0.0b4",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/vme-1.0.0b4-py3-none-any.whl",
+
"hash": "sha256-TMOUIL/cntB9DcQubv7B1OMs+nSwv2RRcFD6KCRwFrk=",
"description": "Microsoft Azure Command-Line Tools Vme Extension"
},
"vmware": {
"pname": "vmware",
-
"version": "7.2.0",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/vmware-7.2.0-py2.py3-none-any.whl",
-
"hash": "sha256-4Pkx39w6vQ+sdw7P0DqUY/zM8v37nwmU2XqPqRLFdrI=",
+
"version": "8.0.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/vmware-8.0.0-py2.py3-none-any.whl",
+
"hash": "sha256-Y+3qoZWD1Jx+BrRsHoTZp2lwuFp/NI/l7EYTP8bebuw=",
"description": "Azure VMware Solution commands"
},
"webapp": {
···
"hash": "sha256-kIsN8HzvZSF2oPK/D9z1i10W+0kD7jwG9z8Ls5E6XA8=",
"description": "Additional commands for Azure AppService"
},
+
"workload-orchestration": {
+
"pname": "workload-orchestration",
+
"version": "1.0.0",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/workload_orchestration-1.0.0-py3-none-any.whl",
+
"hash": "sha256-5o0meWmZDeM45AGTTkD9weX1/tcdg7JJzW1XJRWVdkE=",
+
"description": "Microsoft Azure Command-Line Tools WorkloadOperations Extension"
+
},
"workloads": {
"pname": "workloads",
"version": "1.1.0",
···
},
"zones": {
"pname": "zones",
-
"version": "1.0.0b3",
-
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/zones-1.0.0b3-py3-none-any.whl",
-
"hash": "sha256-O9gcKKvWDFmlnZabIioDGNIxqn8XDaE4xeOt3/q+7Rk=",
+
"version": "1.0.0b4",
+
"url": "https://azcliprod.blob.core.windows.net/cli-extensions/zones-1.0.0b4-py3-none-any.whl",
+
"hash": "sha256-isb/41prZrUTuM4GYR4tL6KyBlmArIyNM9y4eMk28OQ=",
"description": "Microsoft Azure Command-Line Tools Zones Extension"
+3
pkgs/by-name/az/azure-cli/extensions-manual.nix
···
// lib.optionalAttrs config.allowAliases {
# Removed extensions
adp = throw "The 'adp' extension for azure-cli was deprecated upstream"; # Added 2024-11-02, https://github.com/Azure/azure-cli-extensions/pull/8038
+
azurestackhci = throw "The 'azurestackhci' extension for azure-cli was deprecated upstream"; # Added 2025-07-01, https://github.com/Azure/azure-cli-extensions/pull/8898
blockchain = throw "The 'blockchain' extension for azure-cli was deprecated upstream"; # Added 2024-04-26, https://github.com/Azure/azure-cli-extensions/pull/7370
compute-diagnostic-rp = throw "The 'compute-diagnostic-rp' extension for azure-cli was deprecated upstream"; # Added 2024-11-12, https://github.com/Azure/azure-cli-extensions/pull/8240
connection-monitor-preview = throw "The 'connection-monitor-preview' extension for azure-cli was deprecated upstream"; # Added 2024-11-02, https://github.com/Azure/azure-cli-extensions/pull/8194
deidservice = throw "The 'deidservice' extension for azure-cli was moved under healthcareapis"; # Added 2024-11-19, https://github.com/Azure/azure-cli-extensions/pull/8224
logz = throw "The 'logz' extension for azure-cli was deprecated upstream"; # Added 2024-11-02, https://github.com/Azure/azure-cli-extensions/pull/8459
pinecone = throw "The 'pinecone' extension for azure-cli was removed upstream"; # Added 2025-06-03, https://github.com/Azure/azure-cli-extensions/pull/8763
+
sap-hana = throw "The 'sap-hana' extension for azure-cli was deprecated upstream"; # Added 2025-07-01, https://github.com/Azure/azure-cli-extensions/pull/8904
spring = throw "The 'spring' extension for azure-cli was deprecated upstream"; # Added 2025-05-07, https://github.com/Azure/azure-cli-extensions/pull/8652
+
spring-cloud = throw "The 'spring-cloud' extension for azure-cli was deprecated upstream"; # Added 2025-07-01 https://github.com/Azure/azure-cli-extensions/pull/8897
weights-and-biases = throw "The 'weights-and-biases' was removed upstream"; # Added 2025-06-03, https://github.com/Azure/azure-cli-extensions/pull/8764
}
+2 -2
pkgs/by-name/az/azure-cli/package.nix
···
}:
let
-
version = "2.74.0";
+
version = "2.75.0";
src = fetchFromGitHub {
name = "azure-cli-${version}-src";
owner = "Azure";
repo = "azure-cli";
tag = "azure-cli-${version}";
-
hash = "sha256-wX1XKC3snnKEQeqlW+btshjdcMR/5m2Z69QtcJe2Opc=";
+
hash = "sha256-u6umAqRUfiACt23mxTtfosLdxKSPvDVJMkVjPCtxr24=";
};
# put packages that needs to be overridden in the py package scope
+91
pkgs/by-name/ca/caesura/package.nix
···
+
{
+
lib,
+
fetchFromGitHub,
+
fetchzip,
+
rustPlatform,
+
eyed3,
+
flac,
+
imagemagick,
+
intermodal,
+
lame,
+
makeBinaryWrapper,
+
sox,
+
writableTmpDirAsHomeHook,
+
}:
+
let
+
runtimeDeps = [
+
eyed3
+
flac
+
intermodal
+
imagemagick
+
lame
+
sox
+
];
+
+
testSampleContent = fetchzip {
+
url = "https://archive.org/download/tennyson-discography_/Tennyson%20-%20With%20You%20-%20Lay-by.zip";
+
hash = "sha256-/MgnOgn+OSPPg9wkJ32hq+1MXDdW+Qo9MqLtZMLQYBY=";
+
stripRoot = false;
+
};
+
in
+
rustPlatform.buildRustPackage (finalAttrs: {
+
pname = "caesura";
+
version = "0.25.2";
+
+
src = fetchFromGitHub {
+
owner = "RogueOneEcho";
+
repo = "caesura";
+
tag = "v${finalAttrs.version}";
+
hash = "sha256-rpaOFmD/0/c5F6TIS7vGn7G3+rLOoBZKMW/HuzroUxM=";
+
};
+
+
cargoHash = "sha256-agdhYEhhw3gMdZmYiQZVeLARkMsYQ/AWLTrpiaH0mtA=";
+
+
nativeBuildInputs = [
+
makeBinaryWrapper
+
];
+
+
postPatch = ''
+
substituteInPlace Cargo.toml \
+
--replace-fail 'version = "0.0.0"' 'version = "${finalAttrs.version}"'
+
'';
+
+
checkFlags = [
+
# Those test need internet access for its `Source` (i.e: tracker)
+
"--skip=commands::spectrogram::tests::spectrogram_command_tests::spectrogram_command"
+
"--skip=commands::transcode::tests::transcode_command_tests::transcode_command"
+
"--skip=utils::source::tests::source_provider_tests::source_provider"
+
];
+
+
preCheck = ''
+
# From samples/download-sample
+
mkdir samples/content/
+
ln -s ${finalAttrs.passthru.testSampleContent} "samples/content/Tennyson - With You (2014) [Digital] "'{'"16-44.1 Bandcamp"'}'" (FLAC)"
+
# Adapted from .github/workflows/on-push.yml
+
tee config.yml <<EOF
+
announce_url: https://flacsfor.me/YOUR_ANNOUNCE_KEY/announce
+
api_key: YOUR_API_KEY
+
content:
+
- samples/content
+
source: $(mktemp -d)
+
verbosity: trace
+
EOF
+
'';
+
+
postInstall = ''
+
wrapProgram $out/bin/caesura \
+
--prefix PATH : ${lib.makeBinPath finalAttrs.passthru.runtimeDeps}
+
'';
+
+
passthru = {
+
inherit runtimeDeps testSampleContent;
+
};
+
+
meta = {
+
description = "versatile command line tool for automated verifying and transcoding of all your torrents";
+
homepage = "https://github.com/RogueOneEcho/caesura";
+
license = lib.licenses.agpl3Only;
+
maintainers = with lib.maintainers; [ ambroisie ];
+
mainProgram = "caesura";
+
};
+
})
+3 -3
pkgs/by-name/ch/check-jsonschema/package.nix
···
python3Packages.buildPythonApplication rec {
pname = "check-jsonschema";
-
version = "0.33.0";
+
version = "0.33.1";
pyproject = true;
src = fetchFromGitHub {
owner = "python-jsonschema";
repo = "check-jsonschema";
tag = version;
-
hash = "sha256-dygE9vFQpoDtTBtN4zoWY1JXUxBSgiX3GDzdk72BmgI=";
+
hash = "sha256-rcoZZ4fd6ATBL+aG1Lqvch6wnKtGmEYdCBd9F2danoE=";
};
build-system = with python3Packages; [ setuptools ];
···
description = "Jsonschema CLI and pre-commit hook";
mainProgram = "check-jsonschema";
homepage = "https://github.com/python-jsonschema/check-jsonschema";
-
changelog = "https://github.com/python-jsonschema/check-jsonschema/blob/${version}/CHANGELOG.rst";
+
changelog = "https://github.com/python-jsonschema/check-jsonschema/blob/${src.tag}/CHANGELOG.rst";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ sudosubin ];
};
+3 -3
pkgs/by-name/cl/cloak-pt/package.nix
···
fetchFromGitHub,
}:
let
-
version = "2.10.0";
+
version = "2.11.0";
in
buildGoModule {
pname = "Cloak";
···
owner = "cbeuw";
repo = "Cloak";
rev = "v${version}";
-
hash = "sha256-JbwjsLVOxQc6v47+6rG2f1JLS8ieZI6jYV/twtaVx9M=";
+
hash = "sha256-afFOWjJiqlMeo8M8D2RsW572c2qTthMNbQvxEf7edHE=";
};
-
vendorHash = "sha256-0veClhg9GujI5VrHVzAevIXkjqtZ6r7RGTP2QeWbO2w=";
+
vendorHash = "sha256-P3/fB1vJjEMETyFxH9XNQySCEDQWrbZdaf0V4qFucbI=";
doCheck = false;
+4 -4
pkgs/by-name/ew/eww/package.nix
···
rustPlatform.buildRustPackage rec {
pname = "eww";
-
version = "0.6.0-unstable-2025-06-17";
+
version = "0.6.0-unstable-2025-06-30";
src = fetchFromGitHub {
owner = "elkowar";
repo = "eww";
-
rev = "0e409d4a52bd3d37d0aa0ad4e2d7f3b9a8adcdb7";
-
hash = "sha256-QGs9H+SBoMjvznTh3RZVjlwQPkcz6S6CbxC71cS49dk=";
+
rev = "fddb4a09b107237819e661151e007b99b5cab36d";
+
hash = "sha256-PJW4LvW9FmkG9HyUtgXOq7MDjYtBc/iJuOxyf29nD0Y=";
};
useFetchCargoVendor = true;
-
cargoHash = "sha256-SEdr9nW5nBm1g6fjC5fZhqPbHQ7H6Kk0RL1V6OEQRdA=";
+
cargoHash = "sha256-Kf99eojqXvdbZ3eRS8GBgyLYNpZKJGIJtsOsvhhSVDk=";
nativeBuildInputs = [
installShellFiles
+57
pkgs/by-name/fa/fawltydeps/package.nix
···
+
{
+
lib,
+
python3Packages,
+
fetchFromGitHub,
+
writableTmpDirAsHomeHook,
+
}:
+
+
python3Packages.buildPythonApplication rec {
+
pname = "fawltydeps";
+
version = "0.20.0";
+
pyproject = true;
+
+
src = fetchFromGitHub {
+
owner = "tweag";
+
repo = "FawltyDeps";
+
tag = "v${version}";
+
hash = "sha256-RGwCi4SD0khuOZXcR9Leh9WtRautnlJIfuLBnosyUgk=";
+
};
+
+
build-system = with python3Packages; [ poetry-core ];
+
+
dependencies = with python3Packages; [
+
pyyaml
+
importlib-metadata
+
isort
+
pip-requirements-parser
+
pydantic
+
];
+
+
nativeCheckInputs =
+
[
+
writableTmpDirAsHomeHook
+
]
+
++ (with python3Packages; [
+
pytestCheckHook
+
hypothesis
+
]);
+
+
disabledTestPaths = [
+
# Disable tests that require network
+
"tests/test_install_deps.py"
+
"tests/test_resolver.py"
+
];
+
+
pythonImportsCheck = [ "fawltydeps" ];
+
+
meta = {
+
description = "Find undeclared and/or unused 3rd-party dependencies in your Python project";
+
homepage = "https://tweag.github.io/FawltyDeps";
+
license = lib.licenses.mit;
+
mainProgram = "fawltydeps";
+
maintainers = with lib.maintainers; [
+
aleksana
+
jherland
+
];
+
};
+
}
+3 -3
pkgs/by-name/gh/ghostfolio/package.nix
···
buildNpmPackage rec {
pname = "ghostfolio";
-
version = "2.173.0";
+
version = "2.176.0";
src = fetchFromGitHub {
owner = "ghostfolio";
repo = "ghostfolio";
tag = version;
-
hash = "sha256-+x9xpY0Yd0tj8zZdMbfstMznypn1Up4hxFXkp6bjcAo=";
+
hash = "sha256-T14omi5NkMCrqiXF+gSi6ELEdfH4QMp7luJtuCWhGM4=";
# populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true;
···
'';
};
-
npmDepsHash = "sha256-0Kme7RwXfxJuJ/6vWPPalvBYhGy0SpRViP5o4YrVeLI=";
+
npmDepsHash = "sha256-0vFH4gdrtaBca1lWxm2uZ1VerP4hJEJgBQzygDbja3I=";
nativeBuildInputs = [
prisma
+2 -2
pkgs/by-name/gl/glusterfs/package.nix
···
in
stdenv.mkDerivation rec {
pname = "glusterfs";
-
version = "11.1";
+
version = "11.2";
src = fetchFromGitHub {
owner = "gluster";
repo = "glusterfs";
rev = "v${version}";
-
sha256 = "sha256-ZClMfozeFO3266fkuCSV04QwpZaYa8B0uq2lTPEN2rQ=";
+
sha256 = "sha256-MGTntR9SVmejgpAkZnhJOaIkZeCMNBGaQSorLOStdjo=";
};
inherit buildInputs propagatedBuildInputs;
+3 -3
pkgs/by-name/go/gof5/package.nix
···
buildGoModule rec {
pname = "gof5";
-
version = "0.1.4";
+
version = "0.1.5";
src = fetchFromGitHub {
owner = "kayrus";
repo = "gof5";
rev = "v${version}";
-
sha256 = "10qh7rj8s540ghjdvymly53vny3n0qd0z0ixy24n026jjhgjvnpl";
+
sha256 = "sha256-tvahwd/UBKGYOXIgGwN98P4udcf6Bqrsy9mZ/3YVkvM=";
};
-
vendorHash = null;
+
vendorHash = "sha256-kTdAjNYp/qQnUhHaCD6Hn1MlMpUsWaRxTSHWSUf6Uz8=";
# The tests are broken and apparently you need to uncomment some lines in the
# code in order for it to work.
+3 -3
pkgs/by-name/ju/jujutsu/package.nix
···
rustPlatform.buildRustPackage (finalAttrs: {
pname = "jujutsu";
-
version = "0.30.0";
+
version = "0.31.0";
src = fetchFromGitHub {
owner = "jj-vcs";
repo = "jj";
tag = "v${finalAttrs.version}";
-
hash = "sha256-l+E3os5At/PV4zKvUDSv4Aez9Bg0M+BZDvwVOHX+h9s=";
+
hash = "sha256-4zDHSpi7Kk7rramrWFOlBelZnOxt0zgXIrHucYQUOz0=";
};
useFetchCargoVendor = true;
-
cargoHash = "sha256-5H4yPbJ5364CM8YEt40rTbks3+tuQsrb6OQ0wRUQZRw=";
+
cargoHash = "sha256-QmMc7pG2FMJBI9AIGPRRh2juFoz7gRFw5CQIcNK6QZI=";
nativeBuildInputs = [
installShellFiles
+2 -2
pkgs/by-name/ma/mapnik/package.nix
···
stdenv.mkDerivation (finalAttrs: {
pname = "mapnik";
-
version = "4.1.0";
+
version = "4.1.1";
src = fetchFromGitHub {
owner = "mapnik";
repo = "mapnik";
tag = "v${finalAttrs.version}";
-
hash = "sha256-EhRMG0xPOGwcRAMQD2B4z7nVlXQf4HFFfL3oUaUfXBY=";
+
hash = "sha256-+PCN3bjLGqfK4MF6fWApnSua4Pn/mKo2m9CY8/c5xC4=";
fetchSubmodules = true;
};
+2 -2
pkgs/by-name/mi/microsoft-edge/package.nix
···
stdenvNoCC.mkDerivation (finalAttrs: {
pname = "microsoft-edge";
-
version = "138.0.3351.55";
+
version = "138.0.3351.65";
src = fetchurl {
url = "https://packages.microsoft.com/repos/edge/pool/main/m/microsoft-edge-stable/microsoft-edge-stable_${finalAttrs.version}-1_amd64.deb";
-
hash = "sha256-SZCtAjhzY8BqwM9IMS2081RWxRT+4gQgrjve7avM7Bo=";
+
hash = "sha256-+8bV3pwoYvp4e0eJHj5/NSu15QiFwVJuGxFJkS76gwI=";
};
# With strictDeps on, some shebangs were not being patched correctly
+3 -3
pkgs/by-name/n9/n98-magerun2/package.nix
···
php83.buildComposerProject2 (finalAttrs: {
pname = "n98-magerun2";
-
version = "8.1.1";
+
version = "9.0.1";
src = fetchFromGitHub {
owner = "netz98";
repo = "n98-magerun2";
tag = finalAttrs.version;
-
hash = "sha256-GnyIYgVNPumX+GLgPotSzD6BcUiUTlsfYFwFMX94hEk=";
+
hash = "sha256-Lq9TEwhcsoO4Cau2S7i/idEZYIzBeI0iXX1Ol7LnbAo=";
};
-
vendorHash = "sha256-kF8VXE0K/Gzho5K40H94hXtgSS2rogCtMow2ET8PinU=";
+
vendorHash = "sha256-JxUVqQjSBh8FYW1JbwooHHkzDRtMRaCuVO6o45UMzOk=";
nativeInstallCheckInputs = [ versionCheckHook ];
versionCheckProgramArg = "--version";
+14 -1
pkgs/by-name/ne/nexusmods-app/package.nix
···
"--property:DefineConstants=${lib.strings.concatStringsSep "%3B" constants}"
];
-
doCheck = true;
+
# Avoid running `dotnet test` in the main package:
+
# - The test-suite is slow
+
# - Some tests fail intermittently
+
# - The package is often uncached; especially the unfree variant
+
# - We can enable tests in a `passthru.tests` override
+
doCheck = false;
dotnetTestFlags = [
"--environment=USER=nobody"
···
runHook postInstallCheck
'';
+
+
passthru.tests = {
+
# Build the package and run `dotnet test`
+
app = finalAttrs.finalPackage.overrideAttrs {
+
pname = "${finalAttrs.pname}-tested";
+
doCheck = true;
+
};
+
};
passthru.updateScript = nix-update-script { };
+52
pkgs/by-name/ok/oklch-color-picker/package.nix
···
+
{
+
lib,
+
nix-update-script,
+
rustPlatform,
+
fetchFromGitHub,
+
versionCheckHook,
+
autoPatchelfHook,
+
wayland,
+
libxkbcommon,
+
libGL,
+
stdenv,
+
}:
+
+
rustPlatform.buildRustPackage (finalAttrs: {
+
pname = "oklch-color-picker";
+
version = "2.2.1";
+
+
src = fetchFromGitHub {
+
owner = "eero-lehtinen";
+
repo = "oklch-color-picker";
+
tag = "${finalAttrs.version}";
+
hash = "sha256-tPYxcZghGR1YZl1bwoDDIBmbTVGuksCpfgLYwG+k4Ws=";
+
};
+
+
cargoHash = "sha256-tdIkvBYKfcbCYXhDbIwXNNbNb4X32uBwDh3mAyqt/IM=";
+
+
nativeBuildInputs = lib.optionals stdenv.hostPlatform.isLinux [ autoPatchelfHook ];
+
+
runtimeDependencies = [
+
wayland
+
libxkbcommon
+
libGL
+
];
+
+
nativeInstallCheckInputs = [ versionCheckHook ];
+
versionCheckProgramArg = "--version";
+
doInstallCheck = true;
+
passthru.updateScript = nix-update-script { };
+
+
meta = {
+
description = "Color picker for Oklch";
+
longDescription = ''
+
A standalone color picker application using the Oklch
+
colorspace (based on Oklab)
+
'';
+
homepage = "https://github.com/eero-lehtinen/oklch-color-picker";
+
changelog = "https://github.com/eero-lehtinen/oklch-color-picker/releases/tag/${finalAttrs.version}";
+
license = lib.licenses.mit;
+
maintainers = with lib.maintainers; [ videl ];
+
broken = stdenv.hostPlatform.isDarwin;
+
};
+
})
+2 -2
pkgs/by-name/ol/ollama/package.nix
···
goBuild (finalAttrs: {
pname = "ollama";
# don't forget to invalidate all hashes each update
-
version = "0.9.3";
+
version = "0.9.5";
src = fetchFromGitHub {
owner = "ollama";
repo = "ollama";
tag = "v${finalAttrs.version}";
-
hash = "sha256-bAxvlFeCxrxE8PuLbsjAwJYDeZfKb8BDuGBgX8uMgr8=";
+
hash = "sha256-QP70s6gPL1GJv5G4VhYwWpf5raRIcOVsjPq3Jdw89eU=";
fetchSubmodules = true;
};
+3 -3
pkgs/by-name/ph/phpunit/package.nix
···
php.buildComposerProject2 (finalAttrs: {
pname = "phpunit";
-
version = "12.2.3";
+
version = "12.2.5";
src = fetchFromGitHub {
owner = "sebastianbergmann";
repo = "phpunit";
tag = finalAttrs.version;
-
hash = "sha256-wdUx2/f+VGaclDO5DtJprqsGuKMXXdw/CE10py19Dvc=";
+
hash = "sha256-xpIpcjteIC9rpDxySqcDwJu1e3oMs6qC8u0zYlInxMw=";
};
-
vendorHash = "sha256-zc9ZXFhS78gZ5VevbAs0r+R30+It5BzUkgPau8qLjFE=";
+
vendorHash = "sha256-G67bYh61xTtqg2dj2laxYed/wXVIRZsG31mZETxohjM=";
passthru = {
updateScript = nix-update-script { };
+3 -3
pkgs/by-name/re/release-plz/package.nix
···
rustPlatform.buildRustPackage rec {
pname = "release-plz";
-
version = "0.3.136";
+
version = "0.3.137";
src = fetchFromGitHub {
owner = "MarcoIeni";
repo = "release-plz";
rev = "release-plz-v${version}";
-
hash = "sha256-C/8/lukqnuxCeHZR0kSuJSJzWl71kr3hdTTmS5lMRLA=";
+
hash = "sha256-7sCwnUhCLfA/MACseZUT6IWR5+JjxKUyBfLSGwno/qQ=";
};
useFetchCargoVendor = true;
-
cargoHash = "sha256-MsSGFNMmIaKVYXqJks4NzLNnrip8cz7K9pETtxHyLuI=";
+
cargoHash = "sha256-IoFJPRW4SXAWxfBKNBrgtxBAYfbRwxuN9Aig3P9QkOk=";
nativeBuildInputs = [
installShellFiles
-30
pkgs/by-name/sa/saga/darwin-patch-1.patch
···
-
commit 3bbd15676dfc077d7836e9d51810c1d6731f5789
-
Author: Palmer Cox <p@lmercox.com>
-
Date: Sun Feb 23 16:41:18 2025 -0500
-
-
Fix copy/paste error in FindPostgres.cmake
-
-
In f51c6b1513e312002c108fe87d26e33c48671406, EXEC_PROGRAM was changed to
-
execute_process. As part of that, it looks like the second and third
-
invocations were accidentally changed.
-
-
diff --git a/saga-gis/cmake/modules/FindPostgres.cmake b/saga-gis/cmake/modules/FindPostgres.cmake
-
index f22806fd9..a4b6ec9ac 100644
-
--- a/cmake/modules/FindPostgres.cmake
-
+++ b/cmake/modules/FindPostgres.cmake
-
@@ -77,13 +77,13 @@ ELSE(WIN32)
-
SET(POSTGRES_INCLUDE_DIR ${PG_TMP} CACHE STRING INTERNAL)
-
-
# set LIBRARY_DIR
-
- execute_process(COMMAND ${POSTGRES_CONFIG} --includedir
-
+ execute_process(COMMAND ${POSTGRES_CONFIG} --libdir
-
OUTPUT_VARIABLE PG_TMP
-
OUTPUT_STRIP_TRAILING_WHITESPACE)
-
IF (APPLE)
-
SET(POSTGRES_LIBRARY ${PG_TMP}/libpq.dylib CACHE STRING INTERNAL)
-
ELSEIF (CYGWIN)
-
- execute_process(COMMAND ${POSTGRES_CONFIG} --includedir
-
+ execute_process(COMMAND ${POSTGRES_CONFIG} --libs
-
OUTPUT_VARIABLE PG_TMP
-
OUTPUT_STRIP_TRAILING_WHITESPACE)
-
-24
pkgs/by-name/sa/saga/darwin-patch-2.patch
···
-
commit eb69f594ec439309432e87834bead5276b7dbc9b
-
Author: Palmer Cox <p@lmercox.com>
-
Date: Sun Feb 23 16:45:34 2025 -0500
-
-
On Apple, use FIND_LIBRARY to locate libpq
-
-
I think FIND_LIBRARY() is better than just relying on what pg_config
-
said its libdir was, since, depending on how libpq was installed, it may
-
or may not be in that directory. If its not, FIND_LIBRARY() is able to
-
find it in other locations.
-
-
diff --git a/saga-gis/cmake/modules/FindPostgres.cmake b/saga-gis/cmake/modules/FindPostgres.cmake
-
index a4b6ec9ac..65e7ac69b 100644
-
--- a/cmake/modules/FindPostgres.cmake
-
+++ b/cmake/modules/FindPostgres.cmake
-
@@ -81,7 +81,7 @@ ELSE(WIN32)
-
OUTPUT_VARIABLE PG_TMP
-
OUTPUT_STRIP_TRAILING_WHITESPACE)
-
IF (APPLE)
-
- SET(POSTGRES_LIBRARY ${PG_TMP}/libpq.dylib CACHE STRING INTERNAL)
-
+ FIND_LIBRARY(POSTGRES_LIBRARY NAMES pq libpq PATHS ${PG_TMP})
-
ELSEIF (CYGWIN)
-
execute_process(COMMAND ${POSTGRES_CONFIG} --libs
-
OUTPUT_VARIABLE PG_TMP
+2 -10
pkgs/by-name/sa/saga/package.nix
···
stdenv.mkDerivation rec {
pname = "saga";
-
version = "9.7.2";
+
version = "9.8.1";
src = fetchurl {
url = "mirror://sourceforge/saga-gis/saga-${version}.tar.gz";
-
hash = "sha256-1nWpFGRBS49uzKl7m/4YWFI+3lvm2zKByYpR9llxsgY=";
+
hash = "sha256-NCNeTxR4eWMJ3OHcBEQ2MZky9XiEExPscGhriDvXYf8=";
};
sourceRoot = "saga-${version}/saga-gis";
-
-
patches = [
-
# Patches from https://sourceforge.net/p/saga-gis/code/merge-requests/38/.
-
# These are needed to fix building on Darwin (technically the first is not
-
# required, but the second doesn't apply without it).
-
./darwin-patch-1.patch
-
./darwin-patch-2.patch
-
];
nativeBuildInputs = [
cmake
+1
pkgs/by-name/sp/sparrow/package.nix
···
license = licenses.asl20;
maintainers = with maintainers; [
emmanuelrosa
+
msgilligan
_1000101
];
platforms = [ "x86_64-linux" ];
+2 -2
pkgs/by-name/tb/tbls/package.nix
···
buildGoModule rec {
pname = "tbls";
-
version = "1.85.5";
+
version = "1.86.0";
src = fetchFromGitHub {
owner = "k1LoW";
repo = "tbls";
tag = "v${version}";
-
hash = "sha256-djIGgZ5qehrkQZlxe2+3XzRb5FfewZVcquYiitGfFdo=";
+
hash = "sha256-vV7eAjPrPlNNw+rLyrMe9G1KzVvtyFIOSrb+BrK3l00=";
};
vendorHash = "sha256-9IvnIFOlLdqmntisNomO5K6PU8gw7CSuEb46zG5ox2A=";
+3 -3
pkgs/by-name/we/werf/package.nix
···
}:
buildGoModule (finalAttrs: {
pname = "werf";
-
version = "2.38.0";
+
version = "2.39.1";
src = fetchFromGitHub {
owner = "werf";
repo = "werf";
tag = "v${finalAttrs.version}";
-
hash = "sha256-cZUzkThVKgPc8bsxmDc2+gsq9YxVswokO1rORvKVIws=";
+
hash = "sha256-xQ1nh9YL198/ih9rw52rItR3t5Nq901MpDlFVht6kAc=";
};
proxyVendor = true;
-
vendorHash = "sha256-aQVDt6VDtQjHCkY2xcbmoKn+UUplJ+a6xfdwPSF/j9Y=";
+
vendorHash = "sha256-CLe5UuHwAXLk9c+6baOpfFqrE/pl4889PhlajBRV+UU=";
subPackages = [ "cmd/werf" ];
+2 -2
pkgs/development/libraries/mesa/common.nix
···
# nix build .#legacyPackages.x86_64-darwin.mesa .#legacyPackages.aarch64-darwin.mesa
rec {
pname = "mesa";
-
version = "25.1.4";
+
version = "25.1.5";
src = fetchFromGitLab {
domain = "gitlab.freedesktop.org";
owner = "mesa";
repo = "mesa";
rev = "mesa-${version}";
-
hash = "sha256-DA6fE+Ns91z146KbGlQldqkJlvGAxhzNdcmdIO0lHK8=";
+
hash = "sha256-AZAd1/wiz8d0lXpim9obp6/K7ySP12rGFe8jZrc9Gl0=";
};
meta = {
+2 -2
pkgs/development/python-modules/azure-mgmt-containerservice/default.nix
···
buildPythonPackage rec {
pname = "azure-mgmt-containerservice";
-
version = "36.0.0";
+
version = "37.0.0";
pyproject = true;
disabled = pythonOlder "3.8";
···
src = fetchPypi {
pname = "azure_mgmt_containerservice";
inherit version;
-
hash = "sha256-l/PnbSs6auieHmxzmEjx4OB1jHKCqjNNV7MAhvbzbJ8=";
+
hash = "sha256-F02cVmGhYuxDoK95BbzxHNIJpugARaj0I31TcB0qkTs=";
};
build-system = [ setuptools ];
+4 -3
pkgs/development/python-modules/azure-multiapi-storage/default.nix
···
buildPythonPackage rec {
pname = "azure-multiapi-storage";
-
version = "1.4.0";
+
version = "1.4.1";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchPypi {
-
inherit pname version;
-
hash = "sha256-RfFd+1xL2ouWJ3NLXMcsRfQ215bi4ha+iCOcYXjND3E=";
+
pname = "azure_multiapi_storage";
+
inherit version;
+
hash = "sha256-INTvVn+1ysQHKRyI0Q4p43Ynyyj2BiBPVMcfaAEDCyg=";
};
build-system = [ setuptools ];
+8 -1
pkgs/development/tools/misc/binutils/default.nix
···
# on the PATH to both be usable.
targetPrefix = lib.optionalString (targetPlatform != hostPlatform) "${targetPlatform.config}-";
+
# gas is disabled for some targets via noconfigdirs in configure.
+
targetHasGas = !stdenv.targetPlatform.isDarwin;
+
# gas isn't multi-target, even with --enable-targets=all, so we do
# separate builds of just gas for each target.
#
···
# additional targets here as required.
allGasTargets =
allGasTargets'
-
++ lib.optional (!lib.elem targetPlatform.config allGasTargets') targetPlatform.config;
+
++ lib.optional (
+
targetHasGas && !lib.elem targetPlatform.config allGasTargets'
+
) targetPlatform.config;
allGasTargets' = [
"aarch64-unknown-linux-gnu"
"alpha-unknown-linux-gnu"
···
$makeFlags "''${makeFlagsArray[@]}" $installFlags "''${installFlagsArray[@]}" \
install-exec-bindir
done
+
''
+
+ lib.optionalString (withAllTargets && targetHasGas) ''
ln -s $out/bin/${stdenv.targetPlatform.config}-as $out/bin/as
'';
+2 -2
pkgs/games/path-of-building/default.nix
···
let
data = stdenv.mkDerivation (finalAttrs: {
pname = "path-of-building-data";
-
version = "2.55.2";
+
version = "2.55.3";
src = fetchFromGitHub {
owner = "PathOfBuildingCommunity";
repo = "PathOfBuilding";
rev = "v${finalAttrs.version}";
-
hash = "sha256-i+9WeASdOj9QSB0HjDMP7qM7wQh3tyHuh74QlVWhi1c=";
+
hash = "sha256-LGn5dDH1oRD6bi3KGqyiQh7Gu/8k+RRgGRFkUaFa19E=";
};
nativeBuildInputs = [ unzip ];
+3 -3
pkgs/servers/home-assistant/custom-lovelace-modules/universal-remote-card/package.nix
···
buildNpmPackage rec {
pname = "universal-remote-card";
-
version = "4.6.0";
+
version = "4.6.1";
src = fetchFromGitHub {
owner = "Nerwyn";
repo = "android-tv-card";
rev = version;
-
hash = "sha256-CiJJDMOl50QrMIdPIDLJa259FZSjyUhmwiZN29/oBsM=";
+
hash = "sha256-cJu07eIluZFZfIq+3D0xlQs2L3NmSKf3EBSA/S2jx7Y=";
};
patches = [ ./dont-call-git.patch ];
-
npmDepsHash = "sha256-Hv4TcUqwSGjA1OpAdd0RY0v+F9oTMIHgVm54sPjyrpQ=";
+
npmDepsHash = "sha256-eldbaWZq/TV7V3wPOmgZrYNQsNP1Dgt6vqEc0hiqy+c=";
installPhase = ''
runHook preInstall
+2 -2
pkgs/servers/mastodon/source.nix
···
patches ? [ ],
}:
let
-
version = "4.3.8";
+
version = "4.3.9";
in
applyPatches {
src = fetchFromGitHub {
owner = "mastodon";
repo = "mastodon";
rev = "v${version}";
-
hash = "sha256-08AApylDOz8oExZ0cRaZTgNAuP+1wiLkx0SDhkO2fMM=";
+
hash = "sha256-A2WxVwaarT866s97uwfStBVtv7T5czF7ymRswtZ2K4M=";
passthru = {
inherit version;
+2 -2
pkgs/servers/minio/default.nix
···
in
buildGoModule rec {
pname = "minio";
-
version = "2025-05-24T17-08-30Z";
+
version = "2025-06-13T11-33-47Z";
src = fetchFromGitHub {
owner = "minio";
repo = "minio";
rev = "RELEASE.${version}";
-
hash = "sha256-BB7uEBc0JSJ3nBAy+0i6s4js7Nv/jYw51tbIE6bWjkI=";
+
hash = "sha256-pck/K/BJZC0OdjgeCr+3ErkOyqmVTCdZv61jG24tp2E=";
};
vendorHash = "sha256-0UoEIlxbAveYlCbGZ2z1q+RAksJrVjdE+ymc6ozDGcE=";
+21 -4
pkgs/tools/package-management/nix/tests.nix
···
srcVersion=$(cat ${src}/.version)
echo "Version in nix nix expression: $version"
echo "Version in nix.src: $srcVersion"
-
if [ "$version" != "$srcVersion" ]; then
-
echo "Version mismatch!"
-
exit 1
-
fi
+
${
+
if self_attribute_name == "git" then
+
# Major and minor must match, patch can be missing or have a suffix like a commit hash. That's all fine.
+
''
+
majorMinor() {
+
echo "$1" | sed -n -e 's/\([0-9]*\.[0-9]*\).*/\1/p'
+
}
+
if (set -x; [ "$(majorMinor "$version")" != "$(majorMinor "$srcVersion")" ]); then
+
echo "Version mismatch!"
+
exit 1
+
fi
+
''
+
else
+
# exact match
+
''
+
if [ "$version" != "$srcVersion" ]; then
+
echo "Version mismatch!"
+
exit 1
+
fi
+
''
+
}
touch $out
'';