Merge remote-tracking branch 'upstream/main' into merge-against-upstream

This commit is contained in:
Cole Helbling 2024-02-27 09:32:36 -08:00
commit e4bda5a74b
4 changed files with 23 additions and 12 deletions

View file

@ -45,7 +45,9 @@ jobs:
- uses: actions/checkout@v3
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix flake check
- uses: DeterminateSystems/flake-checker-action@main
- name: Run `nix build`
run: nix build .
```
That's it.
@ -108,7 +110,7 @@ cat action.yml| nix run nixpkgs#yq-go -- '[[ "Parameter", "Description", "Requir
[action]: https://github.com/DeterminateSystems/magic-nix-cache-action/
[installer]: https://github.com/DeterminateSystems/nix-installer/
[ghacache]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
[privacy]: https://determinate.systems/privacy
[privacy]: https://determinate.systems/policies/privacy
[telemetry]: https://github.com/DeterminateSystems/magic-nix-cache/blob/main/magic-nix-cache/src/telemetry.rs
[semantics]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache
[z2ncache]: https://zero-to-nix.com/concepts/caching#binary-caches

View file

@ -54,6 +54,6 @@ inputs:
default: 41239
runs:
using: "node16"
using: "node20"
main: "./dist/index.js"
post: "./dist/index.js"

14
dist/index.js generated vendored
View file

@ -3,8 +3,7 @@ import * as os$2 from 'node:os';
import os__default from 'node:os';
import * as path$1 from 'node:path';
import { spawn, exec } from 'node:child_process';
import { openSync } from 'node:fs';
import { setTimeout as setTimeout$1 } from 'timers/promises';
import { openSync, readFileSync } from 'node:fs';
import { promisify as promisify$1, inspect } from 'node:util';
import * as require$$1 from 'http';
import require$$1__default from 'http';
@ -12105,7 +12104,7 @@ var got$1 = got;
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
const gotClient = got$1.extend({
retry: {
limit: 5,
limit: 1,
methods: ['POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE'],
},
hooks: {
@ -12224,6 +12223,7 @@ async function setUpAutoCache() {
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
'--flakehub-api-server-netrc', netrc,
'--flakehub-flake-name', coreExports.getInput('flakehub-flake-name'),
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
'--use-gha-cache'
] : []), {
@ -12319,7 +12319,6 @@ async function tearDownAutoCache() {
coreExports.debug(res);
}
finally {
await setTimeout$1(5000);
coreExports.debug(`unwatching the daemon log`);
log.unwatch();
}
@ -12332,6 +12331,13 @@ async function tearDownAutoCache() {
throw e;
}
}
finally {
if (coreExports.isDebug()) {
coreExports.info("Entire log:");
const log = readFileSync(path$1.join(daemonDir, 'daemon.log'));
coreExports.info(log.toString());
}
}
}
const isPost = !!process.env['STATE_isPost'];
try {

View file

@ -4,8 +4,7 @@ import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { spawn, exec } from 'node:child_process';
import { openSync } from 'node:fs';
import { setTimeout } from 'timers/promises';
import { openSync, readFileSync } from 'node:fs';
import { inspect, promisify } from 'node:util';
import * as http from 'http';
@ -17,7 +16,7 @@ const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
const gotClient = got.extend({
retry: {
limit: 5,
limit: 1,
methods: [ 'POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE' ],
},
hooks: {
@ -277,8 +276,6 @@ async function tearDownAutoCache() {
core.debug(`back from post`);
core.debug(res);
} finally {
await setTimeout(5000);
core.debug(`unwatching the daemon log`);
log.unwatch();
}
@ -290,6 +287,12 @@ async function tearDownAutoCache() {
if (e.code !== 'ESRCH') {
throw e;
}
} finally {
if (core.isDebug()) {
core.info("Entire log:");
const log = readFileSync(path.join(daemonDir, 'daemon.log'));
core.info(log.toString());
}
}
}