mirror of
https://github.com/DeterminateSystems/magic-nix-cache-action.git
synced 2024-12-27 01:30:35 +01:00
Merge remote-tracking branch 'upstream/main' into merge-against-upstream
This commit is contained in:
commit
e4bda5a74b
4 changed files with 23 additions and 12 deletions
|
@ -45,7 +45,9 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: DeterminateSystems/nix-installer-action@main
|
- uses: DeterminateSystems/nix-installer-action@main
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||||
- run: nix flake check
|
- uses: DeterminateSystems/flake-checker-action@main
|
||||||
|
- name: Run `nix build`
|
||||||
|
run: nix build .
|
||||||
```
|
```
|
||||||
|
|
||||||
That's it.
|
That's it.
|
||||||
|
@ -108,7 +110,7 @@ cat action.yml| nix run nixpkgs#yq-go -- '[[ "Parameter", "Description", "Requir
|
||||||
[action]: https://github.com/DeterminateSystems/magic-nix-cache-action/
|
[action]: https://github.com/DeterminateSystems/magic-nix-cache-action/
|
||||||
[installer]: https://github.com/DeterminateSystems/nix-installer/
|
[installer]: https://github.com/DeterminateSystems/nix-installer/
|
||||||
[ghacache]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
|
[ghacache]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
|
||||||
[privacy]: https://determinate.systems/privacy
|
[privacy]: https://determinate.systems/policies/privacy
|
||||||
[telemetry]: https://github.com/DeterminateSystems/magic-nix-cache/blob/main/magic-nix-cache/src/telemetry.rs
|
[telemetry]: https://github.com/DeterminateSystems/magic-nix-cache/blob/main/magic-nix-cache/src/telemetry.rs
|
||||||
[semantics]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache
|
[semantics]: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache
|
||||||
[z2ncache]: https://zero-to-nix.com/concepts/caching#binary-caches
|
[z2ncache]: https://zero-to-nix.com/concepts/caching#binary-caches
|
||||||
|
|
|
@ -54,6 +54,6 @@ inputs:
|
||||||
default: 41239
|
default: 41239
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "node16"
|
using: "node20"
|
||||||
main: "./dist/index.js"
|
main: "./dist/index.js"
|
||||||
post: "./dist/index.js"
|
post: "./dist/index.js"
|
||||||
|
|
14
dist/index.js
generated
vendored
14
dist/index.js
generated
vendored
|
@ -3,8 +3,7 @@ import * as os$2 from 'node:os';
|
||||||
import os__default from 'node:os';
|
import os__default from 'node:os';
|
||||||
import * as path$1 from 'node:path';
|
import * as path$1 from 'node:path';
|
||||||
import { spawn, exec } from 'node:child_process';
|
import { spawn, exec } from 'node:child_process';
|
||||||
import { openSync } from 'node:fs';
|
import { openSync, readFileSync } from 'node:fs';
|
||||||
import { setTimeout as setTimeout$1 } from 'timers/promises';
|
|
||||||
import { promisify as promisify$1, inspect } from 'node:util';
|
import { promisify as promisify$1, inspect } from 'node:util';
|
||||||
import * as require$$1 from 'http';
|
import * as require$$1 from 'http';
|
||||||
import require$$1__default from 'http';
|
import require$$1__default from 'http';
|
||||||
|
@ -12105,7 +12104,7 @@ var got$1 = got;
|
||||||
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
|
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
|
||||||
const gotClient = got$1.extend({
|
const gotClient = got$1.extend({
|
||||||
retry: {
|
retry: {
|
||||||
limit: 5,
|
limit: 1,
|
||||||
methods: ['POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE'],
|
methods: ['POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE'],
|
||||||
},
|
},
|
||||||
hooks: {
|
hooks: {
|
||||||
|
@ -12224,6 +12223,7 @@ async function setUpAutoCache() {
|
||||||
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
|
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
|
||||||
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
|
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
|
||||||
'--flakehub-api-server-netrc', netrc,
|
'--flakehub-api-server-netrc', netrc,
|
||||||
|
'--flakehub-flake-name', coreExports.getInput('flakehub-flake-name'),
|
||||||
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
|
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
|
||||||
'--use-gha-cache'
|
'--use-gha-cache'
|
||||||
] : []), {
|
] : []), {
|
||||||
|
@ -12319,7 +12319,6 @@ async function tearDownAutoCache() {
|
||||||
coreExports.debug(res);
|
coreExports.debug(res);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
await setTimeout$1(5000);
|
|
||||||
coreExports.debug(`unwatching the daemon log`);
|
coreExports.debug(`unwatching the daemon log`);
|
||||||
log.unwatch();
|
log.unwatch();
|
||||||
}
|
}
|
||||||
|
@ -12332,6 +12331,13 @@ async function tearDownAutoCache() {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
finally {
|
||||||
|
if (coreExports.isDebug()) {
|
||||||
|
coreExports.info("Entire log:");
|
||||||
|
const log = readFileSync(path$1.join(daemonDir, 'daemon.log'));
|
||||||
|
coreExports.info(log.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const isPost = !!process.env['STATE_isPost'];
|
const isPost = !!process.env['STATE_isPost'];
|
||||||
try {
|
try {
|
||||||
|
|
13
src/index.ts
13
src/index.ts
|
@ -4,8 +4,7 @@ import * as fs from 'node:fs/promises';
|
||||||
import * as os from 'node:os';
|
import * as os from 'node:os';
|
||||||
import * as path from 'node:path';
|
import * as path from 'node:path';
|
||||||
import { spawn, exec } from 'node:child_process';
|
import { spawn, exec } from 'node:child_process';
|
||||||
import { openSync } from 'node:fs';
|
import { openSync, readFileSync } from 'node:fs';
|
||||||
import { setTimeout } from 'timers/promises';
|
|
||||||
import { inspect, promisify } from 'node:util';
|
import { inspect, promisify } from 'node:util';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
||||||
|
@ -17,7 +16,7 @@ const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
|
||||||
|
|
||||||
const gotClient = got.extend({
|
const gotClient = got.extend({
|
||||||
retry: {
|
retry: {
|
||||||
limit: 5,
|
limit: 1,
|
||||||
methods: [ 'POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE' ],
|
methods: [ 'POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE' ],
|
||||||
},
|
},
|
||||||
hooks: {
|
hooks: {
|
||||||
|
@ -277,8 +276,6 @@ async function tearDownAutoCache() {
|
||||||
core.debug(`back from post`);
|
core.debug(`back from post`);
|
||||||
core.debug(res);
|
core.debug(res);
|
||||||
} finally {
|
} finally {
|
||||||
await setTimeout(5000);
|
|
||||||
|
|
||||||
core.debug(`unwatching the daemon log`);
|
core.debug(`unwatching the daemon log`);
|
||||||
log.unwatch();
|
log.unwatch();
|
||||||
}
|
}
|
||||||
|
@ -290,6 +287,12 @@ async function tearDownAutoCache() {
|
||||||
if (e.code !== 'ESRCH') {
|
if (e.code !== 'ESRCH') {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
if (core.isDebug()) {
|
||||||
|
core.info("Entire log:");
|
||||||
|
const log = readFileSync(path.join(daemonDir, 'daemon.log'));
|
||||||
|
core.info(log.toString());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue