Merge pull request #35 from DeterminateSystems/flakehub-cache

Introduce FlakeHub Cache
This commit is contained in:
Graham Christensen 2024-03-06 16:27:38 -05:00 committed by GitHub
commit 122e91d346
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 381 additions and 113 deletions

View file

@ -1,14 +1,47 @@
#!/bin/sh #! /usr/bin/env bash
set -e set -e
set -ux set -ux
seed=$(date) seed=$(date)
log="${MAGIC_NIX_CACHE_DAEMONDIR}/daemon.log"
binary_cache=https://cache.flakehub.com
# Check that the action initialized correctly.
grep 'FlakeHub cache is enabled' "${log}"
grep 'Using cache' "${log}"
grep 'GitHub Action cache is enabled' "${log}"
# Build something.
outpath=$(nix-build .github/workflows/cache-tester.nix --argstr seed "$seed") outpath=$(nix-build .github/workflows/cache-tester.nix --argstr seed "$seed")
nix copy --to 'http://127.0.0.1:37515' "$outpath"
# Check that the path was enqueued to be pushed to the cache.
grep "Enqueueing.*${outpath}" "${log}"
# Wait until it has been pushed succesfully.
found=
for ((i = 0; i < 60; i++)); do
sleep 1
if grep "$(basename "${outpath}")" "${log}"; then
found=1
break
fi
done
if [[ -z $found ]]; then
echo "FlakeHub push did not happen." >&2
exit 1
fi
# Check the FlakeHub binary cache to see if the path is really there.
nix path-info --store "${binary_cache}" "${outpath}"
# FIXME: remove this once the daemon also uploads to GHA automatically.
nix copy --to 'http://127.0.0.1:37515' "${outpath}"
rm ./result rm ./result
nix store delete "$outpath" nix store delete "${outpath}"
if [ -f "$outpath" ]; then if [ -f "$outpath" ]; then
echo "$outpath still exists? can't test" echo "$outpath still exists? can't test"
exit 1 exit 1

View file

@ -16,9 +16,12 @@ jobs:
- name: Record existing bundle hash - name: Record existing bundle hash
run: | run: |
echo "BUNDLE_HASH=$(sha256sum <dist/index.js | sed 's/ -//')" >>$GITHUB_ENV echo "BUNDLE_HASH=$(sha256sum <dist/index.js | sed 's/ -//')" >>$GITHUB_ENV
- name: Check shell scripts
run: |
nix develop --command shellcheck ./.github/workflows/cache-test.sh
- name: Build action - name: Build action
run: | run: |
nix develop --command -- just build nix develop --command just build
- name: Check bundle consistency - name: Check bundle consistency
run: | run: |
NEW_BUNDLE_HASH=$(sha256sum <dist/index.js | sed 's/ -//') NEW_BUNDLE_HASH=$(sha256sum <dist/index.js | sed 's/ -//')
@ -35,11 +38,15 @@ jobs:
run-x86_64-linux: run-x86_64-linux:
name: Run x86_64 Linux name: Run x86_64 Linux
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
permissions:
id-token: "write"
contents: "read"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Nix - name: Install Nix
uses: DeterminateSystems/nix-installer-action@main uses: DeterminateSystems/nix-installer-action@main
with: with:
flakehub: true
extra-conf: | extra-conf: |
narinfo-cache-negative-ttl = 0 narinfo-cache-negative-ttl = 0
- name: Cache the store - name: Cache the store
@ -48,18 +55,43 @@ jobs:
run: | run: |
.github/workflows/cache-test.sh .github/workflows/cache-test.sh
# run-x86_64-darwin: run-x86_64-darwin:
# name: Run x86_64 Darwin name: Run x86_64 Darwin
# runs-on: macos-12 runs-on: macos-12
# steps: permissions:
# - uses: actions/checkout@v3 id-token: "write"
# - name: Install Nix contents: "read"
# uses: DeterminateSystems/nix-installer-action@main steps:
# with: - uses: actions/checkout@v3
# extra-conf: | - name: Install Nix
# narinfo-cache-negative-ttl = 0 uses: DeterminateSystems/nix-installer-action@main
# - name: Cache the store with:
# uses: ./ flakehub: true
# - name: Check the cache for liveness extra-conf: |
# run: | narinfo-cache-negative-ttl = 0
# .github/workflows/cache-test.sh - name: Cache the store
uses: ./
- name: Check the cache for liveness
run: |
.github/workflows/cache-test.sh
run-aarch64-darwin:
name: Run aarch64 Darwin
concurrency: build-ARM64-macOS
runs-on: macos-latest-xlarge
permissions:
id-token: "write"
contents: "read"
steps:
- uses: actions/checkout@v3
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
with:
flakehub: true
extra-conf: |
narinfo-cache-negative-ttl = 0
- name: Cache the store
uses: ./
- name: Check the cache for liveness
run: |
.github/workflows/cache-test.sh

28
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,28 @@
name: Publish
on:
workflow_dispatch:
push:
branches:
- main
jobs:
publish:
name: Publish
runs-on: ubuntu-22.04
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@v3
- run: |
mkdir artifacts
git archive --format=tar.gz HEAD -o ./artifacts/universal.tar.gz
- uses: DeterminateSystems/push-artifact-ids@main
with:
s3_upload_role: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
bucket: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
directory: ./artifacts
ids_project_name: magic-nix-cache-action-priv
ids_binary_prefix: magic-nix-cache-action-priv

View file

@ -90,14 +90,20 @@ cat action.yml| nix run nixpkgs#yq-go -- '[[ "Parameter", "Description", "Requir
| Parameter | Description | Required | Default | | Parameter | Description | Required | Default |
| - | - | - | - | | - | - | - | - |
| `diagnostic-endpoint` | Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string. | | https://install.determinate.systems/magic-nix-cache/perf | | `diagnostic-endpoint` | Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string. | | https://install.determinate.systems/magic-nix-cache/perf |
| `flakehub-api-server` | The FlakeHub API server. | | https://api.flakehub.com |
| `flakehub-cache-server` | The FlakeHub binary cache server. | | https://cache.flakehub.com |
| `flakehub-flake-name` | The name of your flake on FlakeHub. Defaults to the current name of the GitHub repository it is running in. | | |
| `listen` | The host and port to listen on. | | 127.0.0.1:37515 | | `listen` | The host and port to listen on. | | 127.0.0.1:37515 |
| `source-binary` | Run a version of the cache binary from somewhere already on disk. Conflicts with all other `source-*` options. | | | | `source-binary` | Run a version of the cache binary from somewhere already on disk. Conflicts with all other `source-*` options. | | |
| `source-branch` | The branch of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | | | `source-branch` | The branch of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | main |
| `source-pr` | The PR of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | | | `source-pr` | The PR of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | |
| `source-revision` | The revision of `nix-magic-nix-cache` to use. Conflicts with all other `source-*` options. | | | | `source-revision` | The revision of `nix-magic-nix-cache` to use. Conflicts with all other `source-*` options. | | |
| `source-tag` | The tag of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | | | `source-tag` | The tag of `magic-nix-cache` to use. Conflicts with all other `source-*` options. | | |
| `source-url` | A URL pointing to a `magic-nix-cache` binary. Overrides all other `source-*` options. | | | | `source-url` | A URL pointing to a `magic-nix-cache` binary. Overrides all other `source-*` options. | | |
| `upstream-cache` | Your preferred [upstream cache](#upstream-cache). Store paths fetched from this store will not be cached in the [GitHub Actions Cache][gha-cache]. | | https://cache.nixos.org | | `startup-notification-port` | The port magic-nix-cache uses for daemon startup notification. | | 41239 |
| `upstream-cache` | Your preferred upstream cache. Store paths in this store will not be cached in GitHub Actions' cache. | | https://cache.nixos.org |
| `use-flakehub` | Whether to upload build results to FlakeHub Cache. | | true |
| `use-gha-cache` | Whether to upload build results to the GitHub Actions cache. | | true |
[gha-cache]: https://docs.github.com/en/rest/actions/cache [gha-cache]: https://docs.github.com/en/rest/actions/cache
[detsys]: https://determinate.systems/ [detsys]: https://determinate.systems/

View file

@ -4,6 +4,10 @@ branding:
color: "purple" color: "purple"
description: "Free, no-configuration Nix cache. Cut CI time by 50% or more by caching to GitHub Actions' cache." description: "Free, no-configuration Nix cache. Cut CI time by 50% or more by caching to GitHub Actions' cache."
inputs: inputs:
use-gha-cache:
description: "Whether to upload build results to the GitHub Actions cache."
default: true
required: false
listen: listen:
description: The host and port to listen on. description: The host and port to listen on.
default: 127.0.0.1:37515 default: 127.0.0.1:37515
@ -30,6 +34,23 @@ inputs:
diagnostic-endpoint: diagnostic-endpoint:
description: "Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string." description: "Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string."
default: "https://install.determinate.systems/magic-nix-cache/perf" default: "https://install.determinate.systems/magic-nix-cache/perf"
use-flakehub:
description: "Whether to upload build results to FlakeHub Cache."
default: true
required: false
flakehub-cache-server:
description: "The FlakeHub binary cache server."
default: "https://cache.flakehub.com"
flakehub-api-server:
description: "The FlakeHub API server."
default: "https://api.flakehub.com"
flakehub-flake-name:
description: "The name of your flake on FlakeHub. Defaults to the current name of the GitHub repository it is running in."
default: ${{ github.repository }}
required: false
startup-notification-port:
description: "The port magic-nix-cache uses for daemon startup notification."
default: 41239
runs: runs:
using: "node20" using: "node20"

BIN
bun.lockb

Binary file not shown.

164
dist/index.js generated vendored
View file

@ -2,18 +2,18 @@ import * as fs$2 from 'node:fs/promises';
import * as os$2 from 'node:os'; import * as os$2 from 'node:os';
import os__default from 'node:os'; import os__default from 'node:os';
import * as path$1 from 'node:path'; import * as path$1 from 'node:path';
import { spawn } from 'node:child_process'; import { spawn, exec } from 'node:child_process';
import { openSync, writeSync, close, readFileSync, createWriteStream } from 'node:fs'; import { openSync, readFileSync } from 'node:fs';
import { pipeline } from 'node:stream/promises';
import { promisify as promisify$1, inspect } from 'node:util'; import { promisify as promisify$1, inspect } from 'node:util';
import * as require$$1 from 'http';
import require$$1__default from 'http';
import require$$0 from 'os'; import require$$0 from 'os';
import require$$1 from 'fs'; import require$$1$1 from 'fs';
import crypto from 'crypto'; import crypto from 'crypto';
import require$$0$2 from 'path'; import require$$0$2 from 'path';
import require$$1$2 from 'http';
import require$$2$1 from 'https'; import require$$2$1 from 'https';
import require$$0$6 from 'net'; import require$$0$6 from 'net';
import require$$1$1 from 'tls'; import require$$1$2 from 'tls';
import require$$0$1, { errorMonitor } from 'events'; import require$$0$1, { errorMonitor } from 'events';
import require$$5 from 'assert'; import require$$5 from 'assert';
import require$$6, { types } from 'util'; import require$$6, { types } from 'util';
@ -532,7 +532,7 @@ Object.defineProperty(fileCommand, "__esModule", { value: true });
fileCommand.prepareKeyValueMessage = fileCommand.issueFileCommand = void 0; fileCommand.prepareKeyValueMessage = fileCommand.issueFileCommand = void 0;
// We use any as a valid input type // We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
const fs$1 = __importStar(require$$1); const fs$1 = __importStar(require$$1$1);
const os = __importStar(require$$0); const os = __importStar(require$$0);
const uuid_1 = require$$2; const uuid_1 = require$$2;
const utils_1 = utils; const utils_1 = utils;
@ -648,8 +648,8 @@ function isLoopbackAddress(host) {
var tunnel$1 = {}; var tunnel$1 = {};
var tls$4 = require$$1$1; var tls$4 = require$$1$2;
var http$3 = require$$1$2; var http$3 = require$$1__default;
var https$3 = require$$2$1; var https$3 = require$$2$1;
var events$1 = require$$0$1; var events$1 = require$$0$1;
var util = require$$6; var util = require$$6;
@ -943,7 +943,7 @@ var tunnel = tunnel$1;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(require$$1$2); const http = __importStar(require$$1__default);
const https = __importStar(require$$2$1); const https = __importStar(require$$2$1);
const pm = __importStar(proxy); const pm = __importStar(proxy);
const tunnel$1 = __importStar(tunnel); const tunnel$1 = __importStar(tunnel);
@ -1704,7 +1704,7 @@ function requireSummary () {
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = require$$0; const os_1 = require$$0;
const fs_1 = require$$1; const fs_1 = require$$1$1;
const { access, appendFile, writeFile } = fs_1.promises; const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
@ -2393,7 +2393,7 @@ function requireCore () {
var coreExports = requireCore(); var coreExports = requireCore();
let events = require$$0$1; let events = require$$0$1;
let fs = require$$1; let fs = require$$1$1;
let path = require$$0$2; let path = require$$0$2;
// const environment = process.env['NODE_ENV'] || 'development' // const environment = process.env['NODE_ENV'] || 'development'
@ -6664,7 +6664,7 @@ var delayAsyncDestroy$2 = stream => {
// eslint-disable-next-line node/prefer-global/url // eslint-disable-next-line node/prefer-global/url
const {URL: URL$4} = require$$0$5; const {URL: URL$4} = require$$0$5;
const EventEmitter = require$$0$1; const EventEmitter = require$$0$1;
const tls$3 = require$$1$1; const tls$3 = require$$1$2;
const http2$2 = require$$3; const http2$2 = require$$3;
const QuickLRU$1 = quickLru; const QuickLRU$1 = quickLru;
const delayAsyncDestroy$1 = delayAsyncDestroy$2; const delayAsyncDestroy$1 = delayAsyncDestroy$2;
@ -8299,7 +8299,7 @@ var clientRequest = ClientRequest$1;
var auto$1 = {exports: {}}; var auto$1 = {exports: {}};
const tls$2 = require$$1$1; const tls$2 = require$$1$2;
var resolveAlpn = (options = {}, connect = tls$2.connect) => new Promise((resolve, reject) => { var resolveAlpn = (options = {}, connect = tls$2.connect) => new Promise((resolve, reject) => {
let timeout = false; let timeout = false;
@ -8374,7 +8374,7 @@ var calculateServerName$1 = host => {
// See https://github.com/facebook/jest/issues/2549 // See https://github.com/facebook/jest/issues/2549
// eslint-disable-next-line node/prefer-global/url // eslint-disable-next-line node/prefer-global/url
const {URL: URL$2, urlToHttpOptions} = require$$0$5; const {URL: URL$2, urlToHttpOptions} = require$$0$5;
const http$2 = require$$1$2; const http$2 = require$$1__default;
const https$2 = require$$2$1; const https$2 = require$$2$1;
const resolveALPN = resolveAlpn; const resolveALPN = resolveAlpn;
const QuickLRU = quickLru; const QuickLRU = quickLru;
@ -8580,7 +8580,7 @@ auto$1.exports.createResolveProtocol = createResolveProtocol;
var autoExports = auto$1.exports; var autoExports = auto$1.exports;
const stream = require$$0$3; const stream = require$$0$3;
const tls$1 = require$$1$1; const tls$1 = require$$1$2;
// Really awesome hack. // Really awesome hack.
const JSStreamSocket$2 = (new tls$1.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor; const JSStreamSocket$2 = (new tls$1.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor;
@ -8653,8 +8653,8 @@ var getAuthHeaders = self => {
return {}; return {};
}; };
const tls = require$$1$1; const tls = require$$1$2;
const http$1 = require$$1$2; const http$1 = require$$1__default;
const https$1 = require$$2$1; const https$1 = require$$2$1;
const JSStreamSocket$1 = jsStreamSocket; const JSStreamSocket$1 = jsStreamSocket;
const {globalAgent: globalAgent$2} = agent; const {globalAgent: globalAgent$2} = agent;
@ -8815,7 +8815,7 @@ let Http2OverHttp2$1 = class Http2OverHttp2 extends Http2OverHttpX$1 {
var h2OverH2 = Http2OverHttp2$1; var h2OverH2 = Http2OverHttp2$1;
const http = require$$1$2; const http = require$$1__default;
const https = require$$2$1; const https = require$$2$1;
const Http2OverHttpX = h2OverHx; const Http2OverHttpX = h2OverHx;
const getAuthorizationHeaders = getAuthHeaders; const getAuthorizationHeaders = getAuthHeaders;
@ -12119,7 +12119,7 @@ function getCacherUrl() {
const runnerArch = process.env.RUNNER_ARCH; const runnerArch = process.env.RUNNER_ARCH;
const runnerOs = process.env.RUNNER_OS; const runnerOs = process.env.RUNNER_OS;
const binarySuffix = `${runnerArch}-${runnerOs}`; const binarySuffix = `${runnerArch}-${runnerOs}`;
const urlPrefix = `https://install.determinate.systems/magic-nix-cache`; const urlPrefix = `https://install.determinate.systems/magic-nix-cache-closure`;
if (coreExports.getInput('source-url')) { if (coreExports.getInput('source-url')) {
return coreExports.getInput('source-url'); return coreExports.getInput('source-url');
} }
@ -12137,14 +12137,22 @@ function getCacherUrl() {
} }
return `${urlPrefix}/stable/${binarySuffix}`; return `${urlPrefix}/stable/${binarySuffix}`;
} }
async function fetchAutoCacher(destination) { async function fetchAutoCacher() {
const stream = createWriteStream(destination, {
encoding: "binary",
mode: 0o755,
});
const binary_url = getCacherUrl(); const binary_url = getCacherUrl();
coreExports.debug(`Fetching the Magic Nix Cache from ${binary_url}`); coreExports.info(`Fetching the Magic Nix Cache from ${binary_url}`);
return pipeline(gotClient.stream(binary_url), stream); const { stdout } = await promisify$1(exec)(`curl -L "${binary_url}" | xz -d | nix-store --import`);
const paths = stdout.split(os$2.EOL);
// Since the export is in reverse topologically sorted order, magic-nix-cache is always the penultimate entry in the list (the empty string left by split being the last).
const last_path = paths.at(-2);
return `${last_path}/bin/magic-nix-cache`;
}
function tailLog(daemonDir) {
const log = new Tail_1(path$1.join(daemonDir, 'daemon.log'));
coreExports.debug(`tailing daemon.log...`);
log.on('line', (line) => {
coreExports.info(line);
});
return log;
} }
async function setUpAutoCache() { async function setUpAutoCache() {
const tmpdir = process.env['RUNNER_TEMP'] || os$2.tmpdir(); const tmpdir = process.env['RUNNER_TEMP'] || os$2.tmpdir();
@ -12166,8 +12174,7 @@ async function setUpAutoCache() {
daemonBin = coreExports.getInput('source-binary'); daemonBin = coreExports.getInput('source-binary');
} }
else { else {
daemonBin = `${daemonDir}/magic-nix-cache`; daemonBin = await fetchAutoCacher();
await fetchAutoCacher(daemonBin);
} }
var runEnv; var runEnv;
if (coreExports.isDebug()) { if (coreExports.isDebug()) {
@ -12180,18 +12187,58 @@ async function setUpAutoCache() {
else { else {
runEnv = process.env; runEnv = process.env;
} }
const output = openSync(`${daemonDir}/daemon.log`, 'a'); const notifyPort = coreExports.getInput('startup-notification-port');
const launch = spawn(daemonBin, [ const notifyPromise = new Promise((resolveListening) => {
'--daemon-dir', daemonDir, const promise = new Promise(async (resolveQuit) => {
const notifyServer = require$$1.createServer((req, res) => {
if (req.method === 'POST' && req.url === '/') {
coreExports.debug(`Notify server shutting down.`);
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end('{}');
notifyServer.close(() => {
resolveQuit();
});
}
});
notifyServer.listen(notifyPort, () => {
coreExports.debug(`Notify server running.`);
resolveListening(promise);
});
});
});
// Start tailing the daemon log.
const outputPath = `${daemonDir}/daemon.log`;
const output = openSync(outputPath, 'a');
const log = tailLog(daemonDir);
const netrc = await netrcPath();
// Start the server. Once it is ready, it will notify us via the notification server.
const daemon = spawn(daemonBin, [
'--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
'--listen', coreExports.getInput('listen'), '--listen', coreExports.getInput('listen'),
'--upstream', coreExports.getInput('upstream-cache'), '--upstream', coreExports.getInput('upstream-cache'),
'--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint') '--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint'),
], { '--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
].concat(coreExports.getInput('use-flakehub') === 'true' ? [
'--use-flakehub',
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
'--flakehub-api-server-netrc', netrc,
'--flakehub-flake-name', coreExports.getInput('flakehub-flake-name'),
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
'--use-gha-cache'
] : []), {
stdio: ['ignore', output, output], stdio: ['ignore', output, output],
env: runEnv env: runEnv,
detached: true
}); });
const pidFile = path$1.join(daemonDir, 'daemon.pid');
await fs$2.writeFile(pidFile, `${daemon.pid}`);
coreExports.info("Waiting for magic-nix-cache to start...");
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
launch.on('exit', (code, signal) => { notifyPromise.then((value) => {
resolve();
});
daemon.on('exit', async (code, signal) => {
if (signal) { if (signal) {
reject(new Error(`Daemon was killed by signal ${signal}`)); reject(new Error(`Daemon was killed by signal ${signal}`));
} }
@ -12199,17 +12246,14 @@ async function setUpAutoCache() {
reject(new Error(`Daemon exited with code ${code}`)); reject(new Error(`Daemon exited with code ${code}`));
} }
else { else {
resolve(); reject(new Error(`Daemon unexpectedly exited`));
} }
}); });
}); });
await fs$2.mkdir(`${process.env["HOME"]}/.config/nix`, { recursive: true }); daemon.unref();
const nixConf = openSync(`${process.env["HOME"]}/.config/nix/nix.conf`, 'a'); coreExports.info('Launched Magic Nix Cache');
writeSync(nixConf, `${"\n"}extra-substituters = http://${coreExports.getInput('listen')}/?trusted=1&compression=zstd&parallel-compression=true${"\n"}`);
writeSync(nixConf, `fallback = true${"\n"}`);
close(nixConf);
coreExports.debug('Launched Magic Nix Cache');
coreExports.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir); coreExports.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
log.unwatch();
} }
async function notifyAutoCache() { async function notifyAutoCache() {
const daemonDir = process.env[ENV_CACHE_DAEMONDIR]; const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
@ -12228,6 +12272,34 @@ async function notifyAutoCache() {
coreExports.info(`Magic Nix Cache may not be running for this workflow.`); coreExports.info(`Magic Nix Cache may not be running for this workflow.`);
} }
} }
async function netrcPath() {
const expectedNetrcPath = path$1.join(process.env['RUNNER_TEMP'], 'determinate-nix-installer-netrc');
try {
await fs$2.access(expectedNetrcPath);
return expectedNetrcPath;
}
catch {
// `nix-installer` was not used, the user may be registered with FlakeHub though.
const destinedNetrcPath = path$1.join(process.env['RUNNER_TEMP'], 'magic-nix-cache-netrc');
try {
await flakehub_login(destinedNetrcPath);
}
catch (e) {
coreExports.info("FlakeHub cache disabled.");
coreExports.debug(`Error while logging into FlakeHub: ${e}`);
}
return destinedNetrcPath;
}
}
async function flakehub_login(netrc) {
const jwt = await coreExports.getIDToken("api.flakehub.com");
await fs$2.writeFile(netrc, [
`machine api.flakehub.com login flakehub password ${jwt}`,
`machine flakehub.com login flakehub password ${jwt}`,
`machine cache.flakehub.com login flakehub password ${jwt}`,
].join("\n"));
coreExports.info("Logged in to FlakeHub.");
}
async function tearDownAutoCache() { async function tearDownAutoCache() {
const daemonDir = process.env[ENV_CACHE_DAEMONDIR]; const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
if (!daemonDir) { if (!daemonDir) {
@ -12240,11 +12312,7 @@ async function tearDownAutoCache() {
if (!pid) { if (!pid) {
throw new Error("magic-nix-cache did not start successfully"); throw new Error("magic-nix-cache did not start successfully");
} }
const log = new Tail_1(path$1.join(daemonDir, 'daemon.log')); const log = tailLog(daemonDir);
coreExports.debug(`tailing daemon.log...`);
log.on('line', (line) => {
coreExports.info(line);
});
try { try {
coreExports.debug(`about to post to localhost`); coreExports.debug(`about to post to localhost`);
const res = await gotClient.post(`http://${coreExports.getInput('listen')}/api/workflow-finish`).json(); const res = await gotClient.post(`http://${coreExports.getInput('listen')}/api/workflow-finish`).json();

View file

@ -19,10 +19,10 @@
default = pkgs.mkShell { default = pkgs.mkShell {
packages = with pkgs; [ packages = with pkgs; [
bun bun
nodejs
jq jq
act act
just just
shellcheck
]; ];
}; };
}); });

View file

@ -10,15 +10,15 @@
"license": "LGPL", "license": "LGPL",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"got": "^12.6.0",
"tail": "^2.2.6", "tail": "^2.2.6",
"tslib": "^2.5.2", "tslib": "^2.5.2"
"got": "^12.6.0"
}, },
"devDependencies": { "devDependencies": {
"@rollup/plugin-commonjs": "^25.0.0", "@rollup/plugin-commonjs": "^25.0.0",
"@rollup/plugin-node-resolve": "^15.0.2", "@rollup/plugin-node-resolve": "^15.0.2",
"@rollup/plugin-typescript": "^11.1.1", "@rollup/plugin-typescript": "^11.1.1",
"@types/node": "^20.2.1", "@types/node": "^20.11.17",
"rollup": "^3.22.0", "rollup": "^3.22.0",
"typescript": "^5.0.4" "typescript": "^5.0.4"
} }

View file

@ -3,10 +3,10 @@
import * as fs from 'node:fs/promises'; import * as fs from 'node:fs/promises';
import * as os from 'node:os'; import * as os from 'node:os';
import * as path from 'node:path'; import * as path from 'node:path';
import { spawn } from 'node:child_process'; import { spawn, exec } from 'node:child_process';
import { createWriteStream, openSync, writeSync, close, readFileSync } from 'node:fs'; import { openSync, readFileSync } from 'node:fs';
import { pipeline } from 'node:stream/promises'; import { inspect, promisify } from 'node:util';
import { inspect } from 'node:util'; import * as http from 'http';
import * as core from '@actions/core'; import * as core from '@actions/core';
import { Tail } from 'tail'; import { Tail } from 'tail';
@ -33,8 +33,7 @@ function getCacherUrl() : string {
const runnerArch = process.env.RUNNER_ARCH; const runnerArch = process.env.RUNNER_ARCH;
const runnerOs = process.env.RUNNER_OS; const runnerOs = process.env.RUNNER_OS;
const binarySuffix = `${runnerArch}-${runnerOs}`; const binarySuffix = `${runnerArch}-${runnerOs}`;
const urlPrefix = `https://install.determinate.systems/magic-nix-cache`; const urlPrefix = `https://install.determinate.systems/magic-nix-cache-closure`;
if (core.getInput('source-url')) { if (core.getInput('source-url')) {
return core.getInput('source-url'); return core.getInput('source-url');
} }
@ -58,19 +57,27 @@ function getCacherUrl() : string {
return `${urlPrefix}/stable/${binarySuffix}`; return `${urlPrefix}/stable/${binarySuffix}`;
} }
async function fetchAutoCacher(destination: string) { async function fetchAutoCacher() {
const stream = createWriteStream(destination, {
encoding: "binary",
mode: 0o755,
});
const binary_url = getCacherUrl(); const binary_url = getCacherUrl();
core.debug(`Fetching the Magic Nix Cache from ${binary_url}`); core.info(`Fetching the Magic Nix Cache from ${binary_url}`);
return pipeline( const { stdout } = await promisify(exec)(`curl -L "${binary_url}" | xz -d | nix-store --import`);
gotClient.stream(binary_url),
stream const paths = stdout.split(os.EOL);
);
// Since the export is in reverse topologically sorted order, magic-nix-cache is always the penultimate entry in the list (the empty string left by split being the last).
const last_path = paths.at(-2);
return `${last_path}/bin/magic-nix-cache`;
}
function tailLog(daemonDir) {
const log = new Tail(path.join(daemonDir, 'daemon.log'));
core.debug(`tailing daemon.log...`);
log.on('line', (line) => {
core.info(line);
});
return log;
} }
async function setUpAutoCache() { async function setUpAutoCache() {
@ -97,8 +104,7 @@ async function setUpAutoCache() {
if (core.getInput('source-binary')) { if (core.getInput('source-binary')) {
daemonBin = core.getInput('source-binary'); daemonBin = core.getInput('source-binary');
} else { } else {
daemonBin = `${daemonDir}/magic-nix-cache`; daemonBin = await fetchAutoCacher();
await fetchAutoCacher(daemonBin);
} }
var runEnv; var runEnv;
@ -112,41 +118,87 @@ async function setUpAutoCache() {
runEnv = process.env; runEnv = process.env;
} }
const output = openSync(`${daemonDir}/daemon.log`, 'a'); const notifyPort = core.getInput('startup-notification-port');
const launch = spawn(
const notifyPromise = new Promise<Promise<void>>((resolveListening) => {
const promise = new Promise<void>(async (resolveQuit) => {
const notifyServer = http.createServer((req, res) => {
if (req.method === 'POST' && req.url === '/') {
core.debug(`Notify server shutting down.`);
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end('{}');
notifyServer.close(() => {
resolveQuit();
});
}
});
notifyServer.listen(notifyPort, () => {
core.debug(`Notify server running.`);
resolveListening(promise);
});
});
});
// Start tailing the daemon log.
const outputPath = `${daemonDir}/daemon.log`;
const output = openSync(outputPath, 'a');
const log = tailLog(daemonDir);
const netrc = await netrcPath();
// Start the server. Once it is ready, it will notify us via the notification server.
const daemon = spawn(
daemonBin, daemonBin,
[ [
'--daemon-dir', daemonDir, '--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
'--listen', core.getInput('listen'), '--listen', core.getInput('listen'),
'--upstream', core.getInput('upstream-cache'), '--upstream', core.getInput('upstream-cache'),
'--diagnostic-endpoint', core.getInput('diagnostic-endpoint') '--diagnostic-endpoint', core.getInput('diagnostic-endpoint'),
], '--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
].concat(
core.getInput('use-flakehub') === 'true' ? [
'--use-flakehub',
'--flakehub-cache-server', core.getInput('flakehub-cache-server'),
'--flakehub-api-server', core.getInput('flakehub-api-server'),
'--flakehub-api-server-netrc', netrc,
'--flakehub-flake-name', core.getInput('flakehub-flake-name'),
] : []).concat(
core.getInput('use-gha-cache') === 'true' ? [
'--use-gha-cache'
] : []),
{ {
stdio: ['ignore', output, output], stdio: ['ignore', output, output],
env: runEnv env: runEnv,
detached: true
} }
); );
const pidFile = path.join(daemonDir, 'daemon.pid');
await fs.writeFile(pidFile, `${daemon.pid}`);
core.info("Waiting for magic-nix-cache to start...");
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
launch.on('exit', (code, signal) => { notifyPromise.then((value) => {
resolve();
});
daemon.on('exit', async (code, signal) => {
if (signal) { if (signal) {
reject(new Error(`Daemon was killed by signal ${signal}`)); reject(new Error(`Daemon was killed by signal ${signal}`));
} else if (code) { } else if (code) {
reject(new Error(`Daemon exited with code ${code}`)); reject(new Error(`Daemon exited with code ${code}`));
} else { } else {
resolve(); reject(new Error(`Daemon unexpectedly exited`));
} }
}); });
}); });
await fs.mkdir(`${process.env["HOME"]}/.config/nix`, { recursive: true }); daemon.unref();
const nixConf = openSync(`${process.env["HOME"]}/.config/nix/nix.conf`, 'a');
writeSync(nixConf, `${"\n"}extra-substituters = http://${core.getInput('listen')}/?trusted=1&compression=zstd&parallel-compression=true${"\n"}`);
writeSync(nixConf, `fallback = true${"\n"}`);
close(nixConf);
core.debug('Launched Magic Nix Cache'); core.info('Launched Magic Nix Cache');
core.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir); core.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
log.unwatch();
} }
async function notifyAutoCache() { async function notifyAutoCache() {
@ -168,6 +220,40 @@ async function notifyAutoCache() {
} }
} }
async function netrcPath() {
const expectedNetrcPath = path.join(process.env['RUNNER_TEMP'], 'determinate-nix-installer-netrc')
try {
await fs.access(expectedNetrcPath)
return expectedNetrcPath;
} catch {
// `nix-installer` was not used, the user may be registered with FlakeHub though.
const destinedNetrcPath = path.join(process.env['RUNNER_TEMP'], 'magic-nix-cache-netrc')
try {
await flakehub_login(destinedNetrcPath);
} catch (e) {
core.info("FlakeHub cache disabled.");
core.debug(`Error while logging into FlakeHub: ${e}`)
}
return destinedNetrcPath;
}
}
async function flakehub_login(netrc: string) {
const jwt = await core.getIDToken("api.flakehub.com");
await fs.writeFile(
netrc,
[
`machine api.flakehub.com login flakehub password ${jwt}`,
`machine flakehub.com login flakehub password ${jwt}`,
`machine cache.flakehub.com login flakehub password ${jwt}`,
].join("\n"),
);
core.info("Logged in to FlakeHub.");
}
async function tearDownAutoCache() { async function tearDownAutoCache() {
const daemonDir = process.env[ENV_CACHE_DAEMONDIR]; const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
@ -183,12 +269,7 @@ async function tearDownAutoCache() {
throw new Error("magic-nix-cache did not start successfully"); throw new Error("magic-nix-cache did not start successfully");
} }
const log = new Tail(path.join(daemonDir, 'daemon.log')); const log = tailLog(daemonDir);
core.debug(`tailing daemon.log...`);
log.on('line', (line) => {
core.info(line);
});
try { try {
core.debug(`about to post to localhost`); core.debug(`about to post to localhost`);
@ -239,4 +320,3 @@ try {
}} }}
core.debug(`rip`); core.debug(`rip`);