2023-06-26 05:18:41 +02:00
// Main
import * as fs from 'node:fs/promises' ;
import * as os from 'node:os' ;
import * as path from 'node:path' ;
2024-01-09 15:01:39 +01:00
import { spawn , exec } from 'node:child_process' ;
import { openSync } from 'node:fs' ;
2023-06-26 05:18:41 +02:00
import { setTimeout } from 'timers/promises' ;
2024-01-09 15:01:39 +01:00
import { inspect , promisify } from 'node:util' ;
2023-06-26 05:18:41 +02:00
import * as core from '@actions/core' ;
import { Tail } from 'tail' ;
import got from "got" ;
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR' ;
2023-06-27 18:22:21 +02:00
const gotClient = got . extend ( {
retry : {
limit : 5 ,
methods : [ 'POST' , 'GET' , 'PUT' , 'HEAD' , 'DELETE' , 'OPTIONS' , 'TRACE' ] ,
} ,
hooks : {
beforeRetry : [
( error , retryCount ) = > {
core . info ( ` Retrying after error ${ error . code } , retry #: ${ retryCount } ` ) ;
}
] ,
} ,
} ) ;
2023-06-26 05:18:41 +02:00
function getCacherUrl ( ) : string {
const runnerArch = process . env . RUNNER_ARCH ;
const runnerOs = process . env . RUNNER_OS ;
const binarySuffix = ` ${ runnerArch } - ${ runnerOs } ` ;
2023-12-11 15:29:18 +01:00
const urlPrefix = ` https://magic-nix-cache-priv20231208150408868500000001.s3.us-east-2.amazonaws.com ` ;
2023-06-26 05:18:41 +02:00
if ( core . getInput ( 'source-url' ) ) {
return core . getInput ( 'source-url' ) ;
}
if ( core . getInput ( 'source-tag' ) ) {
return ` ${ urlPrefix } /tag/ ${ core . getInput ( 'source-tag' ) } / ${ binarySuffix } ` ;
}
if ( core . getInput ( 'source-pr' ) ) {
2023-12-11 15:29:18 +01:00
return ` ${ urlPrefix } /pr_ ${ core . getInput ( 'source-pr' ) } /magic-nix-cache- ${ binarySuffix } ` ;
2023-06-26 05:18:41 +02:00
}
if ( core . getInput ( 'source-branch' ) ) {
return ` ${ urlPrefix } /branch/ ${ core . getInput ( 'source-branch' ) } / ${ binarySuffix } ` ;
}
if ( core . getInput ( 'source-revision' ) ) {
return ` ${ urlPrefix } /rev/ ${ core . getInput ( 'source-revision' ) } / ${ binarySuffix } ` ;
}
return ` ${ urlPrefix } /latest/ ${ binarySuffix } ` ;
}
2024-01-09 15:01:39 +01:00
async function fetchAutoCacher() {
2023-06-26 05:18:41 +02:00
const binary_url = getCacherUrl ( ) ;
2023-12-11 15:29:18 +01:00
core . info ( ` Fetching the Magic Nix Cache from ${ binary_url } ` ) ;
2023-06-26 05:18:41 +02:00
2024-01-09 15:01:39 +01:00
const { stdout } = await promisify ( exec ) ( ` curl " ${ binary_url } " | xz -d | nix-store --import ` ) ;
const paths = stdout . split ( os . EOL ) ;
2024-01-09 18:51:52 +01:00
// Since the export is in reverse topologically sorted order, magic-nix-cache is always the penultimate entry in the list (the empty string left by split being the last).
2024-01-09 15:01:39 +01:00
const last_path = paths . at ( - 2 ) ;
return ` ${ last_path } /bin/magic-nix-cache ` ;
2023-06-26 05:18:41 +02:00
}
async function setUpAutoCache() {
const tmpdir = process . env [ 'RUNNER_TEMP' ] || os . tmpdir ( ) ;
const required_env = [ 'ACTIONS_CACHE_URL' , 'ACTIONS_RUNTIME_URL' , 'ACTIONS_RUNTIME_TOKEN' ] ;
var anyMissing = false ;
for ( const n of required_env ) {
if ( ! process . env . hasOwnProperty ( n ) ) {
anyMissing = true ;
core . warning ( ` Disabling automatic caching since required environment ${ n } isn't available ` ) ;
}
}
if ( anyMissing ) {
return ;
}
core . debug ( ` GitHub Action Cache URL: ${ process . env [ 'ACTIONS_CACHE_URL' ] } ` ) ;
const daemonDir = await fs . mkdtemp ( path . join ( tmpdir , 'magic-nix-cache-' ) ) ;
2023-12-11 15:29:18 +01:00
var daemonBin : string ;
if ( core . getInput ( 'source-binary' ) ) {
2023-06-26 05:18:41 +02:00
daemonBin = core . getInput ( 'source-binary' ) ;
} else {
2024-01-09 15:01:39 +01:00
daemonBin = await fetchAutoCacher ( ) ;
2023-06-26 05:18:41 +02:00
}
var runEnv ;
if ( core . isDebug ( ) ) {
runEnv = {
2023-06-26 18:27:45 +02:00
RUST_LOG : "trace,magic_nix_cache=debug,gha_cache=debug" ,
2023-06-26 05:18:41 +02:00
RUST_BACKTRACE : "full" ,
. . . process . env
} ;
} else {
runEnv = process . env ;
}
2023-12-15 17:15:58 +01:00
// Start the server. Once it is ready, it will notify us via file descriptor 3.
2023-12-14 15:58:27 +01:00
const outputPath = ` ${ daemonDir } /daemon.log ` ;
2023-12-04 22:47:15 +01:00
const output = openSync ( outputPath , 'a' ) ;
2023-12-15 17:15:58 +01:00
const notifyFd = 3 ;
2023-12-14 15:58:27 +01:00
const daemon = spawn (
2023-06-26 05:18:41 +02:00
daemonBin ,
[
2023-12-15 17:15:58 +01:00
'--notify-fd' , String ( notifyFd ) ,
2023-06-26 05:18:41 +02:00
'--listen' , core . getInput ( 'listen' ) ,
'--upstream' , core . getInput ( 'upstream-cache' ) ,
2023-12-04 22:47:15 +01:00
'--diagnostic-endpoint' , core . getInput ( 'diagnostic-endpoint' ) ,
'--nix-conf' , ` ${ process . env [ "HOME" ] } /.config/nix/nix.conf `
] . concat (
core . getInput ( 'use-flakehub' ) === 'true' ? [
'--use-flakehub' ,
2023-12-11 13:46:39 +01:00
'--flakehub-cache-server' , core . getInput ( 'flakehub-cache-server' ) ,
2023-12-04 22:47:15 +01:00
'--flakehub-api-server' , core . getInput ( 'flakehub-api-server' ) ,
'--flakehub-api-server-netrc' , path . join ( process . env [ 'RUNNER_TEMP' ] , 'determinate-nix-installer-netrc' ) ,
] : [ ] ) . concat (
core . getInput ( 'use-gha-cache' ) === 'true' ? [
'--use-gha-cache'
] : [ ] ) ,
2023-06-26 05:18:41 +02:00
{
2023-12-14 15:58:27 +01:00
stdio : [ 'ignore' , output , output , 'pipe' ] ,
env : runEnv ,
detached : true
2023-06-26 05:18:41 +02:00
}
) ;
2023-12-14 15:58:27 +01:00
const pidFile = path . join ( daemonDir , 'daemon.pid' ) ;
await fs . writeFile ( pidFile , ` ${ daemon . pid } ` ) ;
2023-06-26 05:18:41 +02:00
await new Promise < void > ( ( resolve , reject ) = > {
2023-12-15 17:15:58 +01:00
daemon . stdio [ notifyFd ] . on ( 'data' , ( data ) = > {
2023-12-14 15:58:27 +01:00
if ( data . toString ( ) . trim ( ) == 'INIT' ) {
resolve ( ) ;
}
} ) ;
daemon . on ( 'exit' , async ( code , signal ) = > {
2023-12-04 22:47:15 +01:00
const log : string = await fs . readFile ( outputPath , 'utf-8' ) ;
2023-06-26 05:18:41 +02:00
if ( signal ) {
2023-12-04 22:47:15 +01:00
reject ( new Error ( ` Daemon was killed by signal ${ signal } : ${ log } ` ) ) ;
2023-06-26 05:18:41 +02:00
} else if ( code ) {
2023-12-04 22:47:15 +01:00
reject ( new Error ( ` Daemon exited with code ${ code } : ${ log } ` ) ) ;
2023-06-26 05:18:41 +02:00
} else {
2023-12-14 15:58:27 +01:00
reject ( new Error ( ` Daemon unexpectedly exited: ${ log } ` ) ) ;
2023-06-26 05:18:41 +02:00
}
} ) ;
} ) ;
2023-12-14 15:58:27 +01:00
daemon . unref ( ) ;
2023-12-04 22:47:15 +01:00
core . info ( 'Launched Magic Nix Cache' ) ;
2023-06-26 05:18:41 +02:00
core . exportVariable ( ENV_CACHE_DAEMONDIR , daemonDir ) ;
}
async function notifyAutoCache() {
const daemonDir = process . env [ ENV_CACHE_DAEMONDIR ] ;
if ( ! daemonDir ) {
return ;
}
2023-06-27 18:22:21 +02:00
try {
core . debug ( ` Indicating workflow start ` ) ;
const res : any = await gotClient . post ( ` http:// ${ core . getInput ( 'listen' ) } /api/workflow-start ` ) . json ( ) ;
core . debug ( ` back from post ` ) ;
core . debug ( res ) ;
} catch ( e ) {
core . info ( ` Error marking the workflow as started: ` ) ;
core . info ( inspect ( e ) ) ;
core . info ( ` Magic Nix Cache may not be running for this workflow. ` ) ;
}
2023-06-26 05:18:41 +02:00
}
async function tearDownAutoCache() {
const daemonDir = process . env [ ENV_CACHE_DAEMONDIR ] ;
if ( ! daemonDir ) {
core . debug ( 'magic-nix-cache not started - Skipping' ) ;
return ;
}
const pidFile = path . join ( daemonDir , 'daemon.pid' ) ;
const pid = parseInt ( await fs . readFile ( pidFile , { encoding : 'ascii' } ) ) ;
core . debug ( ` found daemon pid: ${ pid } ` ) ;
if ( ! pid ) {
throw new Error ( "magic-nix-cache did not start successfully" ) ;
}
const log = new Tail ( path . join ( daemonDir , 'daemon.log' ) ) ;
core . debug ( ` tailing daemon.log... ` ) ;
log . on ( 'line' , ( line ) = > {
core . debug ( ` got a log line ` ) ;
core . info ( line ) ;
} ) ;
try {
core . debug ( ` about to post to localhost ` ) ;
2023-06-27 18:22:21 +02:00
const res : any = await gotClient . post ( ` http:// ${ core . getInput ( 'listen' ) } /api/workflow-finish ` ) . json ( ) ;
2023-06-26 05:18:41 +02:00
core . debug ( ` back from post ` ) ;
core . debug ( res ) ;
} finally {
await setTimeout ( 5000 ) ;
core . debug ( ` unwatching the daemon log ` ) ;
log . unwatch ( ) ;
}
core . debug ( ` killing ` ) ;
try {
process . kill ( pid , 'SIGTERM' ) ;
} catch ( e ) {
if ( e . code !== 'ESRCH' ) {
throw e ;
}
}
}
const isPost = ! ! process . env [ 'STATE_isPost' ] ;
try {
if ( ! isPost ) {
core . saveState ( 'isPost' , 'true' ) ;
await setUpAutoCache ( ) ;
await notifyAutoCache ( ) ;
} else {
await tearDownAutoCache ( ) ;
}
} catch ( e ) {
core . info ( ` got an exception: ` ) ;
core . info ( e ) ;
if ( ! isPost ) {
core . setFailed ( e . message ) ;
throw e ;
} else {
core . info ( "not considering this a failure: finishing the upload is optional, anyway." ) ;
process . exit ( ) ;
} }
core . debug ( ` rip ` ) ;