2019-12-06 19:20:31 +01:00
|
|
|
#!/usr/bin/env node
|
|
|
|
|
|
|
|
const process = require('process');
|
|
|
|
const path = require('path');
|
|
|
|
const fs = require('fs');
|
|
|
|
const fsPromises = require('fs').promises;
|
2020-02-17 15:49:26 +01:00
|
|
|
const childProcess = require('child_process');
|
2019-12-06 19:20:31 +01:00
|
|
|
const tar = require('tar');
|
2019-12-10 16:54:17 +01:00
|
|
|
const asar = require('asar');
|
2020-02-21 19:17:31 +01:00
|
|
|
const needle = require('needle');
|
2019-12-06 19:20:31 +01:00
|
|
|
|
|
|
|
const riotDesktopPackageJson = require('../package.json');
|
2022-01-10 13:57:33 +01:00
|
|
|
const { setPackageVersion } = require('./set-version.js');
|
2019-12-06 19:20:31 +01:00
|
|
|
|
2020-11-24 13:48:37 +01:00
|
|
|
const PUB_KEY_URL = "https://packages.riot.im/element-release-key.asc";
|
2020-08-03 17:05:04 +02:00
|
|
|
const PACKAGE_URL_PREFIX = "https://github.com/vector-im/element-web/releases/download/";
|
2019-12-10 16:54:17 +01:00
|
|
|
const ASAR_PATH = 'webapp.asar';
|
2019-12-06 19:20:31 +01:00
|
|
|
|
2020-02-21 19:17:31 +01:00
|
|
|
async function getLatestDevelopUrl(bkToken) {
|
|
|
|
const buildsResult = await needle('get',
|
2020-09-09 12:08:41 +02:00
|
|
|
"https://api.buildkite.com/v2/organizations/matrix-dot-org/pipelines/element-web/builds",
|
2020-02-21 19:17:31 +01:00
|
|
|
{
|
|
|
|
branch: 'develop',
|
|
|
|
state: 'passed',
|
|
|
|
per_page: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
headers: {
|
|
|
|
authorization: "Bearer " + bkToken,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
);
|
|
|
|
const latestBuild = buildsResult.body[0];
|
|
|
|
console.log("Latest build is " + latestBuild.number);
|
|
|
|
let artifactUrl;
|
|
|
|
for (const job of latestBuild.jobs) {
|
|
|
|
// Strip any colon-form emoji from the build name
|
|
|
|
if (job.name && job.name.replace(/:\w*:\s*/, '') === 'Package') {
|
|
|
|
artifactUrl = job.artifacts_url;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (artifactUrl === undefined) {
|
|
|
|
throw new Error("Couldn't find artifact URL - has the name of the package job changed?");
|
|
|
|
}
|
|
|
|
|
|
|
|
const artifactsResult = await needle('get', artifactUrl, {},
|
|
|
|
{
|
|
|
|
headers: {
|
|
|
|
authorization: "Bearer " + bkToken,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
);
|
|
|
|
let dlUrl;
|
|
|
|
let dlFilename;
|
|
|
|
for (const artifact of artifactsResult.body) {
|
2020-11-24 13:48:37 +01:00
|
|
|
if (artifact.filename && /^element-.*\.tar.gz$/.test(artifact.filename)) {
|
2020-02-21 19:17:31 +01:00
|
|
|
dlUrl = artifact.download_url;
|
|
|
|
dlFilename = artifact.filename;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (dlUrl === undefined) {
|
|
|
|
throw new Error("Couldn't find artifact download URL - has the artifact filename changed?");
|
|
|
|
}
|
|
|
|
console.log("Fetching artifact URL...");
|
|
|
|
const dlResult = await needle('get', dlUrl, {},
|
|
|
|
{
|
|
|
|
headers: {
|
|
|
|
authorization: "Bearer " + bkToken,
|
|
|
|
},
|
|
|
|
// This URL will give us a Location header, but will also give us
|
|
|
|
// a JSON object with the direct URL. We'll take the URL and pass it
|
|
|
|
// back, then we can easily support specifying a URL directly.
|
|
|
|
follow_max: 0,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
return [dlFilename, dlResult.body.url];
|
|
|
|
}
|
|
|
|
|
2019-12-06 19:20:31 +01:00
|
|
|
async function downloadToFile(url, filename) {
|
|
|
|
console.log("Downloading " + url + "...");
|
|
|
|
|
2020-02-21 19:17:31 +01:00
|
|
|
try {
|
2020-02-21 19:25:33 +01:00
|
|
|
await needle('get', url, null,
|
2020-02-21 19:17:31 +01:00
|
|
|
{
|
|
|
|
follow_max: 5,
|
|
|
|
output: filename,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
} catch (e) {
|
|
|
|
try {
|
|
|
|
await fsPromises.unlink(filename);
|
|
|
|
} catch (_) {}
|
2019-12-06 19:20:31 +01:00
|
|
|
throw e;
|
2020-02-21 19:17:31 +01:00
|
|
|
}
|
2019-12-06 19:20:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
async function verifyFile(filename) {
|
|
|
|
return new Promise((resolve, reject) => {
|
2020-02-17 15:49:26 +01:00
|
|
|
childProcess.execFile('gpg', ['--verify', filename + '.asc', filename], (error) => {
|
2019-12-06 19:20:31 +01:00
|
|
|
if (error) {
|
|
|
|
reject(error);
|
|
|
|
} else {
|
|
|
|
resolve();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async function main() {
|
|
|
|
let verify = true;
|
|
|
|
let importkey = false;
|
|
|
|
let pkgDir = 'packages';
|
|
|
|
let deployDir = 'deploys';
|
2019-12-13 12:13:44 +01:00
|
|
|
let cfgDir;
|
2019-12-06 19:20:31 +01:00
|
|
|
let targetVersion;
|
2020-02-21 19:17:31 +01:00
|
|
|
let filename;
|
|
|
|
let url;
|
2020-02-27 18:30:00 +01:00
|
|
|
let setVersion = false;
|
2019-12-06 19:20:31 +01:00
|
|
|
|
|
|
|
while (process.argv.length > 2) {
|
|
|
|
switch (process.argv[2]) {
|
|
|
|
case '--noverify':
|
|
|
|
verify = false;
|
|
|
|
break;
|
|
|
|
case '--importkey':
|
|
|
|
importkey = true;
|
|
|
|
break;
|
|
|
|
case '--packages':
|
|
|
|
process.argv.shift();
|
|
|
|
pkgDir = process.argv[2];
|
|
|
|
break;
|
|
|
|
case '--deploys':
|
|
|
|
process.argv.shift();
|
|
|
|
deployDir = process.argv[2];
|
|
|
|
break;
|
2019-12-13 12:13:44 +01:00
|
|
|
case '--cfgdir':
|
|
|
|
case '-d':
|
|
|
|
process.argv.shift();
|
|
|
|
cfgDir = process.argv[2];
|
|
|
|
break;
|
2019-12-06 19:20:31 +01:00
|
|
|
default:
|
|
|
|
targetVersion = process.argv[2];
|
|
|
|
}
|
|
|
|
process.argv.shift();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (targetVersion === undefined) {
|
|
|
|
targetVersion = 'v' + riotDesktopPackageJson.version;
|
2020-11-24 13:48:37 +01:00
|
|
|
filename = 'element-' + targetVersion + '.tar.gz';
|
2020-02-26 12:56:59 +01:00
|
|
|
url = PACKAGE_URL_PREFIX + targetVersion + '/' + filename;
|
2020-02-21 19:17:31 +01:00
|
|
|
} else if (targetVersion === 'develop') {
|
|
|
|
const buildKiteApiKey = process.env.BUILDKITE_API_KEY;
|
|
|
|
if (buildKiteApiKey === undefined) {
|
|
|
|
console.log("Set BUILDKITE_API_KEY to fetch latest develop version");
|
|
|
|
console.log(
|
|
|
|
"Sorry - Buildkite's API requires authentication to access builds, " +
|
|
|
|
"even if those builds are accessible on the web with no auth.",
|
|
|
|
);
|
|
|
|
process.exit(1);
|
|
|
|
}
|
|
|
|
[filename, url] = await getLatestDevelopUrl(buildKiteApiKey);
|
|
|
|
verify = false; // develop builds aren't signed
|
|
|
|
} else {
|
2020-11-24 13:48:37 +01:00
|
|
|
filename = 'element-' + targetVersion + '.tar.gz';
|
2020-02-21 19:17:31 +01:00
|
|
|
url = PACKAGE_URL_PREFIX + targetVersion + '/' + filename;
|
2020-02-27 18:30:00 +01:00
|
|
|
setVersion = true;
|
2019-12-06 19:20:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
const haveGpg = await new Promise((resolve) => {
|
2020-02-17 15:49:26 +01:00
|
|
|
childProcess.execFile('gpg', ['--version'], (error) => {
|
2019-12-06 19:20:31 +01:00
|
|
|
resolve(!error);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
if (importkey) {
|
|
|
|
if (!haveGpg) {
|
|
|
|
console.log("Can't import key without working GPG binary: install GPG and try again");
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
await new Promise((resolve) => {
|
2020-02-17 15:49:26 +01:00
|
|
|
const gpgProc = childProcess.execFile('gpg', ['--import'], (error) => {
|
2019-12-06 19:20:31 +01:00
|
|
|
if (error) {
|
|
|
|
console.log("Failed to import key", error);
|
|
|
|
} else {
|
|
|
|
console.log("Key imported!");
|
|
|
|
}
|
|
|
|
resolve(!error);
|
|
|
|
});
|
2020-02-21 19:25:33 +01:00
|
|
|
needle.get(PUB_KEY_URL).pipe(gpgProc.stdin);
|
2019-12-06 19:20:31 +01:00
|
|
|
});
|
2019-12-10 15:50:57 +01:00
|
|
|
return 0;
|
2019-12-06 19:20:31 +01:00
|
|
|
}
|
2019-12-13 12:13:44 +01:00
|
|
|
|
|
|
|
if (cfgDir === undefined) {
|
|
|
|
console.log("No config directory set");
|
|
|
|
console.log("Specify a config directory with --cfgdir or -d");
|
|
|
|
console.log("To build with no config (and no auto-update), pass the empty string (-d '')");
|
|
|
|
return 1;
|
|
|
|
}
|
2020-02-17 15:49:26 +01:00
|
|
|
|
2019-12-06 19:20:31 +01:00
|
|
|
if (verify && !haveGpg) {
|
|
|
|
console.log("No working GPG binary: install GPG or pass --noverify to skip verification");
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2020-02-17 15:49:26 +01:00
|
|
|
let haveDeploy = false;
|
2020-02-21 19:17:31 +01:00
|
|
|
const expectedDeployDir = path.join(deployDir, path.basename(filename).replace(/\.tar\.gz/, ''));
|
2019-12-06 19:20:31 +01:00
|
|
|
try {
|
2020-02-17 15:49:26 +01:00
|
|
|
await fs.opendir(expectedDeployDir);
|
2019-12-06 19:20:31 +01:00
|
|
|
console.log(expectedDeployDir + "already exists");
|
|
|
|
haveDeploy = true;
|
|
|
|
} catch (e) {
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!haveDeploy) {
|
|
|
|
const outPath = path.join(pkgDir, filename);
|
|
|
|
try {
|
|
|
|
await fsPromises.stat(outPath);
|
|
|
|
console.log("Already have " + filename + ": not redownloading");
|
|
|
|
} catch (e) {
|
|
|
|
try {
|
|
|
|
await downloadToFile(url, outPath);
|
|
|
|
} catch (e) {
|
|
|
|
console.log("Failed to download " + url, e);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (verify) {
|
|
|
|
try {
|
|
|
|
await fsPromises.stat(outPath+'.asc');
|
|
|
|
console.log("Already have " + filename + ".asc: not redownloading");
|
|
|
|
} catch (e) {
|
|
|
|
try {
|
|
|
|
await downloadToFile(url + '.asc', outPath + '.asc');
|
|
|
|
} catch (e) {
|
|
|
|
console.log("Failed to download " + url, e);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
await verifyFile(outPath);
|
|
|
|
console.log(outPath + " downloaded and verified");
|
|
|
|
} catch (e) {
|
|
|
|
console.log("Signature verification failed!", e);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
console.log(outPath + " downloaded but NOT verified");
|
|
|
|
}
|
|
|
|
|
|
|
|
await tar.x({
|
|
|
|
file: outPath,
|
|
|
|
cwd: deployDir,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-10 16:54:17 +01:00
|
|
|
try {
|
|
|
|
await fsPromises.stat(ASAR_PATH);
|
|
|
|
console.log(ASAR_PATH + " already present: removing");
|
|
|
|
await fsPromises.unlink(ASAR_PATH);
|
|
|
|
} catch (e) {
|
|
|
|
}
|
|
|
|
|
2019-12-13 12:13:44 +01:00
|
|
|
if (cfgDir.length) {
|
|
|
|
const configJsonSource = path.join(cfgDir, 'config.json');
|
|
|
|
const configJsonDest = path.join(expectedDeployDir, 'config.json');
|
|
|
|
console.log(configJsonSource + ' -> ' + configJsonDest);
|
|
|
|
await fsPromises.copyFile(configJsonSource, configJsonDest);
|
|
|
|
} else {
|
|
|
|
console.log("Skipping config file");
|
|
|
|
}
|
|
|
|
|
2019-12-10 16:54:17 +01:00
|
|
|
console.log("Pack " + expectedDeployDir + " -> " + ASAR_PATH);
|
|
|
|
await asar.createPackage(expectedDeployDir, ASAR_PATH);
|
2020-02-26 12:56:59 +01:00
|
|
|
|
2020-02-27 18:30:00 +01:00
|
|
|
if (setVersion) {
|
2020-02-26 12:56:59 +01:00
|
|
|
const semVer = targetVersion.slice(1);
|
2020-02-27 18:30:00 +01:00
|
|
|
console.log("Updating version to " + semVer);
|
2020-02-26 12:56:59 +01:00
|
|
|
await setPackageVersion(semVer);
|
|
|
|
}
|
|
|
|
|
2019-12-10 16:54:17 +01:00
|
|
|
console.log("Done!");
|
2019-12-06 19:20:31 +01:00
|
|
|
}
|
|
|
|
|
2020-07-23 12:14:26 +02:00
|
|
|
main().then((ret) => process.exit(ret)).catch(e => process.exit(1));
|