mirror of
https://github.com/CringeStudios/element-desktop.git
synced 2025-01-31 05:29:58 +01:00
Merge pull request #21 from vector-im/dbkr/hak
Native module builds: matrix-seshat for mac & win
This commit is contained in:
commit
76e40746b0
@ -5,6 +5,7 @@ module.exports = {
|
|||||||
ecmaVersion: 8,
|
ecmaVersion: 8,
|
||||||
},
|
},
|
||||||
env: {
|
env: {
|
||||||
|
es6: true,
|
||||||
node: true,
|
node: true,
|
||||||
// we also have some browser code (ie. the preload script)
|
// we also have some browser code (ie. the preload script)
|
||||||
browser: true,
|
browser: true,
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -6,3 +6,5 @@
|
|||||||
/node_modules
|
/node_modules
|
||||||
/docker_node_modules
|
/docker_node_modules
|
||||||
/pkg/control
|
/pkg/control
|
||||||
|
/.hak
|
||||||
|
/.yarnrc
|
||||||
|
256
hak/matrix-seshat/build.js
Normal file
256
hak/matrix-seshat/build.js
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
const fsExtra = require('fs-extra');
|
||||||
|
|
||||||
|
module.exports = async function(hakEnv, moduleInfo) {
|
||||||
|
if (hakEnv.isWin()) {
|
||||||
|
await buildOpenSslWin(hakEnv, moduleInfo);
|
||||||
|
await buildSqlCipherWin(hakEnv, moduleInfo);
|
||||||
|
} else {
|
||||||
|
await buildSqlCipherUnix(hakEnv, moduleInfo);
|
||||||
|
}
|
||||||
|
await buildMatrixSeshat(hakEnv, moduleInfo);
|
||||||
|
};
|
||||||
|
|
||||||
|
async function buildOpenSslWin(hakEnv, moduleInfo) {
|
||||||
|
const openSslDir = path.join(moduleInfo.moduleDotHakDir, 'openssl-1.1.1d');
|
||||||
|
|
||||||
|
const openSslArch = hakEnv.arch === 'x64' ? 'VC-WIN64A' : 'VC-WIN32';
|
||||||
|
|
||||||
|
console.log("Building openssl in " + openSslDir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'perl',
|
||||||
|
[
|
||||||
|
'Configure',
|
||||||
|
'--prefix=' + moduleInfo.depPrefix,
|
||||||
|
// sqlcipher only uses about a tiny part of openssl. We link statically
|
||||||
|
// so will only pull in the symbols we use, but we may as well turn off
|
||||||
|
// as much as possible to save on build time.
|
||||||
|
'no-afalgeng',
|
||||||
|
'no-capieng',
|
||||||
|
'no-cms',
|
||||||
|
'no-ct',
|
||||||
|
'no-deprecated',
|
||||||
|
'no-dgram',
|
||||||
|
'no-dso',
|
||||||
|
'no-ec',
|
||||||
|
'no-ec2m',
|
||||||
|
'no-gost',
|
||||||
|
'no-nextprotoneg',
|
||||||
|
'no-ocsp',
|
||||||
|
'no-sock',
|
||||||
|
'no-srp',
|
||||||
|
'no-srtp',
|
||||||
|
'no-tests',
|
||||||
|
'no-ssl',
|
||||||
|
'no-tls',
|
||||||
|
'no-dtls',
|
||||||
|
'no-shared',
|
||||||
|
'no-aria',
|
||||||
|
'no-camellia',
|
||||||
|
'no-cast',
|
||||||
|
'no-chacha',
|
||||||
|
'no-cmac',
|
||||||
|
'no-des',
|
||||||
|
'no-dh',
|
||||||
|
'no-dsa',
|
||||||
|
'no-ecdh',
|
||||||
|
'no-ecdsa',
|
||||||
|
'no-idea',
|
||||||
|
'no-md4',
|
||||||
|
'no-mdc2',
|
||||||
|
'no-ocb',
|
||||||
|
'no-poly1305',
|
||||||
|
'no-rc2',
|
||||||
|
'no-rc4',
|
||||||
|
'no-rmd160',
|
||||||
|
'no-scrypt',
|
||||||
|
'no-seed',
|
||||||
|
'no-siphash',
|
||||||
|
'no-sm2',
|
||||||
|
'no-sm3',
|
||||||
|
'no-sm4',
|
||||||
|
'no-whirlpool',
|
||||||
|
openSslArch,
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: openSslDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'nmake',
|
||||||
|
['build_libs'],
|
||||||
|
{
|
||||||
|
cwd: openSslDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'nmake',
|
||||||
|
['install_dev'],
|
||||||
|
{
|
||||||
|
cwd: openSslDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildSqlCipherWin(hakEnv, moduleInfo) {
|
||||||
|
const sqlCipherDir = path.join(moduleInfo.moduleDotHakDir, 'sqlcipher-4.3.0');
|
||||||
|
const buildDir = path.join(sqlCipherDir, 'bld');
|
||||||
|
|
||||||
|
await mkdirp(buildDir);
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'nmake',
|
||||||
|
['/f', path.join('..', 'Makefile.msc'), 'libsqlite3.lib', 'TOP=..'],
|
||||||
|
{
|
||||||
|
cwd: buildDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: Object.assign({}, process.env, {
|
||||||
|
CCOPTS: "-DSQLITE_HAS_CODEC -I" + path.join(moduleInfo.depPrefix, 'include'),
|
||||||
|
LTLIBPATHS: "/LIBPATH:" + path.join(moduleInfo.depPrefix, 'lib'),
|
||||||
|
LTLIBS: "libcrypto.lib",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await fsExtra.copy(
|
||||||
|
path.join(buildDir, 'libsqlite3.lib'),
|
||||||
|
path.join(moduleInfo.depPrefix, 'lib', 'sqlcipher.lib'),
|
||||||
|
);
|
||||||
|
|
||||||
|
await fsExtra.copy(
|
||||||
|
path.join(buildDir, 'sqlite3.h'),
|
||||||
|
path.join(moduleInfo.depPrefix, 'include', 'sqlcipher.h'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildSqlCipherUnix(hakEnv, moduleInfo) {
|
||||||
|
const sqlCipherDir = path.join(moduleInfo.moduleDotHakDir, 'sqlcipher-4.3.0');
|
||||||
|
|
||||||
|
const args = [
|
||||||
|
'--prefix=' + moduleInfo.depPrefix + '',
|
||||||
|
'--enable-tempstore=yes',
|
||||||
|
'--enable-shared=no',
|
||||||
|
];
|
||||||
|
|
||||||
|
if (hakEnv.isMac()) {
|
||||||
|
args.push('--with-crypto-lib=commoncrypto');
|
||||||
|
}
|
||||||
|
args.push('CFLAGS=-DSQLITE_HAS_CODEC');
|
||||||
|
if (hakEnv.isMac()) {
|
||||||
|
args.push('LDFLAGS=-framework Security -framework Foundation');
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
path.join(sqlCipherDir, 'configure'),
|
||||||
|
args,
|
||||||
|
{
|
||||||
|
cwd: sqlCipherDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'make',
|
||||||
|
[],
|
||||||
|
{
|
||||||
|
cwd: sqlCipherDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'make',
|
||||||
|
['install'],
|
||||||
|
{
|
||||||
|
cwd: sqlCipherDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildMatrixSeshat(hakEnv, moduleInfo) {
|
||||||
|
const env = Object.assign({
|
||||||
|
SQLCIPHER_STATIC: 1,
|
||||||
|
SQLCIPHER_LIB_DIR: path.join(moduleInfo.depPrefix, 'lib'),
|
||||||
|
SQLCIPHER_INCLUDE_DIR: path.join(moduleInfo.depPrefix, 'include'),
|
||||||
|
}, hakEnv.makeGypEnv());
|
||||||
|
|
||||||
|
if (hakEnv.isWin()) {
|
||||||
|
env.RUSTFLAGS = '-Ctarget-feature=+crt-static -Clink-args=libcrypto.lib';
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Running neon with env", env);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
path.join(moduleInfo.nodeModuleBinDir, 'neon' + (hakEnv.isWin() ? '.cmd' : '')),
|
||||||
|
['build', '--release'],
|
||||||
|
{
|
||||||
|
cwd: moduleInfo.moduleBuildDir,
|
||||||
|
env,
|
||||||
|
stdio: 'inherit',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
57
hak/matrix-seshat/check.js
Normal file
57
hak/matrix-seshat/check.js
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
|
module.exports = async function(hakEnv, moduleInfo) {
|
||||||
|
// of course tcl doesn't have a --version
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn('tclsh', [], {
|
||||||
|
stdio: ['pipe', 'ignore', 'ignore'],
|
||||||
|
});
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
if (code !== 0) {
|
||||||
|
reject("Can't find tclsh - have you installed TCL?");
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
proc.stdin.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
const tools = [];
|
||||||
|
if (hakEnv.isWin()) {
|
||||||
|
tools.push(['perl', '--version']); // for openssl configure
|
||||||
|
tools.push(['nmake', '/?']);
|
||||||
|
} else {
|
||||||
|
tools.push(['make', '--version']);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const tool of tools) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(tool[0], tool.slice(1), {
|
||||||
|
stdio: ['ignore'],
|
||||||
|
});
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
if (code !== 0) {
|
||||||
|
reject("Can't find " + tool);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
125
hak/matrix-seshat/fetchDeps.js
Normal file
125
hak/matrix-seshat/fetchDeps.js
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const fsProm = require('fs').promises;
|
||||||
|
const needle = require('needle');
|
||||||
|
const tar = require('tar');
|
||||||
|
|
||||||
|
module.exports = async function(hakEnv, moduleInfo) {
|
||||||
|
await getSqlCipher(hakEnv, moduleInfo);
|
||||||
|
|
||||||
|
if (hakEnv.isWin()) {
|
||||||
|
getOpenSsl(hakEnv, moduleInfo);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getSqlCipher(hakEnv, moduleInfo) {
|
||||||
|
const sqlCipherDir = path.join(moduleInfo.moduleDotHakDir, 'sqlcipher-4.3.0');
|
||||||
|
|
||||||
|
let haveSqlcipher;
|
||||||
|
try {
|
||||||
|
await fsProm.stat(sqlCipherDir);
|
||||||
|
haveSqlcipher = true;
|
||||||
|
} catch (e) {
|
||||||
|
haveSqlcipher = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (haveSqlcipher) return;
|
||||||
|
|
||||||
|
const sqlCipherTarball = path.join(moduleInfo.moduleDotHakDir, 'sqlcipher-4.3.0.tar.gz');
|
||||||
|
let haveSqlcipherTar;
|
||||||
|
try {
|
||||||
|
await fsProm.stat(sqlCipherTarball);
|
||||||
|
haveSqlcipherTar = true;
|
||||||
|
} catch (e) {
|
||||||
|
haveSqlcipherTar = false;
|
||||||
|
}
|
||||||
|
if (!haveSqlcipherTar) {
|
||||||
|
const bob = needle('get', 'https://github.com/sqlcipher/sqlcipher/archive/v4.3.0.tar.gz', {
|
||||||
|
follow: 10,
|
||||||
|
output: sqlCipherTarball,
|
||||||
|
});
|
||||||
|
await bob;
|
||||||
|
}
|
||||||
|
|
||||||
|
await tar.x({
|
||||||
|
file: sqlCipherTarball,
|
||||||
|
cwd: moduleInfo.moduleDotHakDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hakEnv.isWin()) {
|
||||||
|
// On Windows, we need to patch the makefile because it forces TEMP_STORE to
|
||||||
|
// default to files (1) but the README specifically says you '*must*' set it
|
||||||
|
// set it to 2 (default to memory).
|
||||||
|
const patchFile = path.join(moduleInfo.moduleHakDir, 'sqlcipher-4.3.0-win.patch');
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const readStream = fs.createReadStream(patchFile);
|
||||||
|
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
'patch',
|
||||||
|
['-p1'],
|
||||||
|
{
|
||||||
|
cwd: sqlCipherDir,
|
||||||
|
stdio: ['pipe', 'inherit', 'inherit'],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', (code) => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
readStream.pipe(proc.stdin);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getOpenSsl(hakEnv, moduleInfo) {
|
||||||
|
const openSslDir = path.join(moduleInfo.moduleDotHakDir, 'openssl-1.1.1d');
|
||||||
|
|
||||||
|
let haveOpenSsl;
|
||||||
|
try {
|
||||||
|
await fsProm.stat(openSslDir);
|
||||||
|
haveOpenSsl = true;
|
||||||
|
} catch (e) {
|
||||||
|
haveOpenSsl = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (haveOpenSsl) return;
|
||||||
|
|
||||||
|
const openSslTarball = path.join(moduleInfo.moduleDotHakDir, 'openssl-1.1.1d.tar.gz');
|
||||||
|
let haveOpenSslTar;
|
||||||
|
try {
|
||||||
|
await fsProm.stat(openSslTarball);
|
||||||
|
haveOpenSslTar = true;
|
||||||
|
} catch (e) {
|
||||||
|
haveOpenSslTar = false;
|
||||||
|
}
|
||||||
|
if (!haveOpenSslTar) {
|
||||||
|
await needle('get', 'https://www.openssl.org/source/openssl-1.1.1d.tar.gz', {
|
||||||
|
follow: 10,
|
||||||
|
output: openSslTarball,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("extracting " + openSslTarball + " in " + moduleInfo.moduleDotHakDir);
|
||||||
|
await tar.x({
|
||||||
|
file: openSslTarball,
|
||||||
|
cwd: moduleInfo.moduleDotHakDir,
|
||||||
|
});
|
||||||
|
}
|
9
hak/matrix-seshat/hak.json
Normal file
9
hak/matrix-seshat/hak.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"check": "check.js",
|
||||||
|
"fetchDeps": "fetchDeps.js",
|
||||||
|
"build": "build.js"
|
||||||
|
},
|
||||||
|
"prune": "native",
|
||||||
|
"copy": "native/index.node"
|
||||||
|
}
|
14
hak/matrix-seshat/sqlcipher-4.3.0-win.patch
Normal file
14
hak/matrix-seshat/sqlcipher-4.3.0-win.patch
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
diff -Nur sqlcipher-4.3.0-orig/Makefile.msc sqlcipher-4.3.0-mod/Makefile.msc
|
||||||
|
--- sqlcipher-4.3.0-orig/Makefile.msc 2019-12-20 16:40:26.000000000 +0000
|
||||||
|
+++ sqlcipher-4.3.0-mod/Makefile.msc 2020-02-14 11:31:39.000000000 +0000
|
||||||
|
@@ -985,8 +985,8 @@
|
||||||
|
# default to file, 2 to default to memory, and 3 to force temporary
|
||||||
|
# tables to always be in memory.
|
||||||
|
#
|
||||||
|
-TCC = $(TCC) -DSQLITE_TEMP_STORE=1
|
||||||
|
-RCC = $(RCC) -DSQLITE_TEMP_STORE=1
|
||||||
|
+TCC = $(TCC) -DSQLITE_TEMP_STORE=2
|
||||||
|
+RCC = $(RCC) -DSQLITE_TEMP_STORE=2
|
||||||
|
|
||||||
|
# Enable/disable loadable extensions, and other optional features
|
||||||
|
# based on configuration. (-DSQLITE_OMIT*, -DSQLITE_ENABLE*).
|
21
package.json
21
package.json
@ -16,13 +16,14 @@
|
|||||||
"fetch": "yarn run mkdirs && node scripts/fetch-package.js",
|
"fetch": "yarn run mkdirs && node scripts/fetch-package.js",
|
||||||
"setversion": "node scripts/set-version.js",
|
"setversion": "node scripts/set-version.js",
|
||||||
"start": "electron .",
|
"start": "electron .",
|
||||||
"lint": "eslint src/",
|
"lint": "eslint src/ scripts/ hak/",
|
||||||
"build": "yarn run setversion && electron-builder",
|
"build": "yarn run setversion && electron-builder",
|
||||||
"in-docker": "scripts/in-docker.sh",
|
"in-docker": "scripts/in-docker.sh",
|
||||||
"docker:build": "yarn run in-docker yarn run build",
|
"docker:build": "yarn run in-docker yarn run build",
|
||||||
"docker:install": "yarn run in-docker yarn install",
|
"docker:install": "yarn run in-docker yarn install",
|
||||||
"debrepo": "scripts/mkrepo.sh",
|
"debrepo": "scripts/mkrepo.sh",
|
||||||
"clean": "rimraf webapp.asar dist packages deploys"
|
"clean": "rimraf webapp.asar dist packages deploys",
|
||||||
|
"hak": "node scripts/hak/index.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"auto-launch": "^5.0.1",
|
"auto-launch": "^5.0.1",
|
||||||
@ -40,11 +41,21 @@
|
|||||||
"eslint": "^5.8.0",
|
"eslint": "^5.8.0",
|
||||||
"eslint-config-google": "^0.7.1",
|
"eslint-config-google": "^0.7.1",
|
||||||
"eslint-plugin-babel": "^4.1.2",
|
"eslint-plugin-babel": "^4.1.2",
|
||||||
|
"find-npm-prefix": "^1.0.2",
|
||||||
"follow-redirects": "^1.9.0",
|
"follow-redirects": "^1.9.0",
|
||||||
|
"fs-extra": "^8.1.0",
|
||||||
|
"glob": "^7.1.6",
|
||||||
"matrix-js-sdk": "^2.4.6-rc.1",
|
"matrix-js-sdk": "^2.4.6-rc.1",
|
||||||
"mkdirp": "^0.5.1",
|
"mkdirp": "^1.0.3",
|
||||||
"rimraf": "^3.0.0",
|
"needle": "^2.3.2",
|
||||||
"tar": "^5.0.5"
|
"node-pre-gyp": "^0.14.0",
|
||||||
|
"npm": "^6.13.7",
|
||||||
|
"rimraf": "^3.0.2",
|
||||||
|
"semver": "^7.1.3",
|
||||||
|
"tar": "^6.0.1"
|
||||||
|
},
|
||||||
|
"hakDependencies": {
|
||||||
|
"matrix-seshat": "^1.0.0"
|
||||||
},
|
},
|
||||||
"build": {
|
"build": {
|
||||||
"appId": "im.riot.app",
|
"appId": "im.riot.app",
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
{
|
{
|
||||||
"update_base_url": "https://packages.riot.im/desktop/update/",
|
|
||||||
"default_server_name": "matrix.org",
|
"default_server_name": "matrix.org",
|
||||||
"brand": "Riot",
|
"brand": "Riot",
|
||||||
"integrations_ui_url": "https://scalar.vector.im/",
|
"integrations_ui_url": "https://scalar.vector.im/",
|
||||||
@ -31,7 +30,8 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"features": {
|
"features": {
|
||||||
"feature_lazyloading": "enable"
|
"feature_lazyloading": "enable",
|
||||||
|
"feature_event_indexing": "labs"
|
||||||
},
|
},
|
||||||
"enable_presence_by_hs_url": {
|
"enable_presence_by_hs_url": {
|
||||||
"https://matrix.org": false,
|
"https://matrix.org": false,
|
||||||
|
@ -5,7 +5,7 @@ const path = require('path');
|
|||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const fsPromises = require('fs').promises;
|
const fsPromises = require('fs').promises;
|
||||||
const { https } = require('follow-redirects');
|
const { https } = require('follow-redirects');
|
||||||
const child_process = require('child_process');
|
const childProcess = require('child_process');
|
||||||
const tar = require('tar');
|
const tar = require('tar');
|
||||||
const asar = require('asar');
|
const asar = require('asar');
|
||||||
|
|
||||||
@ -43,7 +43,7 @@ async function downloadToFile(url, filename) {
|
|||||||
|
|
||||||
async function verifyFile(filename) {
|
async function verifyFile(filename) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const gpgProc = child_process.execFile('gpg', ['--verify', filename + '.asc', filename], (error) => {
|
childProcess.execFile('gpg', ['--verify', filename + '.asc', filename], (error) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
reject(error);
|
reject(error);
|
||||||
} else {
|
} else {
|
||||||
@ -93,7 +93,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const haveGpg = await new Promise((resolve) => {
|
const haveGpg = await new Promise((resolve) => {
|
||||||
child_process.execFile('gpg', ['--version'], (error) => {
|
childProcess.execFile('gpg', ['--version'], (error) => {
|
||||||
resolve(!error);
|
resolve(!error);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -105,7 +105,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await new Promise((resolve) => {
|
await new Promise((resolve) => {
|
||||||
const gpgProc = child_process.execFile('gpg', ['--import'], (error) => {
|
const gpgProc = childProcess.execFile('gpg', ['--import'], (error) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
console.log("Failed to import key", error);
|
console.log("Failed to import key", error);
|
||||||
} else {
|
} else {
|
||||||
@ -131,16 +131,16 @@ async function main() {
|
|||||||
console.log("To build with no config (and no auto-update), pass the empty string (-d '')");
|
console.log("To build with no config (and no auto-update), pass the empty string (-d '')");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (verify && !haveGpg) {
|
if (verify && !haveGpg) {
|
||||||
console.log("No working GPG binary: install GPG or pass --noverify to skip verification");
|
console.log("No working GPG binary: install GPG or pass --noverify to skip verification");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const haveDeploy = false;
|
let haveDeploy = false;
|
||||||
const expectedDeployDir = path.join(deployDir, 'riot-' + targetVersion);
|
const expectedDeployDir = path.join(deployDir, 'riot-' + targetVersion);
|
||||||
try {
|
try {
|
||||||
const webappDir = await fs.opendir(expectedDeployDir);
|
await fs.opendir(expectedDeployDir);
|
||||||
console.log(expectedDeployDir + "already exists");
|
console.log(expectedDeployDir + "already exists");
|
||||||
haveDeploy = true;
|
haveDeploy = true;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
81
scripts/hak/README.md
Normal file
81
scripts/hak/README.md
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
hak
|
||||||
|
===
|
||||||
|
|
||||||
|
This tool builds native dependencies for riot-desktop. Here follows some very minimal
|
||||||
|
documentation for it.
|
||||||
|
|
||||||
|
Goals:
|
||||||
|
* Must build compiled native node modules in a shippable state
|
||||||
|
(ie. only dynamically linked against libraries that will be on the
|
||||||
|
target system, all unnecessary files removed).
|
||||||
|
* Must be able to build any native module, no matter what build system
|
||||||
|
it uses (electron-rebuild is supposed to do this job but only works
|
||||||
|
for modules that use gyp).
|
||||||
|
|
||||||
|
It's also loosely designed to be a general tool and agnostic to what it's
|
||||||
|
actually building. It's used here to build modules for the electron app
|
||||||
|
but should work equally well for building modules for normal node.
|
||||||
|
|
||||||
|
Running
|
||||||
|
=======
|
||||||
|
Hak is invoked with a command and a dependency, eg. `yarn run hak fetch matrix-seshat`.
|
||||||
|
If no dependencies are given, hak runs the command on all dependencies.
|
||||||
|
|
||||||
|
Files
|
||||||
|
=====
|
||||||
|
There are a lot of files involved:
|
||||||
|
|
||||||
|
* scripts/hak/... - The tool itself
|
||||||
|
* hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
|
||||||
|
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
|
||||||
|
* .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
|
||||||
|
any of the native dependency's native dependencies.
|
||||||
|
* .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
|
||||||
|
* .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
|
||||||
|
|
||||||
|
Workings
|
||||||
|
========
|
||||||
|
Hak works around native node modules that try to fetch or build their native component in
|
||||||
|
the npm 'install' phase - modules that do this will typically end up with native components
|
||||||
|
targeted to the build platform and the node that npm/yarn is using, which is no good for an
|
||||||
|
electron app.
|
||||||
|
|
||||||
|
It does this by installing it with `--ignore-scripts` and then using `yarn link` to keep the
|
||||||
|
dependency module separate so yarn doesn't try to run its install / postinstall script
|
||||||
|
at other points (eg. whenever you `yarn add` a random other dependency).
|
||||||
|
|
||||||
|
This also means that the dependencies cannot be listed in `dependencies` or
|
||||||
|
`devDependencies` in the project, since this would cause npm / yarn to install them and
|
||||||
|
try to fetch their native parts. Instead, they are listed in `hakDependencies` which
|
||||||
|
hak reads to install them for you.
|
||||||
|
|
||||||
|
Hak will *not* install dependencies for the copy of the module it links into your
|
||||||
|
project, so if your native module has javascript dependencies that are actually needed at
|
||||||
|
runtime (and not just to fetch / build the native parts), it won't work.
|
||||||
|
|
||||||
|
Hak will generate a `.yarnrc` in the project directory to set the link directory to its
|
||||||
|
own in the .hak directory (unless one already exists, in which case this is your problem).
|
||||||
|
|
||||||
|
Lifecycle
|
||||||
|
=========
|
||||||
|
Hak is divided into lifecycle stages, in order:
|
||||||
|
* fetch - Download and extract the source of the dependency
|
||||||
|
* link - Link the copy of the dependency into your node_modules directory
|
||||||
|
* fetchDeps - Fetch & extract any native dependencies required to build the module.
|
||||||
|
* build - The Good Stuff. Configure and build any native dependencies, then the module itself.
|
||||||
|
* copy - Copy the built artifact from the module build directory to the module output directory.
|
||||||
|
|
||||||
|
hak.json
|
||||||
|
========
|
||||||
|
The scripts section contains scripts used for lifecycle stages that need them (fetch, fetchDeps, build).
|
||||||
|
It also contains 'prune' and 'copy' which are globs of files to delete from the output module directory
|
||||||
|
and copy over from the module build directory to the output module directory, respectively.
|
||||||
|
|
||||||
|
Shortcomings
|
||||||
|
============
|
||||||
|
Hak doesn't know about dependencies between lifecycle stages, ie. it doesn't know that you need to
|
||||||
|
'fetch' and 'fetchDeps' before you can 'build', etc. You get to run each individually, and remember
|
||||||
|
the right order.
|
||||||
|
|
||||||
|
There is also a *lot* of duplication in the command execution: we should abstract away
|
||||||
|
some of the boilerplate required to run commands & so forth.
|
21
scripts/hak/build.js
Normal file
21
scripts/hak/build.js
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function build(hakEnv, moduleInfo) {
|
||||||
|
moduleInfo.scripts.build(hakEnv, moduleInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = build;
|
23
scripts/hak/check.js
Normal file
23
scripts/hak/check.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function check(hakEnv, moduleInfo) {
|
||||||
|
if (moduleInfo.scripts.check) {
|
||||||
|
await moduleInfo.scripts.check(hakEnv, moduleInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = check;
|
53
scripts/hak/clean.js
Normal file
53
scripts/hak/clean.js
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const rimraf = require('rimraf');
|
||||||
|
|
||||||
|
async function clean(hakEnv, moduleInfo) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
rimraf(moduleInfo.moduleDotHakDir, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
rimraf(path.join(hakEnv.dotHakDir, 'links', moduleInfo.name), (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
rimraf(path.join(hakEnv.projectRoot, 'node_modules', moduleInfo.name), (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = clean;
|
67
scripts/hak/copy.js
Normal file
67
scripts/hak/copy.js
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const fsProm = require('fs').promises;
|
||||||
|
|
||||||
|
const rimraf = require('rimraf');
|
||||||
|
const glob = require('glob');
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
|
async function copy(hakEnv, moduleInfo) {
|
||||||
|
if (moduleInfo.cfg.prune) {
|
||||||
|
console.log("Removing " + moduleInfo.cfg.prune + " from " + moduleInfo.moduleOutDir);
|
||||||
|
// rimraf doesn't have a 'cwd' option: it always uses process.cwd()
|
||||||
|
// (and if you set glob.cwd it just breaks because it can't find the files)
|
||||||
|
const oldCwd = process.cwd();
|
||||||
|
try {
|
||||||
|
process.chdir(moduleInfo.moduleOutDir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
rimraf(moduleInfo.cfg.prune, {}, err => {
|
||||||
|
err ? reject(err) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
process.chdir(oldCwd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleInfo.cfg.copy) {
|
||||||
|
console.log(
|
||||||
|
"Copying " + moduleInfo.cfg.prune + " from " +
|
||||||
|
moduleInfo.moduleOutDir + " to " + moduleInfo.moduleOutDir,
|
||||||
|
);
|
||||||
|
const files = await new Promise(async (resolve, reject) => {
|
||||||
|
glob(moduleInfo.cfg.copy, {
|
||||||
|
nosort: true,
|
||||||
|
silent: true,
|
||||||
|
cwd: moduleInfo.moduleBuildDir,
|
||||||
|
}, (err, files) => {
|
||||||
|
err ? reject(err) : resolve(files);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
for (const f of files) {
|
||||||
|
console.log("\t" + f);
|
||||||
|
const src = path.join(moduleInfo.moduleBuildDir, f);
|
||||||
|
const dst = path.join(moduleInfo.moduleOutDir, f);
|
||||||
|
|
||||||
|
await mkdirp(path.dirname(dst));
|
||||||
|
await fsProm.copyFile(src, dst);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = copy;
|
117
scripts/hak/fetch.js
Normal file
117
scripts/hak/fetch.js
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const url = require('url');
|
||||||
|
const fsProm = require('fs').promises;
|
||||||
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
|
const npm = require('npm');
|
||||||
|
const semver = require('semver');
|
||||||
|
const needle = require('needle');
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
const tar = require('tar');
|
||||||
|
|
||||||
|
async function fetch(hakEnv, moduleInfo) {
|
||||||
|
let haveModuleBuildDir;
|
||||||
|
try {
|
||||||
|
const stats = await fsProm.stat(moduleInfo.moduleBuildDir);
|
||||||
|
haveModuleBuildDir = stats.isDirectory();
|
||||||
|
} catch (e) {
|
||||||
|
haveModuleBuildDir = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (haveModuleBuildDir) return;
|
||||||
|
|
||||||
|
await new Promise((resolve) => {
|
||||||
|
npm.load({'loglevel': 'silent'}, resolve);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Fetching " + moduleInfo.name + " at version " + moduleInfo.version);
|
||||||
|
const versions = await new Promise((resolve, reject) => {
|
||||||
|
npm.view([
|
||||||
|
moduleInfo.name + '@' + moduleInfo.version,
|
||||||
|
'dist.tarball',
|
||||||
|
(err, versions) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(versions);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
const orderedVersions = Object.keys(versions);
|
||||||
|
semver.sort(orderedVersions);
|
||||||
|
|
||||||
|
console.log("Resolved version " + orderedVersions[0] + " for " + moduleInfo.name);
|
||||||
|
|
||||||
|
const tarballUrl = versions[orderedVersions[0]]['dist.tarball'];
|
||||||
|
|
||||||
|
await mkdirp(moduleInfo.moduleDotHakDir);
|
||||||
|
|
||||||
|
const parsedUrl = url.parse(tarballUrl);
|
||||||
|
const tarballFile = path.join(moduleInfo.moduleDotHakDir, path.basename(parsedUrl.path));
|
||||||
|
|
||||||
|
let haveTarball;
|
||||||
|
try {
|
||||||
|
await fsProm.stat(tarballFile);
|
||||||
|
haveTarball = true;
|
||||||
|
} catch (e) {
|
||||||
|
haveTarball = false;
|
||||||
|
}
|
||||||
|
if (!haveTarball) {
|
||||||
|
console.log("Downloading " + tarballUrl);
|
||||||
|
await needle('get', tarballUrl, { output: tarballFile });
|
||||||
|
} else {
|
||||||
|
console.log(tarballFile + " already exists.");
|
||||||
|
}
|
||||||
|
|
||||||
|
await mkdirp(moduleInfo.moduleBuildDir);
|
||||||
|
|
||||||
|
await tar.x({
|
||||||
|
file: tarballFile,
|
||||||
|
cwd: moduleInfo.moduleBuildDir,
|
||||||
|
strip: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Running yarn install in " + moduleInfo.moduleBuildDir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(
|
||||||
|
hakEnv.isWin() ? 'yarn.cmd' : 'yarn',
|
||||||
|
['install', '--ignore-scripts'],
|
||||||
|
{
|
||||||
|
stdio: 'inherit',
|
||||||
|
cwd: moduleInfo.moduleBuildDir,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
proc.on('exit', code => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// also extract another copy to the output directory at this point
|
||||||
|
// nb. we do not yarn install in the output copy
|
||||||
|
await mkdirp(moduleInfo.moduleOutDir);
|
||||||
|
await tar.x({
|
||||||
|
file: tarballFile,
|
||||||
|
cwd: moduleInfo.moduleOutDir,
|
||||||
|
strip: 1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = fetch;
|
26
scripts/hak/fetchDeps.js
Normal file
26
scripts/hak/fetchDeps.js
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
|
async function fetchDeps(hakEnv, moduleInfo) {
|
||||||
|
await mkdirp(moduleInfo.moduleDotHakDir);
|
||||||
|
if (moduleInfo.scripts.fetchDeps) {
|
||||||
|
await moduleInfo.scripts.fetchDeps(hakEnv, moduleInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = fetchDeps;
|
100
scripts/hak/hakEnv.js
Normal file
100
scripts/hak/hakEnv.js
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const os = require('os');
|
||||||
|
|
||||||
|
const nodePreGypVersioning = require('node-pre-gyp/lib/util/versioning');
|
||||||
|
|
||||||
|
function getElectronVersion(packageJson) {
|
||||||
|
// should we pick the version of an installed electron
|
||||||
|
// dependency, and if so, before or after electronVersion?
|
||||||
|
if (packageJson.build && packageJson.build.electronVersion) {
|
||||||
|
return packageJson.build.electronVersion;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRuntime(packageJson) {
|
||||||
|
const electronVersion = getElectronVersion(packageJson);
|
||||||
|
return electronVersion ? 'electron' : 'node-webkit';
|
||||||
|
}
|
||||||
|
|
||||||
|
function getTarget(packageJson) {
|
||||||
|
const electronVersion = getElectronVersion(packageJson);
|
||||||
|
if (electronVersion) {
|
||||||
|
return electronVersion;
|
||||||
|
} else {
|
||||||
|
return process.version.substr(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = class HakEnv {
|
||||||
|
constructor(prefix, packageJson) {
|
||||||
|
Object.assign(this, {
|
||||||
|
// what we're targeting
|
||||||
|
runtime: getRuntime(packageJson),
|
||||||
|
target: getTarget(packageJson),
|
||||||
|
platform: process.platform,
|
||||||
|
arch: process.arch,
|
||||||
|
|
||||||
|
// paths
|
||||||
|
projectRoot: prefix,
|
||||||
|
dotHakDir: path.join(prefix, '.hak'),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getRuntimeAbi() {
|
||||||
|
return nodePreGypVersioning.get_runtime_abi(
|
||||||
|
this.runtime,
|
||||||
|
this.target,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// {node_abi}-{platform}-{arch}
|
||||||
|
getNodeTriple() {
|
||||||
|
return this.getRuntimeAbi() + '-' + this.platform + '-' + this.arch;
|
||||||
|
}
|
||||||
|
|
||||||
|
isWin() {
|
||||||
|
return this.platform === 'win32';
|
||||||
|
}
|
||||||
|
|
||||||
|
isMac() {
|
||||||
|
return this.platform === 'darwin';
|
||||||
|
}
|
||||||
|
|
||||||
|
isLinux() {
|
||||||
|
return this.platform === 'linux';
|
||||||
|
}
|
||||||
|
|
||||||
|
makeGypEnv() {
|
||||||
|
return Object.assign({}, process.env, {
|
||||||
|
npm_config_target: this.target,
|
||||||
|
npm_config_arch: this.arch,
|
||||||
|
npm_config_target_arch: this.arch,
|
||||||
|
npm_config_disturl: 'https://atom.io/download/electron',
|
||||||
|
npm_config_runtime: this.runtime,
|
||||||
|
npm_config_build_from_source: true,
|
||||||
|
npm_config_devdir: path.join(os.homedir(), ".electron-gyp"),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getNodeModuleBin(name) {
|
||||||
|
return path.join(this.projectRoot, 'node_modules', '.bin', name);
|
||||||
|
}
|
||||||
|
};
|
134
scripts/hak/index.js
Normal file
134
scripts/hak/index.js
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const findNpmPrefix = require('find-npm-prefix');
|
||||||
|
|
||||||
|
const HakEnv = require('./hakEnv');
|
||||||
|
|
||||||
|
const GENERALCOMMANDS = [
|
||||||
|
'target',
|
||||||
|
];
|
||||||
|
|
||||||
|
// These can only be run on specific modules
|
||||||
|
const MODULECOMMANDS = [
|
||||||
|
'check',
|
||||||
|
'fetch',
|
||||||
|
'link',
|
||||||
|
'fetchDeps',
|
||||||
|
'build',
|
||||||
|
'copy',
|
||||||
|
'clean',
|
||||||
|
];
|
||||||
|
|
||||||
|
// Scripts valid in a hak.json 'scripts' section
|
||||||
|
const HAKSCRIPTS = [
|
||||||
|
'check',
|
||||||
|
'fetch',
|
||||||
|
'fetchDeps',
|
||||||
|
'build',
|
||||||
|
];
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const prefix = await findNpmPrefix(process.cwd());
|
||||||
|
let packageJson;
|
||||||
|
try {
|
||||||
|
packageJson = require(path.join(prefix, "package.json"));
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Can't find a package.json!");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const hakEnv = new HakEnv(prefix, packageJson);
|
||||||
|
|
||||||
|
const deps = {};
|
||||||
|
|
||||||
|
const hakDepsCfg = packageJson.hakDependencies || {};
|
||||||
|
|
||||||
|
for (const dep of Object.keys(hakDepsCfg)) {
|
||||||
|
const hakJsonPath = path.join(prefix, 'hak', dep, 'hak.json');
|
||||||
|
let hakJson;
|
||||||
|
try {
|
||||||
|
hakJson = await require(hakJsonPath);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("No hak.json found for " + dep + ".");
|
||||||
|
console.log("Expecting " + hakJsonPath);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
deps[dep] = {
|
||||||
|
name: dep,
|
||||||
|
version: hakDepsCfg[dep],
|
||||||
|
cfg: hakJson,
|
||||||
|
moduleHakDir: path.join(prefix, 'hak', dep),
|
||||||
|
moduleDotHakDir: path.join(hakEnv.dotHakDir, dep),
|
||||||
|
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, 'build'),
|
||||||
|
moduleOutDir: path.join(hakEnv.dotHakDir, dep, 'out'),
|
||||||
|
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, 'build', 'node_modules', '.bin'),
|
||||||
|
depPrefix: path.join(hakEnv.dotHakDir, dep, 'opt'),
|
||||||
|
scripts: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const s of HAKSCRIPTS) {
|
||||||
|
if (hakJson.scripts && hakJson.scripts[s]) {
|
||||||
|
deps[dep].scripts[s] = require(path.join(prefix, 'hak', dep, hakJson.scripts[s]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.argv.length < 3) {
|
||||||
|
console.log("Usage: hak <command> [modules...]");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const cmd = process.argv[2];
|
||||||
|
if (GENERALCOMMANDS.includes(cmd)) {
|
||||||
|
if (cmd === 'target') {
|
||||||
|
console.log(hakEnv.getNodeTriple());
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!MODULECOMMANDS.includes(cmd)) {
|
||||||
|
console.error("Unknown command: " + cmd);
|
||||||
|
console.log("Commands I know about:");
|
||||||
|
for (const cmd of MODULECOMMANDS) {
|
||||||
|
console.log("\t" + cmd);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const cmdFunc = require('./' + cmd);
|
||||||
|
|
||||||
|
let modules = process.argv.slice(3);
|
||||||
|
|
||||||
|
if (modules.length === 0) modules = Object.keys(deps);
|
||||||
|
|
||||||
|
for (const mod of modules) {
|
||||||
|
const depInfo = deps[mod];
|
||||||
|
if (depInfo === undefined) {
|
||||||
|
console.log(
|
||||||
|
"Module " + mod + " not found - is it in hakDependencies " +
|
||||||
|
"in your package.json?",
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
console.log("hak " + cmd + ": " + mod);
|
||||||
|
await cmdFunc(hakEnv, depInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
70
scripts/hak/link.js
Normal file
70
scripts/hak/link.js
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
/*
|
||||||
|
Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const os = require('os');
|
||||||
|
const fsProm = require('fs').promises;
|
||||||
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
|
async function link(hakEnv, moduleInfo) {
|
||||||
|
const yarnrc = path.join(hakEnv.projectRoot, '.yarnrc');
|
||||||
|
// this is fairly terrible but it's reasonably clunky to either parse a yarnrc
|
||||||
|
// properly or get yarn to do it, so this will probably suffice for now.
|
||||||
|
// We just check to see if there is a local .yarnrc at all, and assume that
|
||||||
|
// if you've put one there yourself, you probably know what you're doing and
|
||||||
|
// we won't meddle with it.
|
||||||
|
// Also we do this for each module which is unnecessary, but meh.
|
||||||
|
try {
|
||||||
|
await fsProm.stat(yarnrc);
|
||||||
|
} catch (e) {
|
||||||
|
await fsProm.writeFile(
|
||||||
|
yarnrc,
|
||||||
|
// XXX: 1. This must be absolute, as yarn will resolve link directories
|
||||||
|
// relative to the closest project root, which means when we run it
|
||||||
|
// in the dependency project, it will put the link directory in its
|
||||||
|
// own project folder rather than the main project.
|
||||||
|
// 2. The parser gets very confused by strings with colons in them
|
||||||
|
// (ie. Windows absolute paths) but strings in quotes get parsed as
|
||||||
|
// JSON so need to be valid JSON encoded strings (ie. have the
|
||||||
|
// backslashes escaped). JSON.stringify will add quotes and escape.
|
||||||
|
'--link-folder ' + JSON.stringify(path.join(hakEnv.dotHakDir, 'links')) + os.EOL,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const yarnCmd = 'yarn' + (hakEnv.isWin() ? '.cmd' : '');
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(yarnCmd, ['link'], {
|
||||||
|
cwd: moduleInfo.moduleOutDir,
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
proc.on('exit', code => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
const proc = childProcess.spawn(yarnCmd, ['link', moduleInfo.name], {
|
||||||
|
cwd: hakEnv.projectRoot,
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
proc.on('exit', code => {
|
||||||
|
code ? reject(code) : resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = link;
|
@ -7,11 +7,11 @@
|
|||||||
|
|
||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
const asar = require('asar');
|
const asar = require('asar');
|
||||||
const child_process = require('child_process');
|
const childProcess = require('child_process');
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
try {
|
try {
|
||||||
const webappDir = await fs.stat('webapp.asar');
|
await fs.stat('webapp.asar');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("No 'webapp.asar' found. Run 'yarn run fetch'");
|
console.log("No 'webapp.asar' found. Run 'yarn run fetch'");
|
||||||
return 1;
|
return 1;
|
||||||
@ -22,7 +22,7 @@ async function main() {
|
|||||||
// set version in package.json: electron-builder will use this to populate
|
// set version in package.json: electron-builder will use this to populate
|
||||||
// all the various version fields
|
// all the various version fields
|
||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
child_process.execFile('yarn', [
|
childProcess.execFile('yarn', [
|
||||||
'version',
|
'version',
|
||||||
'-s',
|
'-s',
|
||||||
'--no-git-tag-version', // This also means "don't commit to git" as it turns out
|
'--no-git-tag-version', // This also means "don't commit to git" as it turns out
|
||||||
|
Loading…
x
Reference in New Issue
Block a user