🎨 Run prettier

This commit is contained in:
trickypr 2021-09-20 11:10:52 +10:00
parent 7a81587f9f
commit 4714fb4bc8
42 changed files with 1786 additions and 2445 deletions

View file

@ -1,2 +1,3 @@
# melon
🍉 Build Firefox-based browsers with ease

View file

@ -5,7 +5,8 @@
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "tsc"
"build": "tsc",
"format": "prettier . -w"
},
"repository": {
"type": "git",

View file

@ -16,146 +16,133 @@ import {
run,
setBranch,
status,
test
} from "./commands";
import { Cmd } from "./types";
test,
} from './commands'
import { Cmd } from './types'
export const commands: Cmd[] = [
{
cmd: "bootstrap",
description: "Bootstrap Dot Browser.",
controller: bootstrap
cmd: 'bootstrap',
description: 'Bootstrap Dot Browser.',
controller: bootstrap,
},
{
cmd: "build [os]",
aliases: ["b"],
cmd: 'build [os]',
aliases: ['b'],
description:
"Build Dot Browser. Specify the OS param for cross-platform builds.",
'Build Dot Browser. Specify the OS param for cross-platform builds.',
options: [
{
arg: "--a, --arch <architecture>",
description:
"Specify architecture for build"
}
arg: '--a, --arch <architecture>',
description: 'Specify architecture for build',
},
],
controller: build
controller: build,
},
{
cmd: "discard <file>",
description: "Discard a files changes.",
cmd: 'discard <file>',
description: 'Discard a files changes.',
options: [
{
arg: "--keep, --keep-patch",
description:
"Keep the patch file instead of removing it"
}
arg: '--keep, --keep-patch',
description: 'Keep the patch file instead of removing it',
},
],
controller: discard
controller: discard,
},
{
cmd: "download [ffVersion]",
description: "Download Firefox.",
controller: download
cmd: 'download [ffVersion]',
description: 'Download Firefox.',
controller: download,
},
{
cmd: "download-artifacts",
description:
"Download Windows artifacts from GitHub.",
cmd: 'download-artifacts',
description: 'Download Windows artifacts from GitHub.',
flags: {
platforms: ["win32"]
platforms: ['win32'],
},
controller: downloadArtifacts
controller: downloadArtifacts,
},
{
cmd: "execute",
description:
"Execute a command inside the engine directory.",
controller: execute
cmd: 'execute',
description: 'Execute a command inside the engine directory.',
controller: execute,
},
{
cmd: "export-file <file>",
description: "Export a changed file as a patch.",
controller: exportFile
cmd: 'export-file <file>',
description: 'Export a changed file as a patch.',
controller: exportFile,
},
{
cmd: "export",
aliases: ["export-patches"],
description:
"Export the changed files as patches.",
controller: exportPatches
cmd: 'export',
aliases: ['export-patches'],
description: 'Export the changed files as patches.',
controller: exportPatches,
},
{
cmd: "lfify",
aliases: ["fix-le"],
description:
"Convert CRLF line endings to Unix LF line endings.",
controller: fixLineEndings
cmd: 'lfify',
aliases: ['fix-le'],
description: 'Convert CRLF line endings to Unix LF line endings.',
controller: fixLineEndings,
},
{
cmd: "import [type]",
aliases: ["import-patches", "i"],
description: "Import patches into the browser.",
cmd: 'import [type]',
aliases: ['import-patches', 'i'],
description: 'Import patches into the browser.',
options: [
{
arg: "-m, --minimal",
description:
"Import patches in minimal mode"
arg: '-m, --minimal',
description: 'Import patches in minimal mode',
},
{
arg: "--noignore",
description:
"Bypass .gitignore. You shouldn't really use this."
}
arg: '--noignore',
description: "Bypass .gitignore. You shouldn't really use this.",
},
],
controller: importPatches
controller: importPatches,
},
{
cmd: "init <source>",
aliases: ["initialise", "initialize"],
description: "Initialise the Firefox directory.",
controller: init
cmd: 'init <source>',
aliases: ['initialise', 'initialize'],
description: 'Initialise the Firefox directory.',
controller: init,
},
{
cmd: "license-check",
aliases: ["lc"],
description:
"Check the src directory for the absence of MPL-2.0 header.",
controller: licenseCheck
cmd: 'license-check',
aliases: ['lc'],
description: 'Check the src directory for the absence of MPL-2.0 header.',
controller: licenseCheck,
},
{
cmd: "package",
aliases: ["pack"],
description:
"Package the browser for distribution.",
controller: melonPackage
cmd: 'package',
aliases: ['pack'],
description: 'Package the browser for distribution.',
controller: melonPackage,
},
{
cmd: "reset",
description:
"Reset the source directory to stock Firefox.",
controller: reset
cmd: 'reset',
description: 'Reset the source directory to stock Firefox.',
controller: reset,
},
{
cmd: "run [chrome]",
aliases: ["r", "open"],
description: "Run the browser.",
controller: run
cmd: 'run [chrome]',
aliases: ['r', 'open'],
description: 'Run the browser.',
controller: run,
},
{
cmd: "set-branch <branch>",
description: "Change the default branch.",
controller: setBranch
cmd: 'set-branch <branch>',
description: 'Change the default branch.',
controller: setBranch,
},
{
cmd: "status",
description:
"Status and files changed for src directory.",
controller: status
cmd: 'status',
description: 'Status and files changed for src directory.',
controller: status,
},
{
cmd: "test",
description:
"Run the test suite for Dot Browser.",
controller: test
}
];
cmd: 'test',
description: 'Run the test suite for Dot Browser.',
controller: test,
},
]

View file

@ -1,89 +1,81 @@
/// <reference path="./linus.d.ts"/>
import distro from "linus";
import { bin_name } from "..";
import { log } from "../";
import { ENGINE_DIR } from "../constants";
import { dispatch } from "../utils";
import { pacmanInstall } from "./bootstrap/arch";
import distro from 'linus'
import { bin_name } from '..'
import { log } from '../'
import { ENGINE_DIR } from '../constants'
import { dispatch } from '../utils'
import { pacmanInstall } from './bootstrap/arch'
export const bootstrap = async () => {
if (process.platform == "win32")
if (process.platform == 'win32')
log.error(
`You do not need to bootstrap on Windows. As long as you ran |${bin_name} download-artifacts| everything should work fine.`
);
)
log.info(`Bootstrapping Dot Browser for Desktop...`);
log.info(`Bootstrapping Dot Browser for Desktop...`)
const args = ["--application-choice", "browser"];
const args = ['--application-choice', 'browser']
if (process.platform === "linux") {
linuxBootstrap();
if (process.platform === 'linux') {
linuxBootstrap()
} else {
console.info(
`Custom bootstrapping doesn't work on ${process.platform}. Consider contributing to improve support`
);
)
console.info(
`Passing through to |mach bootstrap|`
);
console.info(`Passing through to |mach bootstrap|`)
await dispatch(
`./mach`,
["bootstrap", ...args],
ENGINE_DIR
);
await dispatch(`./mach`, ['bootstrap', ...args], ENGINE_DIR)
}
}
};
function getDistro(): Promise<string> {
return new Promise((resolve, reject) => {
distro.name((err: Error, name: string) => {
if (name) resolve(name);
if (name) resolve(name)
else {
reject(
err || "Failed to get linux distro"
);
reject(err || 'Failed to get linux distro')
}
});
});
})
})
}
async function linuxBootstrap() {
const distro = await getDistro();
const distro = await getDistro()
switch (distro) {
// Both arch and manjaro use the same package repo and the same package manager
case "ManjaroLinux":
case "ArchLinux":
case 'ManjaroLinux':
case 'ArchLinux':
console.log(
await pacmanInstall(
// Shared packages
"base-devel",
"nodejs",
"unzip",
"zip",
'base-devel',
'nodejs',
'unzip',
'zip',
// Needed for desktop apps
"alsa-lib",
"dbus-glib",
"gtk3",
"libevent",
"libvpx",
"libxt",
"mime-types",
"nasm",
"startup-notification",
"gst-plugins-base-libs",
"libpulse",
"xorg-server-xvfb",
"gst-libav",
"gst-plugins-good"
'alsa-lib',
'dbus-glib',
'gtk3',
'libevent',
'libvpx',
'libxt',
'mime-types',
'nasm',
'startup-notification',
'gst-plugins-base-libs',
'libpulse',
'xorg-server-xvfb',
'gst-libav',
'gst-plugins-good'
)
);
break;
)
break
default:
log.error(`Unimplemented distro '${distro}'`);
log.error(`Unimplemented distro '${distro}'`)
}
}

View file

@ -1,14 +1,6 @@
import execa from "execa";
import execa from 'execa'
export async function pacmanInstall(
...packages: string[]
): Promise<string> {
return (
await execa("sudo", [
"pacman",
"--noconfirm",
"-S",
...packages
])
).stdout;
export async function pacmanInstall(...packages: string[]): Promise<string> {
return (await execa('sudo', ['pacman', '--noconfirm', '-S', ...packages]))
.stdout
}

View file

@ -1,168 +1,112 @@
import execa from "execa";
import {
existsSync,
readFileSync,
writeFileSync
} from "fs";
import { join, resolve } from "path";
import { bin_name, log } from "..";
import execa from 'execa'
import { existsSync, readFileSync, writeFileSync } from 'fs'
import { join, resolve } from 'path'
import { bin_name, log } from '..'
import {
ARCHITECTURE,
BUILD_TARGETS,
CONFIGS_DIR,
ENGINE_DIR
} from "../constants";
import { dispatch } from "../utils";
ENGINE_DIR,
} from '../constants'
import { dispatch } from '../utils'
const platform: any = {
win32: "windows",
darwin: "macos",
linux: "linux"
};
win32: 'windows',
darwin: 'macos',
linux: 'linux',
}
const applyConfig = async (os: string, arch: string) => {
log.info("Applying mozconfig...");
log.info('Applying mozconfig...')
let commonConfig = readFileSync(
resolve(CONFIGS_DIR, "common", "mozconfig"),
"utf-8"
);
resolve(CONFIGS_DIR, 'common', 'mozconfig'),
'utf-8'
)
const changesetPrefix = commonConfig
.split("\n")
.find((ln) =>
ln.startsWith("export MOZ_SOURCE_CHANGESET=")
);
.split('\n')
.find((ln) => ln.startsWith('export MOZ_SOURCE_CHANGESET='))
const changeset = changesetPrefix?.replace(
/export MOZ_SOURCE_CHANGESET=/,
""
);
const changeset = changesetPrefix?.replace(/export MOZ_SOURCE_CHANGESET=/, '')
const { stdout: gitSha } = await execa("git", [
"rev-parse",
"HEAD"
]);
const { stdout: gitSha } = await execa('git', ['rev-parse', 'HEAD'])
console.log(changeset, gitSha);
console.log(changeset, gitSha)
if (changeset)
commonConfig = commonConfig.replace(
changeset,
gitSha
);
if (changeset) commonConfig = commonConfig.replace(changeset, gitSha)
writeFileSync(
resolve(CONFIGS_DIR, "common", "mozconfig"),
commonConfig
);
writeFileSync(resolve(CONFIGS_DIR, 'common', 'mozconfig'), commonConfig)
const osConfig = readFileSync(
resolve(
CONFIGS_DIR,
os,
arch === "i686"
? "mozconfig-i686"
: "mozconfig"
),
"utf-8"
);
resolve(CONFIGS_DIR, os, arch === 'i686' ? 'mozconfig-i686' : 'mozconfig'),
'utf-8'
)
// Allow a custom config to be placed in /mozconfig. This will not be committed
// to origin
const customConfig = existsSync(
join(process.cwd(), "mozconfig")
)
? readFileSync(
join(process.cwd(), "mozconfig")
).toString()
: "";
const customConfig = existsSync(join(process.cwd(), 'mozconfig'))
? readFileSync(join(process.cwd(), 'mozconfig')).toString()
: ''
const mergedConfig = `# This file is automatically generated. You should only modify this if you know what you are doing!\n\n${commonConfig}\n\n${osConfig}\n\n${customConfig}`;
const mergedConfig = `# This file is automatically generated. You should only modify this if you know what you are doing!\n\n${commonConfig}\n\n${osConfig}\n\n${customConfig}`
writeFileSync(
resolve(ENGINE_DIR, "mozconfig"),
mergedConfig
);
writeFileSync(resolve(ENGINE_DIR, 'mozconfig'), mergedConfig)
log.info(`Config for this \`${os}\` build:`);
log.info(`Config for this \`${os}\` build:`)
mergedConfig.split("\n").map((ln) => {
if (
ln.startsWith("mk") ||
ln.startsWith("ac") ||
ln.startsWith("export")
)
mergedConfig.split('\n').map((ln) => {
if (ln.startsWith('mk') || ln.startsWith('ac') || ln.startsWith('export'))
log.info(
`\t${ln
.replace(/mk_add_options /, "")
.replace(/ac_add_options /, "")
.replace(/export /, "")}`
);
});
};
.replace(/mk_add_options /, '')
.replace(/ac_add_options /, '')
.replace(/export /, '')}`
)
})
}
const genericBuild = async (os: string, tier: string) => {
log.info(`Building for "${os}"...`);
log.info(`Building for "${os}"...`)
log.warning(
`If you get any dependency errors, try running |${bin_name} bootstrap|.`
);
)
await dispatch(
`./mach`,
["build"].concat(tier ? [tier] : []),
ENGINE_DIR
);
};
await dispatch(`./mach`, ['build'].concat(tier ? [tier] : []), ENGINE_DIR)
}
const parseDate = (d: number) => {
d = d / 1000;
var h = Math.floor(d / 3600);
var m = Math.floor((d % 3600) / 60);
var s = Math.floor((d % 3600) % 60);
d = d / 1000
var h = Math.floor(d / 3600)
var m = Math.floor((d % 3600) / 60)
var s = Math.floor((d % 3600) % 60)
var hDisplay =
h > 0
? h + (h == 1 ? " hour, " : " hours, ")
: "";
var mDisplay =
m > 0
? m + (m == 1 ? " minute, " : " minutes, ")
: "";
var sDisplay =
s > 0
? s + (s == 1 ? " second" : " seconds")
: "";
return hDisplay + mDisplay + sDisplay;
};
var hDisplay = h > 0 ? h + (h == 1 ? ' hour, ' : ' hours, ') : ''
var mDisplay = m > 0 ? m + (m == 1 ? ' minute, ' : ' minutes, ') : ''
var sDisplay = s > 0 ? s + (s == 1 ? ' second' : ' seconds') : ''
return hDisplay + mDisplay + sDisplay
}
const success = (date: number) => {
// mach handles the success messages
console.log();
log.info(
`Total build time: ${parseDate(
Date.now() - date
)}.`
);
};
interface Options {
arch: string;
console.log()
log.info(`Total build time: ${parseDate(Date.now() - date)}.`)
}
export const build = async (
tier: string,
options: Options
) => {
let d = Date.now();
interface Options {
arch: string
}
export const build = async (tier: string, options: Options) => {
let d = Date.now()
// Host build
const prettyHost = platform[process.platform as any];
const prettyHost = platform[process.platform as any]
if (BUILD_TARGETS.includes(prettyHost)) {
let arch = "64bit";
let arch = '64bit'
if (options.arch) {
if (!ARCHITECTURE.includes(options.arch))
@ -172,16 +116,14 @@ export const build = async (
}" build right now.\nWe only currently support ${JSON.stringify(
ARCHITECTURE
)}.`
);
else arch = options.arch;
)
else arch = options.arch
}
applyConfig(prettyHost, options.arch);
applyConfig(prettyHost, options.arch)
setTimeout(async () => {
await genericBuild(prettyHost, tier).then(
(_) => success(d)
);
}, 2500);
await genericBuild(prettyHost, tier).then((_) => success(d))
}, 2500)
}
}
};

View file

@ -1,71 +1,61 @@
import execa from "execa";
import { existsSync, statSync } from "fs";
import { resolve } from "path";
import rimraf from "rimraf";
import { log } from "..";
import { ENGINE_DIR, PATCHES_DIR } from "../constants";
import execa from 'execa'
import { existsSync, statSync } from 'fs'
import { resolve } from 'path'
import rimraf from 'rimraf'
import { log } from '..'
import { ENGINE_DIR, PATCHES_DIR } from '../constants'
interface Options {
keep?: boolean;
fromRemote?: string;
keep?: boolean
fromRemote?: string
}
const remotes = {
ff: (file: string, version: string) =>
`https://hg.mozilla.org/experimental/firefox-unified-stage/raw-file/FIREFOX_${version
.replace(" ", "_")
.replace(".", "_")}_RELEASE/${file}`,
.replace(' ', '_')
.replace('.', '_')}_RELEASE/${file}`,
dot: (file: string, ref: string) =>
`https://raw.githubusercontent.com/dothq/browser-desktop/${ref}/${file}`
};
`https://raw.githubusercontent.com/dothq/browser-desktop/${ref}/${file}`,
}
export const discard = async (
file: string,
options: Options
) => {
log.info(`Discarding ${file}...`);
export const discard = async (file: string, options: Options) => {
log.info(`Discarding ${file}...`)
if (!statSync(file).isFile())
throw new Error("Target must be a file.");
if (!statSync(file).isFile()) throw new Error('Target must be a file.')
// @todo add remote discard
if (options.fromRemote) {
if (
options.fromRemote == "ff" ||
options.fromRemote == "firefox"
) {
} else if (options.fromRemote == "dot") {
if (options.fromRemote == 'ff' || options.fromRemote == 'firefox') {
} else if (options.fromRemote == 'dot') {
} else {
throw new Error(
"Unrecognised remote type. Expected `ff` or `dot`."
);
throw new Error('Unrecognised remote type. Expected `ff` or `dot`.')
}
} else {
if (!existsSync(resolve(ENGINE_DIR, file)))
throw new Error(
`File ${file} could not be found in src directory. Check the path for any mistakes and try again.`
);
)
const patchFile = resolve(
PATCHES_DIR,
file.replace(/\//g, "-").replace(/\./g, "-") +
".patch"
);
file.replace(/\//g, '-').replace(/\./g, '-') + '.patch'
)
if (!existsSync(patchFile))
throw new Error(
`File ${file} does have an associated patch in the patches directory.`
);
)
const { stdout, exitCode } = await execa(
"git",
["apply", "-R", "-p", "1", patchFile],
'git',
['apply', '-R', '-p', '1', patchFile],
{ cwd: ENGINE_DIR }
);
)
if (exitCode == 0) {
log.success(`Discarded changes to ${file}.`);
if (!options.keep) rimraf.sync(patchFile);
} else throw new Error(stdout);
log.success(`Discarded changes to ${file}.`)
if (!options.keep) rimraf.sync(patchFile)
} else throw new Error(stdout)
}
}
};

View file

@ -1,81 +1,61 @@
import axios from "axios";
import execa from "execa";
import fs from "fs";
import { homedir } from "os";
import { posix, resolve, sep } from "path";
import { log } from "..";
import axios from 'axios'
import execa from 'execa'
import fs from 'fs'
import { homedir } from 'os'
import { posix, resolve, sep } from 'path'
import { log } from '..'
export const downloadArtifacts = async () => {
if (process.platform !== "win32")
if (process.platform !== 'win32')
return log.error(
"This is not a Windows machine, will not download artifacts."
);
'This is not a Windows machine, will not download artifacts.'
)
if (process.env.MOZILLABUILD)
return log.error(
"Run this command in Git Bash, it does not work in Mozilla Build."
);
'Run this command in Git Bash, it does not work in Mozilla Build.'
)
const filename = "mozbuild.tar.bz2";
const url = `https://github.com/dothq/windows-artifacts/releases/latest/download/mozbuild.tar.bz2`;
let home = homedir().split(sep).join(posix.sep);
const filename = 'mozbuild.tar.bz2'
const url = `https://github.com/dothq/windows-artifacts/releases/latest/download/mozbuild.tar.bz2`
let home = homedir().split(sep).join(posix.sep)
if (process.platform == "win32") {
home =
"/" +
home
.replace(/\:/, "")
.replace(/\\/g, "/")
.toLowerCase();
if (process.platform == 'win32') {
home = '/' + home.replace(/\:/, '').replace(/\\/g, '/').toLowerCase()
}
log.info(`Downloading Windows artifacts...`);
log.info(`Downloading Windows artifacts...`)
const { data, headers } = await axios.get(url, {
responseType: "stream"
});
responseType: 'stream',
})
const length = headers["content-length"];
const length = headers['content-length']
const writer = fs.createWriteStream(
resolve(process.cwd(), filename)
);
const writer = fs.createWriteStream(resolve(process.cwd(), filename))
let receivedBytes = 0;
let receivedBytes = 0
data.on("data", (chunk: any) => {
receivedBytes += chunk.length;
data.on('data', (chunk: any) => {
receivedBytes += chunk.length
let rand = Math.floor(Math.random() * 1000 + 1);
let rand = Math.floor(Math.random() * 1000 + 1)
if (rand > 999.5) {
let percentCompleted = parseInt(
Math.round(
(receivedBytes * 100) / length
).toFixed(0)
);
if (
percentCompleted % 2 == 0 ||
percentCompleted >= 100
Math.round((receivedBytes * 100) / length).toFixed(0)
)
return;
log.info(
`\t${filename}\t${percentCompleted}%...`
);
if (percentCompleted % 2 == 0 || percentCompleted >= 100) return
log.info(`\t${filename}\t${percentCompleted}%...`)
}
});
})
data.pipe(writer);
data.pipe(writer)
data.on("end", async () => {
log.info("Unpacking mozbuild...");
data.on('end', async () => {
log.info('Unpacking mozbuild...')
await execa("tar", [
"-xvf",
filename,
"-C",
home
]);
await execa('tar', ['-xvf', filename, '-C', home])
log.info("Done extracting mozbuild artifacts.");
});
};
log.info('Done extracting mozbuild artifacts.')
})
}

View file

@ -1,154 +1,135 @@
import axios from "axios";
import chalk from "chalk";
import execa from "execa";
import fs, {
existsSync,
rmdirSync,
writeFileSync
} from "fs";
import { ensureDirSync, removeSync } from "fs-extra";
import ora from "ora";
import { homedir } from "os";
import { posix, resolve, sep } from "path";
import { bin_name, log } from "..";
import { ENGINE_DIR } from "../constants";
import { getLatestFF, writeMetadata } from "../utils";
import { downloadArtifacts } from "./download-artifacts";
import axios from 'axios'
import chalk from 'chalk'
import execa from 'execa'
import fs, { existsSync, rmdirSync, writeFileSync } from 'fs'
import { ensureDirSync, removeSync } from 'fs-extra'
import ora from 'ora'
import { homedir } from 'os'
import { posix, resolve, sep } from 'path'
import { bin_name, log } from '..'
import { ENGINE_DIR } from '../constants'
import { getLatestFF, writeMetadata } from '../utils'
import { downloadArtifacts } from './download-artifacts'
const pjson = require("../../package.json");
const pjson = require('../../package.json')
let initProgressText = "Initialising...";
let initProgressText = 'Initialising...'
let initProgress: any = ora({
text: `Initialising...`,
prefixText: chalk.blueBright.bold("00:00:00"),
prefixText: chalk.blueBright.bold('00:00:00'),
spinner: {
frames: [""]
frames: [''],
},
indent: 0
});
indent: 0,
})
const onData = (data: any) => {
const d = data.toString();
const d = data.toString()
d.split("\n").forEach((line: any) => {
d.split('\n').forEach((line: any) => {
if (line.trim().length !== 0) {
let t = line.split(" ");
t.shift();
initProgressText = t.join(" ");
let t = line.split(' ')
t.shift()
initProgressText = t.join(' ')
}
})
}
});
};
const unpack = async (name: string, version: string) => {
let cwd = process.cwd().split(sep).join(posix.sep);
let cwd = process.cwd().split(sep).join(posix.sep)
if (process.platform == "win32") {
cwd = "./";
if (process.platform == 'win32') {
cwd = './'
}
initProgress.start();
initProgress.start()
setInterval(() => {
if (initProgress) {
initProgress.text = initProgressText;
initProgress.prefixText =
chalk.blueBright.bold(log.getDiff());
initProgress.text = initProgressText
initProgress.prefixText = chalk.blueBright.bold(log.getDiff())
}
}, 100);
}, 100)
initProgressText = `Unpacking Firefox...`;
initProgressText = `Unpacking Firefox...`
try {
rmdirSync(ENGINE_DIR);
rmdirSync(ENGINE_DIR)
} catch (e) {}
ensureDirSync(ENGINE_DIR);
ensureDirSync(ENGINE_DIR)
let tarProc = execa("tar", [
"--transform",
"s,firefox-89.0,engine,",
let tarProc = execa('tar', [
'--transform',
's,firefox-89.0,engine,',
`--show-transformed`,
"-xf",
resolve(cwd, ".dotbuild", "engines", name)
]);
'-xf',
resolve(cwd, '.dotbuild', 'engines', name),
])
(tarProc.stdout as any).on("data", onData);
(tarProc.stdout as any).on("error", onData);
;(tarProc.stdout as any).on('data', onData)
;(tarProc.stdout as any).on('error', onData)
tarProc.on("exit", () => {
if (process.env.CI_SKIP_INIT)
return log.info("Skipping initialisation.");
tarProc.on('exit', () => {
if (process.env.CI_SKIP_INIT) return log.info('Skipping initialisation.')
const initProc = execa(`./${bin_name}`, [
"init",
"engine"
]);
const initProc = execa(`./${bin_name}`, ['init', 'engine'])
(initProc.stdout as any).on("data", onData);
(initProc.stdout as any).on("error", onData);
;(initProc.stdout as any).on('data', onData)
;(initProc.stdout as any).on('error', onData)
initProc.on("exit", async () => {
initProgressText = "";
initProgress.stop();
initProgress = null;
initProc.on('exit', async () => {
initProgressText = ''
initProgress.stop()
initProgress = null
await new Promise((resolve) =>
setTimeout(resolve, 5000)
);
await new Promise((resolve) => setTimeout(resolve, 5000))
log.success(
`You should be ready to make changes to Dot Browser.\n\n\t You should import the patches next, run |${bin_name} import|.\n\t To begin building Dot, run |${bin_name} build|.`
);
console.log();
)
console.log()
pjson.versions["firefox-display"] = version;
pjson.versions["firefox"] =
version.split("b")[0];
pjson.versions['firefox-display'] = version
pjson.versions['firefox'] = version.split('b')[0]
writeFileSync(
resolve(process.cwd(), "package.json"),
resolve(process.cwd(), 'package.json'),
JSON.stringify(pjson, null, 4)
);
)
await writeMetadata();
await writeMetadata()
removeSync(
resolve(cwd, ".dotbuild", "engines", name)
);
removeSync(resolve(cwd, '.dotbuild', 'engines', name))
process.exit(0);
});
});
};
process.exit(0)
})
})
}
export const download = async (
firefoxVersion?: string
) => {
export const download = async (firefoxVersion?: string) => {
if (firefoxVersion)
log.warning(
`A custom Firefox version is being used. Some features of Dot may not work as expected.`
);
)
if (!firefoxVersion) {
firefoxVersion =
pjson.versions["firefox-display"];
firefoxVersion = pjson.versions['firefox-display']
}
let version = await getLatestFF();
let version = await getLatestFF()
if (firefoxVersion) {
version = firefoxVersion;
version = firefoxVersion
}
const base = `https://archive.mozilla.org/pub/firefox/releases/${version}/source/`;
const filename = `firefox-${version}.source.tar.xz`;
const base = `https://archive.mozilla.org/pub/firefox/releases/${version}/source/`
const filename = `firefox-${version}.source.tar.xz`
const url = `${base}${filename}`;
const url = `${base}${filename}`
log.info(`Locating Firefox release ${version}...`);
log.info(`Locating Firefox release ${version}...`)
ensureDirSync(
resolve(process.cwd(), `.dotbuild`, `engines`)
);
ensureDirSync(resolve(process.cwd(), `.dotbuild`, `engines`))
if (
existsSync(
@ -156,28 +137,26 @@ export const download = async (
process.cwd(),
`.dotbuild`,
`engines`,
`firefox-${version.split("b")[0]}`
`firefox-${version.split('b')[0]}`
)
)
) {
log.error(
`Cannot download version ${
version.split("b")[0]
version.split('b')[0]
} as it already exists at "${resolve(
process.cwd(),
`firefox-${version.split("b")[0]}`
`firefox-${version.split('b')[0]}`
)}"`
);
)
}
if (version == firefoxVersion)
log.info(
`Version is frozen at ${firefoxVersion}!`
);
if (version.includes("b"))
log.info(`Version is frozen at ${firefoxVersion}!`)
if (version.includes('b'))
log.warning(
"Version includes non-numeric characters. This is probably a beta."
);
'Version includes non-numeric characters. This is probably a beta.'
)
if (
fs.existsSync(
@ -185,85 +164,60 @@ export const download = async (
process.cwd(),
`.dotbuild`,
`engines`,
"firefox",
version.split("b")[0]
'firefox',
version.split('b')[0]
)
) ||
fs.existsSync(
resolve(
process.cwd(),
"firefox",
"firefox-" + version.split("b")[0]
)
resolve(process.cwd(), 'firefox', 'firefox-' + version.split('b')[0])
)
)
log.error(
`Workspace with version "${
version.split("b")[0]
version.split('b')[0]
}" already exists.\nRemove that workspace and run |${bin_name} download ${version}| again.`
);
)
log.info(`Downloading Firefox release ${version}...`);
log.info(`Downloading Firefox release ${version}...`)
const { data, headers } = await axios.get(url, {
responseType: "stream"
});
responseType: 'stream',
})
const length = headers["content-length"];
const length = headers['content-length']
const writer = fs.createWriteStream(
resolve(
process.cwd(),
`.dotbuild`,
`engines`,
filename
resolve(process.cwd(), `.dotbuild`, `engines`, filename)
)
);
let receivedBytes = 0;
let receivedBytes = 0
data.on("data", (chunk: any) => {
receivedBytes += chunk.length;
data.on('data', (chunk: any) => {
receivedBytes += chunk.length
let rand = Math.floor(Math.random() * 1000 + 1);
let rand = Math.floor(Math.random() * 1000 + 1)
if (rand > 999.5) {
let percentCompleted = parseInt(
Math.round(
(receivedBytes * 100) / length
).toFixed(0)
);
if (
percentCompleted % 2 == 0 ||
percentCompleted >= 100
Math.round((receivedBytes * 100) / length).toFixed(0)
)
return;
log.info(
`\t${filename}\t${percentCompleted}%...`
);
if (percentCompleted % 2 == 0 || percentCompleted >= 100) return
log.info(`\t${filename}\t${percentCompleted}%...`)
}
});
})
data.pipe(writer);
data.pipe(writer)
data.on("end", async () => {
await unpack(filename, version);
data.on('end', async () => {
await unpack(filename, version)
if (process.platform === "win32") {
if (
existsSync(
resolve(homedir(), ".mozbuild")
)
) {
log.info(
"Mozbuild directory already exists, not redownloading"
);
if (process.platform === 'win32') {
if (existsSync(resolve(homedir(), '.mozbuild'))) {
log.info('Mozbuild directory already exists, not redownloading')
} else {
log.info(
"Mozbuild not found, downloading artifacts."
);
await downloadArtifacts();
log.info('Mozbuild not found, downloading artifacts.')
await downloadArtifacts()
}
}
});
};
})
}

View file

@ -1,26 +1,24 @@
import { existsSync } from "fs";
import { log } from "..";
import { ENGINE_DIR } from "../constants";
import { dispatch } from "../utils";
import { existsSync } from 'fs'
import { log } from '..'
import { ENGINE_DIR } from '../constants'
import { dispatch } from '../utils'
export const execute = async (_: any, cmd: any[]) => {
if (existsSync(ENGINE_DIR)) {
if (!cmd || cmd.length == 0)
log.error(
"You need to specify a command to run."
);
log.error('You need to specify a command to run.')
const bin = cmd[0];
const args = cmd;
args.shift();
const bin = cmd[0]
const args = cmd
args.shift()
log.info(
`Executing \`${bin}${
args.length !== 0 ? ` ` : ``
}${args.join(" ")}\` in \`src\`...`
);
dispatch(bin, args, ENGINE_DIR, true);
`Executing \`${bin}${args.length !== 0 ? ` ` : ``}${args.join(
' '
)}\` in \`src\`...`
)
dispatch(bin, args, ENGINE_DIR, true)
} else {
log.error(`Unable to locate src directory.`);
log.error(`Unable to locate src directory.`)
}
}
};

View file

@ -1,64 +1,56 @@
import execa from "execa";
import { existsSync, writeFileSync } from "fs";
import { ensureDirSync } from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import { ENGINE_DIR, SRC_DIR } from "../constants";
import { delay } from "../utils";
import execa from 'execa'
import { existsSync, writeFileSync } from 'fs'
import { ensureDirSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
import { ENGINE_DIR, SRC_DIR } from '../constants'
import { delay } from '../utils'
export const exportFile = async (file: string) => {
log.info(`Exporting ${file}...`);
log.info(`Exporting ${file}...`)
if (!existsSync(resolve(ENGINE_DIR, file)))
throw new Error(
`File ${file} could not be found in engine directory. Check the path for any mistakes and try again.`
);
)
const proc = await execa(
"git",
'git',
[
"diff",
"--src-prefix=a/",
"--dst-prefix=b/",
"--full-index",
resolve(ENGINE_DIR, file)
'diff',
'--src-prefix=a/',
'--dst-prefix=b/',
'--full-index',
resolve(ENGINE_DIR, file),
],
{
cwd: ENGINE_DIR,
stripFinalNewline: false
stripFinalNewline: false,
}
);
)
const name =
file
.split("/")
[
file.replace(/\./g, "-").split("/")
.length - 1
].replace(/\./g, "-") + ".patch";
.split('/')
[file.replace(/\./g, '-').split('/').length - 1].replace(/\./g, '-') +
'.patch'
const patchPath = file
.replace(/\./g, "-")
.split("/")
.slice(0, -1);
const patchPath = file.replace(/\./g, '-').split('/').slice(0, -1)
ensureDirSync(resolve(SRC_DIR, ...patchPath));
ensureDirSync(resolve(SRC_DIR, ...patchPath))
if (proc.stdout.length >= 8000) {
log.warning("");
log.warning('')
log.warning(
`Exported patch is over 8000 characters. This patch may become hard to manage in the future.`
);
)
log.warning(
`We recommend trying to decrease your patch size by making minimal edits to the source.`
);
log.warning("");
await delay(2000);
)
log.warning('')
await delay(2000)
}
writeFileSync(
resolve(SRC_DIR, ...patchPath, name),
proc.stdout
);
log.info(`Wrote "${name}" to patches directory.`);
console.log();
};
writeFileSync(resolve(SRC_DIR, ...patchPath, name), proc.stdout)
log.info(`Wrote "${name}" to patches directory.`)
console.log()
}

View file

@ -1,218 +1,163 @@
import execa from "execa";
import execa from 'execa'
import {
appendFileSync,
createWriteStream,
existsSync,
mkdirSync,
rmdirSync,
writeFileSync
} from "fs";
import { copySync, ensureDirSync } from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import {
COMMON_DIR,
ENGINE_DIR,
PATCHES_DIR
} from "../constants";
import manualPatches from "../manual-patches";
writeFileSync,
} from 'fs'
import { copySync, ensureDirSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
import { COMMON_DIR, ENGINE_DIR, PATCHES_DIR } from '../constants'
import manualPatches from '../manual-patches'
const flags: {
[key: string]: string;
[key: string]: string
} = {
D: "delete",
M: "modify",
A: "add"
};
D: 'delete',
M: 'modify',
A: 'add',
}
const getFiles = async (flags: string, cwd: string) => {
let { stdout: ignored } = await execa(
"git",
[
"ls-files",
`-${flags.toLowerCase()}`,
"-i",
"-o",
"--exclude-standard"
],
'git',
['ls-files', `-${flags.toLowerCase()}`, '-i', '-o', '--exclude-standard'],
{ cwd }
);
)
let { stdout: fls } = await execa(
"git",
[
"diff",
`--diff-filter=${flags}`,
"--name-only",
"--ignore-space-at-eol"
],
'git',
['diff', `--diff-filter=${flags}`, '--name-only', '--ignore-space-at-eol'],
{ cwd }
);
)
const files = fls.split("\n").filter((i: any) => {
return !(
ignored.split("\n").includes(i) ||
i == ".gitignore"
);
}); // this filters out the manual patches
const files = fls.split('\n').filter((i: any) => {
return !(ignored.split('\n').includes(i) || i == '.gitignore')
}) // this filters out the manual patches
log.info(
`Ignoring ${ignored.split("\n").length} files...`
);
log.info(`Ignoring ${ignored.split('\n').length} files...`)
const fileNames: any = files.map((f: any) => {
if (f.length !== 0) {
return (
f
.replace(/\//g, "-")
.replace(/\./g, "-") + ".patch"
);
return f.replace(/\//g, '-').replace(/\./g, '-') + '.patch'
}
});
})
return { files, fileNames };
};
return { files, fileNames }
}
const exportModified = async (
patchesDir: string,
cwd: string
) => {
const { files, fileNames } = await getFiles("M", cwd);
const exportModified = async (patchesDir: string, cwd: string) => {
const { files, fileNames } = await getFiles('M', cwd)
var filesWritten = 0;
var filesWritten = 0
await Promise.all(
files.map(async (file: any, i: any) => {
if (file) {
try {
const proc = execa(
"git",
'git',
[
"diff",
"--src-prefix=a/",
"--dst-prefix=b/",
"--full-index",
file
'diff',
'--src-prefix=a/',
'--dst-prefix=b/',
'--full-index',
file,
],
{
cwd,
stripFinalNewline: false
stripFinalNewline: false,
}
);
const name = fileNames[i];
proc.stdout?.pipe(
createWriteStream(
resolve(patchesDir, name)
)
);
const name = fileNames[i]
appendFileSync(
resolve(PATCHES_DIR, ".index"),
`${name} - ${file}\n`
);
proc.stdout?.pipe(createWriteStream(resolve(patchesDir, name)))
++filesWritten;
appendFileSync(resolve(PATCHES_DIR, '.index'), `${name} - ${file}\n`)
++filesWritten
} catch (e) {
log.error(e);
return;
log.error(e)
return
}
}
})
);
)
log.info(
`Wrote ${filesWritten} to patches directory.`
);
};
log.info(`Wrote ${filesWritten} to patches directory.`)
}
const exportFlag = async (
flag: string,
cwd: string,
actions: any[]
) => {
const { files } = await getFiles(flag, cwd);
const exportFlag = async (flag: string, cwd: string, actions: any[]) => {
const { files } = await getFiles(flag, cwd)
actions.push({
action: flags[flag],
target: files
});
target: files,
})
return actions;
};
return actions
}
const exportManual = async (cwd: string) => {
return new Promise(async (resol) => {
manualPatches.forEach((patch) => {
if (patch.action == "copy") {
if (typeof patch.src == "string") {
const inSrc = resolve(cwd, patch.src);
const outsideSrc = resolve(
COMMON_DIR,
patch.src
);
if (patch.action == 'copy') {
if (typeof patch.src == 'string') {
const inSrc = resolve(cwd, patch.src)
const outsideSrc = resolve(COMMON_DIR, patch.src)
if (!existsSync(inSrc))
return log.error(
`Cannot find "${patch.src}" from manual patches.`
);
if (!existsSync(outsideSrc))
ensureDirSync(outsideSrc); // make sure target dir exists before copying
return log.error(`Cannot find "${patch.src}" from manual patches.`)
if (!existsSync(outsideSrc)) ensureDirSync(outsideSrc) // make sure target dir exists before copying
copySync(inSrc, outsideSrc);
copySync(inSrc, outsideSrc)
} else if (Array.isArray(patch.src)) {
patch.src.forEach((p) => {
const inSrc = resolve(cwd, p);
const outsideSrc = resolve(
COMMON_DIR,
p
);
const inSrc = resolve(cwd, p)
const outsideSrc = resolve(COMMON_DIR, p)
if (!existsSync(inSrc))
return log.error(
`Cannot find "${p}" from manual patches.`
);
if (!existsSync(outsideSrc))
ensureDirSync(outsideSrc); // make sure target dir exists before copying
return log.error(`Cannot find "${p}" from manual patches.`)
if (!existsSync(outsideSrc)) ensureDirSync(outsideSrc) // make sure target dir exists before copying
copySync(inSrc, outsideSrc);
});
copySync(inSrc, outsideSrc)
})
}
}
});
});
};
})
})
}
export const exportPatches = async () => {
throw new Error(
"export-patches has been deprecated in favour of export-file. This change has been made to limit the amount of active patches we have in the tree."
);
'export-patches has been deprecated in favour of export-file. This change has been made to limit the amount of active patches we have in the tree.'
)
let actions: any[] = [];
let actions: any[] = []
log.info(`Wiping patches directory...`);
console.log();
log.info(`Wiping patches directory...`)
console.log()
// TODO: Replace this with fs.rmSync(path, { recursive: true }) when node 12 is deprecated
// This function has been depriciated, however its replacement was only available
// from v14.14.0 onwards (https://nodejs.org/dist/latest-v16.x/docs/api/fs.html#fs_fs_rmsync_path_options)
rmdirSync(PATCHES_DIR, { recursive: true });
mkdirSync(PATCHES_DIR);
writeFileSync(resolve(PATCHES_DIR, ".index"), "");
rmdirSync(PATCHES_DIR, { recursive: true })
mkdirSync(PATCHES_DIR)
writeFileSync(resolve(PATCHES_DIR, '.index'), '')
log.info("Exporting modified files...");
await exportModified(PATCHES_DIR, ENGINE_DIR);
console.log();
log.info('Exporting modified files...')
await exportModified(PATCHES_DIR, ENGINE_DIR)
console.log()
log.info("Exporting deleted files...");
await exportFlag("D", ENGINE_DIR, actions);
console.log();
log.info('Exporting deleted files...')
await exportFlag('D', ENGINE_DIR, actions)
console.log()
log.info("Exporting manual patches...");
await exportManual(ENGINE_DIR);
console.log();
log.info('Exporting manual patches...')
await exportManual(ENGINE_DIR)
console.log()
copySync(
resolve(ENGINE_DIR, "dot"),
resolve(process.cwd(), "browser")
);
};
copySync(resolve(ENGINE_DIR, 'dot'), resolve(process.cwd(), 'browser'))
}

View file

@ -1,49 +1,28 @@
import {
existsSync,
readdirSync,
readFileSync
} from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import { ENGINE_DIR, PATCHES_DIR } from "../constants";
import { dispatch } from "../utils";
import { existsSync, readdirSync, readFileSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
import { ENGINE_DIR, PATCHES_DIR } from '../constants'
import { dispatch } from '../utils'
export const fixLineEndings = async () => {
let patches = readdirSync(PATCHES_DIR);
let patches = readdirSync(PATCHES_DIR)
patches = patches.filter((p) => p !== ".index");
patches = patches.filter((p) => p !== '.index')
await Promise.all(
patches.map(async (patch) => {
const patchContents = readFileSync(
resolve(PATCHES_DIR, patch),
"utf-8"
);
const patchContents = readFileSync(resolve(PATCHES_DIR, patch), 'utf-8')
const originalPath = patchContents
.split("diff --git a/")[1]
.split(" b/")[0];
.split('diff --git a/')[1]
.split(' b/')[0]
if (
existsSync(
resolve(ENGINE_DIR, originalPath)
)
) {
dispatch(
"dos2unix",
[originalPath],
ENGINE_DIR
).then(async (_) => {
await dispatch(
"dos2unix",
[patch],
PATCHES_DIR
);
});
if (existsSync(resolve(ENGINE_DIR, originalPath))) {
dispatch('dos2unix', [originalPath], ENGINE_DIR).then(async (_) => {
await dispatch('dos2unix', [patch], PATCHES_DIR)
})
} else {
log.warning(
`Skipping ${patch} as it no longer exists in tree...`
);
log.warning(`Skipping ${patch} as it no longer exists in tree...`)
}
})
);
};
)
}

View file

@ -1,141 +1,115 @@
import { sync } from "glob";
import { bin_name, log } from "..";
import { SRC_DIR } from "../constants";
import Patch from "../controllers/patch";
import manualPatches from "../manual-patches";
import { delay, dispatch } from "../utils";
import { sync } from 'glob'
import { bin_name, log } from '..'
import { SRC_DIR } from '../constants'
import Patch from '../controllers/patch'
import manualPatches from '../manual-patches'
import { delay, dispatch } from '../utils'
const {
versions: { dot }
} = require("../../package.json");
versions: { dot },
} = require('../../package.json')
const importManual = async (
minimal?: boolean,
noIgnore?: boolean
) => {
log.info(
`Applying ${manualPatches.length} manual patches...`
);
const importManual = async (minimal?: boolean, noIgnore?: boolean) => {
log.info(`Applying ${manualPatches.length} manual patches...`)
if (!minimal) console.log();
if (!minimal) console.log()
await delay(500);
await delay(500)
return new Promise(async (res, rej) => {
var total = 0;
var total = 0
var i = 0;
var i = 0
for await (let {
name,
action,
src,
markers,
indent
} of manualPatches) {
++i;
for await (let { name, action, src, markers, indent } of manualPatches) {
++i
const p = new Patch({
name,
action,
src,
type: "manual",
type: 'manual',
status: [i, manualPatches.length],
markers,
indent,
options: {
minimal,
noIgnore
}
});
noIgnore,
},
})
await delay(100);
await delay(100)
await p.apply();
await p.apply()
}
log.success(
`Successfully imported ${manualPatches.length} manual patches!`
);
console.log();
log.success(`Successfully imported ${manualPatches.length} manual patches!`)
console.log()
await delay(1000);
await delay(1000)
res(total);
});
};
res(total)
})
}
const importPatchFiles = async (
minimal?: boolean,
noIgnore?: boolean
) => {
let patches = sync("**/*.patch", {
const importPatchFiles = async (minimal?: boolean, noIgnore?: boolean) => {
let patches = sync('**/*.patch', {
nodir: true,
cwd: SRC_DIR
});
cwd: SRC_DIR,
})
patches = patches
.filter((p) => p !== ".index")
.filter((p) => !p.includes("node_modules"));
.filter((p) => p !== '.index')
.filter((p) => !p.includes('node_modules'))
log.info(`Applying ${patches.length} patch files...`);
log.info(`Applying ${patches.length} patch files...`)
if (!minimal) console.log();
if (!minimal) console.log()
await delay(500);
await delay(500)
var i = 0;
var i = 0
for await (const patch of patches) {
++i;
++i
const p = new Patch({
name: patch,
type: "file",
type: 'file',
status: [i, patches.length],
options: {
minimal,
noIgnore
}
});
noIgnore,
},
})
await delay(100);
await delay(100)
await p.apply();
await p.apply()
}
console.log();
console.log()
await dispatch(
`./${bin_name}`,
["doctor", "patches"],
['doctor', 'patches'],
process.cwd(),
true,
true
);
)
log.success(
`Successfully imported ${patches.length} patch files!`
);
};
log.success(`Successfully imported ${patches.length} patch files!`)
}
interface Args {
minimal?: boolean;
noignore?: boolean;
minimal?: boolean
noignore?: boolean
}
export const importPatches = async (
type: string,
args: Args
) => {
export const importPatches = async (type: string, args: Args) => {
if (type) {
if (type == "manual")
await importManual(args.minimal);
else if (type == "file")
await importPatchFiles(args.minimal);
if (type == 'manual') await importManual(args.minimal)
else if (type == 'file') await importPatchFiles(args.minimal)
} else {
await importManual(args.minimal, args.noignore);
await importPatchFiles(
args.minimal,
args.noignore
);
await importManual(args.minimal, args.noignore)
await importPatchFiles(args.minimal, args.noignore)
}
}
};

View file

@ -1,18 +1,18 @@
export * from "./bootstrap";
export * from "./build";
export * from "./discard";
export * from "./download";
export * from "./download-artifacts";
export * from "./execute";
export * from "./export-file";
export * from "./export-patches";
export * from "./fix-le";
export * from "./import-patches";
export * from "./init";
export * from "./license-check";
export * from "./package";
export * from "./reset";
export * from "./run";
export * from "./set-branch";
export * from "./status";
export * from "./test";
export * from './bootstrap'
export * from './build'
export * from './discard'
export * from './download'
export * from './download-artifacts'
export * from './execute'
export * from './export-file'
export * from './export-patches'
export * from './fix-le'
export * from './import-patches'
export * from './init'
export * from './license-check'
export * from './package'
export * from './reset'
export * from './run'
export * from './set-branch'
export * from './status'
export * from './test'

View file

@ -1,68 +1,53 @@
import { Command } from "commander";
import { existsSync, readFileSync } from "fs";
import { resolve } from "path";
import { bin_name, log } from "..";
import { dispatch } from "../utils";
import { Command } from 'commander'
import { existsSync, readFileSync } from 'fs'
import { resolve } from 'path'
import { bin_name, log } from '..'
import { dispatch } from '../utils'
export const init = async (directory: Command) => {
if (process.platform == "win32") {
if (process.platform == 'win32') {
// Because Windows cannot handle paths correctly, we're just calling a script as the workaround.
log.info(
"Successfully downloaded browser source. Please run |./windows-init.sh| to finish up."
);
process.exit(0);
'Successfully downloaded browser source. Please run |./windows-init.sh| to finish up.'
)
process.exit(0)
}
const cwd = process.cwd();
const cwd = process.cwd()
const dir = resolve(
cwd as string,
directory.toString()
);
const dir = resolve(cwd as string, directory.toString())
if (!existsSync(dir)) {
log.error(
`Directory "${directory}" not found.\nCheck the directory exists and run |${bin_name} init| again.`
);
)
}
let version = readFileSync(
resolve(
cwd,
directory.toString(),
"browser",
"config",
"version_display.txt"
'browser',
'config',
'version_display.txt'
),
"utf-8"
);
'utf-8'
)
if (!version)
log.error(
`Directory "${directory}" not found.\nCheck the directory exists and run |${bin_name} init| again.`
);
)
version = version.trim().replace(/\\n/g, "");
version = version.trim().replace(/\\n/g, '')
await dispatch("git", ["init"], dir as string);
await dispatch('git', ['init'], dir as string)
await dispatch('git', ['checkout', '--orphan', version], dir as string)
await dispatch('git', ['add', '-v', '-f', '.'], dir as string)
await dispatch(
"git",
["checkout", "--orphan", version],
'git',
['commit', '-am', `"Firefox ${version}"`],
dir as string
);
await dispatch(
"git",
["add", "-v", "-f", "."],
dir as string
);
await dispatch(
"git",
["commit", "-am", `"Firefox ${version}"`],
dir as string
);
await dispatch(
"git",
["checkout", "-b", "dot"],
dir as string
);
};
)
await dispatch('git', ['checkout', '-b', 'dot'], dir as string)
}

View file

@ -1,92 +1,69 @@
import chalk from "chalk";
import { readdirSync, readFileSync } from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import { ENGINE_DIR, PATCHES_DIR } from "../constants";
import chalk from 'chalk'
import { readdirSync, readFileSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
import { ENGINE_DIR, PATCHES_DIR } from '../constants'
const ignoredExt = [".json", ".bundle.js"];
const ignoredExt = ['.json', '.bundle.js']
export const licenseCheck = async () => {
log.info("Checking project...");
log.info('Checking project...')
let patches = readdirSync(PATCHES_DIR).map((p) => p);
let patches = readdirSync(PATCHES_DIR).map((p) => p)
patches = patches.filter((p) => p !== ".index");
patches = patches.filter((p) => p !== '.index')
const originalPaths = patches.map((p) => {
const data = readFileSync(
resolve(PATCHES_DIR, p),
"utf-8"
);
const data = readFileSync(resolve(PATCHES_DIR, p), 'utf-8')
return data
.split("diff --git a/")[1]
.split(" b/")[0];
});
return data.split('diff --git a/')[1].split(' b/')[0]
})
let passed: string[] = [];
let failed: string[] = [];
let ignored: string[] = [];
let passed: string[] = []
let failed: string[] = []
let ignored: string[] = []
originalPaths.forEach((p) => {
const data = readFileSync(
resolve(ENGINE_DIR, p),
"utf-8"
);
const headerRegion = data
.split("\n")
.slice(0, 32)
.join(" ");
const data = readFileSync(resolve(ENGINE_DIR, p), 'utf-8')
const headerRegion = data.split('\n').slice(0, 32).join(' ')
const passes =
headerRegion.includes(
"http://mozilla.org/MPL/2.0"
) &&
headerRegion.includes(
"This Source Code Form"
) &&
headerRegion.includes("copy of the MPL");
headerRegion.includes('http://mozilla.org/MPL/2.0') &&
headerRegion.includes('This Source Code Form') &&
headerRegion.includes('copy of the MPL')
const isIgnored = ignoredExt.find((i) =>
p.endsWith(i)
)
? true
: false;
isIgnored && ignored.push(p);
const isIgnored = ignoredExt.find((i) => p.endsWith(i)) ? true : false
isIgnored && ignored.push(p)
if (!isIgnored) {
if (passes) passed.push(p);
else if (!passes) failed.push(p);
if (passes) passed.push(p)
else if (!passes) failed.push(p)
}
});
})
let maxPassed = 5;
let i = 0;
let maxPassed = 5
let i = 0
for (const p of passed) {
log.info(
`${p}... ${chalk.green("✔ Pass - MPL-2.0")}`
);
log.info(`${p}... ${chalk.green('✔ Pass - MPL-2.0')}`)
if (i >= maxPassed) {
log.info(
`${chalk.gray.italic(
`${
passed.length - maxPassed
} other files...`
)} ${chalk.green("✔ Pass - MPL-2.0")}`
);
break;
`${passed.length - maxPassed} other files...`
)} ${chalk.green('✔ Pass - MPL-2.0')}`
)
break
}
++i;
++i
}
failed.forEach((p, i) => {
log.info(`${p}... ${chalk.red("❗ Failed")}`);
});
log.info(`${p}... ${chalk.red('❗ Failed')}`)
})
ignored.forEach((p, i) => {
log.info(`${p}... ${chalk.gray(" Ignored")}`);
});
};
log.info(`${p}... ${chalk.gray(' Ignored')}`)
})
}

View file

@ -1,5 +1,3 @@
declare module "linus" {
export function name(
callback: (error: Error, name: string) => void
): void;
declare module 'linus' {
export function name(callback: (error: Error, name: string) => void): void
}

View file

@ -1,35 +1,27 @@
import execa from "execa";
import { existsSync } from "fs";
import { resolve } from "path";
import { bin_name, log } from "..";
import { ENGINE_DIR } from "../constants";
import execa from 'execa'
import { existsSync } from 'fs'
import { resolve } from 'path'
import { bin_name, log } from '..'
import { ENGINE_DIR } from '../constants'
export const melonPackage = async () => {
if (existsSync(ENGINE_DIR)) {
const artifactPath = resolve(ENGINE_DIR, "mach");
const artifactPath = resolve(ENGINE_DIR, 'mach')
if (existsSync(artifactPath)) {
const args = ["package"];
const args = ['package']
log.info(
`Packaging \`dot\` with args ${JSON.stringify(
args.slice(1, 0)
)}...`
);
`Packaging \`dot\` with args ${JSON.stringify(args.slice(1, 0))}...`
)
execa(artifactPath, args).stdout?.pipe(
process.stdout
);
execa(artifactPath, args).stdout?.pipe(process.stdout)
} else {
log.error(
`Cannot binary with name \`mach\` in ${resolve(
ENGINE_DIR
)}`
);
log.error(`Cannot binary with name \`mach\` in ${resolve(ENGINE_DIR)}`)
}
} else {
log.error(
`Unable to locate any source directories.\nRun |${bin_name} download| to generate the source directory.`
);
)
}
}
};

View file

@ -1,171 +1,93 @@
import execa from "execa";
import { existsSync } from "fs-extra";
import { resolve } from "path";
import { confirm } from "promptly";
import rimraf from "rimraf";
import { bin_name, log } from "..";
import { ENGINE_DIR } from "../constants";
import { IPatch } from "../interfaces/patch";
import manualPatches from "../manual-patches";
import execa from 'execa'
import { existsSync } from 'fs-extra'
import { resolve } from 'path'
import { confirm } from 'promptly'
import rimraf from 'rimraf'
import { bin_name, log } from '..'
import { ENGINE_DIR } from '../constants'
import { IPatch } from '../interfaces/patch'
import manualPatches from '../manual-patches'
export const reset = async () => {
try {
log.warning(
"This will clear all your unexported changes in the `src` directory!"
);
log.warning(
`You can export your changes by running |${bin_name} export|.`
);
'This will clear all your unexported changes in the `src` directory!'
)
log.warning(`You can export your changes by running |${bin_name} export|.`)
confirm(`Are you sure you want to continue?`, {
default: "false"
default: 'false',
})
.then(async (answer) => {
if (answer) {
await execa(
"git",
["checkout", "."],
{ cwd: ENGINE_DIR }
);
await execa('git', ['checkout', '.'], { cwd: ENGINE_DIR })
manualPatches.forEach(
async (patch: IPatch) => {
const { src, action } = patch;
manualPatches.forEach(async (patch: IPatch) => {
const { src, action } = patch
if (action == "copy") {
if (
typeof src == "string"
) {
const path = resolve(
ENGINE_DIR,
src
);
if (action == 'copy') {
if (typeof src == 'string') {
const path = resolve(ENGINE_DIR, src)
if (
path !==
ENGINE_DIR
) {
log.info(
`Deleting ${src}...`
);
if (path !== ENGINE_DIR) {
log.info(`Deleting ${src}...`)
if (
existsSync(
path
)
)
rimraf.sync(
path
);
if (existsSync(path)) rimraf.sync(path)
}
} else if (
Array.isArray(src)
) {
} else if (Array.isArray(src)) {
src.forEach((i) => {
const path =
resolve(
ENGINE_DIR,
i
);
const path = resolve(ENGINE_DIR, i)
if (
path !==
ENGINE_DIR
) {
log.info(
`Deleting ${i}...`
);
if (path !== ENGINE_DIR) {
log.info(`Deleting ${i}...`)
if (
existsSync(
path
)
)
rimraf.sync(
path
);
if (existsSync(path)) rimraf.sync(path)
}
});
})
}
} else {
log.warning(
"Resetting does not work on manual patches that have a `delete` action, skipping..."
);
}
}
);
let leftovers = new Set();
const { stdout: origFiles } =
await execa(
"git",
[
"clean",
"-e",
"'!*.orig'",
"--dry-run"
],
{ cwd: ENGINE_DIR }
);
const { stdout: rejFiles } =
await execa(
"git",
[
"clean",
"-e",
"'!*.rej'",
"--dry-run"
],
{ cwd: ENGINE_DIR }
);
origFiles
.split("\n")
.map((f) =>
leftovers.add(
f.replace(
/Would remove /,
""
'Resetting does not work on manual patches that have a `delete` action, skipping...'
)
)
);
rejFiles
.split("\n")
.map((f) =>
leftovers.add(
f.replace(
/Would remove /,
""
)
)
);
Array.from(leftovers).forEach(
(f: any) => {
const path = resolve(
ENGINE_DIR,
f
);
if (path !== ENGINE_DIR) {
log.info(
`Deleting ${f}...`
);
rimraf.sync(
resolve(ENGINE_DIR, f)
);
}
}
);
log.success("Reset successfully.");
log.info(
"Next time you build, it may need to recompile parts of the program because the cache was invalidated."
);
}
})
.catch((e) => e);
let leftovers = new Set()
const { stdout: origFiles } = await execa(
'git',
['clean', '-e', "'!*.orig'", '--dry-run'],
{ cwd: ENGINE_DIR }
)
const { stdout: rejFiles } = await execa(
'git',
['clean', '-e', "'!*.rej'", '--dry-run'],
{ cwd: ENGINE_DIR }
)
origFiles
.split('\n')
.map((f) => leftovers.add(f.replace(/Would remove /, '')))
rejFiles
.split('\n')
.map((f) => leftovers.add(f.replace(/Would remove /, '')))
Array.from(leftovers).forEach((f: any) => {
const path = resolve(ENGINE_DIR, f)
if (path !== ENGINE_DIR) {
log.info(`Deleting ${f}...`)
rimraf.sync(resolve(ENGINE_DIR, f))
}
})
log.success('Reset successfully.')
log.info(
'Next time you build, it may need to recompile parts of the program because the cache was invalidated.'
)
}
})
.catch((e) => e)
} catch (e) {}
};
}

View file

@ -1,36 +1,32 @@
import { existsSync, readdirSync } from "fs";
import { resolve } from "path";
import { bin_name, log } from "..";
import { ENGINE_DIR } from "../constants";
import { dispatch } from "../utils";
import { existsSync, readdirSync } from 'fs'
import { resolve } from 'path'
import { bin_name, log } from '..'
import { ENGINE_DIR } from '../constants'
import { dispatch } from '../utils'
export const run = async (chrome?: string) => {
const dirs = readdirSync(ENGINE_DIR);
const dirs = readdirSync(ENGINE_DIR)
const objDirname: any = dirs.find((dir) => {
return dir.startsWith("obj-");
});
return dir.startsWith('obj-')
})
if (!objDirname) {
throw new Error(
"Dot Browser needs to be built before you can do this."
);
throw new Error('Dot Browser needs to be built before you can do this.')
}
const objDir = resolve(ENGINE_DIR, objDirname);
const objDir = resolve(ENGINE_DIR, objDirname)
if (existsSync(objDir)) {
dispatch(
"./mach",
["run"].concat(
chrome ? ["-chrome", chrome] : []
),
'./mach',
['run'].concat(chrome ? ['-chrome', chrome] : []),
ENGINE_DIR,
true,
true
);
)
} else {
log.error(
`Unable to locate any built binaries.\nRun |${bin_name} build| to initiate a build.`
);
)
}
}
};

View file

@ -1,62 +1,29 @@
import execa from "execa";
import {
existsSync,
readFileSync,
writeFileSync
} from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import execa from 'execa'
import { existsSync, readFileSync, writeFileSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
export const setBranch = async (branch: string) => {
if (
!existsSync(
resolve(
process.cwd(),
".dotbuild",
"metadata"
)
)
) {
return log.error(
"Cannot find metadata, aborting..."
);
if (!existsSync(resolve(process.cwd(), '.dotbuild', 'metadata'))) {
return log.error('Cannot find metadata, aborting...')
}
const metadata = JSON.parse(
readFileSync(
resolve(
process.cwd(),
".dotbuild",
"metadata"
),
"utf-8"
readFileSync(resolve(process.cwd(), '.dotbuild', 'metadata'), 'utf-8')
)
);
try {
await execa("git", [
"rev-parse",
"--verify",
branch
]);
await execa('git', ['rev-parse', '--verify', branch])
metadata.branch = branch;
metadata.branch = branch
writeFileSync(
resolve(
process.cwd(),
".dotbuild",
"metadata"
),
resolve(process.cwd(), '.dotbuild', 'metadata'),
JSON.stringify(metadata)
);
)
log.success(
`Default branch is at \`${branch}\`.`
);
log.success(`Default branch is at \`${branch}\`.`)
} catch (e) {
return log.error(
`Branch with name \`${branch}\` does not exist.`
);
return log.error(`Branch with name \`${branch}\` does not exist.`)
}
}
};

View file

@ -1,12 +1,12 @@
import { existsSync } from "fs";
import { log } from "..";
import { ENGINE_DIR } from "../constants";
import { dispatch } from "../utils";
import { existsSync } from 'fs'
import { log } from '..'
import { ENGINE_DIR } from '../constants'
import { dispatch } from '../utils'
export const status = async () => {
if (existsSync(ENGINE_DIR)) {
dispatch("git", ["status"], ENGINE_DIR, true);
dispatch('git', ['status'], ENGINE_DIR, true)
} else {
log.error(`Unable to locate src directory.`);
log.error(`Unable to locate src directory.`)
}
}
};

View file

@ -1,10 +1,6 @@
import { resolve } from "path";
import { dispatch } from "../utils";
import { resolve } from 'path'
import { dispatch } from '../utils'
export const test = async () => {
dispatch(
"yarn",
["test"],
resolve(process.cwd(), "src", "dot")
);
};
dispatch('yarn', ['test'], resolve(process.cwd(), 'src', 'dot'))
}

View file

@ -1,51 +1,31 @@
import execa from "execa";
import { resolve } from "path";
import execa from 'execa'
import { resolve } from 'path'
export const BUILD_TARGETS = [
"linux",
"windows",
"macos"
];
export const BUILD_TARGETS = ['linux', 'windows', 'macos']
export const ARCHITECTURE = ["i686", "x86_64"];
export const ARCHITECTURE = ['i686', 'x86_64']
export const PATCH_ARGS = [
"--ignore-space-change",
"--ignore-whitespace",
"--verbose"
];
'--ignore-space-change',
'--ignore-whitespace',
'--verbose',
]
export const ENGINE_DIR = resolve(
process.cwd(),
"engine"
);
export const SRC_DIR = resolve(process.cwd(), "src");
export const PATCHES_DIR = resolve(
process.cwd(),
"patches"
);
export const COMMON_DIR = resolve(
process.cwd(),
"common"
);
export const CONFIGS_DIR = resolve(
process.cwd(),
"configs"
);
export const ENGINE_DIR = resolve(process.cwd(), 'engine')
export const SRC_DIR = resolve(process.cwd(), 'src')
export const PATCHES_DIR = resolve(process.cwd(), 'patches')
export const COMMON_DIR = resolve(process.cwd(), 'common')
export const CONFIGS_DIR = resolve(process.cwd(), 'configs')
export let CONFIG_GUESS: any = null;
export let CONFIG_GUESS: any = null
try {
CONFIG_GUESS = execa.commandSync(
"./build/autoconf/config.guess",
{ cwd: ENGINE_DIR }
).stdout;
CONFIG_GUESS = execa.commandSync('./build/autoconf/config.guess', {
cwd: ENGINE_DIR,
}).stdout
} catch (e) {}
export const OBJ_DIR = resolve(
ENGINE_DIR,
`obj-${CONFIG_GUESS}`
);
export const OBJ_DIR = resolve(ENGINE_DIR, `obj-${CONFIG_GUESS}`)
export const FTL_STRING_LINE_REGEX =
/(([a-zA-Z0-9\-]*|\.[a-z\-]*) =(.*|\.)|\[[a-zA-Z0-9]*\].*(\n\s?\s?})?|\*\[[a-zA-Z0-9]*\] .*(\n\s?\s?})?)/gm;
/(([a-zA-Z0-9\-]*|\.[a-z\-]*) =(.*|\.)|\[[a-zA-Z0-9]*\].*(\n\s?\s?})?|\*\[[a-zA-Z0-9]*\] .*(\n\s?\s?})?)/gm

View file

@ -1,231 +1,152 @@
import chalk from "chalk";
import execa from "execa";
import {
existsSync,
rmdirSync,
rmSync,
statSync
} from "fs-extra";
import { resolve } from "path";
import readline from "readline";
import { log } from "..";
import {
ENGINE_DIR,
PATCH_ARGS,
SRC_DIR
} from "../constants";
import { copyManual } from "../utils";
import chalk from 'chalk'
import execa from 'execa'
import { existsSync, rmdirSync, rmSync, statSync } from 'fs-extra'
import { resolve } from 'path'
import readline from 'readline'
import { log } from '..'
import { ENGINE_DIR, PATCH_ARGS, SRC_DIR } from '../constants'
import { copyManual } from '../utils'
class Patch {
public name: string;
public action: string;
public src: string | string[];
public type: "file" | "manual";
public status: number[];
public name: string
public action: string
public src: string | string[]
public type: 'file' | 'manual'
public status: number[]
public markers?: {
[key: string]: [string, string];
};
public indent?: number;
[key: string]: [string, string]
}
public indent?: number
public options: {
minimal?: boolean;
noIgnore?: boolean;
};
private _done: boolean = false;
minimal?: boolean
noIgnore?: boolean
}
private _done: boolean = false
private error: Error | unknown;
private error: Error | unknown
private async applyAsManual() {
return new Promise(async (res, rej) => {
try {
switch (this.action) {
case "copy":
if (typeof this.src == "string") {
copyManual(
this.src,
this.options.noIgnore
);
case 'copy':
if (typeof this.src == 'string') {
copyManual(this.src, this.options.noIgnore)
}
if (Array.isArray(this.src)) {
this.src.forEach((i) => {
copyManual(
i,
this.options.noIgnore
);
});
copyManual(i, this.options.noIgnore)
})
}
break;
case "delete":
if (typeof this.src == "string") {
if (
!existsSync(
resolve(
ENGINE_DIR,
this.src
)
)
)
break
case 'delete':
if (typeof this.src == 'string') {
if (!existsSync(resolve(ENGINE_DIR, this.src)))
return log.error(
`We were unable to delete the file or directory \`${this.src}\` as it doesn't exist in the src directory.`
);
)
if (
statSync(
resolve(
ENGINE_DIR,
this.src
)
).isDirectory()
) {
rmdirSync(
resolve(
ENGINE_DIR,
this.src
)
);
if (statSync(resolve(ENGINE_DIR, this.src)).isDirectory()) {
rmdirSync(resolve(ENGINE_DIR, this.src))
} else {
rmSync(
resolve(
ENGINE_DIR,
this.src
)
);
rmSync(resolve(ENGINE_DIR, this.src))
}
}
if (Array.isArray(this.src)) {
this.src.forEach((i) => {
if (
!existsSync(
resolve(
ENGINE_DIR,
i
)
)
)
if (!existsSync(resolve(ENGINE_DIR, i)))
return log.error(
`We were unable to delete the file or directory \`${i}\` as it doesn't exist in the src directory.`
);
)
if (
statSync(
resolve(
ENGINE_DIR,
i
)
).isDirectory()
) {
rmdirSync(
resolve(
ENGINE_DIR,
i
)
);
if (statSync(resolve(ENGINE_DIR, i)).isDirectory()) {
rmdirSync(resolve(ENGINE_DIR, i))
} else {
rmSync(
resolve(
ENGINE_DIR,
i
),
{ force: true }
);
rmSync(resolve(ENGINE_DIR, i), { force: true })
}
});
})
}
break;
break
}
res(true);
res(true)
} catch (e) {
rej(e);
rej(e)
}
});
})
}
private async applyAsPatch() {
return new Promise(async (res, rej) => {
try {
try {
await execa(
"git",
[
"apply",
"-R",
...PATCH_ARGS,
this.src as any
],
{ cwd: ENGINE_DIR }
);
await execa('git', ['apply', '-R', ...PATCH_ARGS, this.src as any], {
cwd: ENGINE_DIR,
})
} catch (e) {
null;
null
}
const { stdout, exitCode } = await execa(
"git",
[
"apply",
...PATCH_ARGS,
this.src as any
],
'git',
['apply', ...PATCH_ARGS, this.src as any],
{ cwd: ENGINE_DIR }
);
)
if (exitCode == 0) res(true);
else throw stdout;
if (exitCode == 0) res(true)
else throw stdout
} catch (e) {
rej(e);
rej(e)
}
});
})
}
public async apply() {
if (!this.options.minimal) {
log.info(
`${chalk.gray(
`(${this.status[0]}/${this.status[1]})`
)} Applying ${this.name}...`
);
`${chalk.gray(`(${this.status[0]}/${this.status[1]})`)} Applying ${
this.name
}...`
)
}
try {
if (this.type == "manual")
await this.applyAsManual();
if (this.type == "file")
await this.applyAsPatch();
if (this.type == 'manual') await this.applyAsManual()
if (this.type == 'file') await this.applyAsPatch()
this.done = true;
this.done = true
} catch (e) {
this.error = e;
this.done = false;
this.error = e
this.done = false
}
}
public get done() {
return this._done;
return this._done
}
public set done(_: any) {
this._done = _;
this._done = _
if (!this.options.minimal) {
readline.moveCursor(process.stdout, 0, -1);
readline.clearLine(process.stdout, 1);
readline.moveCursor(process.stdout, 0, -1)
readline.clearLine(process.stdout, 1)
log.info(
`${chalk.gray(
`(${this.status[0]}/${this.status[1]})`
)} Applying ${this.name}... ${chalk[
this._done ? "green" : "red"
].bold(
this._done ? "Done ✔" : "Error ❗"
`${chalk.gray(`(${this.status[0]}/${this.status[1]})`)} Applying ${
this.name
}... ${chalk[this._done ? 'green' : 'red'].bold(
this._done ? 'Done ✔' : 'Error ❗'
)}`
);
)
}
if (this.error) {
throw this.error;
throw this.error
}
}
@ -237,31 +158,31 @@ class Patch {
status,
markers,
indent,
options
options,
}: {
name: string;
action?: string;
src?: string | string[];
type: "file" | "manual";
status: number[];
name: string
action?: string
src?: string | string[]
type: 'file' | 'manual'
status: number[]
markers?: {
[key: string]: [string, string];
};
indent?: number;
[key: string]: [string, string]
}
indent?: number
options: {
minimal?: boolean;
noIgnore?: boolean;
};
minimal?: boolean
noIgnore?: boolean
}
}) {
this.name = name;
this.action = action || "";
this.src = src || resolve(SRC_DIR, name);
this.type = type;
this.status = status;
this.markers = markers;
this.indent = indent;
this.options = options;
this.name = name
this.action = action || ''
this.src = src || resolve(SRC_DIR, name)
this.type = type
this.status = status
this.markers = markers
this.indent = indent
this.options = options
}
}
export default Patch;
export default Patch

View file

@ -1,60 +1,41 @@
import chalk from "chalk";
import commander, { Command } from "commander";
import { existsSync, readFileSync } from "fs";
import { resolve } from "path";
import { commands } from "./cmds";
import { ENGINE_DIR } from "./constants";
import Log from "./log";
import { shaCheck } from "./middleware/sha-check";
import { updateCheck } from "./middleware/update-check";
import { errorHandler } from "./utils";
import chalk from 'chalk'
import commander, { Command } from 'commander'
import { existsSync, readFileSync } from 'fs'
import { resolve } from 'path'
import { commands } from './cmds'
import { ENGINE_DIR } from './constants'
import Log from './log'
import { shaCheck } from './middleware/sha-check'
import { updateCheck } from './middleware/update-check'
import { errorHandler } from './utils'
const program = new Command();
const program = new Command()
export let log = new Log();
export let log = new Log()
program
.storeOptionsAsProperties(false)
.passCommandToAction(false);
program.storeOptionsAsProperties(false).passCommandToAction(false)
const { dot, firefox, melon } =
require("../package.json").versions;
const { dot, firefox, melon } = require('../package.json').versions
let reportedFFVersion;
let reportedFFVersion
if (
existsSync(
resolve(
ENGINE_DIR,
"browser",
"config",
"version.txt"
)
)
) {
if (existsSync(resolve(ENGINE_DIR, 'browser', 'config', 'version.txt'))) {
const version = readFileSync(
resolve(
ENGINE_DIR,
"browser",
"config",
"version.txt"
),
"utf-8"
).replace(/\n/g, "");
resolve(ENGINE_DIR, 'browser', 'config', 'version.txt'),
'utf-8'
).replace(/\n/g, '')
if (version !== firefox) reportedFFVersion = version;
if (version !== firefox) reportedFFVersion = version
}
export const bin_name = "melon";
export const bin_name = 'melon'
program.version(`
\t${chalk.bold("Dot Browser")} ${dot}
\t${chalk.bold("Firefox")} ${firefox} ${
reportedFFVersion
? `(being reported as ${reportedFFVersion})`
: ``
\t${chalk.bold('Dot Browser')} ${dot}
\t${chalk.bold('Firefox')} ${firefox} ${
reportedFFVersion ? `(being reported as ${reportedFFVersion})` : ``
}
\t${chalk.bold("Melon")} ${melon}
\t${chalk.bold('Melon')} ${melon}
${
reportedFFVersion
@ -63,46 +44,42 @@ You may have downloaded the source code using a different version and
then switched to another branch.`
: ``
}
`);
program.name(bin_name);
`)
program.name(bin_name)
commands.forEach((command) => {
if (command.flags) {
if (
command.flags.platforms &&
!command.flags.platforms.includes(
process.platform
)
!command.flags.platforms.includes(process.platform)
) {
return;
return
}
}
const _cmd = commander.command(command.cmd);
const _cmd = commander.command(command.cmd)
_cmd.description(command.description);
_cmd.description(command.description)
command?.aliases?.forEach((alias) => {
_cmd.alias(alias);
});
_cmd.alias(alias)
})
command?.options?.forEach((opt) => {
_cmd.option(opt.arg, opt.description);
});
_cmd.option(opt.arg, opt.description)
})
_cmd.action(async (...args: any) => {
await shaCheck(command.cmd);
await updateCheck();
await shaCheck(command.cmd)
await updateCheck()
command.controller(...args);
});
command.controller(...args)
})
program.addCommand(_cmd);
});
program.addCommand(_cmd)
})
process.on("uncaughtException", errorHandler);
process.on("unhandledException", (err) =>
errorHandler(err, true)
);
process.on('uncaughtException', errorHandler)
process.on('unhandledException', (err) => errorHandler(err, true))
program.parse(process.argv);
program.parse(process.argv)

View file

@ -1,9 +1,9 @@
export interface IPatch {
name: string;
action: string;
src: string | string[];
name: string
action: string
src: string | string[]
markers?: {
[key: string]: [string, string];
};
indent?: number;
[key: string]: [string, string]
}
indent?: number
}

View file

@ -1,70 +1,51 @@
import chalk from "chalk";
import chalk from 'chalk'
class Log {
private startTime: number;
private startTime: number
constructor() {
const d = new Date();
const d = new Date()
this.startTime = d.getTime();
this.startTime = d.getTime()
}
getDiff() {
const d = new Date();
const d = new Date()
const currentTime = d.getTime();
const currentTime = d.getTime()
const elapsedTime = currentTime - this.startTime;
const elapsedTime = currentTime - this.startTime
var secs = Math.floor((elapsedTime / 1000) % 60);
var mins = Math.floor(
(elapsedTime / (60 * 1000)) % 60
);
var hours = Math.floor(
(elapsedTime / (60 * 60 * 1000)) % 24
);
var secs = Math.floor((elapsedTime / 1000) % 60)
var mins = Math.floor((elapsedTime / (60 * 1000)) % 60)
var hours = Math.floor((elapsedTime / (60 * 60 * 1000)) % 24)
const format = (r: number) => {
return r.toString().length == 1 ? "0" + r : r;
};
return r.toString().length == 1 ? '0' + r : r
}
return `${format(hours)}:${format(mins)}:${format(
secs
)}`;
return `${format(hours)}:${format(mins)}:${format(secs)}`
}
info(...args: any[]) {
console.info(
chalk.blueBright.bold(this.getDiff()),
...args
);
console.info(chalk.blueBright.bold(this.getDiff()), ...args)
}
warning(...args: any[]) {
console.info(
chalk.yellowBright.bold(" WARNING"),
...args
);
console.info(chalk.yellowBright.bold(' WARNING'), ...args)
}
hardWarning(...args: any[]) {
console.info(
"",
chalk.bgRed.bold("WARNING"),
...args
);
console.info('', chalk.bgRed.bold('WARNING'), ...args)
}
success(...args: any[]) {
console.log(
`\n${chalk.greenBright.bold("SUCCESS")}`,
...args
);
console.log(`\n${chalk.greenBright.bold('SUCCESS')}`, ...args)
}
error(...args: any[]) {
throw new Error(...args);
throw new Error(...args)
}
}
export default Log;
export default Log

View file

@ -1,32 +1,26 @@
import { sync } from "glob";
import { SRC_DIR } from "./constants";
import { IPatch } from "./interfaces/patch";
import { sync } from 'glob'
import { SRC_DIR } from './constants'
import { IPatch } from './interfaces/patch'
let files = sync("**/*", {
let files = sync('**/*', {
nodir: true,
cwd: SRC_DIR
cwd: SRC_DIR,
}).filter(
(f) =>
!(
f.endsWith(".patch") ||
f.split("/").includes("node_modules")
(f) => !(f.endsWith('.patch') || f.split('/').includes('node_modules'))
)
);
const manualPatches: IPatch[] = [];
const manualPatches: IPatch[] = []
files.map((i) => {
const group = i.split("/")[0];
const group = i.split('/')[0]
if (!manualPatches.find((m) => m.name == group)) {
manualPatches.push({
name: group,
action: "copy",
src: files.filter(
(f) => f.split("/")[0] == group
)
});
action: 'copy',
src: files.filter((f) => f.split('/')[0] == group),
})
}
});
})
export default manualPatches;
export default manualPatches

View file

@ -1,44 +1,25 @@
import execa from "execa";
import { existsSync, readFileSync } from "fs-extra";
import { resolve } from "path";
import { bin_name, log } from "..";
import execa from 'execa'
import { existsSync, readFileSync } from 'fs-extra'
import { resolve } from 'path'
import { bin_name, log } from '..'
const blacklistedCommands = [
"reset",
"init",
"set-branch"
];
const blacklistedCommands = ['reset', 'init', 'set-branch']
export const shaCheck = async (command: string) => {
if (
blacklistedCommands.filter((c) =>
command.startsWith(c)
).length !== 0 ||
!existsSync(
resolve(
process.cwd(),
".dotbuild",
"metadata"
blacklistedCommands.filter((c) => command.startsWith(c)).length !== 0 ||
!existsSync(resolve(process.cwd(), '.dotbuild', 'metadata'))
)
)
)
return;
return
const metadata = JSON.parse(
readFileSync(
resolve(
process.cwd(),
".dotbuild",
"metadata"
),
"utf-8"
readFileSync(resolve(process.cwd(), '.dotbuild', 'metadata'), 'utf-8')
)
);
const { stdout: currentBranch } = await execa("git", [
"branch",
"--show-current"
]);
const { stdout: currentBranch } = await execa('git', [
'branch',
'--show-current',
])
if (metadata && metadata.branch) {
if (metadata.branch !== currentBranch) {
@ -47,7 +28,7 @@ export const shaCheck = async (command: string) => {
\t If you are changing the Firefox version, you will need to reset the tree
\t with |${bin_name} reset --hard| and then |${bin_name} download|.
\t Or you can change the default branch by typing |${bin_name} set-branch <branch>|.`);
\t Or you can change the default branch by typing |${bin_name} set-branch <branch>|.`)
}
}
}
};

View file

@ -1,31 +1,24 @@
import axios from "axios";
import { log } from "../";
import axios from 'axios'
import { log } from '../'
const pjson = require("../../package.json");
const pjson = require('../../package.json')
export const updateCheck = async () => {
const firefoxVersion =
pjson.versions["firefox-display"];
const firefoxVersion = pjson.versions['firefox-display']
try {
const { data } = await axios.get(
`https://product-details.mozilla.org/1.0/firefox_history_major_releases.json`,
{ timeout: 1000 }
);
)
if (data) {
let version =
Object.keys(data)[
Object.keys(data).length - 1
];
let version = Object.keys(data)[Object.keys(data).length - 1]
if (
firefoxVersion &&
version !== firefoxVersion
)
if (firefoxVersion && version !== firefoxVersion)
log.warning(
`Latest version of Firefox (${version}) does not match frozen version (${firefoxVersion}).`
);
)
}
} catch (e) {}
};
}

20
src/types.d.ts vendored
View file

@ -1,19 +1,19 @@
export interface Cmd {
cmd: string;
description: string;
cmd: string
description: string
controller: (...args: any) => void;
controller: (...args: any) => void
options?: CmdOption[];
aliases?: string[];
options?: CmdOption[]
aliases?: string[]
flags?: {
platforms?: CmdFlagPlatform[];
};
platforms?: CmdFlagPlatform[]
}
}
export interface CmdOption {
arg: string;
description: string;
arg: string
description: string
}
export type CmdFlagPlatform = NodeJS.Platform;
export type CmdFlagPlatform = NodeJS.Platform

View file

@ -1,5 +1,5 @@
export const delay = (delay: number) => {
return new Promise((resolve) => {
setTimeout(() => resolve(true), delay);
});
};
setTimeout(() => resolve(true), delay)
})
}

View file

@ -1,21 +1,18 @@
import execa from "execa";
import { log } from "..";
import execa from 'execa'
import { log } from '..'
const handle = (data: any, killOnError?: boolean) => {
const d = data.toString();
const d = data.toString()
d.split("\n").forEach((line: any) => {
if (line.length !== 0)
log.info(
line.replace(/\s\d{1,5}:\d\d\.\d\d /g, "")
);
});
d.split('\n').forEach((line: any) => {
if (line.length !== 0) log.info(line.replace(/\s\d{1,5}:\d\d\.\d\d /g, ''))
})
if (killOnError) {
log.error("Command failed. See error above.");
process.exit(1);
log.error('Command failed. See error above.')
process.exit(1)
}
}
};
export const dispatch = (
cmd: string,
@ -25,25 +22,21 @@ export const dispatch = (
killOnError?: boolean
) => {
return new Promise((resolve, reject) => {
process.env.MACH_USE_SYSTEM_PYTHON = "true";
process.env.MACH_USE_SYSTEM_PYTHON = 'true'
const proc = execa(cmd, args, {
cwd: cwd ? cwd : process.cwd(),
env: process.env
});
env: process.env,
})
proc.stdout?.on("data", (d) => handle(d));
proc.stderr?.on("data", (d) => handle(d));
proc.stdout?.on('data', (d) => handle(d))
proc.stderr?.on('data', (d) => handle(d))
proc.stdout?.on("error", (d) =>
handle(d, killOnError)
);
proc.stderr?.on("error", (d) =>
handle(d, killOnError)
);
proc.stdout?.on('error', (d) => handle(d, killOnError))
proc.stderr?.on('error', (d) => handle(d, killOnError))
proc.on("exit", () => {
resolve(true);
});
});
};
proc.on('exit', () => {
resolve(true)
})
})
}

View file

@ -1,45 +1,39 @@
import chalk from "chalk";
import { readFileSync } from "fs-extra";
import { resolve } from "path";
import { log } from "..";
import chalk from 'chalk'
import { readFileSync } from 'fs-extra'
import { resolve } from 'path'
import { log } from '..'
export const errorHandler = (
err: Error,
isUnhandledRej: boolean
) => {
let cc = readFileSync(
resolve(process.cwd(), ".dotbuild", "command"),
"utf-8"
);
cc = cc.replace(/(\r\n|\n|\r)/gm, "");
export const errorHandler = (err: Error, isUnhandledRej: boolean) => {
let cc = readFileSync(resolve(process.cwd(), '.dotbuild', 'command'), 'utf-8')
cc = cc.replace(/(\r\n|\n|\r)/gm, '')
console.log(
`\n ${chalk.redBright.bold(
"ERROR"
'ERROR'
)} An error occurred while running command ["${cc
.split(" ")
.split(' ')
.join('", "')}"]:`
);
)
console.log(
`\n\t`,
isUnhandledRej
? err.toString().replace(/\n/g, "\n\t ")
: err.message.replace(/\n/g, "\n\t ")
);
? err.toString().replace(/\n/g, '\n\t ')
: err.message.replace(/\n/g, '\n\t ')
)
if (err.stack || isUnhandledRej) {
const stack: any = err.stack?.split("\n");
stack.shift();
stack.shift();
const stack: any = err.stack?.split('\n')
stack.shift()
stack.shift()
console.log(
`\t`,
stack
.join("\n")
.replace(/(\r\n|\n|\r)/gm, "")
.replace(/ at /g, "\n\t • ")
);
.join('\n')
.replace(/(\r\n|\n|\r)/gm, '')
.replace(/ at /g, '\n\t • ')
)
}
console.log();
log.info("Exiting due to error.");
process.exit(1);
};
console.log()
log.info('Exiting due to error.')
process.exit(1)
}

View file

@ -2,65 +2,45 @@ import {
appendFileSync,
ensureSymlink,
lstatSync,
readFileSync
} from "fs-extra";
import { resolve } from "path";
import rimraf from "rimraf";
import { ENGINE_DIR, SRC_DIR } from "../constants";
readFileSync,
} from 'fs-extra'
import { resolve } from 'path'
import rimraf from 'rimraf'
import { ENGINE_DIR, SRC_DIR } from '../constants'
const getChunked = (location: string) => {
return location.replace(/\\/g, "/").split("/");
};
return location.replace(/\\/g, '/').split('/')
}
export const copyManual = (
name: string,
noIgnore?: boolean
) => {
export const copyManual = (name: string, noIgnore?: boolean) => {
try {
try {
if (
!lstatSync(
resolve(
ENGINE_DIR,
...getChunked(name)
)
).isSymbolicLink()
!lstatSync(resolve(ENGINE_DIR, ...getChunked(name))).isSymbolicLink()
) {
rimraf.sync(
resolve(
ENGINE_DIR,
...getChunked(name)
)
);
rimraf.sync(resolve(ENGINE_DIR, ...getChunked(name)))
}
} catch (e) {}
ensureSymlink(
resolve(SRC_DIR, ...getChunked(name)),
resolve(ENGINE_DIR, ...getChunked(name))
);
)
if (!noIgnore) {
const gitignore = readFileSync(
resolve(ENGINE_DIR, ".gitignore"),
"utf-8"
);
const gitignore = readFileSync(resolve(ENGINE_DIR, '.gitignore'), 'utf-8')
if (
!gitignore.includes(
getChunked(name).join("/")
)
)
if (!gitignore.includes(getChunked(name).join('/')))
appendFileSync(
resolve(ENGINE_DIR, ".gitignore"),
`\n${getChunked(name).join("/")}`
);
resolve(ENGINE_DIR, '.gitignore'),
`\n${getChunked(name).join('/')}`
)
}
return;
return
} catch (e) {
console.log(e);
process.exit(0);
console.log(e)
process.exit(0)
// return e;
}
};
}

View file

@ -1,6 +1,6 @@
export * from "./delay";
export * from "./dispatch";
export * from "./error-handler";
export * from "./import";
export * from "./version";
export * from "./write-metadata";
export * from './delay'
export * from './dispatch'
export * from './error-handler'
export * from './import'
export * from './version'
export * from './write-metadata'

View file

@ -1,11 +1,9 @@
import axios from "axios";
import axios from 'axios'
export const getLatestFF = async () => {
const { data } = await axios.get(
`https://product-details.mozilla.org/1.0/firefox_history_major_releases.json`
);
)
return Object.keys(data)[
Object.keys(data).length - 1
];
};
return Object.keys(data)[Object.keys(data).length - 1]
}

View file

@ -1,26 +1,20 @@
import execa from "execa";
import { writeFileSync } from "fs-extra";
import { resolve } from "path";
import execa from 'execa'
import { writeFileSync } from 'fs-extra'
import { resolve } from 'path'
const pjson = require("../../package.json");
const pjson = require('../../package.json')
export const writeMetadata = async () => {
const { stdout: sha } = await execa("git", [
"rev-parse",
"HEAD"
]);
const { stdout: branch } = await execa("git", [
"branch",
"--show-current"
]);
const { stdout: sha } = await execa('git', ['rev-parse', 'HEAD'])
const { stdout: branch } = await execa('git', ['branch', '--show-current'])
writeFileSync(
resolve(process.cwd(), ".dotbuild", "metadata"),
resolve(process.cwd(), '.dotbuild', 'metadata'),
JSON.stringify({
sha,
branch,
birth: Date.now(),
versions: pjson.versions
versions: pjson.versions,
})
);
};
)
}

View file

@ -60,10 +60,5 @@
"skipLibCheck": true /* Skip type checking of declaration files. */,
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"exclude": [
"node_modules/**/*",
"firefox-*/**/*",
"gecko",
"engine/**/*"
]
"exclude": ["node_modules/**/*", "firefox-*/**/*", "gecko", "engine/**/*"]
}