mirror of
https://github.com/zen-browser/surfer.git
synced 2025-07-08 09:20:01 +02:00
✨ Generate update manifests for addons
This commit is contained in:
parent
d848fd1848
commit
5f2b57c826
9 changed files with 90 additions and 27 deletions
|
@ -140,10 +140,17 @@ export const commands: Cmd[] = [
|
|||
requestController: async () => (await import('./commands/status')).status,
|
||||
},
|
||||
{
|
||||
cmd: 'updates browser',
|
||||
cmd: 'updates-browser',
|
||||
description:
|
||||
'Generate update manifest for the browser binary. This should be run after packaging',
|
||||
requestController: async () =>
|
||||
(await import('./commands/updates/browser')).generateBrowserUpdateFiles,
|
||||
},
|
||||
{
|
||||
cmd: 'updates-addons',
|
||||
description:
|
||||
'Generates update manifests for system addons that are included in the browser',
|
||||
requestController: async () =>
|
||||
(await import('./commands/updates/addons')).generateAddonUpdateFiles,
|
||||
},
|
||||
]
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
addAddonsToMozBuild,
|
||||
downloadAddon,
|
||||
generateAddonMozBuild,
|
||||
getAddons,
|
||||
initializeAddon,
|
||||
resolveAddonDownloadUrl,
|
||||
unpackAddon,
|
||||
|
@ -29,30 +30,20 @@ export const download = async (): Promise<void> => {
|
|||
process.exit(1)
|
||||
}
|
||||
|
||||
const addons = Object.keys(config.addons).map((addon) => ({
|
||||
name: addon,
|
||||
...config.addons[addon],
|
||||
}))
|
||||
|
||||
if (shouldSetupFirefoxSource()) {
|
||||
await setupFirefoxSource(version)
|
||||
}
|
||||
|
||||
for (const addon of addons) {
|
||||
for (const addon of getAddons()) {
|
||||
const downloadUrl = await resolveAddonDownloadUrl(addon)
|
||||
const downloadedXPI = await downloadAddon(downloadUrl, addon)
|
||||
|
||||
if (!downloadedXPI) {
|
||||
log.info(`Skipping ${addon.name}... Already installed`)
|
||||
continue
|
||||
}
|
||||
|
||||
await unpackAddon(downloadedXPI, addon)
|
||||
await generateAddonMozBuild(addon)
|
||||
await initializeAddon(addon)
|
||||
}
|
||||
|
||||
await addAddonsToMozBuild(addons)
|
||||
await addAddonsToMozBuild(getAddons())
|
||||
|
||||
log.success(
|
||||
`You should be ready to make changes to ${config.name}.`,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { existsSync, readFileSync, unlinkSync, writeFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { isMatch } from 'picomatch'
|
||||
import { config } from '../..'
|
||||
import { ENGINE_DIR, MELON_TMP_DIR } from '../../constants'
|
||||
import { log } from '../../log'
|
||||
|
||||
|
@ -16,6 +17,12 @@ import { downloadFileToLocation } from '../../utils/download'
|
|||
import { readItem } from '../../utils/store'
|
||||
import { discard } from '../discard'
|
||||
|
||||
export const getAddons = (): (AddonInfo & { name: string })[] =>
|
||||
Object.keys(config.addons).map((addon) => ({
|
||||
name: addon,
|
||||
...config.addons[addon],
|
||||
}))
|
||||
|
||||
export async function resolveAddonDownloadUrl(
|
||||
addon: AddonInfo
|
||||
): Promise<string> {
|
||||
|
@ -55,27 +62,22 @@ export async function resolveAddonDownloadUrl(
|
|||
export async function downloadAddon(
|
||||
url: string,
|
||||
addon: AddonInfo & { name: string }
|
||||
): Promise<string | false> {
|
||||
): Promise<string> {
|
||||
const tempFile = join(MELON_TMP_DIR, addon.name + '.xpi')
|
||||
const outPath = join(ENGINE_DIR, 'browser', 'extensions', addon.name)
|
||||
|
||||
log.info(`Download addon from ${url}`)
|
||||
|
||||
if (existsSync(outPath)) {
|
||||
// Now we need to do some tests. First, if there is no cache file,
|
||||
// we must discard the existing folder and download the file again.
|
||||
// If there is a cache file and the cache file points to the same path
|
||||
// we can return and skip the download.
|
||||
|
||||
{
|
||||
const extensionCache = readItem<{ url: string }>(addon.name)
|
||||
|
||||
if (extensionCache.isNone()) {
|
||||
// We haven't stored it in the cache, therefore we need to redonwload
|
||||
// We haven't stored it in the cache, therefore we need to redownload
|
||||
// it
|
||||
} else {
|
||||
const cache = extensionCache.unwrap()
|
||||
if (cache.url == url) {
|
||||
return false
|
||||
if (cache.url == url && existsSync(tempFile)) {
|
||||
log.info(`Using cached version of ${addon.name}`)
|
||||
return tempFile
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -98,6 +100,13 @@ export async function unpackAddon(
|
|||
) {
|
||||
const outPath = join(ENGINE_DIR, 'browser', 'extensions', addon.name)
|
||||
|
||||
if (existsSync(outPath)) {
|
||||
log.info(
|
||||
`The extension ${addon.name} has already been unpacked... skipping`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
log.info(`Unpacking extension...`)
|
||||
|
||||
// I do not know why, but this delay causes unzip to work reliably
|
||||
|
|
|
@ -80,7 +80,7 @@ async function downloadFirefoxSource(version: string) {
|
|||
|
||||
log.info(`Locating Firefox release ${version}...`)
|
||||
|
||||
await ensureDir(fsParent)
|
||||
await ensureDir(fsSaveLocation)
|
||||
|
||||
if (existsSync(fsSaveLocation)) {
|
||||
log.info('Using cached download')
|
||||
|
|
50
src/commands/updates/addons.ts
Normal file
50
src/commands/updates/addons.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { stat, writeFile } from 'fs/promises'
|
||||
import { dirname, join } from 'path'
|
||||
import { create } from 'xmlbuilder2'
|
||||
import { DIST_DIR } from '../../constants'
|
||||
import { dynamicConfig, ensureDir, generateHash, getSize } from '../../utils'
|
||||
import {
|
||||
downloadAddon,
|
||||
getAddons,
|
||||
resolveAddonDownloadUrl,
|
||||
} from '../download/addon'
|
||||
|
||||
export async function generateAddonUpdateFiles() {
|
||||
const addons = []
|
||||
|
||||
for (const addon of getAddons()) {
|
||||
const url = await resolveAddonDownloadUrl(addon)
|
||||
const xpi = await downloadAddon(url, addon)
|
||||
|
||||
addons.push({
|
||||
...addon,
|
||||
url,
|
||||
xpi,
|
||||
hash: await generateHash(xpi, 'sha256'),
|
||||
hashType: 'sha256',
|
||||
size: await getSize(xpi),
|
||||
})
|
||||
}
|
||||
|
||||
const root = create().ele('updates').ele('addons')
|
||||
|
||||
for (const addon of addons) {
|
||||
const addonNode = root.ele('addon')
|
||||
addonNode.att('id', addon.id)
|
||||
addonNode.att('URL', addon.url)
|
||||
addonNode.att('hashFunction', addon.hashType)
|
||||
addonNode.att('hashValue', addon.hash)
|
||||
addonNode.att('size', addon.size.toString())
|
||||
addonNode.att('version', addon.version)
|
||||
}
|
||||
|
||||
const path = join(
|
||||
DIST_DIR,
|
||||
'update/browser/addons',
|
||||
dynamicConfig.get('brand'),
|
||||
'update.xml'
|
||||
)
|
||||
|
||||
ensureDir(path)
|
||||
await writeFile(path, root.end({ prettyPrint: true }))
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
import { existsSync } from 'fs'
|
||||
import { readFile, stat, writeFile } from 'fs/promises'
|
||||
import { readFile, writeFile } from 'fs/promises'
|
||||
import { parse } from 'ini'
|
||||
import { isAppleSilicon } from 'is-apple-silicon'
|
||||
import { dirname, join } from 'path'
|
||||
|
@ -11,6 +11,7 @@ import {
|
|||
dynamicConfig,
|
||||
ensureEmpty,
|
||||
generateHash,
|
||||
getSize,
|
||||
ReleaseInfo,
|
||||
} from '../../utils'
|
||||
|
||||
|
@ -165,7 +166,7 @@ export async function generateBrowserUpdateFiles() {
|
|||
'@URL': completeMarURL,
|
||||
'@hashFunction': 'sha512',
|
||||
'@hashValue': marHash,
|
||||
'@size': (await stat(marPath)).size,
|
||||
'@size': await getSize(marPath),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
0
src/utils/addons.ts
Normal file
0
src/utils/addons.ts
Normal file
|
@ -86,6 +86,7 @@ export interface AMOAddonInfo {
|
|||
|
||||
export interface UrlAddonInfo {
|
||||
platform: 'url'
|
||||
version: string
|
||||
id: string
|
||||
url: string
|
||||
}
|
||||
|
|
|
@ -173,3 +173,7 @@ export function ensureEmpty(path: string) {
|
|||
|
||||
mkdirSync(path, { recursive: true })
|
||||
}
|
||||
|
||||
export async function getSize(path: string): Promise<number> {
|
||||
return (await stat(path)).size
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue