mirror of
https://github.com/zen-browser/desktop.git
synced 2025-07-08 01:19:59 +02:00
Add autopep8 configuration and integrate into workflows for code formatting
This commit is contained in:
parent
838569e386
commit
225ab67a50
9 changed files with 156 additions and 99 deletions
3
.github/workflows/code-linter.yml
vendored
3
.github/workflows/code-linter.yml
vendored
|
@ -23,6 +23,9 @@ jobs:
|
|||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Setup autopep8
|
||||
run: sudo apt install python3-autopep8
|
||||
|
||||
- name: Setup pnpm
|
||||
run: npm install -g pnpm
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
"update-ff": "python3 scripts/update_ff.py",
|
||||
"update-ff:raw": "surfer update",
|
||||
"update-newtab": "python3 scripts/update_newtab.py",
|
||||
"pretty": "prettier . --write",
|
||||
"lint": "npx prettier . --check",
|
||||
"pretty": "prettier . --write && autopep8 -r scripts/ --in-place",
|
||||
"lint": "npx prettier . --check && autopep8 --diff -r scripts/",
|
||||
"prepare": "husky"
|
||||
},
|
||||
"repository": {
|
||||
|
|
6
pyproject.toml
Normal file
6
pyproject.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[tool.autopep8]
|
||||
max_line_length = 120
|
||||
ignore = "E501,W6" # or ["E501", "W6"]
|
||||
recursive = true
|
||||
aggressive = 3
|
||||
indent_size = 2
|
7
requirements.txt
Normal file
7
requirements.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
autopep8==2.3.1
|
||||
click==8.1.8
|
||||
mypy-extensions==1.0.0
|
||||
packaging==24.2
|
||||
pathspec==0.12.1
|
||||
platformdirs==4.3.6
|
||||
pycodestyle==2.12.1
|
|
@ -1,61 +1,76 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
RESPONSE_FILENAME = 'rc-response.json'
|
||||
METADATA_FILENAME = 'surfer.json'
|
||||
RESPONSE_FILENAME = "rc-response.json"
|
||||
METADATA_FILENAME = "surfer.json"
|
||||
|
||||
|
||||
def get_current_version() -> Optional[str]:
|
||||
"""Retrieve the current version from the metadata file."""
|
||||
try:
|
||||
with open(METADATA_FILENAME) as f:
|
||||
metadata = json.load(f)
|
||||
return metadata['version']['candidate']
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error reading current version: {e}")
|
||||
return None
|
||||
"""Retrieve the current version from the metadata file."""
|
||||
try:
|
||||
with open(METADATA_FILENAME) as f:
|
||||
metadata = json.load(f)
|
||||
return metadata["version"]["candidate"]
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error reading current version: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_rc_response() -> Optional[str]:
|
||||
"""Get the release candidate response from the response file."""
|
||||
try:
|
||||
with open(RESPONSE_FILENAME) as f:
|
||||
data = json.load(f)
|
||||
for tag_dict in data['tags']:
|
||||
tag = tag_dict['tag']
|
||||
if (tag.startswith('FIREFOX') and tag.endswith('_BUILD1')
|
||||
and 'ESR' not in tag and 'b' not in tag):
|
||||
return tag.replace('FIREFOX_', '').replace('_BUILD1', '').replace('_', '.')
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error reading RC response: {e}")
|
||||
return None
|
||||
"""Get the release candidate response from the response file."""
|
||||
try:
|
||||
with open(RESPONSE_FILENAME) as f:
|
||||
data = json.load(f)
|
||||
for tag_dict in data["tags"]:
|
||||
tag = tag_dict["tag"]
|
||||
if (tag.startswith("FIREFOX") and tag.endswith("_BUILD1")
|
||||
and "ESR" not in tag and "b" not in tag):
|
||||
return (tag.replace("FIREFOX_", "").replace("_BUILD1",
|
||||
"").replace("_", "."))
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error reading RC response: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_pings() -> str:
|
||||
"""Build a string of Discord user IDs for mentions."""
|
||||
ping_ids = os.getenv('DISCORD_PING_IDS', '')
|
||||
return ' '.join(f"<@{ping.strip()}>" for ping in ping_ids.split(',') if ping.strip())
|
||||
"""Build a string of Discord user IDs for mentions."""
|
||||
ping_ids = os.getenv("DISCORD_PING_IDS", "")
|
||||
return " ".join(f"<@{ping.strip()}>" for ping in ping_ids.split(",")
|
||||
if ping.strip())
|
||||
|
||||
|
||||
def send_webhook(rc: str) -> None:
|
||||
"""Send a message to the Discord webhook."""
|
||||
text = f"||{get_pings()}|| New Firefox RC version is available: **{rc}**"
|
||||
webhook_url = os.getenv('DISCORD_WEBHOOK_URL')
|
||||
|
||||
if webhook_url:
|
||||
message = {
|
||||
"content": text,
|
||||
"username": "Firefox RC Checker",
|
||||
}
|
||||
try:
|
||||
response = requests.post(webhook_url, json=message)
|
||||
response.raise_for_status() # Raise an error for bad responses
|
||||
except requests.RequestException as e:
|
||||
print(f"Error sending webhook: {e}")
|
||||
else:
|
||||
print("Webhook URL not set.")
|
||||
"""Send a message to the Discord webhook."""
|
||||
text = f"||{get_pings()}|| New Firefox RC version is available: **{rc}**"
|
||||
webhook_url = os.getenv("DISCORD_WEBHOOK_URL")
|
||||
|
||||
if webhook_url:
|
||||
message = {
|
||||
"content": text,
|
||||
"username": "Firefox RC Checker",
|
||||
}
|
||||
try:
|
||||
response = requests.post(webhook_url, json=message)
|
||||
response.raise_for_status() # Raise an error for bad responses
|
||||
except requests.RequestException as e:
|
||||
print(f"Error sending webhook: {e}")
|
||||
else:
|
||||
print("Webhook URL not set.")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
current_version = get_current_version()
|
||||
rc_response = get_rc_response()
|
||||
|
||||
if rc_response and rc_response != current_version:
|
||||
send_webhook(rc_response)
|
||||
return 0
|
||||
|
||||
print(f"Current version: {current_version}, RC version: {rc_response}")
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
current_version = get_current_version()
|
||||
rc_response = get_rc_response()
|
||||
|
||||
if rc_response and rc_response != current_version:
|
||||
send_webhook(rc_response)
|
||||
sys.exit(main())
|
||||
|
|
|
@ -5,7 +5,8 @@ import os
|
|||
|
||||
FLATID = "io.github.zen_browser.zen"
|
||||
|
||||
def get_sha256sum(filename):
|
||||
|
||||
def get_sha256sum(filename):
|
||||
"""Calculate the SHA256 checksum of a file."""
|
||||
sha256 = hashlib.sha256()
|
||||
try:
|
||||
|
@ -17,16 +18,16 @@ def get_sha256sum(filename):
|
|||
sys.exit(1)
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
def build_template(template, linux_sha256, flatpak_sha256, version):
|
||||
"""Build the template with the provided hashes and version."""
|
||||
print(f"Building template with version {version}")
|
||||
print(f"\tLinux archive sha256: {linux_sha256}")
|
||||
print(f"\tFlatpak archive sha256: {flatpak_sha256}")
|
||||
return template.format(
|
||||
linux_sha256=linux_sha256,
|
||||
flatpak_sha256=flatpak_sha256,
|
||||
version=version
|
||||
)
|
||||
return template.format(linux_sha256=linux_sha256,
|
||||
flatpak_sha256=flatpak_sha256,
|
||||
version=version)
|
||||
|
||||
|
||||
def get_template(template_root):
|
||||
"""Get the template content from the specified root directory."""
|
||||
|
@ -39,23 +40,32 @@ def get_template(template_root):
|
|||
print(f"Template {file} not found.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to parse arguments and process files."""
|
||||
parser = argparse.ArgumentParser(description='Prepare flatpak release')
|
||||
parser.add_argument('--version', help='Version of the release', required=True)
|
||||
parser.add_argument('--linux-archive', help='Linux archive', required=True)
|
||||
parser.add_argument('--flatpak-archive', help='Flatpak archive', required=True)
|
||||
parser.add_argument('--output', help='Output file', default=f"{FLATID}.yml")
|
||||
parser.add_argument('--template-root', help='Template root', default="flatpak")
|
||||
parser = argparse.ArgumentParser(description="Prepare flatpak release")
|
||||
parser.add_argument("--version",
|
||||
help="Version of the release",
|
||||
required=True)
|
||||
parser.add_argument("--linux-archive", help="Linux archive", required=True)
|
||||
parser.add_argument("--flatpak-archive",
|
||||
help="Flatpak archive",
|
||||
required=True)
|
||||
parser.add_argument("--output", help="Output file", default=f"{FLATID}.yml")
|
||||
parser.add_argument("--template-root",
|
||||
help="Template root",
|
||||
default="flatpak")
|
||||
args = parser.parse_args()
|
||||
|
||||
linux_sha256 = get_sha256sum(args.linux_archive)
|
||||
flatpak_sha256 = get_sha256sum(args.flatpak_archive)
|
||||
template = build_template(get_template(args.template_root), linux_sha256, flatpak_sha256, args.version)
|
||||
template = build_template(get_template(args.template_root), linux_sha256,
|
||||
flatpak_sha256, args.version)
|
||||
|
||||
print(f"Writing output to {args.output}")
|
||||
with open(args.output, "w") as f:
|
||||
f.write(template)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
|
||||
def update_ff():
|
||||
"""Runs the npm command to update the 'ff' component."""
|
||||
result = os.system("npm run update-ff:raw")
|
||||
if result != 0:
|
||||
raise RuntimeError("Failed to update 'ff' component.")
|
||||
|
||||
|
||||
def get_version_from_file(filename):
|
||||
"""Retrieves the version from the specified JSON file."""
|
||||
try:
|
||||
|
@ -16,6 +18,7 @@ def get_version_from_file(filename):
|
|||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
raise RuntimeError(f"Error reading version from {filename}: {e}")
|
||||
|
||||
|
||||
def update_readme(last_version, new_version):
|
||||
"""Updates the README.md file to reflect the new version."""
|
||||
try:
|
||||
|
@ -28,6 +31,7 @@ def update_readme(last_version, new_version):
|
|||
except FileNotFoundError as e:
|
||||
raise RuntimeError(f"README.md file not found: {e}")
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to update versions and README."""
|
||||
try:
|
||||
|
@ -35,9 +39,11 @@ def main():
|
|||
update_ff()
|
||||
new_version = get_version_from_file("surfer.json")
|
||||
update_readme(last_version, new_version)
|
||||
print(f"Updated version from {last_version} to {new_version} in README.md.")
|
||||
print(
|
||||
f"Updated version from {last_version} to {new_version} in README.md.")
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -6,35 +6,36 @@ import logging
|
|||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# Constants for paths
|
||||
NEW_TAB_DIR = './engine/browser/components/newtab'
|
||||
ENGINE_DIR = './engine'
|
||||
NPM_INSTALL_COMMANDS = [
|
||||
"npm install",
|
||||
"npm install meow@9.0.0"
|
||||
]
|
||||
NEW_TAB_DIR = "./engine/browser/components/newtab"
|
||||
ENGINE_DIR = "./engine"
|
||||
NPM_INSTALL_COMMANDS = ["npm install", "npm install meow@9.0.0"]
|
||||
BUNDLE_COMMAND = "npm run bundle --prefix=browser/components/newtab"
|
||||
|
||||
|
||||
def install_dependencies():
|
||||
"""Install necessary npm packages for the newtab component."""
|
||||
for command in NPM_INSTALL_COMMANDS:
|
||||
logging.info(f"Running command: {command} in {NEW_TAB_DIR}")
|
||||
subprocess.run(command.split(), cwd=NEW_TAB_DIR, check=True)
|
||||
|
||||
|
||||
def bundle_newtab_components():
|
||||
"""Bundle the newtab components."""
|
||||
logging.info(f"Bundling newtab components in {ENGINE_DIR}")
|
||||
subprocess.run(BUNDLE_COMMAND.split(), cwd=ENGINE_DIR, check=True)
|
||||
|
||||
|
||||
def update_newtab(init: bool = True):
|
||||
"""Update the newtab components, optionally initializing dependencies."""
|
||||
try:
|
||||
if init:
|
||||
install_dependencies()
|
||||
|
||||
|
||||
bundle_newtab_components()
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.error(f"An error occurred: {e}")
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_newtab(init=False)
|
||||
|
|
|
@ -9,48 +9,57 @@ JAR_ENTRY_TEMPLATE = "\tcontent/browser/zen-avatars/{0}\t(content/zen-avatars/{0
|
|||
URL = "https://source.boringavatars.com/bauhaus/120/${}?colors={}"
|
||||
|
||||
COLORS = {
|
||||
"dark": ["DDDDDD", "5E9188", "3E5954", "253342", "232226"],
|
||||
"light": ["9D9382", "FFC1B2", "FFDBC8", "FFF6C7", "DCD7C2"]
|
||||
"dark": ["DDDDDD", "5E9188", "3E5954", "253342", "232226"],
|
||||
"light": ["9D9382", "FFC1B2", "FFDBC8", "FFF6C7", "DCD7C2"],
|
||||
}
|
||||
|
||||
|
||||
def random_string(length):
|
||||
return ''.join(random.choices("abcdefghijklmnopqrstuvwxyz", k=length))
|
||||
return "".join(random.choices("abcdefghijklmnopqrstuvwxyz", k=length))
|
||||
|
||||
|
||||
def generate_list_names():
|
||||
names = []
|
||||
for i in range(1, 101):
|
||||
names.append(random_string(random.randint(5, 10)))
|
||||
return names
|
||||
names = []
|
||||
for i in range(1, 101):
|
||||
names.append(random_string(random.randint(5, 10)))
|
||||
return names
|
||||
|
||||
|
||||
def write_jar_file(jar_file):
|
||||
with open("jar.inc.mn", "w") as f:
|
||||
f.write(jar_file)
|
||||
with open("jar.inc.mn", "w") as f:
|
||||
f.write(jar_file)
|
||||
|
||||
|
||||
def fetch_all_avatars():
|
||||
names = generate_list_names()
|
||||
jar_file = ""
|
||||
for theme in COLORS:
|
||||
for [i, name] in enumerate(names):
|
||||
url = URL.format(name, ",".join(COLORS[theme]))
|
||||
response = requests.get(url)
|
||||
with open(f"avatar-{i}-{theme}.svg", "w") as f:
|
||||
f.write(response.text)
|
||||
jar_file += JAR_ENTRY_TEMPLATE.format(f"avatar-{i}-{theme}.svg") + "\n"
|
||||
print(f"SUCCESS: Fetched 'avatar-{i}-{theme}.svg' for name '{name}' with theme '{theme}'")
|
||||
write_jar_file(jar_file)
|
||||
names = generate_list_names()
|
||||
jar_file = ""
|
||||
for theme in COLORS:
|
||||
for [i, name] in enumerate(names):
|
||||
url = URL.format(name, ",".join(COLORS[theme]))
|
||||
response = requests.get(url)
|
||||
with open(f"avatar-{i}-{theme}.svg", "w") as f:
|
||||
f.write(response.text)
|
||||
jar_file += JAR_ENTRY_TEMPLATE.format(f"avatar-{i}-{theme}.svg") + "\n"
|
||||
print(
|
||||
f"SUCCESS: Fetched 'avatar-{i}-{theme}.svg' for name '{name}' with theme '{theme}'"
|
||||
)
|
||||
write_jar_file(jar_file)
|
||||
|
||||
|
||||
def clear_all_avatars():
|
||||
for file in os.listdir():
|
||||
if file.startswith("avatar-") and file.endswith(".svg"):
|
||||
os.remove(file)
|
||||
print(f"SUCCESS: Removed '{file}'")
|
||||
for file in os.listdir():
|
||||
if file.startswith("avatar-") and file.endswith(".svg"):
|
||||
os.remove(file)
|
||||
print(f"SUCCESS: Removed '{file}'")
|
||||
|
||||
|
||||
def main():
|
||||
if not os.getcwd().endswith("zen-avatars"):
|
||||
print("ERROR: Please run this script from the 'zen-avatars' directory")
|
||||
return
|
||||
clear_all_avatars()
|
||||
fetch_all_avatars()
|
||||
if not os.getcwd().endswith("zen-avatars"):
|
||||
print("ERROR: Please run this script from the 'zen-avatars' directory")
|
||||
return
|
||||
clear_all_avatars()
|
||||
fetch_all_avatars()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue