initial commit

This commit is contained in:
jingrow 2025-12-23 19:17:16 +08:00
commit e3fc0a705a
3206 changed files with 292337 additions and 0 deletions

6
.coveragerc Normal file
View File

@ -0,0 +1,6 @@
[report]
exclude_lines =
pragma: no cover
raise NotImplementedError
if TYPE_CHECKING:
if typing.TYPE_CHECKING:

716
.cspell.json Normal file
View File

@ -0,0 +1,716 @@
{
"version": "0.2",
"language": "en",
"allowCompoundWords": true,
"ignorePaths": [
"dashboard/node_modules",
"**/assets",
"*.json",
"**.jinja2",
"**.j2",
"**.service",
"**.yml",
"test_**",
"**.conf",
"requirements.txt",
"press/utils/country_timezone.py"
],
"words": [
"2.4.6",
"Aaiun",
"Ababa",
"activites",
"Adak",
"adblockers",
"Addis",
"aditya",
"Adminstrator",
"aescts",
"afero",
"Agejt",
"aggs",
"Ajkr",
"Akbary",
"Akts",
"Åland",
"Anadyr",
"Andhra",
"ansari",
"Aqtau",
"Aqtobe",
"Araguaina",
"Arunachal",
"Asmera",
"asmfmt",
"asname",
"asrc",
"ATEXT",
"athul",
"Atikokan",
"Atka",
"atleast",
"atotto",
"Atyrau",
"auid",
"awwzvf",
"aymanbagabas",
"backgound",
"Baja",
"Balamurali",
"Barthelemy",
"Barthélemy",
"Bator",
"behavior",
"behaviour",
"benbjohnson",
"BENTO",
"binlog",
"biosdevname",
"blkid",
"bofq",
"boto",
"Bouvet",
"bouy",
"buildx",
"Busingen",
"Cabo",
"CCONTENT",
"cellbuf",
"cellbug",
"CFWS",
"chdir",
"Chhattisgarh",
"Choibalsan",
"Chuuk",
"chzyer",
"cidata",
"cint",
"clamav",
"clas",
"cli",
"cloudimg",
"CMDLINE",
"CNAME",
"cnsistency",
"CODECOV",
"codespell",
"cofig",
"commitlint",
"Comod",
"COMPATBILITY",
"confs",
"Consolas",
"Containerised",
"coveragerc",
"cpath",
"cpcommerce",
"cpuid",
"cpus",
"creat",
"creds",
"Creston",
"CSpell: Files checked: 14, Issues found: 124 in 2 files.",
"Csvg",
"csvg",
"CTEXT",
"CTPBJ",
"Cuiaba",
"Cunha",
"cust",
"Dacca",
"Dadra",
"Danmarkshavn",
"Darkify",
"dateutil",
"davecgh",
"DAYOFMONTH",
"DAYOFWEEK",
"DAYOFYEAR",
"dbgsym",
"dboptimize",
"dbserver",
"dcbs",
"DCONTENT",
"ddeb",
"ddebs",
"dearmor",
"devscripts",
"devtmpfs",
"dffx",
"Dgzr",
"Dili",
"dmypy",
"DNOQOHHMYYI",
"dnsmasq",
"dnspython",
"dnsutils",
"doesnt",
"dont",
"DONTNEED",
"dpkg",
"dribbble",
"DSEes",
"DTEXT",
"duckdb",
"DUID",
"Dumont",
"EACCES",
"earlyoom",
"ecommerce",
"EDITMSG",
"Efate",
"efi",
"EHIKF",
"Eirunepe",
"elif",
"elts",
"emaill",
"Ensenada",
"EPERM",
"equivs",
"erikgeiser",
"erpdb",
"erpnext",
"erpnextcom",
"erpnextsmb",
"errgo",
"Eswatini",
"Eucla",
"euid",
"EVHT",
"execv",
"execve",
"exitst",
"Exlude",
"FADV",
"Fakaofo",
"faris",
"Faso",
"fchmod",
"fchmodat",
"fchown",
"fchownat",
"fcrestore",
"Fdvmq",
"FEFF",
"Fffphu",
"filippo",
"Fmbeo",
"Fpww",
"frappeclient",
"frappehr",
"Frappeio",
"frappeui",
"fremovexattr",
"fsetxattr",
"fstype",
"ftrace",
"ftruncate",
"Fung",
"FWUP",
"Fzqt",
"gcore",
"Gekx",
"genproto",
"getdate",
"getitimer",
"gget",
"ghaction",
"ghead",
"githubusercontent",
"glfw",
"glog",
"gmxxxxcom",
"gnueabi",
"GOARCH",
"goasm",
"goccy",
"godebug",
"gofork",
"goidentity",
"gokrb",
"goleak",
"gonum",
"gopkg",
"gotool",
"Gozu",
"Gqttikk",
"grequests",
"gshadow",
"GSSAPI",
"gstin",
"gstinhide",
"gstinshow",
"gtid",
"gunicorn",
"gxzc",
"hakanensari",
"Haryana",
"hase",
"Haveli",
"hdel",
"hdfs",
"hget",
"Himachal",
"honnef",
"hookpy",
"Hovd",
"hrms",
"hrtimers",
"hset",
"hsts",
"htpasswd",
"Hvyanc",
"ibdata",
"Ibhfb",
"ibtmp",
"iceber",
"ifaces",
"Ifalt",
"ifnames",
"ifnull",
"IGST",
"ikxn",
"ILIKE",
"imds",
"Incase",
"innodb",
"innoterra",
"inodes",
"inplace",
"interactjs",
"Inuvik",
"invs",
"iour",
"iowait",
"ipaddress",
"IPEYBICE",
"iputils",
"ipython",
"IRET",
"isatty",
"isin",
"isnotnull",
"istable",
"ITIMER",
"Jammu",
"jcmturner",
"jemalloc",
"Jharkhand",
"Jhuj",
"jmespath",
"JMWS",
"Jnsl",
"joomla",
"joxit",
"jscache",
"jsons",
"jstemmer",
"Jujuy",
"JZNG",
"Karnataka",
"kcontinue",
"kdhz",
"KGUJ",
"Khandyga",
"KHTML",
"Kiritimati",
"kisielk",
"Kitts",
"Kolkata",
"kontinue",
"Kralendijk",
"Kuala",
"kwarg",
"kwargs",
"Ladakh",
"Lakshadweep",
"Latrh",
"lchown",
"Leste",
"libc",
"libdevel",
"libharfbuzz",
"libpango",
"libpangocairo",
"libsm",
"libstdc",
"libx",
"libxcb",
"libxext",
"libxmuu",
"libxrender",
"Lindeman",
"llen",
"localds",
"logex",
"Longyearbyen",
"LOUAA",
"lpush",
"lqez",
"lrange",
"lremovexattr",
"lsetxattr",
"lucasb",
"Lumpur",
"luxon",
"Maarten",
"Madhya",
"MADKY",
"Mahe",
"makeprg",
"marcboeker",
"MARIADB",
"mariadbd",
"Marino",
"Marketpalce",
"mattn",
"Mayen",
"mbps",
"mccabe",
"Meghalaya",
"Menlo",
"mergify",
"Metlakatla",
"mhpd",
"Mhsc",
"Minh",
"missingok",
"Mizoram",
"mkisofs",
"Mmckchk",
"mname",
"momentjs",
"Moresby",
"moto",
"Mpesa",
"msgprint",
"msisdn",
"Mtay",
"muieblackcat",
"Murdo",
"myadmin",
"Mycp",
"myisam",
"mypma",
"mypy",
"mysqladmin",
"mysqld",
"mysqldb",
"Mywk",
"Nadu",
"Nagar",
"ncdu",
"nedded",
"NEFT",
"Nera",
"netcfg",
"NFKH",
"NGROK",
"nineth",
"Nipigon",
"nistp",
"njsproj",
"nocompress",
"nofail",
"NOFORK",
"noozm",
"NOPASSWD",
"Noronha",
"Norte",
"notifempty",
"notin",
"nqhxc",
"ntvs",
"Nuuk",
"nvme",
"Nxzjr",
"objx",
"Occurred",
"OCI",
"ocpu",
"ocpus",
"ocsp",
"Odisha",
"Ojinaga",
"Olgu",
"OLQY",
"onfail",
"oom",
"opasswd",
"OPENBLAS",
"opions",
"overriden",
"OWUVXXW",
"oxxk",
"Paasphrase",
"packagejsons",
"Pago",
"paise",
"Pangnirtung",
"paramiko",
"parentfield",
"parenttype",
"pborman",
"pckj",
"pckjs",
"Pedning",
"Pesa",
"pexpect",
"pfiles",
"pflag",
"Pfrw",
"pgrep",
"phpmyadmin",
"pids",
"Pjpw",
"pmadb",
"pmezard",
"Pmirojx",
"Pohnpei",
"popperjs",
"pppconfig",
"pppoeconf",
"pprof",
"Pradesh",
"primarys",
"prm",
"probability",
"proces",
"procs",
"promql",
"protoc",
"psync",
"ptype",
"Puducherry",
"Punta",
"Pushkarev",
"pycache",
"pycups",
"pyngrok",
"pypr",
"pyproject",
"pypt",
"pyspy",
"PYTHONUNBUFFERED",
"pytz",
"pyunit",
"Pziu",
"QCONTENT",
"Qostanay",
"Qrcode",
"qrcode",
"QTEXT",
"Qyzylorda",
"rcfile",
"rdata",
"rdatatype",
"recognise",
"redisearch",
"referer",
"Regs",
"Releas",
"removexattr",
"reqd",
"Rerunnability",
"rerunnable",
"Réunion",
"Rhxk",
"Rica",
"RIOHXQEHM",
"Rioja",
"rivo",
"rname",
"rnyq",
"rogpeppe",
"rootfs",
"rpush",
"rrset",
"Rsya",
"rtype",
"rutwikhdev",
"ruzy",
"saas",
"sadd",
"sahilm",
"Santo",
"saurabh",
"sbool",
"Scoresbysund",
"sda",
"sdext",
"sdf",
"sdg",
"sdist",
"sdomain",
"secho",
"Segoe",
"seperate",
"serializability",
"setxattr",
"shadrak",
"shuralyov",
"signup",
"SLXVDP",
"smembers",
"SNUBA",
"snuba",
"socketio",
"softirq",
"somes",
"sonner",
"spamd",
"splited",
"squashfs",
"Srednekolymsk",
"Starke",
"stdc",
"stime",
"stkpush",
"Storge",
"stretchr",
"stripnl",
"Strnm",
"supectl",
"supervisorctl",
"supervisord",
"swapuuid",
"SYMBOLICATOR",
"symbolicator",
"synchronise",
"Syowa",
"Syrus",
"sysrq",
"tanmoy",
"tanmoysrt",
"tanxxxxxxkar",
"Telangana",
"termenv",
"Thgcy",
"tidb",
"Tiraspol",
"Tkndys",
"tldextract",
"tmpfs",
"Tokelau",
"tomli",
"Tongatapu",
"TOOD",
"TOTP",
"totp",
"tqdm",
"Troso",
"TSZK",
"tupple",
"Tvyn",
"Twillio",
"udiff",
"Udxsrq",
"uefi",
"Uenf",
"Ujung",
"Ulaanbaatar",
"Ulan",
"unarchived",
"Unbilled",
"uncollectible",
"unfollow",
"unindex",
"unindexed",
"unindexing",
"uniseg",
"unlinkat",
"unparse",
"unpatch",
"unplugin",
"Unprovisioned",
"unscrub",
"unsuspended",
"Unsuspending",
"unsuspension",
"updadted",
"urandom",
"uring",
"Urville",
"USEDNS",
"Ushuaia",
"Uttar",
"Uttarakhand",
"Uzhgorod",
"vagrant",
"varkw",
"vasile",
"VBDHE",
"vcpu",
"vcpus",
"vda",
"Velho",
"venv",
"Vetur",
"vetur",
"Vevay",
"vfat",
"vimrc",
"virsh",
"virtualenv",
"Vite",
"vite",
"vitess",
"VMI",
"vmis",
"vnic",
"volid",
"vpus",
"vtprotobuf",
"vueuse",
"Vzzq",
"wazuh",
"weasyprint",
"webp",
"Winamac",
"witht",
"Wiue",
"wkhtmlto",
"WKHTMLTOPDF",
"wkhtmltox",
"Wpym",
"xampp",
"xauth",
"xcall",
"xerrors",
"xfonts",
"XHOMZ",
"xitongsys",
"xlink",
"Xpai",
"XPUT",
"Xrwmb",
"xvda",
"xvdf",
"xvdg",
"Xwgt",
"Xyrw",
"Xzmq",
"Yakutat",
"Yancowinna",
"Ynel",
"yxei",
"Yzuve",
"zeebo",
"zloirock",
"Zpyihv",
"Zvkq",
"segs",
"interner",
"sprintf",
"xyproto",
"vxeg",
"rzgre",
"Yekq",
"Rhiv",
"Kvsc",
"ZSTD",
"Wazuh",
"DBUS"
]
}

19
.editorconfig Normal file
View File

@ -0,0 +1,19 @@
# Root editor config file
root = true
# Common settings
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
charset = utf-8
# python, js indentation settings
[{*.py}]
indent_style = tab
indent_size = 4
[{*.js,*.vue}]
indent_style = tab
indent_size = 2

11
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,11 @@
# Regenerate fixtures
9db90c9a790ad8b74e8f476c846898f3450e5c6d
# Mess up Agent Job Type fixtures
b7d4540c32075cbf569d9c8e256a8ce9898c7115
# Fix Agent Job Type fixtures
0c88a71473a906c87c58c94cc11743f79711d240
# Generate DocType types
a965b98b90fadf438c5f0a22c5778896743a94e7

39
.github/helper/install.sh vendored Normal file
View File

@ -0,0 +1,39 @@
#!/bin/bash
set -e
cd ~ || exit
sudo apt update && sudo apt install redis-server libcups2-dev
pip install frappe-bench
bench init --skip-assets --python "$(which python)" ~/frappe-bench --frappe-path https://github.com/balamurali27/frappe --frappe-branch fc-ci
mysql --host 127.0.0.1 --port 3306 -u root -proot -e "SET GLOBAL character_set_server = 'utf8mb4'"
mysql --host 127.0.0.1 --port 3306 -u root -proot -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'"
install_whktml() {
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
sudo chmod o+x /usr/local/bin/wkhtmltopdf
}
install_whktml &
cd ~/frappe-bench || exit
sed -i 's/watch:/# watch:/g' Procfile
sed -i 's/schedule:/# schedule:/g' Procfile
sed -i 's/socketio:/# socketio:/g' Procfile
sed -i 's/redis_socketio:/# redis_socketio:/g' Procfile
bench get-app press "${GITHUB_WORKSPACE}"
bench setup requirements --dev
bench start &> bench_start_logs.txt &
CI=Yes bench build --app frappe &
bench new-site --db-root-password root --admin-password admin test_site
bench --site test_site install-app press
bench set-config -g server_script_enabled 1
bench set-config -g http_port 8000

29
.github/hooks/todo-warning.sh vendored Executable file
View File

@ -0,0 +1,29 @@
#!/usr/bin/env bash
ORANGE='\033[0;33m'
NC='\033[0m'
BOLD='\033[1m'
NORMAL='\033[0m'
echo $GIT_COMMIT
check_file() {
local file=$1
local match_pattern=$2
local file_changes_with_context=$(git diff -U999999999 -p --cached --color=always -- $file)
local matched_additions=$(echo "$file_changes_with_context" | grep -C4 $'^\e\\[32m\+.*'"$match_pattern")
if [ -n "$matched_additions" ]; then
echo -e "${ORANGE}[WARNING]${NC} ${BOLD}$file${NORMAL} contains some $match_pattern."
echo "$matched_additions"
echo -e "\n"
fi
}
for file in `git diff --cached -p --name-status | cut -c3-`; do
check_file $file 'TODO'
done
exit

BIN
.github/images/press-cluster.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 228 KiB

View File

@ -0,0 +1,90 @@
name: Auto PR from Master to Develop
on:
push:
branches:
- master
permissions:
contents: write
pull-requests: write
jobs:
create-pr:
runs-on: ubuntu-latest
if: |
!contains(github.event.head_commit.message, 'Merge pull request') &&
!contains(github.event.head_commit.message, 'Merge branch') &&
!startsWith(github.event.head_commit.message, 'Merge')
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Configure Git
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Prepare backport_master branch (Rebase)
run: |
git fetch origin develop
git fetch origin master
# Create/update backport_master from develop
git checkout -B backport_master origin/develop
# Rebase onto master to avoid merge commits
git rebase origin/master || {
echo "Rebase conflict detected, aborting."
git rebase --abort
exit 1
}
git push origin backport_master --force-with-lease
- name: Get commit details
run: |
git fetch origin develop
COMMITS=$(git log origin/develop..origin/backport_master --pretty=format:"- %h %s")
echo "$COMMITS" > commits.txt
- name: Check if PR already exists
id: check-pr
run: |
EXISTING_PR=$(gh pr list --head backport_master --base develop --state open --json number --jq '.[0].number // empty')
echo "existing_pr=$EXISTING_PR" >> $GITHUB_OUTPUT
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create Pull Request
if: steps.check-pr.outputs.existing_pr == ''
run: |
COMMITS_LIST=$(cat commits.txt)
PR_DESCRIPTION="**Please perform \`Merge & Commit\` to preserve commit history**
$COMMITS_LIST"
gh pr create \
--title "chore: Sync Changes from Master to Develop" \
--body "$PR_DESCRIPTION" \
--head backport_master \
--base develop
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Update existing PR
if: steps.check-pr.outputs.existing_pr != ''
run: |
COMMITS_LIST=$(cat commits.txt)
PR_DESCRIPTION="**Please perform \`Merge & Commit\` to preserve commit history**
$COMMITS_LIST"
gh pr edit ${{ steps.check-pr.outputs.existing_pr }} \
--body "$PR_DESCRIPTION"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

322
.github/workflows/main.yaml vendored Normal file
View File

@ -0,0 +1,322 @@
name: Press Tests
on:
push:
branches:
- master
pull_request:
branches:
- '*'
workflow_dispatch:
env:
DB_NAME: test_frappe
DB_USER: test_frappe
DB_PASSWORD: test_frappe
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
format:
name: 'Lint and Format'
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies
run: |
pip install -r dev-requirements.txt
- name: Get changed files
id: changed-python-files
uses: tj-actions/changed-files@v45
with:
files: |
**.py
- name: List all changed files
if: steps.changed-python-files.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-python-files.outputs.all_changed_files }}
run: |
for file in ${ALL_CHANGED_FILES}; do
echo "$file was changed"
done
- name: Lint Check
if: steps.changed-python-files.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-python-files.outputs.all_changed_files }}
run: |
ruff check --output-format github ${ALL_CHANGED_FILES}
- name: Format Check
if: steps.changed-python-files.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-python-files.outputs.all_changed_files }}
run: |
ruff format --check ${ALL_CHANGED_FILES} --diff
- name: Typing Check
if: steps.changed-python-files.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-python-files.outputs.all_changed_files }}
run: |
mypy ${ALL_CHANGED_FILES}
semgrep:
name: Semgrep Rules
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Run Semgrep rules
run: |
pip install semgrep==1.32
semgrep ci --config ./press-semgrep-rules.yml --config r/python.lang.correctness
ui-tests:
name: Client
runs-on: ubuntu-latest
env:
PRESS_ADMIN_USER_EMAIL: playwright@example.com
PRESS_ADMIN_USER_PASSWORD: playwright
BASE_URL: http://test_site:8000
services:
mariadb:
image: mariadb:10.6
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
steps:
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Clone
uses: actions/checkout@v4
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -q -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 18
check-latest: true
- name: Cache pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py', '**/setup.cfg') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: 'echo "::set-output name=dir::$(yarn cache dir)"'
- uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install
run: bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
- name: Add to Hosts
run: |
echo "127.0.0.1 test_site" | sudo tee -a /etc/hosts
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
- name: Install Playwright browsers
working-directory: /home/runner/frappe-bench/apps/press/dashboard
run: npx playwright install chromium
- name: Setup Test Users
working-directory: /home/runner/frappe-bench/apps/press/dashboard
run: |
bench --site test_site execute press.press.doctype.team.test_team.create_test_press_admin_team \
--kwargs "{\"email\": \"${PRESS_ADMIN_USER_EMAIL}\", \"free_account\": True, \"skip_onboarding\": True}"
bench --site test_site set-password "$PRESS_ADMIN_USER_EMAIL" "$PRESS_ADMIN_USER_PASSWORD"
- name: Run Playwright Tests
working-directory: /home/runner/frappe-bench/apps/press/dashboard
run: npx playwright test --project=chromium
- name: Generate Coverage Report
working-directory: /home/runner/frappe-bench/apps/press/dashboard
run: npx nyc report --reporter=cobertura --reporter=text --report-dir=./coverage
- name: Upload Playwright Test report
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-report
path: /home/runner/frappe-bench/apps/press/dashboard/playwright-report
- name: Upload coverage data
uses: codecov/codecov-action@v5
with:
disable_search: true
files: /home/runner/frappe-bench/apps/press/dashboard/coverage/cobertura-coverage.xml
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
name: press-dashboard
flags: dashboard
tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
container: [1, 2]
name: Server
services:
mariadb:
image: mariadb:10.6
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
steps:
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Clone
uses: actions/checkout@v4
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -q -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 18
check-latest: true
- name: Cache pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py', '**/setup.cfg') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: 'echo "::set-output name=dir::$(yarn cache dir)"'
- uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install
run: bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
- name: Add to Hosts
run: |
echo "127.0.0.1 test_site" | sudo tee -a /etc/hosts
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
install: true
- name: Run Tests
working-directory: /home/runner/frappe-bench
run: |
bench --site test_site set-config allow_tests true
bench --site test_site run-parallel-tests --app press --with-coverage --total-builds 2 --build-number ${{ matrix.container }}
env:
TYPE: server
COVERAGE_RCFILE: /home/runner/frappe-bench/apps/press/.coveragerc
- name: Upload coverage reports to Codecov
uses: actions/upload-artifact@v4
with:
name: coverage-${{ matrix.container }}
path: /home/runner/frappe-bench/sites/coverage.xml
if: always()
- name: Upload bench start logs
uses: actions/upload-artifact@v4
with:
name: bench-start-logs-${{ matrix.container }}
path: /home/runner/frappe-bench/bench_start_logs.txt
if: always()
coverage:
name: Coverage Wrap Up
if: always()
needs: tests
runs-on: ubuntu-latest
steps:
- name: Clone
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
- name: Upload coverage data
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}

View File

@ -0,0 +1,73 @@
name: Validate PR Title
on:
pull_request:
types: [opened, edited, synchronize]
permissions:
pull-requests: write
contents: read
jobs:
lint:
name: 'Lint'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
- name: Install Commitlint and CSpell
run: npm install --save-dev @commitlint/{config-conventional,cli} cspell
- run: echo "${{ github.event.pull_request.title }}" > pr-title.txt
- name: Run Commitlint
id: commitlint
run: npx commitlint --edit pr-title.txt > commitlint_output.txt 2>&1
if: github.event.pull_request.user.login != 'mergify[bot]'
- name: Run CSpell
id: cspell
run: npx cspell --config .cspell.json pr-title.txt > cspell_output.txt 2>&1
if: github.event.pull_request.user.login != 'mergify[bot]'
- name: Delete Old Bot Comments
if: always()
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Fetch all comments on the PR
COMMENTS=$(gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments --jq '.[] | select(.user.login == "github-actions[bot]") | .id')
# Delete comments authored by the bot
for COMMENT_ID in $COMMENTS; do
gh api repos/${{ github.repository }}/issues/comments/$COMMENT_ID -X DELETE
done
- name: Post PR Comment
if: github.event.pull_request.user.login != 'mergify[bot]'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Initialize status messages
STATUS_COMMITLINT="PASSED"
STATUS_CSPELL="PASSED"
COMMITLINT_OUTPUT=$(cat commitlint_output.txt)
CSPELL_OUTPUT=$(cat cspell_output.txt)
COMMENT_BODY="### Found Issues In PR Title\n"
if [ "${{ steps.commitlint.outcome }}" == "failure" ]; then
STATUS_COMMITLINT="FAILED"
fi
if [ "${{ steps.cspell.outcome }}" == "failure" ]; then
STATUS_CSPELL="FAILED"
fi
if [ "$STATUS_COMMITLINT" == "FAILED" ]; then
COMMENT_BODY+="**❌ Conventional Commit Format**\n"
COMMENT_BODY+="\n\`\`\`\n$COMMITLINT_OUTPUT\n\`\`\`\n"
fi
if [ "$STATUS_CSPELL" == "FAILED" ]; then
COMMENT_BODY+="**❌ Spelling Error**\n"
COMMENT_BODY+="\n\`\`\`\n$CSPELL_OUTPUT\n\`\`\`\n"
COMMENT_BODY+="\n> If you believe the spelling error is a false positive, please add the word in **cspell.json** file.\n"
fi
if [ "$STATUS_COMMITLINT" == "FAILED" ] || [ "$STATUS_CSPELL" == "FAILED" ]; then
# Post the comment
echo -e "$COMMENT_BODY" | gh pr comment ${{ github.event.pull_request.number }} --body-file -
fi

170
.gitignore vendored Normal file
View File

@ -0,0 +1,170 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
test-results
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Added by frappe
.DS_Store
*.pyc
*.egg-info
*.swp
tags
node_modules
press/docs/current
press/public/dashboard
press/www/dashboard.html
press/www/dashboard-old.html
press/public/css/email.css
press/public/css/saas-next.css
dashboard/tailwind.theme.json
dashboard/components.d.ts
# Session file
libs/fc_cli/session.json
# Backbone artefacts
backbone/packer/builds/
backbone/packer/scratch/
backbone/packer/images/
backbone/packer/cloud-init.img
backbone/packer/user-data
backbone/packer/meta-data
backbone/packer/cloud-init-scaleway.img
# marketplace
press/public/css/marketplace.css
press/public/css/marketplace-next.css
# Vim
.vim
.nvimrc
# IDE
.idea
.vscode

42
.mergify.yml Normal file
View File

@ -0,0 +1,42 @@
pull_request_rules:
- name: Auto-close PRs on stable branch
conditions:
- and:
- and:
- author!=frappe-pr-bot
- author!=mergify[bot]
- author!=github-actions
- or:
- base=master
actions:
comment:
message: |
@{{author}}, thanks for the contribution, but we do not accept pull requests on a master. Please close this PR and raise PR on an develop branch.
- name: backport to develop
conditions:
- label="backport-develop"
actions:
backport:
branches:
- develop
assignees:
- "{{ author }}"
- name: backport to master
conditions:
- label="backport-master"
actions:
backport:
branches:
- master
assignees:
- "{{ author }}"
- name: Label PRs targeting develop with 'backport-master'
conditions:
- base=develop
actions:
label:
add:
- backport-master

89
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,89 @@
exclude: 'node_modules|.git'
default_stages: [pre-commit]
fail_fast: false
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8
hooks:
- id: prettier
types_or: [javascript, vue]
# Ignore any files that might contain jinja / bundles
exclude: |
(?x)^(
press/public/dist/.*|
.*node_modules.*|
.*boilerplate.*|
press/www/website_script.js|
press/templates/includes/.*|
press/public/js/.*min.js
)$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: debug-statements
- id: trailing-whitespace
files: 'press.*'
exclude: '.*json$|.*txt$|.*csv|.*md|.*svg'
- id: check-merge-conflict
- id: check-ast
- id: check-json
- id: check-toml
- id: check-yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.1
hooks:
- id: ruff
args: [--fix]
- id: ruff-format
- repo: local
hooks:
- id: commitlint
name: check commit message format
entry: npx commitlint --edit .git/COMMIT_EDITMSG
language: system
stages: [commit-msg]
always_run: true
- id: cspell-commit-msg
name: check commit message spelling
entry: npx cspell --config .cspell.json .git/COMMIT_EDITMSG
language: system
stages: [commit-msg]
always_run: true
- id: cspell-modified-files
name: check spelling of files
entry: sh -c "npx cspell --no-must-find-files --config .cspell.json `git diff --cached -p --name-status | cut -c3- | tr '\n' ' '`"
language: system
stages: [pre-commit]
- id: todo-warning
name: check todos
entry: .github/hooks/todo-warning.sh
language: script
stages: [pre-commit]
verbose: true
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.18.2
hooks:
- id: mypy
args:
[
--ignore-missing-imports,
--follow-imports=skip,
--exclude-gitignore,
--no-warn-no-return,
--disable-error-code=annotation-unchecked,
]
exclude: ^tests/|test_.*\.py$ # Exclude the 'tests/' directory and files starting with 'test_'
additional_dependencies:
[
types-requests<2.32,
types-pytz~=2025.2,
types-python-dateutil~=2.9.0,
]

5
.prettierrc.json Normal file
View File

@ -0,0 +1,5 @@
{
"useTabs": true,
"singleQuote": true,
"tabWidth": 2
}

16
.semgrepignore Normal file
View File

@ -0,0 +1,16 @@
# Common large paths
node_modules/
build/
dist/
vendor/
.env/
.venv/
.tox/
*.min.js
.npm/
# Semgrep rules folder
.semgrep
# Semgrep-action log folder
.semgrep_logs/

25
CODEOWNERS Normal file
View File

@ -0,0 +1,25 @@
# Each line is a file pattern followed by one or more owners
# These owners will be the default owners for everything in
# the repo. Unless a later match takes precedence
backbone/ @adityahase
ssh* @adityahase
nginx.conf @adityahase
*server @adityahase
playbooks/ @adityahase @balamurali27
site* @balamurali27
team/ @shadrak98
invoice/ @shadrak98
stripe* @shadrak98
razorpay* @shadrak98
subscription/ @shadrak98
dashboard/ @regdocs
saas @rutwikhdev
deploy* @18alantom
press/Dockerfile @18alantom

95
README.md Normal file
View File

@ -0,0 +1,95 @@
<div align="center" markdown="1">
<img src="https://frappe.io/files/Group%202%20(1).png" alt="Press logo" width="80"/>
<h1>Press</h1>
**Full Service Cloud Hosting For The Frappe Stack - Powers Frappe Cloud**
[![codecov](https://codecov.io/gh/frappe/press/branch/master/graph/badge.svg?token=0puvH0jUx9)](https://codecov.io/gh/frappe/press)
[![unittests](https://github.com/frappe/press/actions/workflows/main.yaml/badge.svg)](https://github.com/frappe/press/actions/workflows/main.yaml)
</div>
<div align="center">
<img width="889" alt="Managed press" src="https://github.com/user-attachments/assets/2675e828-d5ed-4527-a038-7742a5cfa3db" />
</div>
<br />
<div align="center">
<a href="https://frappe.io/press">Website</a>
-
<a href="https://docs.frappe.io/cloud/">Documentation</a>
</div>
## Press
Press is a 100% open-source cloud hosting for the Frappe stack.
### Motivation
We originally hosted our customer sites on an internal cloud platform called "Central," designed to automate creating and hosting sites when customers signed up on our website. Central was primarily built to host ERPNext, our flagship product. However, as our customers' needs evolved, they began requesting the ability to host custom applications, a feature that was not a priority in Central.
Additionally, customers lacked full control over their servers—no SSH access, no ability to manage updates, and limited flexibility in interacting with their environment. This led us to launch Frappe Cloud, to build a self-serve cloud platform that would empower our customers with complete control over their hosting experience.
### Key Features
- **Multitenancy Made Easy**: Press simplifies multi-tenancy by enabling multiple sites on a single platform, each with its app version, allowing independent updates and minimal downtime, even for large sites.
- **Dashboard**: The dashboard provides a centralized interface to manage apps, servers, sites, billing, backups, and updates, offering real-time insights and streamlined control of complex operations.
- **Permissions**: Granular access controls let team owners manage roles and resources efficiently, ensuring users have access only to relevant information and actions for their roles.
- **Simplified Management**: Press streamlines site management with automated backups, real-time monitoring, role-based access, and easy scaling, making it ideal for growing Frappe environments.
- **Billing**: Automated billing supports daily or monthly subscriptions, flexible payment methods, wallet credits, and ERP integration, simplifying customer invoicing and payments.
- **Marketplace**: The marketplace allows developers to list apps with flexible pricing models, ensures compatibility checks, and provides a streamlined system for sales and payouts.
<details>
<summary>Screenshots</summary>
![Dashboard](https://github.com/user-attachments/assets/1904fa3e-39aa-4151-8276-d3cc622ed582)
![Permissions](https://github.com/user-attachments/assets/60da6b5e-8f48-4483-99cf-67886ccc8bd6)
![Bench Group Update](https://github.com/user-attachments/assets/2be6b0ee-084d-4949-8d13-218b5a218d3d)
![Marketplace](https://github.com/user-attachments/assets/2f325737-7929-485d-a670-549f986fd07e)
</details>
### Under the Hood
- [**Frappe Framework**](https://github.com/frappe/frappe): A full-stack web application framework written in Python and Javascript. The framework provides a robust foundation for building web applications, including a database abstraction layer, user authentication, and a REST API.
- [**Frappe UI**](https://github.com/frappe/frappe-ui): A Vue-based UI library, to provide a modern user interface. The Frappe UI library provides a variety of components that can be used to build single-page applications on top of the Frappe Framework.
- [**Agent**](https://github.com/frappe/agent): A flask app designed to work along with Press. It provides a CLI interface for Press to communicate with the sites and benches.
- [**Docker**](https://www.docker.com): An open-source platform that enables developers to build, package, and deploy applications in lightweight, portable containers.
- [**Ansible**](https://www.ansible.com): An open-source IT automation tool that simplifies the management, configuration, and deployment of systems and applications.
## Setup
To self host or to setup Press locally follow the steps in the [Local Development Environment Setup Guide](https://docs.frappe.io/cloud/local-fc-setup) or [this YouTube video](https://www.youtube.com/watch?v=Xb9QHnUrIEk)
### Pre-commit
There's a [pre-commit](https://pre-commit.com/) hook included in the repo. You can set it up by doing
```bash
pip install pre-commit
pre-commit install
```
## Learn and connect
- [Telegram Public Group](https://t.me/frappecloud)
- [Discuss Forum](https://discuss.frappe.io/c/frappe-cloud/77)
- [Documentation](https://docs.frappe.io/cloud)
<br/>
<br/>
<div align="center" style="padding-top: 0.75rem;">
<a href="https://frappe.io" target="_blank">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://frappe.io/files/Frappe-white.png">
<img src="https://frappe.io/files/Frappe-black.png" alt="Frappe Technologies" height="28"/>
</picture>
</a>
</div>

View File

@ -0,0 +1,32 @@
# https://github.com/devgeniem/RediSearch/blob/master/Dockerfile
ARG REDIS_VER=6.2
ARG REDISEARCH_VER=v2.4.6
ARG RUST_VER=1.60.0
FROM rust:${RUST_VER} AS builder
ARG REDISEARCH_VER
RUN apt clean && apt -y update && apt -y install --no-install-recommends \
clang && rm -rf /var/lib/apt/lists/*
WORKDIR /
RUN git clone --recursive --depth 1 --branch ${REDISEARCH_VER} https://github.com/RediSearch/RediSearch.git
WORKDIR /RediSearch
RUN make setup
RUN make build
# Run module in official Redis
FROM redis:${REDIS_VER}
WORKDIR /data
RUN mkdir -p /usr/lib/redis/modules
COPY --from=builder /RediSearch/bin/linux-arm64v8-release/search/redisearch.so /usr/lib/redis/modules
EXPOSE 6379
CMD ["redis-server", "--loadmodule", "/usr/lib/redis/modules/redisearch.so"]

7
backbone/README.md Normal file
View File

@ -0,0 +1,7 @@
# Backbone
> Note: Not to be confused with the scrapped project **Frappe Backbone**
## Installation
Automatically installed with Press

3
backbone/__init__.py Normal file
View File

@ -0,0 +1,3 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt

55
backbone/cli.py Normal file
View File

@ -0,0 +1,55 @@
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
import click
from backbone.hypervisor import Hypervisor, Shell
from backbone.tests import run_tests
@click.group()
def cli():
pass
@cli.group()
def hypervisor():
pass
@hypervisor.command()
def setup():
shell = Shell()
hypervisor = Hypervisor(shell=shell)
hypervisor.setup()
@hypervisor.command()
@click.option("--size", default=16384, type=int)
@click.option("--scaleway", is_flag=True)
def build(size, scaleway):
shell = Shell()
hypervisor = Hypervisor(shell=shell)
if scaleway:
hypervisor.build_scaleway(size=size)
else:
hypervisor.build(size=size)
@hypervisor.command()
def up():
shell = Shell()
hypervisor = Hypervisor(shell=shell)
hypervisor.up()
@hypervisor.command()
@click.option("-c", "--command")
def ssh(command):
shell = Shell()
hypervisor = Hypervisor(shell=shell)
hypervisor.ssh(command=command)
@cli.command()
def tests():
run_tests()

118
backbone/hypervisor.py Normal file
View File

@ -0,0 +1,118 @@
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
import platform
import subprocess
from pathlib import Path
class Hypervisor:
def __init__(self, shell=None):
self.shell = shell
def setup(self):
system = platform.system()
if system == "Linux":
self.preinstall()
self.install()
self.verify()
elif system == "Darwin":
self.verify_mac()
def build(self, size):
system = platform.system()
if system == "Linux":
self.build_cloud_init_linux()
elif system == "Darwin":
self.build_cloud_init_mac()
self.build_packer("backbone", size=size)
def build_cloud_init_linux(self):
cloud_init_yml = str(Path(__file__).parent.joinpath("packer", "cloud-init.yml"))
cloud_init_image = str(Path(__file__).parent.joinpath("packer", "cloud-init.img"))
self.shell.execute(f"cloud-localds {cloud_init_image} {cloud_init_yml}")
def build_cloud_init_mac(self):
# cloud-localds isn't available on macOS.
# So we do what it does ourselves
# user-data is the same as cloud-init.yml
# https://github.com/canonical/cloud-utils/blob/49e5dd7849ee3c662f3db35e857148d02e72694b/bin/cloud-localds#L168-L187
cloud_init_yml = str(Path(__file__).parent.joinpath("packer", "cloud-init.yml"))
user_data = str(Path(__file__).parent.joinpath("packer", "user-data"))
self.shell.execute(f"cp {cloud_init_yml} {user_data}")
# meta-data has some inconsequential values
# but the file is needed
meta_data = str(Path(__file__).parent.joinpath("packer", "meta-data"))
self.shell.execute(f"touch {meta_data}")
cloud_init_image = str(Path(__file__).parent.joinpath("packer", "cloud-init.img"))
# Reference: https://github.com/canonical/cloud-utils/blob/49e5dd7849ee3c662f3db35e857148d02e72694b/bin/cloud-localds#L235-L237
self.shell.execute(
f"mkisofs -joliet -rock -volid cidata -output {cloud_init_image} {user_data} {meta_data}"
)
def build_packer(self, template, size):
packer_template = str(Path(__file__).parent.joinpath("packer", f"{template}.json"))
packer = self.shell.execute(f"packer build -var 'disk_size={size}' {packer_template}")
if packer.returncode:
raise Exception("Build Failed")
box = str(Path(__file__).parent.joinpath("packer", "builds", f"{template}.box"))
add = self.shell.execute(f"vagrant box add {box} --name {template} --force")
if add.returncode:
raise Exception(f"Cannot add box {box}")
def build_scaleway(self, size):
self.build_cloud_init_scaleway()
self.build_packer("scaleway", size=size)
def build_cloud_init_scaleway(self):
cloud_init_yml = str(Path(__file__).parent.joinpath("packer", "cloud-init-scaleway.yml"))
cloud_init_image = str(Path(__file__).parent.joinpath("packer", "cloud-init-scaleway.img"))
self.shell.execute(f"cloud-localds {cloud_init_image} {cloud_init_yml}")
def up(self):
vagrant = self.shell.execute("vagrant init backbone")
vagrant = self.shell.execute("vagrant up --provider=libvirt")
if vagrant.returncode:
raise Exception("Cannot start hypervisor")
def ssh(self, command=None):
if command:
vagrant = self.shell.execute(f'vagrant ssh -c "{command}"')
else:
vagrant = self.shell.execute("vagrant ssh")
if vagrant.returncode:
raise Exception("Cannot ssh")
def preinstall(self):
kvm_ok = self.shell.execute("kvm-ok")
if kvm_ok.returncode:
raise Exception("Cannot use KVM")
def install(self):
kvm_install = self.shell.execute("sudo apt install qemu-kvm")
if kvm_install.returncode:
raise Exception("Cannot install KVM")
def verify(self):
kvm_connect = self.shell.execute("virsh list --all")
if kvm_connect.returncode:
raise Exception("Cannot connect to KVM")
def verify_mac(self):
kvm_connect = self.shell.execute("virsh list --all")
if kvm_connect.returncode:
raise Exception("Cannot connect to KVM")
class Shell:
def __init__(self, directory=None):
self.directory = directory
def execute(self, command, directory=None):
directory = directory or self.directory
return subprocess.run(
command, check=False, stderr=subprocess.STDOUT, cwd=directory, shell=True, text=True
)

View File

@ -0,0 +1,56 @@
{
"builders": [
{
"boot_wait": "10s",
"cpus": "2",
"disk_image": true,
"disk_size": "{{user `disk_size`}}",
"iso_checksum": "1bf86f40534c7c4c5491bbc8064bf1b0764da8c88d5a12edce0f442bc3055784",
"iso_urls": [
"{{template_dir}}/images/ubuntu-20.04-server-cloudimg-amd64.img",
"{{template_dir}}/images/79f46c38b9e000a66d0edecf3222e2371fccd8a1.img",
"https://cloud-images.ubuntu.com/releases/focal/release-20221213/ubuntu-20.04-server-cloudimg-amd64.img"
],
"iso_target_path": "{{template_dir}}/images",
"iso_target_extension": "img",
"memory": "4096",
"output_directory": "{{template_dir}}/scratch",
"headless": true,
"qemuargs": [
[
"-cdrom",
"{{template_dir}}/cloud-init.img"
]
],
"shutdown_command": "echo 'vagrant' | sudo -S shutdown -P now",
"ssh_password": "vagrant",
"ssh_username": "vagrant",
"type": "qemu",
"use_backing_file": false,
"vm_name": "backbone"
}
],
"post-processors": [
{
"output": "{{template_dir}}/builds/backbone.box",
"type": "vagrant"
}
],
"provisioners": [
{
"execute_command": "echo 'vagrant' | {{.Vars}} sudo -S -E sh -eux '{{.Path}}'",
"expect_disconnect": true,
"scripts": [
"{{template_dir}}/scripts/sshd.sh",
"{{template_dir}}/scripts/networking.sh",
"{{template_dir}}/scripts/update.sh",
"{{template_dir}}/scripts/cleanup.sh",
"{{template_dir}}/scripts/minimize.sh"
],
"type": "shell"
}
],
"variables": {
"disk_size": "16384"
}
}

View File

@ -0,0 +1,27 @@
#cloud-config
ssh_pwauth: true
users:
- name: root
shell: /usr/bin/bash
- name: frappe
gecos: Frappe
groups: sudo
lock_passwd: false
passwd: $6$rounds=4096$GytYXpLxIgl5SZ$C3zfa5zfD66lfm/TEgtlAVYbl3IjEK9ZAND4qnI7fXGWGhqUFl7m2DD25VjimMfqH3SepUBTUwuyiubwpTUtc/
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDB3zVjTzHQSEHQG7OD3bYi7V1xk+PCwko0W3+d1fSUvSDCxSaMKtR31+CfMKmjnvoHubOHYI9wvLpx6KdZUl2uOzKnoLazi/FCGD+m75PS4lraNU6S/B62OQk0xaClDNYBKC3H3rdXCwTU4aWflWLcfc0bmffFDTDZBJa4ySy9ne9FomGYsaMMdYtt2GNwqOOAkhzI96RFz3d4/HvHDqAeR1zv5hdqpoRL49H+3PYHIpu3rz+oMGIrN/ZM7EhvXP3yCgBMIYDTpihbv0+KTJx9rQmGNdLObM+M3HHq2C4/Xj0yAd2xQYBSr/orUyJKeGB367k72M2NADT5EzPr99AV aditya@aditya
shell: /usr/bin/bash
uid: "1000"
- name: vagrant
gecos: Vagrant
lock_passwd: false
passwd: $6$rounds=4096$GytYXpLxIgl5SZ$C3zfa5zfD66lfm/TEgtlAVYbl3IjEK9ZAND4qnI7fXGWGhqUFl7m2DD25VjimMfqH3SepUBTUwuyiubwpTUtc/
shell: /usr/bin/bash
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key
sudo: ALL=(ALL) NOPASSWD:ALL
uid: "2000"

View File

@ -0,0 +1,19 @@
#cloud-config
ssh_pwauth: true
users:
- name: root
shell: /usr/bin/bash
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDB3zVjTzHQSEHQG7OD3bYi7V1xk+PCwko0W3+d1fSUvSDCxSaMKtR31+CfMKmjnvoHubOHYI9wvLpx6KdZUl2uOzKnoLazi/FCGD+m75PS4lraNU6S/B62OQk0xaClDNYBKC3H3rdXCwTU4aWflWLcfc0bmffFDTDZBJa4ySy9ne9FomGYsaMMdYtt2GNwqOOAkhzI96RFz3d4/HvHDqAeR1zv5hdqpoRL49H+3PYHIpu3rz+oMGIrN/ZM7EhvXP3yCgBMIYDTpihbv0+KTJx9rQmGNdLObM+M3HHq2C4/Xj0yAd2xQYBSr/orUyJKeGB367k72M2NADT5EzPr99AV aditya@aditya
- name: vagrant
gecos: Vagrant
lock_passwd: false
passwd: $6$rounds=4096$GytYXpLxIgl5SZ$C3zfa5zfD66lfm/TEgtlAVYbl3IjEK9ZAND4qnI7fXGWGhqUFl7m2DD25VjimMfqH3SepUBTUwuyiubwpTUtc/
shell: /usr/bin/bash
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key
sudo: ALL=(ALL) NOPASSWD:ALL
uid: "2000"

View File

@ -0,0 +1,57 @@
{
"builders": [
{
"boot_wait": "10s",
"cpus": "2",
"disk_image": true,
"disk_size": "{{user `disk_size`}}",
"iso_checksum": "36403f9562949545e2a6c38d4b840008acae674e20b67a67f4facba610b82aec",
"iso_urls": [
"{{template_dir}}/images/ubuntu-20.04-server-cloudimg-amd64.img",
"{{template_dir}}/images/02b24c4cf15cb4f576c262aa4efa6bca3c64c620.img",
"https://cloud-images.ubuntu.com/releases/focal/release-20201210/ubuntu-20.04-server-cloudimg-amd64.img"
],
"iso_target_path": "{{template_dir}}/images",
"iso_target_extension": "img",
"memory": "4096",
"output_directory": "{{template_dir}}/scratch",
"headless": true,
"qemuargs": [
[
"-fda",
"{{template_dir}}/cloud-init-scaleway.img"
]
],
"shutdown_command": "echo 'vagrant' | sudo -S shutdown -P now",
"ssh_password": "vagrant",
"ssh_username": "vagrant",
"type": "qemu",
"use_backing_file": false,
"vm_name": "scaleway"
}
],
"post-processors": [
{
"output": "{{template_dir}}/builds/scaleway.box",
"type": "vagrant"
}
],
"provisioners": [
{
"execute_command": "echo 'vagrant' | {{.Vars}} sudo -S -E sh -eux '{{.Path}}'",
"expect_disconnect": true,
"scripts": [
"{{template_dir}}/scripts/sshd.sh",
"{{template_dir}}/scripts/scaleway-sshd.sh",
"{{template_dir}}/scripts/networking.sh",
"{{template_dir}}/scripts/update.sh",
"{{template_dir}}/scripts/cleanup.sh",
"{{template_dir}}/scripts/minimize.sh"
],
"type": "shell"
}
],
"variables": {
"disk_size": "16384"
}
}

View File

@ -0,0 +1,113 @@
#!/bin/sh -eux
export DEBIAN_FRONTEND=noninteractive
# Remove open-vm-tools
apt-get -y purge open-vm-tools
# Remove git and vim
apt-get -y purge git vim-common
# Remove snapd
apt-get -y purge snapd
rm -rf /var/cache/snapd/
rm -rf /snap
# Remove cloud init
apt-get -y purge cloud-init
rm -rf /etc/cloud/
rm -rf /var/lib/cloud/
# Delete all Linux headers
dpkg --list \
| awk '{ print $2 }' \
| grep 'linux-headers' \
| xargs apt-get -y purge
# Remove specific Linux kernels, such as linux-image-3.11.0-15-generic but
# keeps the current kernel and does not touch the virtual packages,
# e.g. 'linux-image-generic', etc.
dpkg --list \
| awk '{ print $2 }' \
| grep 'linux-image-.*-generic' \
| grep -v `uname -r` \
| xargs apt-get -y purge
# Delete Linux source
dpkg --list \
| awk '{ print $2 }' \
| grep linux-source \
| xargs apt-get -y purge
# Delete development packages
dpkg --list \
| awk '{ print $2 }' \
| grep -- '-dev$' \
| xargs apt-get -y purge
# delete docs packages
dpkg --list \
| awk '{ print $2 }' \
| grep -- '-doc$' \
| xargs apt-get -y purge
# Delete X11 libraries
apt-get -y purge libx11-data xauth libxmuu1 libxcb1 libx11-6 libxext6
# Delete obsolete networking
apt-get -y purge ppp pppconfig pppoeconf
# Delete oddities
apt-get -y purge popularity-contest installation-report command-not-found friendly-recovery bash-completion fonts-ubuntu-font-family-console laptop-detect
# Exlude the files we don't need w/o uninstalling linux-firmware
echo "==> Setup dpkg excludes for linux-firmware"
cat <<_EOF_ | cat >> /etc/dpkg/dpkg.cfg.d/excludes
#BENTO-BEGIN
path-exclude=/lib/firmware/*
path-exclude=/usr/share/doc/linux-firmware/*
#BENTO-END
_EOF_
# Delete the massive firmware packages
rm -rf /lib/firmware/*
rm -rf /usr/share/doc/linux-firmware/*
# Clean up orphaned packages with deborphan
apt-get -y install deborphan
while [ -n "$(deborphan --guess-all --libdevel)" ]
do
deborphan --guess-all --libdevel | xargs apt-get -y purge
done
apt-get -y purge deborphan dialog
apt-get -y autoremove
apt-get -y autoclean
apt-get -y clean
# Remove docs
rm -rf /usr/share/doc/*
# Remove man pages
rm -rf /usr/share/man/*
# Remove cache files
find /var/cache -type f -exec rm -rf {} \;
# Remove APT files"
find /var/lib/apt -type f | xargs rm -f
# truncate any logs that have built up during the install
find /var/log -type f -exec truncate --size=0 {} \;
# Blank netplan machine-id (DUID) so machines get unique ID generated on boot.
truncate -s 0 /etc/machine-id
# remove the contents of /tmp and /var/tmp
rm -rf /tmp/* /var/tmp/*
# clear the history so our install isn't there
export HISTSIZE=0
rm -f /root/.wget-hsts
# Remove unused blocks
/sbin/fstrim -v /

View File

@ -0,0 +1,33 @@
#!/bin/sh -eux
# Whiteout root
count=$(df --sync -kP / | tail -n1 | awk -F ' ' '{print $4}')
count=$(($count-1))
dd if=/dev/zero of=/tmp/whitespace bs=1M count=$count || echo "dd exit code $? is suppressed"
rm /tmp/whitespace
# Whiteout /boot
count=$(df --sync -kP /boot | tail -n1 | awk -F ' ' '{print $4}')
count=$(($count-1))
dd if=/dev/zero of=/boot/whitespace bs=1M count=$count || echo "dd exit code $? is suppressed"
rm /boot/whitespace
set +e
swapuuid="`/sbin/blkid -o value -l -s UUID -t TYPE=swap`"
case "$?" in
2|0) ;;
*) exit 1 ;;
esac
set -e
if [ "x${swapuuid}" != "x" ]
then
# Whiteout the swap partition to reduce box size
# Swap is disabled till reboot
swappart="`readlink -f /dev/disk/by-uuid/$swapuuid`"
/sbin/swapoff "$swappart"
dd if=/dev/zero of="$swappart" bs=1M || echo "dd exit code $? is suppressed"
/sbin/mkswap -U "$swapuuid" "$swappart"
fi
sync

View File

@ -0,0 +1,14 @@
#!/bin/sh -eux
echo "Create netplan config for eth0"
cat <<EOF >/etc/netplan/01-netcfg.yaml
network:
version: 2
ethernets:
eth0:
dhcp4: true
EOF
# Disable Predictable Network Interface names and use eth0
sed -i 's/GRUB_CMDLINE_LINUX="\(.*\)"/GRUB_CMDLINE_LINUX="net.ifnames=0 biosdevname=0 \1"/g' /etc/default/grub
update-grub

View File

@ -0,0 +1,14 @@
#!/bin/sh -eux
SSHD_CONFIG="/etc/ssh/sshd_config"
# ensure that there is a trailing newline before attempting to concatenate
sed -i -e '$a\' "$SSHD_CONFIG"
DISABLE_PASSWORD_AUTHENTICATION="PasswordAuthentication yes"
if grep -q -E "^[[:space:]]*PasswordAuthentication" "$SSHD_CONFIG"
then
sed -i "s/^\s*PasswordAuthentication.*/${DISABLE_PASSWORD_AUTHENTICATION}/" "$SSHD_CONFIG"
else
echo "$DISABLE_PASSWORD_AUTHENTICATION" >>"$SSHD_CONFIG"
fi

View File

@ -0,0 +1,22 @@
#!/bin/sh -eux
SSHD_CONFIG="/etc/ssh/sshd_config"
# ensure that there is a trailing newline before attempting to concatenate
sed -i -e '$a\' "$SSHD_CONFIG"
USEDNS="UseDNS no"
if grep -q -E "^[[:space:]]*UseDNS" "$SSHD_CONFIG"
then
sed -i "s/^\s*UseDNS.*/${USEDNS}/" "$SSHD_CONFIG"
else
echo "$USEDNS" >>"$SSHD_CONFIG"
fi
GSSAPI="GSSAPIAuthentication no"
if grep -q -E "^[[:space:]]*GSSAPIAuthentication" "$SSHD_CONFIG"
then
sed -i "s/^\s*GSSAPIAuthentication.*/${GSSAPI}/" "$SSHD_CONFIG"
else
echo "$GSSAPI" >>"$SSHD_CONFIG"
fi

View File

@ -0,0 +1,38 @@
#!/bin/sh -eux
export DEBIAN_FRONTEND=noninteractive
# Disable release-upgrades
sed -i.bak 's/^Prompt=.*$/Prompt=never/' /etc/update-manager/release-upgrades
# Disable systemd apt timers/services
systemctl stop apt-daily.timer
systemctl stop apt-daily-upgrade.timer
systemctl disable apt-daily.timer
systemctl disable apt-daily-upgrade.timer
systemctl mask apt-daily.service
systemctl mask apt-daily-upgrade.service
systemctl daemon-reload
# Disable periodic activities of apt to be safe
cat <<EOF >/etc/apt/apt.conf.d/10periodic;
APT::Periodic::Enable "0";
APT::Periodic::Update-Package-Lists "0";
APT::Periodic::Download-Upgradeable-Packages "0";
APT::Periodic::AutocleanInterval "0";
APT::Periodic::Unattended-Upgrade "0";
EOF
# Clean and nuke the package from orbit
rm -rf /var/log/unattended-upgrades
apt-get -y purge unattended-upgrades
# Update the package list
apt-get -y update
# Upgrade all installed packages incl. kernel and kernel headers
apt-get -y dist-upgrade -o Dpkg::Options::="--force-confnew"
# Install QEMU guest agent
apt-get install qemu-guest-agent
reboot

53
backbone/setup.py Normal file
View File

@ -0,0 +1,53 @@
import sys
from backbone.hypervisor import Shell
shell = Shell()
def apt_install(packages):
shell.execute(f"sudo apt install --yes --no-install-suggests --no-install-recommends {packages}")
def main(args):
prepare()
setup_vagrant()
setup_kvm()
setup_libvirt()
setup_packer()
def prepare():
shell.execute("sudo apt update")
apt_install("build-essential")
def setup_vagrant():
VAGRANT_SERVER = "https://releases.hashicorp.com/vagrant/2.4.6"
VAGRANT_PACKAGE = "vagrant_2.4.6-1_amd64.deb"
shell.execute(f"wget {VAGRANT_SERVER}/{VAGRANT_PACKAGE} -O {VAGRANT_PACKAGE}")
shell.execute(f"sudo dpkg -i {VAGRANT_PACKAGE}")
def setup_packer():
PACKER_KEY = "https://apt.releases.hashicorp.com/gpg"
PACKER_REPO = '"deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main"'
shell.execute(f"curl -fsSL {PACKER_KEY} | sudo apt-key add -")
shell.execute(f"sudo apt-add-repository {PACKER_REPO}")
apt_install("packer cloud-utils")
def setup_kvm():
apt_install("qemu-kvm")
shell.execute("sudo usermod -aG kvm $USER")
def setup_libvirt():
apt_install("libvirt-dev libvirt-daemon-system qemu-utils dnsmasq-base")
shell.execute("sudo usermod -aG libvirt $USER")
shell.execute("vagrant plugin install vagrant-libvirt")
shell.execute("vagrant plugin install vagrant-hostmanager")
if __name__ == "__main__":
sys.exit(main(sys.argv))

57
backbone/setup_mac.py Normal file
View File

@ -0,0 +1,57 @@
import sys
from backbone.hypervisor import Shell
shell = Shell()
def brew_install(packages):
shell.execute(f"brew install {packages}")
def main(args):
prepare()
setup_qemu()
setup_vagrant()
setup_libvirt()
setup_packer()
def prepare():
shell.execute("brew update")
brew_install("cdrtools iproute2mac")
def setup_qemu():
brew_install("qemu")
# We might need to disable a few things
# echo 'security_driver = "none"' >> /opt/homebrew/etc/libvirt/qemu.conf
# echo "dynamic_ownership = 0" >> /opt/homebrew/etc/libvirt/qemu.conf
# echo "remember_owner = 0" >> /opt/homebrew/etc/libvirt/qemu.conf
def setup_vagrant():
# At the time of writing hashicorp tap has older 2.4.2 version
# We need 2.4.3
# Reference: https://github.com/vagrant-libvirt/vagrant-libvirt/issues/1831
brew_install("vagrant")
def setup_libvirt():
brew_install("libvirt")
shell.execute("brew services start libvirt")
# Make sure you haven't installed macports
# It overrides pkg-config, and we won't find brew libvirt packages
shell.execute("vagrant plugin install vagrant-libvirt")
shell.execute("vagrant plugin install vagrant-hostmanager")
def setup_packer():
shell.execute("brew tap hashicorp/tap")
brew_install("hashicorp/tap/packer")
shell.execute("packer plugins install github.com/hashicorp/qemu")
shell.execute("packer plugins install github.com/hashicorp/vagrant")
if __name__ == "__main__":
sys.exit(main(sys.argv))

View File

@ -0,0 +1,19 @@
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
import unittest
from pathlib import Path
from coverage import Coverage
def run_tests():
coverage = Coverage(source=[str(Path(__file__).parent.parent)], omit=["*/tests/*"], branch=True)
coverage.start()
unittest.main(module=None, argv=["", "discover", "-s", "backbone"], exit=False)
coverage.stop()
coverage.save()
coverage.html_report()
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,48 @@
# Copyright (c) 2020, Frappe and contributors
# For license information, please see license.txt
from unittest.mock import MagicMock
from frappe.tests.utils import FrappeTestCase
from backbone.hypervisor import Hypervisor
class TestHypervisor(FrappeTestCase):
def test_preinstall_pass(self):
shell = MagicMock()
shell.execute.return_value.returncode = 0
hypervisor = Hypervisor(shell=shell)
self.assertEqual(hypervisor.preinstall(), None)
shell.execute.assert_called_with("kvm-ok")
def test_preinstall_fail(self):
shell = MagicMock()
shell.execute.return_value.returncode = 1
hypervisor = Hypervisor(shell=shell)
self.assertRaisesRegex(Exception, "Cannot use KVM", hypervisor.preinstall)
def test_install_pass(self):
shell = MagicMock()
shell.execute.return_value.returncode = 0
hypervisor = Hypervisor(shell=shell)
self.assertEqual(hypervisor.install(), None)
shell.execute.assert_called_with("sudo apt install qemu-kvm")
def test_install_fail(self):
shell = MagicMock()
shell.execute.return_value.returncode = 1
hypervisor = Hypervisor(shell=shell)
self.assertRaisesRegex(Exception, "Cannot install KVM", hypervisor.install)
def test_verify_pass(self):
shell = MagicMock()
shell.execute.return_value.returncode = 0
hypervisor = Hypervisor(shell=shell)
self.assertEqual(hypervisor.verify(), None)
shell.execute.assert_called_with("virsh list --all")
def test_verify_fail(self):
shell = MagicMock()
shell.execute.return_value.returncode = 1
hypervisor = Hypervisor(shell=shell)
self.assertRaisesRegex(Exception, "Cannot connect to KVM", hypervisor.verify)

169
backbone/vagrant/Vagrantfile vendored Normal file
View File

@ -0,0 +1,169 @@
Vagrant.configure("2") do |config|
config.vm.box = "backbone"
config.vm.synced_folder ".", "/vagrant", disabled: true
# This let's us access all guests with their names from host and other guests
config.hostmanager.enabled = true
config.hostmanager.manage_host = true
config.hostmanager.manage_guest = true
config.vm.provider :libvirt do |libvirt|
libvirt.qemu_use_session = false
# Enable qemu_use_session or set this on macOS
# Also run **sudo** brew services start
# libvirt.uri = "qemu:///session"
libvirt.driver = "kvm"
libvirt.default_prefix = ""
# VMs with little disk space may fail to boot
libvirt.machine_virtual_size = 16
libvirt.cpus = 1
libvirt.cpu_mode = "host-passthrough"
libvirt.memory = 512
end
# We will add two static IPs to simulate public and private interfaces
# Host manager plugin will work only with first interface in this list
# Public 10.0.x.x
# Private 10.1.x.x
# IP Pattern based on server types
# Proxy x.x.1.x
# Frappe x.x.2.x
# Database x.x.3.x
# Other x.x.4.x
# We'll start IPs from x.x.x.101
# Default Cluster
# Reverse Proxy Server
config.vm.define "n1.local.frappe.dev" do |n1|
n1.vm.hostname = "n1.local.frappe.dev"
n1.vm.network "private_network", ip: "10.0.1.101", netmask: "255.255.0.0"
n1.vm.network "private_network", ip: "10.1.1.101", netmask: "255.255.0.0"
n1.vm.provider :libvirt do |libvirt|
libvirt.memory = 1024
end
end
# Primary App Server
config.vm.define "f1.local.frappe.dev" do |f1|
f1.vm.hostname = "f1.local.frappe.dev"
f1.vm.network "private_network", ip: "10.0.2.101", netmask: "255.255.0.0"
f1.vm.network "private_network", ip: "10.1.2.101", netmask: "255.255.0.0"
f1.vm.provider :libvirt do |libvirt|
libvirt.cpus = 2
libvirt.memory = 4096
end
end
# Replica of f1
# config.vm.define "f2.local.frappe.dev" do |f2|
# f2.vm.hostname = "f2.local.frappe.dev"
# f2.vm.network "private_network", ip: "10.0.2.102", netmask: "255.255.0.0"
# f2.vm.network "private_network", ip: "10.1.2.102", netmask: "255.255.0.0"
# f2.vm.provider :libvirt do |libvirt|
# libvirt.cpus = 2
# libvirt.memory = 4096
# end
# end
# Primary DB Server
config.vm.define "m1.local.frappe.dev" do |m1|
m1.vm.hostname = "m1.local.frappe.dev"
m1.vm.network "private_network", ip: "10.0.3.101", netmask: "255.255.0.0"
m1.vm.network "private_network", ip: "10.1.3.101", netmask: "255.255.0.0"
m1.vm.provider :libvirt do |libvirt|
libvirt.cpus = 1
libvirt.memory = 2048
end
end
# Replica of m1
# config.vm.define "m2.local.frappe.dev" do |m2|
# m2.vm.hostname = "m2.local.frappe.dev"
# m2.vm.network "private_network", ip: "10.0.3.102", netmask: "255.255.0.0"
# m2.vm.network "private_network", ip: "10.1.3.102", netmask: "255.255.0.0"
# m2.vm.provider :libvirt do |libvirt|
# libvirt.cpus = 1
# libvirt.memory = 2048
# end
# end
# # Secondary Cluster
# config.vm.define "n2.frappe.dev" do |n2|
# n2.vm.hostname = "n2.frappe.dev"
# n2.vm.network "private_network", ip: "10.0.1.102", netmask: "255.255.0.0"
# n2.vm.network "private_network", ip: "10.1.1.102", netmask: "255.255.0.0"
# end
# Additional Hosts.
# Docker Registry
config.vm.define "registry.local.frappe.dev" do |registry|
registry.vm.hostname = "registry.local.frappe.dev"
registry.vm.network "private_network", ip: "10.0.4.101", netmask: "255.255.0.0"
registry.vm.network "private_network", ip: "10.1.4.101", netmask: "255.255.0.0"
end
# Log Server = ElasticSearch + Logstash + Kibana
config.vm.define "log.local.frappe.dev" do |log|
log.vm.hostname = "log.local.frappe.dev"
log.vm.network "private_network", ip: "10.0.4.102", netmask: "255.255.0.0"
log.vm.network "private_network", ip: "10.1.4.102", netmask: "255.255.0.0"
log.vm.provider :libvirt do |libvirt|
libvirt.cpus = 2
libvirt.memory = 4096
end
end
# Uptime Server = Prometheus + Grafana
config.vm.define "monitor.local.frappe.dev" do |monitor|
monitor.vm.hostname = "monitor.local.frappe.dev"
monitor.vm.network "private_network", ip: "10.0.4.103", netmask: "255.255.0.0"
monitor.vm.network "private_network", ip: "10.1.4.103", netmask: "255.255.0.0"
monitor.vm.provider :libvirt do |libvirt|
libvirt.memory = 1024
end
end
# Analytics Server = Plausible
# config.vm.define "analytics.local.frappe.dev" do |analytics|
# analytics.vm.hostname = "analytics.local.frappe.dev"
# analytics.vm.network "private_network", ip: "10.0.4.104", netmask: "255.255.0.0"
# analytics.vm.network "private_network", ip: "10.1.4.104", netmask: "255.255.0.0"
# analytics.vm.provider :libvirt do |libvirt|
# libvirt.memory = 1024
# end
# end
# Trace Server = Sentry
config.vm.define "trace.local.frappe.dev" do |trace|
trace.vm.hostname = "trace.local.frappe.dev"
trace.vm.network "private_network", ip: "10.0.4.105", netmask: "255.255.0.0"
trace.vm.network "private_network", ip: "10.1.4.105", netmask: "255.255.0.0"
trace.vm.provider :libvirt do |libvirt|
libvirt.cpus = 2
libvirt.memory = 4096
end
end
# config.vm.define "sn1.local.frappe.dev" do |sn1|
# sn1.vm.box = "scaleway"
# sn1.vm.hostname = "sn1.local.frappe.dev"
# sn1.vm.network "private_network", ip: "10.2.0.101", netmask: "255.255.0.0"
# sn1.vm.network "private_network", ip: "10.3.0.101", netmask: "255.255.0.0", auto_config: false
# end
# config.vm.define "sf1.local.frappe.dev" do |sf1|
# sf1.vm.box = "scaleway"
# sf1.vm.hostname = "sf1.local.frappe.dev"
# sf1.vm.network "private_network", ip: "10.2.1.101", netmask: "255.255.0.0"
# sf1.vm.network "private_network", ip: "10.3.1.101", netmask: "255.255.0.0", auto_config: false
# sf1.vm.provider :libvirt do |libvirt|
# libvirt.cpus = 2
# libvirt.memory = 4096
# end
# end
end

13
codecov.yml Normal file
View File

@ -0,0 +1,13 @@
coverage:
status:
project:
default:
target: auto
threshold: 0.5%
patch:
default:
target: 75%
threshold: 0%
if_ci_failed: ignore
ignore:
- press/press/report/**

11
commitlint.config.js Normal file
View File

@ -0,0 +1,11 @@
export default {
extends: ['@commitlint/config-conventional'],
rules: {
'header-max-length': [2, 'always', 72],
'subject-case': [2, 'always', 'sentence-case'],
'scope-case': [2, 'always', 'kebab-case'],
'body-case': [2, 'always', 'sentence-case'],
'body-leading-blank': [2, 'always'],
'footer-leading-blank': [2, 'always'],
},
};

View File

@ -0,0 +1,2 @@
defaults
not IE 11

14
dashboard/.eslintrc.js Normal file
View File

@ -0,0 +1,14 @@
module.exports = {
root: true,
env: {
node: true
},
extends: ['plugin:vue/essential', 'eslint:recommended', '@vue/prettier'],
parserOptions: {
parser: 'babel-eslint'
},
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off'
}
};

32
dashboard/.gitignore vendored Normal file
View File

@ -0,0 +1,32 @@
.DS_Store
node_modules
/dist
/coverage
# local env files
.env.local
.env.*.local
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
# Playwright E2E artifacts
playwright-report/
test-results/
tests-e2e/.env
tests-e2e/.auth
blob-report/
playwright/.cache/
*.trace.zip
.nyc_output/

View File

@ -0,0 +1,4 @@
{
"singleQuote": true,
"useTabs": true
}

64
dashboard/README.md Normal file
View File

@ -0,0 +1,64 @@
# Dashboard
Dashboard is a VueJS application that is the face of Frappe Cloud. This is what the end users (tenants) see and manage their FC stuff in. The tenants does not have access to the desk, so, this is their dashboard for managing sites, apps, updates etc.
Technologies at the heart of dashboard:
1. [VueJS 3](https://vuejs.org/): The JavaScript framework of our choice.
2. [TailwindCSS 3](https://tailwindcss.com/): We love it.
3. [ViteJS](https://vitejs.dev/guide/): Build tooling for dev server and build command.
4. [Feather Icons](https://feathericons.com/): Those Shiny & Crisp Open Source icons.
## Development
We use the vite's development server, gives us super-fast hot reload and more.
### Running the development server
Run:
```bash
yarn run dev
```
> Note: If you are getting `CSRFTokenError` in your local development machine, please add the following key value pair in your site_cofig.json
>
> ```json
> "ignore_csrf": 1
> ```
### Proxy
While running the vite dev server, the requests to paths like `/app`, `/files` and `/api` are redirected to the actual site inside the bench. This makes sure these paths and other backend API keep working properly. You can check the [proxyOptions.js](./proxyOptions.js) files to check how the proxying happens. These options are then loaded and used in the [vite config](./vite.config.js) file.
## Testing
There is a separate setup for testing the frontend.
### The Stack
1. [MSW](https://mswjs.io/)
2. [Vitest](https://vitest.dev/)
### Running the tests
```bash
yarn run test
```
The tests run in CI too.
## Learning More
You can start by taking a look at the [main.js](./src/main.js) file. This is where the VueJS app is initialzed and the below things are attached (registered) to the instance:
1. Vue Router
2. Plugins
3. Controllers
4. Global Components
The logic to register each of the above is in its own separate file, you can take a look at the imports as required. Till we have a more docs, you have to dig into some `js` and `vue` files. If you find something that you can add here, feel free to raise a PR!

10
dashboard/auto-imports.d.ts vendored Normal file
View File

@ -0,0 +1,10 @@
/* eslint-disable */
/* prettier-ignore */
// @ts-nocheck
// noinspection JSUnusedGlobalSymbols
// Generated by unplugin-auto-import
// biome-ignore lint: disable
export {}
declare global {
}

View File

@ -0,0 +1,3 @@
module.exports = {
presets: ['@babel/preset-env']
};

27
dashboard/index.html Normal file
View File

@ -0,0 +1,27 @@
<!DOCTYPE html>
<html class="h-full overflow-hidden" lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width,initial-scale=1.0" />
<title>Frappe Cloud</title>
<link rel="icon" href="/assets/press/dashboard/favicon.png" type="image/x-icon" />
</head>
<body class="h-full">
<noscript>
<strong>
Frappe Cloud Dashboard doesn't work properly without JavaScript enabled.
Please enable it to continue.
</strong>
</noscript>
<!-- Main Vue App -->
<div id="app" class="h-full"></div>
<!-- For Teleports -->
<div id="modals"></div>
<div id="popovers"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

9
dashboard/jsconfig.json Normal file
View File

@ -0,0 +1,9 @@
{
"include": ["./src/**/*", "src2/components/AddressableErrorDialog.vue"],
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["src/*"]
}
}
}

119
dashboard/package.json Normal file
View File

@ -0,0 +1,119 @@
{
"name": "dashboard",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"test": "vitest",
"coverage": "vitest run --coverage",
"lint": "eslint src",
"test:e2e": "playwright test",
"test:e2e:headed": "playwright test --headed",
"test:e2e:report": "playwright show-report"
},
"dependencies": {
"@codemirror/autocomplete": "^6.18.1",
"@codemirror/lang-sql": "^6.8.0",
"@headlessui/vue": "^1.7.14",
"@popperjs/core": "^2.11.2",
"@sentry/vite-plugin": "^2.19.0",
"@sentry/vue": "^8.10.0",
"@stripe/stripe-js": "^1.3.0",
"@tailwindcss/container-queries": "^0.1.1",
"@tanstack/vue-table": "^8.20.5",
"@vueuse/components": "^10.7.0",
"@vueuse/core": "^10.3.0",
"codemirror": "^6.0.1",
"core-js": "^3.6.4",
"dayjs": "^1.10.7",
"echarts": "^5.4.3",
"feather-icons": "^4.26.0",
"frappe-charts": "2.0.0-rc22",
"frappe-ui": "0.1.168",
"fuse.js": "6.6.2",
"libarchive.js": "^1.3.0",
"lodash": "^4.17.19",
"luxon": "^1.22.0",
"markdown-it": "^12.3.2",
"papaparse": "^5.4.1",
"qrcode": "^1.5.4",
"register-service-worker": "^1.6.2",
"socket.io-client": "^4.5.1",
"sql-formatter": "^15.4.10",
"unplugin-icons": "^0.17.0",
"unplugin-vue-components": "^0.25.2",
"vue": "^3.5.15",
"vue-codemirror": "^6.1.1",
"vue-echarts": "^6.6.1",
"vue-qrcode": "^2.2.2",
"vue-router": "^4.1.6",
"vue-sonner": "^1.2.5"
},
"devDependencies": {
"@iconify/json": "^2.2.123",
"@playwright/test": "^1.54.2",
"@tailwindcss/forms": "^0.4.0",
"@tailwindcss/postcss7-compat": "^2.0.2",
"@tailwindcss/typography": "^0.5.1",
"@vitejs/plugin-legacy": "^4.1.1",
"@vitejs/plugin-vue": "^5.0.3",
"@vitejs/plugin-vue-jsx": "^3.1.0",
"@vue/compiler-sfc": "^3.1.0",
"@vue/eslint-config-prettier": "^6.0.0",
"@vue/test-utils": "^2.0.0-rc.19",
"autoprefixer": "^10.4.2",
"babel-eslint": "^10.0.3",
"c8": "^7.11.0",
"dotenv": "^17.2.3",
"eslint": "^6.7.2",
"eslint-plugin-prettier": "^3.1.1",
"eslint-plugin-vue": "^6.2.2",
"jsdom": "^19.0.0",
"lint-staged": "^9.5.0",
"msw": "^0.36.8",
"node-fetch": "^3.2.10",
"postcss": "^8.4.6",
"postcss-easy-import": "^4.0.0",
"prettier": "^2.5.1",
"prettier-plugin-tailwindcss": "^0.1.8",
"tailwindcss": "^3.4",
"typescript": "^5.4.3",
"vite": "5.4.20",
"vite-plugin-rewrite-all": "^1.0.1",
"vite-plugin-vue-devtools": "7.6.7",
"vitest": "^0.9.3",
"vue-tsc": "^2.0.7",
"yorkie": "^2.0.0"
},
"gitHooks": {
"pre-commit": "lint-staged"
},
"lint-staged": {
"*.{js,jsx,vue}": [
"yarn lint",
"git add"
]
},
"nyc": {
"include": [
"src/**/*.vue",
"src/**/*.js"
],
"extension": [
".js",
".ts",
".vue"
],
"reporter": [
"html",
"text",
"lcov"
],
"all": true,
"require": [
"source-map-support/register"
]
}
}

View File

@ -0,0 +1,39 @@
import { defineConfig, devices } from '@playwright/test';
import dotenv from "dotenv";
dotenv.config({ path: "./tests-e2e/.env", quiet: true });
export default defineConfig({
testDir: './tests-e2e',
fullyParallel: true,
retries: process.env.CI ? 2 : 0,
workers: process.env.CI ? 4 : undefined,
use: {
baseURL: process.env.BASE_URL || 'http://localhost:8010',
trace: 'retain-on-failure',
screenshot: 'only-on-failure',
video: 'retain-on-failure'
},
reporter: [['list'], ['html', { open: 'never' }]],
projects: [
{
name: 'cron',
testMatch: /.*\.cron\.spec\.ts/,
dependencies: undefined
},
{
name: 'setup',
testMatch: /.*\.setup\.ts/
},
{
name: 'chromium',
use: {
...devices['Desktop Chrome'],
storageState: 'tests-e2e/.auth/session.json'
},
// must match all *.test.ts files only
testMatch: /^(?!.*(\.cron|\.setup)\.spec\.ts$).*\.test\.ts$/,
dependencies: ['setup']
}
]
});

View File

@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {}
}
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 440 B

View File

@ -0,0 +1,2 @@
User-agent: *
Disallow:

7
dashboard/shims-global.d.ts vendored Normal file
View File

@ -0,0 +1,7 @@
declare global {
interface Window {
is_system_user?: boolean;
}
}
export {};

5
dashboard/shims.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
declare module '~icons/*' {
import { FunctionalComponent, SVGAttributes } from 'vue';
const component: FunctionalComponent<SVGAttributes>;
export default component;
}

98
dashboard/src/App.vue Normal file
View File

@ -0,0 +1,98 @@
<template>
<div class="relative flex h-full flex-col">
<div class="h-full flex-1">
<div class="flex h-full">
<div
v-if="!isSignupFlow && !$isMobile && !isHideSidebar"
class="relative block min-h-0 flex-shrink-0 overflow-hidden hover:overflow-auto"
>
<AppSidebar v-if="$session.user" />
</div>
<div class="w-full overflow-auto" id="scrollContainer">
<MobileNav
v-if="!isSignupFlow && $isMobile && !isHideSidebar && $session.user"
/>
<div
v-if="
!isSignupFlow &&
!isSiteLogin &&
!$session.user &&
!$route.meta.isLoginPage
"
class="border bg-red-200 px-5 py-3 text-base text-red-900"
>
You are not logged in.
<router-link to="/login" class="underline">Login</router-link> to
access dashboard.
</div>
<router-view />
</div>
</div>
</div>
<Toaster position="top-right" />
<component v-for="dialog in dialogs" :is="dialog" :key="dialog.id" />
</div>
</template>
<script setup>
import { defineAsyncComponent, computed, watch, ref, provide } from 'vue';
import { Toaster } from 'vue-sonner';
import { dialogs } from './utils/components';
import { useRoute } from 'vue-router';
import { getTeam } from './data/team';
import { session } from './data/session.js';
const AppSidebar = defineAsyncComponent(
() => import('./components/AppSidebar.vue'),
);
const MobileNav = defineAsyncComponent(
() => import('./components/MobileNav.vue'),
);
const route = useRoute();
const team = getTeam();
const isHideSidebar = computed(() => {
const alwaysHideSidebarRoutes = [
'Site Login',
'SignupLoginToSite',
'SignupSetup',
];
const alwaysHideSidebarPaths = ['/dashboard/site-login'];
if (!session.user) return false;
if (
alwaysHideSidebarRoutes.includes(route.name) ||
alwaysHideSidebarPaths.includes(window.location.pathname)
)
return true;
return (
route.meta.hideSidebar && session.user && team?.doc?.hide_sidebar === true
);
});
const isSignupFlow = ref(
window.location.pathname.startsWith('/dashboard/create-site') ||
window.location.pathname.startsWith('/dashboard/setup-account') ||
window.location.pathname.startsWith('/dashboard/site-login') ||
window.location.pathname.startsWith('/dashboard/signup'),
);
const isSiteLogin = ref(window.location.pathname.endsWith('/site-login'));
watch(
() => route.name,
() => {
isSignupFlow.value =
window.location.pathname.startsWith('/dashboard/create-site') ||
window.location.pathname.startsWith('/dashboard/setup-account') ||
window.location.pathname.startsWith('/dashboard/site-login') ||
window.location.pathname.startsWith('/dashboard/signup');
},
);
provide('team', team);
provide('session', session);
</script>
<style src="./assets/style.css"></style>

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,152 @@
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 100;
font-display: swap;
src: url('Inter-Thin.woff2?v=3.12') format('woff2'),
url('Inter-Thin.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 100;
font-display: swap;
src: url('Inter-ThinItalic.woff2?v=3.12') format('woff2'),
url('Inter-ThinItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 200;
font-display: swap;
src: url('Inter-ExtraLight.woff2?v=3.12') format('woff2'),
url('Inter-ExtraLight.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 200;
font-display: swap;
src: url('Inter-ExtraLightItalic.woff2?v=3.12') format('woff2'),
url('Inter-ExtraLightItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 300;
font-display: swap;
src: url('Inter-Light.woff2?v=3.12') format('woff2'),
url('Inter-Light.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 300;
font-display: swap;
src: url('Inter-LightItalic.woff2?v=3.12') format('woff2'),
url('Inter-LightItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 400;
font-display: swap;
src: url('Inter-Regular.woff2?v=3.12') format('woff2'),
url('Inter-Regular.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 400;
font-display: swap;
src: url('Inter-Italic.woff2?v=3.12') format('woff2'),
url('Inter-Italic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 500;
font-display: swap;
src: url('Inter-Medium.woff2?v=3.12') format('woff2'),
url('Inter-Medium.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 500;
font-display: swap;
src: url('Inter-MediumItalic.woff2?v=3.12') format('woff2'),
url('Inter-MediumItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 600;
font-display: swap;
src: url('Inter-SemiBold.woff2?v=3.12') format('woff2'),
url('Inter-SemiBold.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 600;
font-display: swap;
src: url('Inter-SemiBoldItalic.woff2?v=3.12') format('woff2'),
url('Inter-SemiBoldItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 700;
font-display: swap;
src: url('Inter-Bold.woff2?v=3.12') format('woff2'),
url('Inter-Bold.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 700;
font-display: swap;
src: url('Inter-BoldItalic.woff2?v=3.12') format('woff2'),
url('Inter-BoldItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 800;
font-display: swap;
src: url('Inter-ExtraBold.woff2?v=3.12') format('woff2'),
url('Inter-ExtraBold.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 800;
font-display: swap;
src: url('Inter-ExtraBoldItalic.woff2?v=3.12') format('woff2'),
url('Inter-ExtraBoldItalic.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: normal;
font-weight: 900;
font-display: swap;
src: url('Inter-Black.woff2?v=3.12') format('woff2'),
url('Inter-Black.woff?v=3.12') format('woff');
}
@font-face {
font-family: 'Inter';
font-style: italic;
font-weight: 900;
font-display: swap;
src: url('Inter-BlackItalic.woff2?v=3.12') format('woff2'),
url('Inter-BlackItalic.woff?v=3.12') format('woff');
}

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="88px" height="88px" viewBox="0 0 88 88" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 56.2 (81672) - https://sketch.com -->
<title>erpnext-logo</title>
<desc>Created with Sketch.</desc>
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="erpnext-logo" fill-rule="nonzero">
<path d="M74.0833425,8.65999998e-07 C81.4123344,8.65999998e-07 87.3125279,5.90019975 87.3125279,13.2291584 L87.3125279,74.0833368 C87.3125279,81.4122954 81.4123344,87.3125043 74.0833425,87.3125043 L13.2292094,87.3125043 C5.90031746,87.3125043 2.3949e-05,81.4122954 2.3949e-05,74.0833368 L2.3949e-05,13.2291584 C2.3949e-05,5.90019975 5.90031746,8.65999998e-07 13.2292094,8.65999998e-07 L74.0833425,8.65999998e-07 Z" id="Box" fill="#5A67D8"></path>
<path d="M29.7776717,21.8279666 C29.5028225,21.8279666 29.2349848,21.8558854 28.9761694,21.9085798 C28.7173275,21.9612743 28.4675079,22.0389083 28.229965,22.1390584 C28.1111671,22.1891334 27.9957029,22.2450161 27.8832021,22.3059734 C27.6582269,22.4278829 27.4465337,22.5707975 27.2506891,22.7323019 C26.3693091,23.4590779 25.8089216,24.5599536 25.8089216,25.7967167 L25.8089216,26.0385617 L25.8089216,61.2741253 L25.8089216,61.5154782 C25.8089216,63.7141657 27.5789842,65.4842283 29.7776717,65.4842283 L57.5346337,65.4842283 C59.7333212,65.4842283 61.5033838,63.7141657 61.5033838,61.5154782 L61.5033838,61.2741253 C61.5033838,59.0754377 59.7333212,57.3053752 57.5346337,57.3053752 L33.988251,57.3053752 L33.988251,47.4201976 L51.4957041,47.4201976 C53.6943917,47.4201976 55.4644542,45.6501351 55.4644542,43.4514475 L55.4644542,43.2101211 C55.4644542,41.0114335 53.6943917,39.241371 51.4957041,39.241371 L33.988251,39.241371 L33.988251,30.0073118 L57.5346337,30.0073118 C59.7333212,30.0073118 61.5033838,28.2372493 61.5033838,26.0385617 L61.5033838,25.7967167 C61.5033838,23.5980291 59.7333212,21.8279666 57.5346337,21.8279666 L29.7776717,21.8279666 Z" id="E" fill="#FFFFFF"></path>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -0,0 +1,4 @@
<svg viewBox="0 0 956 941" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M909.491 851.754H468.772C258.897 851.754 87.321 680.178 87.321 470.303C87.321 260.429 258.897 88.852 468.772 88.852C583.666 88.852 690.902 137.874 765.966 228.258C781.286 246.641 810.392 249.705 828.775 234.386C847.159 219.066 850.223 189.96 834.903 171.576C744.519 62.809 612.773 0 471.835 0C209.875 0 0 212.939 0 470.303C0 727.668 212.939 940.606 470.303 940.606H911.023C935.534 940.606 955.449 922.223 955.449 896.18C955.449 870.137 934.002 851.754 909.491 851.754Z" fill="#4794E9"/>
<path d="M226.852 470.961C226.852 337.683 335.62 227.384 470.429 227.384C542.43 227.384 611.367 259.555 657.325 314.704C672.644 333.087 701.751 336.151 720.134 320.832C738.518 305.513 741.581 276.406 726.262 258.023C663.453 181.426 570.005 137 470.429 137C286.598 137 138 285.597 138 469.429C138 653.261 286.598 801.858 470.429 801.858H811.574C836.084 801.858 856 783.475 856 757.432C856 731.39 837.616 713.006 811.574 713.006H468.898C335.62 714.538 226.852 604.239 226.852 470.961Z" fill="#8CC0F1"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

Some files were not shown because too many files have changed in this diff Show More