Skip to content

Commit

Permalink
feat: move testbed build to python & add more info
Browse files Browse the repository at this point in the history
  • Loading branch information
tydeu committed Dec 7, 2023
1 parent b4b4220 commit b557316
Show file tree
Hide file tree
Showing 8 changed files with 134 additions and 57 deletions.
14 changes: 8 additions & 6 deletions .github/workflows/testbed.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -115,17 +115,19 @@ jobs:
uses: actions/checkout@v3
- id: build
name: Build
continue-on-error: true
continue-on-error: true # e.g., clone failures
# Note that this runs arbitrary untrusted code so we cannot give it secrets
run: ./build.sh ${{ matrix.gitUrl }} testbed ${{ matrix.toolchain }}
- name: Output Outcome
run: |
echo ${{ steps.build.outcome }} > outcome.txt
- name: Upload Results
./testbed-build.py -o result.json -v \
${{ matrix.gitUrl }} testbed ${{ matrix.toolchain }}
- name: Check Build
run: test "`jq -r '.outcome' result.json`" = success
continue-on-error: true # build failures
- name: Upload Result
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.artifact }}
path: outcome.txt
path: result.json
collect:
needs: [setup, build]
name: Collect Results
Expand Down
21 changes: 0 additions & 21 deletions build.sh

This file was deleted.

4 changes: 2 additions & 2 deletions bundle.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/usr/bin/env python3
from utils import run_cmd, load_index, add_build
from utils import capture_cmd, load_index, add_build
from datetime import datetime
import argparse
import json

RELEASE_REPO = 'leanprover/lean4'
def query_toolchain_releases():
out = run_cmd(
out = capture_cmd(
'gh', 'api', '--paginate',
f'repos/{RELEASE_REPO}/releases',
'-q', '.[] | .tag_name'
Expand Down
19 changes: 6 additions & 13 deletions query.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
from utils import paginate, run_cmd
from utils import configure_logging, paginate, capture_cmd
from datetime import datetime
import argparse
import os
Expand Down Expand Up @@ -34,7 +34,7 @@ def query_repo_data(repoIds: 'list[str]') -> dict:
for id in page:
fields.append('-f')
fields.append(f'repoIds[]={id}')
out = run_cmd(
out = capture_cmd(
'gh', 'api', 'graphql',
"-H", "X-Github-Next-Global-ID: 1",
'-f', f'query={REPO_QUERY}', *fields
Expand All @@ -47,14 +47,14 @@ def query_repo_data(repoIds: 'list[str]') -> dict:
def query_lake_repos(limit: int) -> 'list[str]':
query='filename:lakefile.lean path:/'
if limit == 0:
out = run_cmd(
out = capture_cmd(
'gh', 'api', 'search/code',
'--paginate', '--cache', '1h',
'-X', 'GET', '-f', f'q={query}',
'-q', '.items[] | .repository.node_id'
)
else:
out = run_cmd(
out = capture_cmd(
'gh', 'search', 'code', *query.split(' '), '-L', str(limit),
'--json', 'path,repository', '-q', '.[] | .repository.id'
)
Expand Down Expand Up @@ -86,13 +86,7 @@ def filter_ws(value: str | None):
help='print verbose logging information')
args = parser.parse_args()

if args.verbosity == 0:
level = logging.CRITICAL
elif args.verbosity == 1:
level = logging.INFO
else:
level = logging.DEBUG
logging.basicConfig(level=level, format='%(levelname)s: %(message)s')
configure_logging(args.verbosity)

exclusions = set()
with open(args.exclusions, 'r') as f:
Expand Down Expand Up @@ -140,8 +134,7 @@ def curate(pkg: dict):
if args.index_dir is not None:
for pkg in pkgs:
pkg_dir = os.path.join(args.index_dir, pkg['owner'], pkg['name'])
if not os.path.exists(pkg_dir):
os.makedirs(pkg_dir)
os.makedirs(pkg_dir, exist_ok=True)
with open(os.path.join(pkg_dir, "metadata.json"), 'w') as f:
f.write(json.dumps(pkg, indent=2))
f.write("\n")
Expand Down
1 change: 1 addition & 0 deletions site/utils/manifest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export type Source = GitHubSource
export interface Build {
url?: string
builtAt: string
revision: string
toolchain: string
outcome: string
}
Expand Down
86 changes: 86 additions & 0 deletions testbed-build.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
#!/usr/bin/env python3
from utils import CommandError, configure_logging, run_cmd, capture_cmd
import subprocess
import os
import argparse
import shutil
import logging
import json

DEFAULT_ORIGIN = 'leanprover/lean4'
def normalize_toolchain(toolchain: str):
parts = toolchain.strip().split(':')
if len(parts) < 2:
origin = DEFAULT_ORIGIN
ver = parts[0]
else:
origin = parts[0]
ver = parts[1]
if ver[0].isdecimal():
ver = f'v{ver}'
return f'{origin}:{ver}'

if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('url',
help="Git URL of the repository to clone")
parser.add_argument('test_dir',
help="directory to clone the package into")
parser.add_argument('toolchain', nargs='?', type=str, default='',
help="Lean toolchain on build the package on")
parser.add_argument('-o', '--output',
help='file to output the build results')
parser.add_argument('-q', '--quiet', dest="verbosity", action='store_const', const=0, default=1,
help='print no logging information')
parser.add_argument('-v', '--verbose', dest="verbosity", action='store_const', const=2,
help='print verbose logging information')
parser.add_argument('-r', '--reuse-clone', action='store_true',
help='reuse cloned repository if it already exists')
args = parser.parse_args()

configure_logging(args.verbosity)
result = dict()

# Clone package
if not (args.reuse_clone and os.path.exists(args.test_dir)):
os.makedirs(args.test_dir, exist_ok=True)
shutil.rmtree(args.test_dir)
run_cmd('git', 'clone', args.url, args.test_dir)
iwd = os.getcwd()
os.chdir(args.test_dir)
result['revision'] = capture_cmd('git', 'rev-parse', 'HEAD').decode().strip()

# Set toolchain
toolchain = args.toolchain.strip()
toolchain_file = 'lean-toolchain'
if len(toolchain) > 0:
with open(toolchain_file, 'w') as f:
f.write(args.toolchain)
f.write('\n')
else:
with open(toolchain_file, 'r') as f:
toolchain = normalize_toolchain(f.read())
result['toolchain'] = toolchain

# Try build
try:
run_cmd('lake', 'exe', 'cache', 'get', allow_failure=True)
if run_cmd('lake', 'build', allow_failure=True) != 0:
logging.info('build failed, updating and trying again')
run_cmd('lake', 'update')
run_cmd('lake', 'clean')
run_cmd('lake', 'exe', 'cache', 'get', allow_failure=True)
run_cmd('lake', 'build')
logging.info(f'successfully built {args.url} on {toolchain}')
result['outcome'] = 'success'
except CommandError:
logging.error(f'failed to build {args.url} on {toolchain}')
result['outcome'] = 'failure'

# Output result
os.chdir(iwd)
if args.output is None:
print(json.dumps(result, indent=2))
else:
with open(args.output, 'w') as f:
f.write(json.dumps(result, indent=2))
22 changes: 9 additions & 13 deletions testbed-collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,30 +61,26 @@ def find_build_job(repo: str) -> Job:
result = {
'url': f"https://github.com/{TESTBED_REPO}/actions/runs/{args.run_id}/job/{jobId}#step:5:1",
'builtAt': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
'toolchain': entry['toolchain'],
}
outcome_file = os.path.join(args.results, entry['artifact'], 'outcome.txt')
if os.path.exists(outcome_file):
with open(outcome_file, 'r') as f:
result['outcome'] = f.read().strip()
else:
result['outcome'] = None
result_file = os.path.join(args.results, entry['artifact'], 'result.json')
if not os.path.exists(result_file):
continue
with open(result_file, 'r') as f:
result |= json.load(f)
results[entry['fullName']] = result

if args.index_dir is not None:
for entry in matrix:
if args.index_dir is not None:
pkg_dir = os.path.join(args.index_dir, entry['fullName'])
if not os.path.exists(pkg_dir):
continue
builds_file = os.path.join(pkg_dir, "builds.json")
if os.path.exists(builds_file):
with open(os.path.join(pkg_dir, "builds.json"), 'r') as f:
builds = json.loads(f)
builds = add_build(builds, results[entry['fullName']])
builds = add_build(builds, result)
else:
builds = list()
builds = [result]
with open(builds_file, 'w') as f:
f.write(json.dumps([results[entry['fullName']]], indent=2))
f.write(json.dumps(builds, indent=2))
f.write("\n")

if args.output is None:
Expand Down
24 changes: 22 additions & 2 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@
import os
import subprocess

def configure_logging(verbosity):
if verbosity == 0:
level = logging.CRITICAL
elif verbosity == 1:
level = logging.INFO
else:
level = logging.DEBUG
logging.basicConfig(level=level, format='%(levelname)s: %(message)s')

def load_index(path: str, include_builds=False):
if os.path.isdir(path):
pkgs = list()
Expand Down Expand Up @@ -41,10 +50,21 @@ def paginate(iterable, page_size):
slicer = lambda: list(itertools.islice(it, page_size))
return iter(slicer, [])

def run_cmd(*args: str) -> bytes:
class CommandError(RuntimeError):
pass

def run_cmd(*args: str, allow_failure=False):
logging.info(f'> {" ".join(args)}')
rc = subprocess.run(args).returncode
if not allow_failure and rc != 0:
raise CommandError(f'external command exited with code {rc}')
return rc

def capture_cmd(*args: str) -> bytes:
logging.debug(f'> {" ".join(args)}')
child = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if child.returncode != 0:
raise RuntimeError(child.stderr.decode().strip())
raise CommandError(child.stderr.decode().strip())
elif len(child.stderr) > 0:
logging.error(child.stderr.decode())
return child.stdout

0 comments on commit b557316

Please sign in to comment.