Compress-Stream-Zstd
view release on metacpan or search on metacpan
ext/zstd/tests/automated_benchmarking.py view on Meta::CPAN
# ################################################################
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under both the BSD-style license (found in the
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
# in the COPYING file in the root directory of this source tree).
# You may select, at your option, one of the above-listed licenses.
# ##########################################################################
import argparse
import glob
import json
import os
import time
import pickle as pk
import subprocess
import urllib.request
GITHUB_API_PR_URL = "https://api.github.com/repos/facebook/zstd/pulls?state=open"
GITHUB_URL_TEMPLATE = "https://github.com/{}/zstd"
RELEASE_BUILD = {"user": "facebook", "branch": "dev", "hash": None}
# check to see if there are any new PRs every minute
DEFAULT_MAX_API_CALL_FREQUENCY_SEC = 60
PREVIOUS_PRS_FILENAME = "prev_prs.pk"
# Not sure what the threshold for triggering alarms should be
# 1% regression sounds like a little too sensitive but the desktop
# that I'm running it on is pretty stable so I think this is fine
CSPEED_REGRESSION_TOLERANCE = 0.01
DSPEED_REGRESSION_TOLERANCE = 0.01
def get_new_open_pr_builds(prev_state=True):
prev_prs = None
if os.path.exists(PREVIOUS_PRS_FILENAME):
with open(PREVIOUS_PRS_FILENAME, "rb") as f:
prev_prs = pk.load(f)
data = json.loads(urllib.request.urlopen(GITHUB_API_PR_URL).read().decode("utf-8"))
prs = {
d["url"]: {
"user": d["user"]["login"],
"branch": d["head"]["ref"],
"hash": d["head"]["sha"].strip(),
}
for d in data
}
with open(PREVIOUS_PRS_FILENAME, "wb") as f:
pk.dump(prs, f)
if not prev_state or prev_prs == None:
return list(prs.values())
return [pr for url, pr in prs.items() if url not in prev_prs or prev_prs[url] != pr]
def get_latest_hashes():
tmp = subprocess.run(["git", "log", "-1"], stdout=subprocess.PIPE).stdout.decode(
"utf-8"
)
sha1 = tmp.split("\n")[0].split(" ")[1]
tmp = subprocess.run(
["git", "show", "{}^1".format(sha1)], stdout=subprocess.PIPE
).stdout.decode("utf-8")
sha2 = tmp.split("\n")[0].split(" ")[1]
tmp = subprocess.run(
["git", "show", "{}^2".format(sha1)], stdout=subprocess.PIPE
).stdout.decode("utf-8")
sha3 = "" if len(tmp) == 0 else tmp.split("\n")[0].split(" ")[1]
return [sha1.strip(), sha2.strip(), sha3.strip()]
def get_builds_for_latest_hash():
hashes = get_latest_hashes()
for b in get_new_open_pr_builds(False):
if b["hash"] in hashes:
return [b]
return []
def clone_and_build(build):
if build["user"] != None:
github_url = GITHUB_URL_TEMPLATE.format(build["user"])
os.system(
"""
rm -rf zstd-{user}-{sha} &&
git clone {github_url} zstd-{user}-{sha} &&
cd zstd-{user}-{sha} &&
{checkout_command}
make -j &&
( run in 1.142 second using v1.01-cache-2.11-cpan-39bf76dae61 )