Initial commit
This commit is contained in:
@@ -0,0 +1 @@
|
||||
This directory contains Various useful scripts for working with OE builds
|
||||
Executable
+93
@@ -0,0 +1,93 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# Script which can be run on new autobuilder workers to check all needed configuration is present.
|
||||
# Designed to be run in a repo where bitbake/oe-core are already present.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Todo
|
||||
# Add testtools/subunit import test
|
||||
# Add python3-git test
|
||||
# Add pigz test
|
||||
# vnc tests/checkvnc?
|
||||
# test sendmail works (for QA email notification)
|
||||
# test error report submission works
|
||||
# test buildistory git repo works?
|
||||
#
|
||||
|
||||
if [ ! -x $HOME/yocto-autobuilder-helper/scripts/checkvnc ]; then
|
||||
echo "$HOME/yocto-autobuilder-helper should be created."
|
||||
exit 1
|
||||
fi
|
||||
$HOME/yocto-autobuilder-helper/scripts/checkvnc
|
||||
|
||||
. ./oe-init-build-env > /dev/null
|
||||
if [ "$?" != "0" ]; then
|
||||
exit 1
|
||||
fi
|
||||
git config --global user.name > /dev/null
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Please set git config --global user.name"
|
||||
exit 1
|
||||
fi
|
||||
git config --global user.email > /dev/null
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Please set git config --global user.email"
|
||||
exit 1
|
||||
fi
|
||||
python3 -c "import jinja2"
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Please ensure jinja2 is available"
|
||||
exit 1
|
||||
fi
|
||||
bitbake -p
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Bitbake parsing failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
WATCHES=$(PATH="/sbin:/usr/sbin:$PATH" sysctl fs.inotify.max_user_watches -n)
|
||||
if (( $WATCHES < 65000 )); then
|
||||
echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf'
|
||||
exit 1
|
||||
fi
|
||||
OPEN_FILES=$(ulimit -n)
|
||||
if (( $OPEN_FILES < 65535 )); then
|
||||
echo 'Increase maximum open files in /etc/security/limits.conf'
|
||||
echo '* soft nofile 131072'
|
||||
echo '* hard nofile 131072'
|
||||
exit 1
|
||||
fi
|
||||
MAX_PROCESSES=$(ulimit -u)
|
||||
if (( $MAX_PROCESSES < 514542 )); then
|
||||
echo 'Increase maximum user processes in /etc/security/limits.conf'
|
||||
echo '* hard nproc 515294'
|
||||
echo '* soft nproc 514543'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p tmp/deploy/images/qemux86-64
|
||||
pushd tmp/deploy/images/qemux86-64
|
||||
if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then
|
||||
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4
|
||||
fi
|
||||
if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then
|
||||
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf
|
||||
fi
|
||||
if [ ! -e bzImage-qemux86-64.bin ]; then
|
||||
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/bzImage-qemux86-64.bin
|
||||
fi
|
||||
popd
|
||||
bitbake qemu-helper-native
|
||||
DISPLAY=:1 runqemu serialstdio qemux86-64
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Unable to use runqemu"
|
||||
exit 1
|
||||
fi
|
||||
DISPLAY=:1 runqemu serialstdio qemux86-64 kvm
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Unable to use runqemu with kvm"
|
||||
exit 1
|
||||
fi
|
||||
Executable
+120
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
help ()
|
||||
{
|
||||
base=`basename $0`
|
||||
echo -e "Usage: $base command"
|
||||
echo "Avaliable commands:"
|
||||
echo -e "\texport <file.conf>: export and lock down the AUTOPR values from the PR service into a file for release."
|
||||
echo -e "\timport <file.conf>: import the AUTOPR values from the exported file into the PR service."
|
||||
}
|
||||
|
||||
clean_cache()
|
||||
{
|
||||
s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"`
|
||||
# Stop any active memory resident server
|
||||
bitbake -m
|
||||
# Remove cache entries since we want to trigger a full reparse
|
||||
if [ "x${s}" != "x" ]; then
|
||||
rm -f ${s}/bb_cache*.dat.*
|
||||
fi
|
||||
}
|
||||
|
||||
do_export ()
|
||||
{
|
||||
file=$1
|
||||
[ "x${file}" == "x" ] && help && exit 1
|
||||
rm -f ${file}
|
||||
|
||||
clean_cache
|
||||
bitbake -R conf/prexport.conf -p
|
||||
s=`bitbake -R conf/prexport.conf -e | grep ^PRSERV_DUMPFILE= | cut -f2 -d\"`
|
||||
if [ "x${s}" != "x" ];
|
||||
then
|
||||
[ -e $s ] && mv -f $s $file && echo "Exporting to file $file succeeded!"
|
||||
return 0
|
||||
fi
|
||||
echo "Exporting to file $file failed!"
|
||||
return 1
|
||||
}
|
||||
|
||||
do_import ()
|
||||
{
|
||||
file=$1
|
||||
[ "x${file}" == "x" ] && help && exit 1
|
||||
|
||||
clean_cache
|
||||
bitbake -R conf/primport.conf -R $file -p
|
||||
ret=$?
|
||||
[ $ret -eq 0 ] && echo "Importing from file $file succeeded!" || echo "Importing from file $file failed!"
|
||||
return $ret
|
||||
}
|
||||
|
||||
do_migrate_localcount ()
|
||||
{
|
||||
df=`bitbake -R conf/migrate_localcount.conf -e | \
|
||||
grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"`
|
||||
if [ "x${df}" == "x" ];
|
||||
then
|
||||
echo "LOCALCOUNT_DUMPFILE is not defined!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
rm -f $df
|
||||
clean_cache
|
||||
echo "Exporting LOCALCOUNT to AUTOINCs..."
|
||||
bitbake -R conf/migrate_localcount.conf -p
|
||||
[ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1
|
||||
|
||||
if [ -e $df ];
|
||||
then
|
||||
echo "Exporting to file $df succeeded!"
|
||||
else
|
||||
echo "Exporting to file $df failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Importing generated AUTOINC entries..."
|
||||
[ -e $df ] && do_import $df
|
||||
|
||||
if [ ! $? -eq 0 ]
|
||||
then
|
||||
echo "Migration from LOCALCOUNT to AUTOINCs failed!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Migration from LOCALCOUNT to AUTOINCs succeeded!"
|
||||
return 0
|
||||
}
|
||||
|
||||
[ $# -eq 0 ] && help && exit 1
|
||||
|
||||
case $2 in
|
||||
*.conf|*.inc)
|
||||
;;
|
||||
*)
|
||||
echo ERROR: $2 must end with .conf or .inc!
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
case $1 in
|
||||
export)
|
||||
do_export $2
|
||||
;;
|
||||
import)
|
||||
do_import $2
|
||||
;;
|
||||
migrate_localcount)
|
||||
do_migrate_localcount
|
||||
;;
|
||||
*)
|
||||
help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
Executable
+318
@@ -0,0 +1,318 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (c) 2013 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
import shutil
|
||||
import re
|
||||
import warnings
|
||||
import subprocess
|
||||
import argparse
|
||||
|
||||
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
|
||||
import scriptpath
|
||||
|
||||
# Figure out where is the bitbake/lib/bb since we need bb.siggen and bb.process
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
|
||||
sys.exit(1)
|
||||
scriptpath.add_oe_lib_path()
|
||||
import argparse_oe
|
||||
|
||||
import bb.siggen
|
||||
import bb.process
|
||||
|
||||
# Match the stamp's filename
|
||||
# group(1): PE_PV (may no PE)
|
||||
# group(2): PR
|
||||
# group(3): TASK
|
||||
# group(4): HASH
|
||||
stamp_re = re.compile("(?P<pv>.*)-(?P<pr>r\d+)\.(?P<task>do_\w+)\.(?P<hash>[^\.]*)")
|
||||
sigdata_re = re.compile(".*\.sigdata\..*")
|
||||
|
||||
def gen_dict(stamps):
|
||||
"""
|
||||
Generate the dict from the stamps dir.
|
||||
The output dict format is:
|
||||
{fake_f: {pn: PN, pv: PV, pr: PR, task: TASK, path: PATH}}
|
||||
Where:
|
||||
fake_f: pv + task + hash
|
||||
path: the path to the stamp file
|
||||
"""
|
||||
# The member of the sub dict (A "path" will be appended below)
|
||||
sub_mem = ("pv", "pr", "task")
|
||||
d = {}
|
||||
for dirpath, _, files in os.walk(stamps):
|
||||
for f in files:
|
||||
# The "bitbake -S" would generate ".sigdata", but no "_setscene".
|
||||
fake_f = re.sub('_setscene.', '.', f)
|
||||
fake_f = re.sub('.sigdata', '', fake_f)
|
||||
subdict = {}
|
||||
tmp = stamp_re.match(fake_f)
|
||||
if tmp:
|
||||
for i in sub_mem:
|
||||
subdict[i] = tmp.group(i)
|
||||
if len(subdict) != 0:
|
||||
pn = os.path.basename(dirpath)
|
||||
subdict['pn'] = pn
|
||||
# The path will be used by os.stat() and bb.siggen
|
||||
subdict['path'] = dirpath + "/" + f
|
||||
fake_f = tmp.group('pv') + tmp.group('task') + tmp.group('hash')
|
||||
d[fake_f] = subdict
|
||||
return d
|
||||
|
||||
# Re-construct the dict
|
||||
def recon_dict(dict_in):
|
||||
"""
|
||||
The output dict format is:
|
||||
{pn_task: {pv: PV, pr: PR, path: PATH}}
|
||||
"""
|
||||
dict_out = {}
|
||||
for k in dict_in.keys():
|
||||
subdict = {}
|
||||
# The key
|
||||
pn_task = "%s_%s" % (dict_in.get(k).get('pn'), dict_in.get(k).get('task'))
|
||||
# If more than one stamps are found, use the latest one.
|
||||
if pn_task in dict_out:
|
||||
full_path_pre = dict_out.get(pn_task).get('path')
|
||||
full_path_cur = dict_in.get(k).get('path')
|
||||
if os.stat(full_path_pre).st_mtime > os.stat(full_path_cur).st_mtime:
|
||||
continue
|
||||
subdict['pv'] = dict_in.get(k).get('pv')
|
||||
subdict['pr'] = dict_in.get(k).get('pr')
|
||||
subdict['path'] = dict_in.get(k).get('path')
|
||||
dict_out[pn_task] = subdict
|
||||
|
||||
return dict_out
|
||||
|
||||
def split_pntask(s):
|
||||
"""
|
||||
Split the pn_task in to (pn, task) and return it
|
||||
"""
|
||||
tmp = re.match("(.*)_(do_.*)", s)
|
||||
return (tmp.group(1), tmp.group(2))
|
||||
|
||||
|
||||
def print_added(d_new = None, d_old = None):
|
||||
"""
|
||||
Print the newly added tasks
|
||||
"""
|
||||
added = {}
|
||||
for k in list(d_new.keys()):
|
||||
if k not in d_old:
|
||||
# Add the new one to added dict, and remove it from
|
||||
# d_new, so the remaining ones are the changed ones
|
||||
added[k] = d_new.get(k)
|
||||
del(d_new[k])
|
||||
|
||||
if not added:
|
||||
return 0
|
||||
|
||||
# Format the output, the dict format is:
|
||||
# {pn: task1, task2 ...}
|
||||
added_format = {}
|
||||
counter = 0
|
||||
for k in added.keys():
|
||||
pn, task = split_pntask(k)
|
||||
if pn in added_format:
|
||||
# Append the value
|
||||
added_format[pn] = "%s %s" % (added_format.get(pn), task)
|
||||
else:
|
||||
added_format[pn] = task
|
||||
counter += 1
|
||||
print("=== Newly added tasks: (%s tasks)" % counter)
|
||||
for k in added_format.keys():
|
||||
print(" %s: %s" % (k, added_format.get(k)))
|
||||
|
||||
return counter
|
||||
|
||||
def print_vrchanged(d_new = None, d_old = None, vr = None):
|
||||
"""
|
||||
Print the pv or pr changed tasks.
|
||||
The arg "vr" is "pv" or "pr"
|
||||
"""
|
||||
pvchanged = {}
|
||||
counter = 0
|
||||
for k in list(d_new.keys()):
|
||||
if d_new.get(k).get(vr) != d_old.get(k).get(vr):
|
||||
counter += 1
|
||||
pn, task = split_pntask(k)
|
||||
if pn not in pvchanged:
|
||||
# Format the output, we only print pn (no task) since
|
||||
# all the tasks would be changed when pn or pr changed,
|
||||
# the dict format is:
|
||||
# {pn: pv/pr_old -> pv/pr_new}
|
||||
pvchanged[pn] = "%s -> %s" % (d_old.get(k).get(vr), d_new.get(k).get(vr))
|
||||
del(d_new[k])
|
||||
|
||||
if not pvchanged:
|
||||
return 0
|
||||
|
||||
print("\n=== %s changed: (%s tasks)" % (vr.upper(), counter))
|
||||
for k in pvchanged.keys():
|
||||
print(" %s: %s" % (k, pvchanged.get(k)))
|
||||
|
||||
return counter
|
||||
|
||||
def print_depchanged(d_new = None, d_old = None, verbose = False):
|
||||
"""
|
||||
Print the dependency changes
|
||||
"""
|
||||
depchanged = {}
|
||||
counter = 0
|
||||
for k in d_new.keys():
|
||||
counter += 1
|
||||
pn, task = split_pntask(k)
|
||||
if (verbose):
|
||||
full_path_old = d_old.get(k).get("path")
|
||||
full_path_new = d_new.get(k).get("path")
|
||||
# No counter since it is not ready here
|
||||
if sigdata_re.match(full_path_old) and sigdata_re.match(full_path_new):
|
||||
output = bb.siggen.compare_sigfiles(full_path_old, full_path_new)
|
||||
if output:
|
||||
print("\n=== The verbose changes of %s.%s:" % (pn, task))
|
||||
print('\n'.join(output))
|
||||
else:
|
||||
# Format the output, the format is:
|
||||
# {pn: task1, task2, ...}
|
||||
if pn in depchanged:
|
||||
depchanged[pn] = "%s %s" % (depchanged.get(pn), task)
|
||||
else:
|
||||
depchanged[pn] = task
|
||||
|
||||
if len(depchanged) > 0:
|
||||
print("\n=== Dependencies changed: (%s tasks)" % counter)
|
||||
for k in depchanged.keys():
|
||||
print(" %s: %s" % (k, depchanged[k]))
|
||||
|
||||
return counter
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Print what will be done between the current and last builds:
|
||||
1) Run "STAMPS_DIR=<path> bitbake -S recipe" to re-generate the stamps
|
||||
2) Figure out what are newly added and changed, can't figure out
|
||||
what are removed since we can't know the previous stamps
|
||||
clearly, for example, if there are several builds, we can't know
|
||||
which stamps the last build has used exactly.
|
||||
3) Use bb.siggen.compare_sigfiles to diff the old and new stamps
|
||||
"""
|
||||
|
||||
parser = argparse_oe.ArgumentParser(usage = """%(prog)s [options] [package ...]
|
||||
print what will be done between the current and last builds, for example:
|
||||
|
||||
$ bitbake core-image-sato
|
||||
# Edit the recipes
|
||||
$ bitbake-whatchanged core-image-sato
|
||||
|
||||
The changes will be printed.
|
||||
|
||||
Note:
|
||||
The amount of tasks is not accurate when the task is "do_build" since
|
||||
it usually depends on other tasks.
|
||||
The "nostamp" task is not included.
|
||||
"""
|
||||
)
|
||||
parser.add_argument("recipe", help="recipe to check")
|
||||
parser.add_argument("-v", "--verbose", help = "print the verbose changes", action = "store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get the STAMPS_DIR
|
||||
print("Figuring out the STAMPS_DIR ...")
|
||||
cmdline = "bitbake -e | sed -ne 's/^STAMPS_DIR=\"\(.*\)\"/\\1/p'"
|
||||
try:
|
||||
stampsdir, err = bb.process.run(cmdline)
|
||||
except:
|
||||
raise
|
||||
if not stampsdir:
|
||||
print("ERROR: No STAMPS_DIR found for '%s'" % args.recipe, file=sys.stderr)
|
||||
return 2
|
||||
stampsdir = stampsdir.rstrip("\n")
|
||||
if not os.path.isdir(stampsdir):
|
||||
print("ERROR: stamps directory \"%s\" not found!" % stampsdir, file=sys.stderr)
|
||||
return 2
|
||||
|
||||
# The new stamps dir
|
||||
new_stampsdir = stampsdir + ".bbs"
|
||||
if os.path.exists(new_stampsdir):
|
||||
print("ERROR: %s already exists!" % new_stampsdir, file=sys.stderr)
|
||||
return 2
|
||||
|
||||
try:
|
||||
# Generate the new stamps dir
|
||||
print("Generating the new stamps ... (need several minutes)")
|
||||
cmdline = "STAMPS_DIR=%s bitbake -S none %s" % (new_stampsdir, args.recipe)
|
||||
# FIXME
|
||||
# The "bitbake -S" may fail, not fatal error, the stamps will still
|
||||
# be generated, this might be a bug of "bitbake -S".
|
||||
try:
|
||||
bb.process.run(cmdline)
|
||||
except Exception as exc:
|
||||
print(exc)
|
||||
|
||||
# The dict for the new and old stamps.
|
||||
old_dict = gen_dict(stampsdir)
|
||||
new_dict = gen_dict(new_stampsdir)
|
||||
|
||||
# Remove the same one from both stamps.
|
||||
cnt_unchanged = 0
|
||||
for k in list(new_dict.keys()):
|
||||
if k in old_dict:
|
||||
cnt_unchanged += 1
|
||||
del(new_dict[k])
|
||||
del(old_dict[k])
|
||||
|
||||
# Re-construct the dict to easily find out what is added or changed.
|
||||
# The dict format is:
|
||||
# {pn_task: {pv: PV, pr: PR, path: PATH}}
|
||||
new_recon = recon_dict(new_dict)
|
||||
old_recon = recon_dict(old_dict)
|
||||
|
||||
del new_dict
|
||||
del old_dict
|
||||
|
||||
# Figure out what are changed, the new_recon would be changed
|
||||
# by the print_xxx function.
|
||||
# Newly added
|
||||
cnt_added = print_added(new_recon, old_recon)
|
||||
|
||||
# PV (including PE) and PR changed
|
||||
# Let the bb.siggen handle them if verbose
|
||||
cnt_rv = {}
|
||||
if not args.verbose:
|
||||
for i in ('pv', 'pr'):
|
||||
cnt_rv[i] = print_vrchanged(new_recon, old_recon, i)
|
||||
|
||||
# Dependencies changed (use bitbake-diffsigs)
|
||||
cnt_dep = print_depchanged(new_recon, old_recon, args.verbose)
|
||||
|
||||
total_changed = cnt_added + (cnt_rv.get('pv') or 0) + (cnt_rv.get('pr') or 0) + cnt_dep
|
||||
|
||||
print("\n=== Summary: (%s changed, %s unchanged)" % (total_changed, cnt_unchanged))
|
||||
if args.verbose:
|
||||
print("Newly added: %s\nDependencies changed: %s\n" % \
|
||||
(cnt_added, cnt_dep))
|
||||
else:
|
||||
print("Newly added: %s\nPV changed: %s\nPR changed: %s\nDependencies changed: %s\n" % \
|
||||
(cnt_added, cnt_rv.get('pv') or 0, cnt_rv.get('pr') or 0, cnt_dep))
|
||||
except:
|
||||
print("ERROR occurred!")
|
||||
raise
|
||||
finally:
|
||||
# Remove the newly generated stamps dir
|
||||
if os.path.exists(new_stampsdir):
|
||||
print("Removing the newly generated stamps dir ...")
|
||||
shutil.rmtree(new_stampsdir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Executable
+120
@@ -0,0 +1,120 @@
|
||||
#!/bin/sh
|
||||
# Copyright (c) 2020 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# buildall-qemu: a tool for automating build testing of recipes
|
||||
# TODO: Add support for selecting which qemu architectures to build
|
||||
# TODO: Add support for queueing up multiple recipe builds
|
||||
# TODO: Add more logging options (e.g. local.conf info, bitbake env info)
|
||||
|
||||
usage ()
|
||||
{
|
||||
base=$(basename "$0")
|
||||
echo "Usage: $base [options] [recipename/target]"
|
||||
echo "Executes a build of a given target for selected LIBCs. With no options, default to both libc and musl."
|
||||
echo "Options:"
|
||||
echo "-l, --libc Specify one of \"glibc\" or \"musl\""
|
||||
}
|
||||
|
||||
|
||||
buildall ()
|
||||
{
|
||||
# Get path to oe-core directory. Since oe-init-build-env prepends $PATH with
|
||||
# the path to the scripts directory, get it from there
|
||||
SCRIPTS_PATH="$(echo "$PATH" | cut -d ":" -f 1)"
|
||||
OE_CORE_PATH=$(echo "$SCRIPTS_PATH" | sed 's|\(.*\)/.*|\1|')
|
||||
|
||||
# Get target list and host machine information
|
||||
TARGET_LIST=$(find "$OE_CORE_PATH"/meta/conf/machine -maxdepth 1 -type f | grep qemu | sed 's|.*/||' | sed -e 's/\.conf//')
|
||||
|
||||
# Set LIBC value to use for the builds based on options provided by the user
|
||||
if [ -n "$2" ]
|
||||
then
|
||||
LIBC_LIST="$2"
|
||||
echo "$LIBC_LIST"
|
||||
else
|
||||
LIBC_LIST="glibc musl"
|
||||
echo "$LIBC_LIST"
|
||||
fi
|
||||
|
||||
START_TIME=$(date "+%Y-%m-%d_%H:%M:%S")
|
||||
LOG_FILE="$1-buildall.log"
|
||||
OS_INFO=$(grep "PRETTY_NAME=" /etc/os-release | awk -F "=" '{print $2}' | sed -e 's/^"//' -e 's/"$//')
|
||||
|
||||
# Append an existing log file for this build with .old if one exists
|
||||
if [ -f "${LOG_FILE}" ]
|
||||
then
|
||||
mv "${LOG_FILE}" "${LOG_FILE}.old"
|
||||
else
|
||||
touch "${LOG_FILE}"
|
||||
fi
|
||||
|
||||
# Fill the log file with build and host info
|
||||
echo "BUILDALL-QEMU LOG FOR $1" >> "${LOG_FILE}"
|
||||
echo "START TIME: ${START_TIME}" >> "${LOG_FILE}"
|
||||
echo "HOSTNAME: $(uname -n)" >> "${LOG_FILE}"
|
||||
echo "HOST OS: ${OS_INFO}" >> "${LOG_FILE}"
|
||||
echo "HOST KERNEL: $(uname -r)" >> "${LOG_FILE}"
|
||||
echo "===============" >> "${LOG_FILE}"
|
||||
echo "BUILD RESULTS:" >> "${LOG_FILE}"
|
||||
|
||||
# start the builds for each MACHINE and TCLIBC
|
||||
for j in ${LIBC_LIST}
|
||||
do
|
||||
echo "[$j]" >> "${LOG_FILE}"
|
||||
for i in ${TARGET_LIST}
|
||||
do
|
||||
echo "$i" "$j"; \
|
||||
TCLIBC=$j MACHINE=$i bitbake "$1" && echo "PASS: $i" >> "${LOG_FILE}" || echo "FAIL: $i" >> "${LOG_FILE}"
|
||||
done
|
||||
done
|
||||
|
||||
# Get pass/fail totals and add them to the end of the log
|
||||
PASSED=$(grep "PASS:" "${LOG_FILE}" | wc -l)
|
||||
FAILED=$(grep "FAIL:" "${LOG_FILE}" | wc -l)
|
||||
|
||||
echo "===============" >> "${LOG_FILE}"
|
||||
echo "PASSED: ${PASSED}" >> "${LOG_FILE}"
|
||||
echo "FAILED: ${FAILED}" >> "${LOG_FILE}"
|
||||
}
|
||||
|
||||
|
||||
# fail entire script if any command fails
|
||||
set -e
|
||||
|
||||
# print usage and exit if not enough args given
|
||||
[ $# -eq 0 ] && usage && exit 1
|
||||
|
||||
# handle arguments
|
||||
RECIPE=
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
arg=$1
|
||||
case $arg in
|
||||
-l|--libc)
|
||||
if [ "$2" = "glibc" ] || [ "$2" = "musl" ]
|
||||
then
|
||||
LIBC_LIST="$2"
|
||||
else
|
||||
echo "Unrecognized libc option."
|
||||
usage && exit 1
|
||||
fi
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
RECIPE="$1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
set -- "$RECIPE"
|
||||
|
||||
# run buildall for the given recipe and LIBC
|
||||
if [ -n "$1" ]
|
||||
then
|
||||
buildall "$1" "$LIBC_LIST"
|
||||
fi
|
||||
|
||||
Executable
+108
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Collects the recorded SRCREV values from buildhistory and reports on them
|
||||
#
|
||||
# Copyright 2013 Intel Corporation
|
||||
# Authored-by: Paul Eggleton <paul.eggleton@intel.com>
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
import optparse
|
||||
import logging
|
||||
|
||||
def logger_create():
|
||||
logger = logging.getLogger("buildhistory")
|
||||
loggerhandler = logging.StreamHandler()
|
||||
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
||||
logger.addHandler(loggerhandler)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
description = "Collects the recorded SRCREV values from buildhistory and reports on them.",
|
||||
usage = """
|
||||
%prog [options]""")
|
||||
|
||||
parser.add_option("-a", "--report-all",
|
||||
help = "Report all SRCREV values, not just ones where AUTOREV has been used",
|
||||
action="store_true", dest="reportall")
|
||||
parser.add_option("-f", "--forcevariable",
|
||||
help = "Use forcevariable override for all output lines",
|
||||
action="store_true", dest="forcevariable")
|
||||
parser.add_option("-p", "--buildhistory-dir",
|
||||
help = "Specify path to buildhistory directory (defaults to buildhistory/ under cwd)",
|
||||
action="store", dest="buildhistory_dir", default='buildhistory/')
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
if len(args) > 1:
|
||||
sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args[1:]))
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.exists(options.buildhistory_dir):
|
||||
sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % options.buildhistory_dir)
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
if options.forcevariable:
|
||||
forcevariable = ':forcevariable'
|
||||
else:
|
||||
forcevariable = ''
|
||||
|
||||
all_srcrevs = collections.defaultdict(list)
|
||||
for root, dirs, files in os.walk(options.buildhistory_dir):
|
||||
dirs.sort()
|
||||
if '.git' in dirs:
|
||||
dirs.remove('.git')
|
||||
for fn in files:
|
||||
if fn == 'latest_srcrev':
|
||||
curdir = os.path.basename(os.path.dirname(root))
|
||||
fullpath = os.path.join(root, fn)
|
||||
pn = os.path.basename(root)
|
||||
srcrev = None
|
||||
orig_srcrev = None
|
||||
orig_srcrevs = {}
|
||||
srcrevs = {}
|
||||
with open(fullpath) as f:
|
||||
for line in f:
|
||||
if '=' in line:
|
||||
splitval = line.split('=')
|
||||
value = splitval[1].strip('" \t\n\r')
|
||||
if line.startswith('# SRCREV = '):
|
||||
orig_srcrev = value
|
||||
elif line.startswith('# SRCREV_'):
|
||||
splitval = line.split('=')
|
||||
name = splitval[0].split('_')[1].strip()
|
||||
orig_srcrevs[name] = value
|
||||
elif line.startswith('SRCREV ='):
|
||||
srcrev = value
|
||||
elif line.startswith('SRCREV_'):
|
||||
name = splitval[0].split('_')[1].strip()
|
||||
srcrevs[name] = value
|
||||
if srcrev and (options.reportall or srcrev != orig_srcrev):
|
||||
all_srcrevs[curdir].append((pn, None, srcrev))
|
||||
for name, value in srcrevs.items():
|
||||
orig = orig_srcrevs.get(name, orig_srcrev)
|
||||
if options.reportall or value != orig:
|
||||
all_srcrevs[curdir].append((pn, name, value))
|
||||
|
||||
for curdir, srcrevs in sorted(all_srcrevs.items()):
|
||||
if srcrevs:
|
||||
print('# %s' % curdir)
|
||||
for pn, name, srcrev in srcrevs:
|
||||
if name:
|
||||
print('SRCREV_%s:pn-%s%s = "%s"' % (name, pn, forcevariable, srcrev))
|
||||
else:
|
||||
print('SRCREV:pn-%s%s = "%s"' % (pn, forcevariable, srcrev))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Executable
+134
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Report significant differences in the buildhistory repository since a specific revision
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
# Author: Paul Eggleton <paul.eggleton@linux.intel.com>
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
# Ensure PythonGit is installed (buildhistory_analysis needs it)
|
||||
try:
|
||||
import git
|
||||
except ImportError:
|
||||
print("Please install GitPython (python3-git) 0.3.4 or later in order to use this script")
|
||||
sys.exit(1)
|
||||
|
||||
def get_args_parser():
|
||||
description = "Reports significant differences in the buildhistory repository."
|
||||
|
||||
parser = argparse.ArgumentParser(description=description,
|
||||
usage="""
|
||||
%(prog)s [options] [from-revision [to-revision]]
|
||||
(if not specified, from-revision defaults to build-minus-1, and to-revision defaults to HEAD)""")
|
||||
|
||||
default_dir = os.path.join(os.environ.get('BUILDDIR', '.'), 'buildhistory')
|
||||
|
||||
parser.add_argument('-p', '--buildhistory-dir',
|
||||
action='store',
|
||||
dest='buildhistory_dir',
|
||||
default=default_dir,
|
||||
help="Specify path to buildhistory directory (defaults to buildhistory/ under cwd)")
|
||||
parser.add_argument('-v', '--report-version',
|
||||
action='store_true',
|
||||
dest='report_ver',
|
||||
default=False,
|
||||
help="Report changes in PKGE/PKGV/PKGR even when the values are still the default (PE/PV/PR)")
|
||||
parser.add_argument('-a', '--report-all',
|
||||
action='store_true',
|
||||
dest='report_all',
|
||||
default=False,
|
||||
help="Report all changes, not just the default significant ones")
|
||||
parser.add_argument('-s', '---signatures',
|
||||
action='store_true',
|
||||
dest='sigs',
|
||||
default=False,
|
||||
help="Report list of signatures differing instead of output")
|
||||
parser.add_argument('-S', '--signatures-with-diff',
|
||||
action='store_true',
|
||||
dest='sigsdiff',
|
||||
default=False,
|
||||
help="Report on actual signature differences instead of output (requires signature data to have been generated, either by running the actual tasks or using bitbake -S)")
|
||||
parser.add_argument('-e', '--exclude-path',
|
||||
action='append',
|
||||
help="Exclude path from the output")
|
||||
parser.add_argument('-c', '--colour',
|
||||
choices=('yes', 'no', 'auto'),
|
||||
default="auto",
|
||||
help="Whether to colourise (defaults to auto)")
|
||||
parser.add_argument('revisions',
|
||||
default = ['build-minus-1', 'HEAD'],
|
||||
nargs='*',
|
||||
help=argparse.SUPPRESS)
|
||||
return parser
|
||||
|
||||
def main():
|
||||
|
||||
parser = get_args_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
if len(args.revisions) > 2:
|
||||
sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
|
||||
parser.print_help()
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.exists(args.buildhistory_dir):
|
||||
sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % args.buildhistory_dir)
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
|
||||
import scriptpath
|
||||
|
||||
# Set path to OE lib dir so we can import the buildhistory_analysis module
|
||||
scriptpath.add_oe_lib_path()
|
||||
# Set path to bitbake lib dir so the buildhistory_analysis module can load bb.utils
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
|
||||
if not bitbakepath:
|
||||
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(args.revisions) == 1:
|
||||
if '..' in args.revisions[0]:
|
||||
fromrev, torev = args.revisions[0].split('..')
|
||||
else:
|
||||
fromrev, torev = args.revisions[0], 'HEAD'
|
||||
elif len(args.revisions) == 2:
|
||||
fromrev, torev = args.revisions
|
||||
|
||||
from oe.buildhistory_analysis import init_colours, process_changes
|
||||
import gitdb
|
||||
|
||||
init_colours({"yes": True, "no": False, "auto": sys.stdout.isatty()}[args.colour])
|
||||
|
||||
try:
|
||||
changes = process_changes(args.buildhistory_dir, fromrev, torev,
|
||||
args.report_all, args.report_ver, args.sigs,
|
||||
args.sigsdiff, args.exclude_path)
|
||||
except gitdb.exc.BadObject as e:
|
||||
if not args.revisions:
|
||||
sys.stderr.write("Unable to find previous build revision in buildhistory repository\n\n")
|
||||
parser.print_help()
|
||||
else:
|
||||
sys.stderr.write('Specified git revision "%s" is not valid\n' % e.args[0])
|
||||
sys.exit(1)
|
||||
|
||||
for chg in changes:
|
||||
out = str(chg)
|
||||
if out:
|
||||
print(out)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Executable
+302
@@ -0,0 +1,302 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Script for comparing buildstats from two different builds
|
||||
#
|
||||
# Copyright (c) 2016, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
from operator import attrgetter
|
||||
|
||||
# Import oe libs
|
||||
scripts_path = os.path.dirname(os.path.realpath(__file__))
|
||||
sys.path.append(os.path.join(scripts_path, 'lib'))
|
||||
from buildstats import BuildStats, diff_buildstats, taskdiff_fields, BSVerDiff
|
||||
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
class ScriptError(Exception):
|
||||
"""Exception for internal error handling of this script"""
|
||||
pass
|
||||
|
||||
|
||||
def read_buildstats(path, multi):
|
||||
"""Read buildstats"""
|
||||
if not os.path.exists(path):
|
||||
raise ScriptError("No such file or directory: {}".format(path))
|
||||
|
||||
if os.path.isfile(path):
|
||||
return BuildStats.from_file_json(path)
|
||||
|
||||
if os.path.isfile(os.path.join(path, 'build_stats')):
|
||||
return BuildStats.from_dir(path)
|
||||
|
||||
# Handle a non-buildstat directory
|
||||
subpaths = sorted(glob.glob(path + '/*'))
|
||||
if len(subpaths) > 1:
|
||||
if multi:
|
||||
log.info("Averaging over {} buildstats from {}".format(
|
||||
len(subpaths), path))
|
||||
else:
|
||||
raise ScriptError("Multiple buildstats found in '{}'. Please give "
|
||||
"a single buildstat directory of use the --multi "
|
||||
"option".format(path))
|
||||
bs = None
|
||||
for subpath in subpaths:
|
||||
if os.path.isfile(subpath):
|
||||
_bs = BuildStats.from_file_json(subpath)
|
||||
else:
|
||||
_bs = BuildStats.from_dir(subpath)
|
||||
if bs is None:
|
||||
bs = _bs
|
||||
else:
|
||||
bs.aggregate(_bs)
|
||||
if not bs:
|
||||
raise ScriptError("No buildstats found under {}".format(path))
|
||||
|
||||
return bs
|
||||
|
||||
|
||||
def print_ver_diff(bs1, bs2):
|
||||
"""Print package version differences"""
|
||||
|
||||
diff = BSVerDiff(bs1, bs2)
|
||||
|
||||
maxlen = max([len(r) for r in set(bs1.keys()).union(set(bs2.keys()))])
|
||||
fmt_str = " {:{maxlen}} ({})"
|
||||
|
||||
if diff.new:
|
||||
print("\nNEW RECIPES:")
|
||||
print("------------")
|
||||
for name, val in sorted(diff.new.items()):
|
||||
print(fmt_str.format(name, val.nevr, maxlen=maxlen))
|
||||
|
||||
if diff.dropped:
|
||||
print("\nDROPPED RECIPES:")
|
||||
print("----------------")
|
||||
for name, val in sorted(diff.dropped.items()):
|
||||
print(fmt_str.format(name, val.nevr, maxlen=maxlen))
|
||||
|
||||
fmt_str = " {0:{maxlen}} {1:<20} ({2})"
|
||||
if diff.rchanged:
|
||||
print("\nREVISION CHANGED:")
|
||||
print("-----------------")
|
||||
for name, val in sorted(diff.rchanged.items()):
|
||||
field1 = "{} -> {}".format(val.left.revision, val.right.revision)
|
||||
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
|
||||
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
|
||||
|
||||
if diff.vchanged:
|
||||
print("\nVERSION CHANGED:")
|
||||
print("----------------")
|
||||
for name, val in sorted(diff.vchanged.items()):
|
||||
field1 = "{} -> {}".format(val.left.version, val.right.version)
|
||||
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
|
||||
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
|
||||
|
||||
if diff.echanged:
|
||||
print("\nEPOCH CHANGED:")
|
||||
print("--------------")
|
||||
for name, val in sorted(diff.echanged.items()):
|
||||
field1 = "{} -> {}".format(val.left.epoch, val.right.epoch)
|
||||
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
|
||||
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
|
||||
|
||||
|
||||
def print_task_diff(bs1, bs2, val_type, min_val=0, min_absdiff=0, sort_by=('absdiff',), only_tasks=[]):
|
||||
"""Diff task execution times"""
|
||||
def val_to_str(val, human_readable=False):
|
||||
"""Convert raw value to printable string"""
|
||||
def hms_time(secs):
|
||||
"""Get time in human-readable HH:MM:SS format"""
|
||||
h = int(secs / 3600)
|
||||
m = int((secs % 3600) / 60)
|
||||
s = secs % 60
|
||||
if h == 0:
|
||||
return "{:02d}:{:04.1f}".format(m, s)
|
||||
else:
|
||||
return "{:d}:{:02d}:{:04.1f}".format(h, m, s)
|
||||
|
||||
if 'time' in val_type:
|
||||
if human_readable:
|
||||
return hms_time(val)
|
||||
else:
|
||||
return "{:.1f}s".format(val)
|
||||
elif 'bytes' in val_type and human_readable:
|
||||
prefix = ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi']
|
||||
dec = int(math.log(val, 2) / 10)
|
||||
prec = 1 if dec > 0 else 0
|
||||
return "{:.{prec}f}{}B".format(val / (2 ** (10 * dec)),
|
||||
prefix[dec], prec=prec)
|
||||
elif 'ops' in val_type and human_readable:
|
||||
prefix = ['', 'k', 'M', 'G', 'T', 'P']
|
||||
dec = int(math.log(val, 1000))
|
||||
prec = 1 if dec > 0 else 0
|
||||
return "{:.{prec}f}{}ops".format(val / (1000 ** dec),
|
||||
prefix[dec], prec=prec)
|
||||
return str(int(val))
|
||||
|
||||
def sum_vals(buildstats):
|
||||
"""Get cumulative sum of all tasks"""
|
||||
total = 0.0
|
||||
for recipe_data in buildstats.values():
|
||||
for name, bs_task in recipe_data.tasks.items():
|
||||
if not only_tasks or name in only_tasks:
|
||||
total += getattr(bs_task, val_type)
|
||||
return total
|
||||
|
||||
if min_val:
|
||||
print("Ignoring tasks less than {} ({})".format(
|
||||
val_to_str(min_val, True), val_to_str(min_val)))
|
||||
if min_absdiff:
|
||||
print("Ignoring differences less than {} ({})".format(
|
||||
val_to_str(min_absdiff, True), val_to_str(min_absdiff)))
|
||||
|
||||
# Prepare the data
|
||||
tasks_diff = diff_buildstats(bs1, bs2, val_type, min_val, min_absdiff, only_tasks)
|
||||
|
||||
# Sort our list
|
||||
for field in reversed(sort_by):
|
||||
if field.startswith('-'):
|
||||
field = field[1:]
|
||||
reverse = True
|
||||
else:
|
||||
reverse = False
|
||||
tasks_diff = sorted(tasks_diff, key=attrgetter(field), reverse=reverse)
|
||||
|
||||
linedata = [(' ', 'PKG', ' ', 'TASK', 'ABSDIFF', 'RELDIFF',
|
||||
val_type.upper() + '1', val_type.upper() + '2')]
|
||||
field_lens = dict([('len_{}'.format(i), len(f)) for i, f in enumerate(linedata[0])])
|
||||
|
||||
# Prepare fields in string format and measure field lengths
|
||||
for diff in tasks_diff:
|
||||
task_prefix = diff.task_op if diff.pkg_op == ' ' else ' '
|
||||
linedata.append((diff.pkg_op, diff.pkg, task_prefix, diff.task,
|
||||
val_to_str(diff.absdiff),
|
||||
'{:+.1f}%'.format(diff.reldiff),
|
||||
val_to_str(diff.value1),
|
||||
val_to_str(diff.value2)))
|
||||
for i, field in enumerate(linedata[-1]):
|
||||
key = 'len_{}'.format(i)
|
||||
if len(field) > field_lens[key]:
|
||||
field_lens[key] = len(field)
|
||||
|
||||
# Print data
|
||||
print()
|
||||
for fields in linedata:
|
||||
print("{:{len_0}}{:{len_1}} {:{len_2}}{:{len_3}} {:>{len_4}} {:>{len_5}} {:>{len_6}} -> {:{len_7}}".format(
|
||||
*fields, **field_lens))
|
||||
|
||||
# Print summary of the diffs
|
||||
total1 = sum_vals(bs1)
|
||||
total2 = sum_vals(bs2)
|
||||
print("\nCumulative {}:".format(val_type))
|
||||
print (" {} {:+.1f}% {} ({}) -> {} ({})".format(
|
||||
val_to_str(total2 - total1), 100 * (total2-total1) / total1,
|
||||
val_to_str(total1, True), val_to_str(total1),
|
||||
val_to_str(total2, True), val_to_str(total2)))
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
"""Parse cmdline arguments"""
|
||||
description="""
|
||||
Script for comparing buildstats of two separate builds."""
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||
description=description)
|
||||
|
||||
min_val_defaults = {'cputime': 3.0,
|
||||
'read_bytes': 524288,
|
||||
'write_bytes': 524288,
|
||||
'read_ops': 500,
|
||||
'write_ops': 500,
|
||||
'walltime': 5}
|
||||
min_absdiff_defaults = {'cputime': 1.0,
|
||||
'read_bytes': 131072,
|
||||
'write_bytes': 131072,
|
||||
'read_ops': 50,
|
||||
'write_ops': 50,
|
||||
'walltime': 2}
|
||||
|
||||
parser.add_argument('--debug', '-d', action='store_true',
|
||||
help="Verbose logging")
|
||||
parser.add_argument('--ver-diff', action='store_true',
|
||||
help="Show package version differences and exit")
|
||||
parser.add_argument('--diff-attr', default='cputime',
|
||||
choices=min_val_defaults.keys(),
|
||||
help="Buildstat attribute which to compare")
|
||||
parser.add_argument('--min-val', default=min_val_defaults, type=float,
|
||||
help="Filter out tasks less than MIN_VAL. "
|
||||
"Default depends on --diff-attr.")
|
||||
parser.add_argument('--min-absdiff', default=min_absdiff_defaults, type=float,
|
||||
help="Filter out tasks whose difference is less than "
|
||||
"MIN_ABSDIFF, Default depends on --diff-attr.")
|
||||
parser.add_argument('--sort-by', default='absdiff',
|
||||
help="Comma-separated list of field sort order. "
|
||||
"Prepend the field name with '-' for reversed sort. "
|
||||
"Available fields are: {}".format(', '.join(taskdiff_fields)))
|
||||
parser.add_argument('--multi', action='store_true',
|
||||
help="Read all buildstats from the given paths and "
|
||||
"average over them")
|
||||
parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[],
|
||||
help="Only include TASK in report. May be specified multiple times")
|
||||
parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat")
|
||||
parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat")
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
# We do not nedd/want to read all buildstats if we just want to look at the
|
||||
# package versions
|
||||
if args.ver_diff:
|
||||
args.multi = False
|
||||
|
||||
# Handle defaults for the filter arguments
|
||||
if args.min_val is min_val_defaults:
|
||||
args.min_val = min_val_defaults[args.diff_attr]
|
||||
if args.min_absdiff is min_absdiff_defaults:
|
||||
args.min_absdiff = min_absdiff_defaults[args.diff_attr]
|
||||
|
||||
return args
|
||||
|
||||
def main(argv=None):
|
||||
"""Script entry point"""
|
||||
args = parse_args(argv)
|
||||
if args.debug:
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
# Validate sort fields
|
||||
sort_by = []
|
||||
for field in args.sort_by.split(','):
|
||||
if field.lstrip('-') not in taskdiff_fields:
|
||||
log.error("Invalid sort field '%s' (must be one of: %s)" %
|
||||
(field, ', '.join(taskdiff_fields)))
|
||||
sys.exit(1)
|
||||
sort_by.append(field)
|
||||
|
||||
try:
|
||||
bs1 = read_buildstats(args.buildstats1, args.multi)
|
||||
bs2 = read_buildstats(args.buildstats2, args.multi)
|
||||
|
||||
if args.ver_diff:
|
||||
print_ver_diff(bs1, bs2)
|
||||
else:
|
||||
print_task_diff(bs1, bs2, args.diff_attr, args.min_val,
|
||||
args.min_absdiff, sort_by, args.only_tasks)
|
||||
except ScriptError as err:
|
||||
log.error(str(err))
|
||||
return 1
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Executable
+126
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Dump a summary of the specified buildstats to the terminal, filtering and
|
||||
# sorting by walltime.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import datetime
|
||||
import enum
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
scripts_path = os.path.dirname(os.path.realpath(__file__))
|
||||
sys.path.append(os.path.join(scripts_path, "lib"))
|
||||
import buildstats
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Task:
|
||||
recipe: str
|
||||
task: str
|
||||
start: datetime.datetime
|
||||
duration: datetime.timedelta
|
||||
|
||||
|
||||
class Sorting(enum.Enum):
|
||||
start = 1
|
||||
duration = 2
|
||||
|
||||
# argparse integration
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def from_string(s: str):
|
||||
try:
|
||||
return Sorting[s]
|
||||
except KeyError:
|
||||
return s
|
||||
|
||||
|
||||
def read_buildstats(path: pathlib.Path) -> buildstats.BuildStats:
|
||||
if not path.exists():
|
||||
raise Exception(f"No such file or directory: {path}")
|
||||
if path.is_file():
|
||||
return buildstats.BuildStats.from_file_json(path)
|
||||
if (path / "build_stats").is_file():
|
||||
return buildstats.BuildStats.from_dir(path)
|
||||
raise Exception(f"Cannot find buildstats in {path}")
|
||||
|
||||
|
||||
def dump_buildstats(args, bs: buildstats.BuildStats):
|
||||
tasks = []
|
||||
for recipe in bs.values():
|
||||
for task, stats in recipe.tasks.items():
|
||||
t = Task(
|
||||
recipe.name,
|
||||
task,
|
||||
datetime.datetime.fromtimestamp(stats["start_time"]),
|
||||
datetime.timedelta(seconds=int(stats.walltime)),
|
||||
)
|
||||
tasks.append(t)
|
||||
|
||||
tasks.sort(key=lambda t: getattr(t, args.sort.name))
|
||||
|
||||
minimum = datetime.timedelta(seconds=args.shortest)
|
||||
highlight = datetime.timedelta(seconds=args.highlight)
|
||||
|
||||
for t in tasks:
|
||||
if t.duration >= minimum:
|
||||
line = f"{t.duration} {t.recipe}:{t.task}"
|
||||
if args.highlight and t.duration >= highlight:
|
||||
print(f"\033[1m{line}\033[0m")
|
||||
else:
|
||||
print(line)
|
||||
|
||||
|
||||
def main(argv=None) -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sort",
|
||||
"-s",
|
||||
type=Sorting.from_string,
|
||||
choices=list(Sorting),
|
||||
default=Sorting.start,
|
||||
help="Sort tasks",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--shortest",
|
||||
"-t",
|
||||
type=int,
|
||||
default=1,
|
||||
metavar="SECS",
|
||||
help="Hide tasks shorter than SECS seconds",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--highlight",
|
||||
"-g",
|
||||
type=int,
|
||||
default=60,
|
||||
metavar="SECS",
|
||||
help="Highlight tasks longer than SECS seconds (0 disabled)",
|
||||
)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
bs = read_buildstats(args.buildstats)
|
||||
dump_buildstats(args, bs)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Executable
+1384
File diff suppressed because it is too large
Load Diff
Executable
+25
@@ -0,0 +1,25 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Hook to add source component/revision info to commit message
|
||||
# Parameter:
|
||||
# $1 patch-file
|
||||
# $2 revision
|
||||
# $3 reponame
|
||||
|
||||
patchfile=$1
|
||||
rev=$2
|
||||
reponame=$3
|
||||
|
||||
sed -i -e "0,/^Subject:/s#^Subject: \[PATCH\] \($reponame: \)*\(.*\)#Subject: \[PATCH\] $reponame: \2#" $patchfile
|
||||
if grep -q '^Signed-off-by:' $patchfile; then
|
||||
# Insert before Signed-off-by.
|
||||
sed -i -e "0,/^Signed-off-by:/s#\(^Signed-off-by:.*\)#\(From $reponame rev: $rev\)\n\n\1#" $patchfile
|
||||
else
|
||||
# Insert before final --- separator, with extra blank lines removed.
|
||||
perl -e "\$_ = join('', <>); s/^(.*\S[ \t]*)(\n|\n\s*\n)---\n/\$1\n\nFrom $reponame rev: $rev\n---\n/s; print;" $patchfile >$patchfile.tmp
|
||||
mv $patchfile.tmp $patchfile
|
||||
fi
|
||||
@@ -0,0 +1,93 @@
|
||||
# combo-layer example configuration file
|
||||
|
||||
# Default values for all sections.
|
||||
[DEFAULT]
|
||||
|
||||
# Add 'Signed-off-by' to all commits that get imported automatically.
|
||||
signoff = True
|
||||
|
||||
# component name
|
||||
[bitbake]
|
||||
|
||||
# Override signedoff default above (not very useful, but possible).
|
||||
signoff = False
|
||||
|
||||
# mandatory options
|
||||
# git upstream uri
|
||||
src_uri = git://git.openembedded.org/bitbake
|
||||
|
||||
# the directory to clone the component repo
|
||||
local_repo_dir = /home/kyu3/src/test/bitbake
|
||||
|
||||
# the relative dir within the combo repo to put the component files
|
||||
# use "." if the files should be in the root dir
|
||||
dest_dir = bitbake
|
||||
|
||||
# the last update revision.
|
||||
# "init" will set this to the latest revision automatically, however if it
|
||||
# is empty when "update" is run, the tool will start from the first commit.
|
||||
# Note that this value will get updated by "update" if the component repo's
|
||||
# latest revision changed and the operation completes successfully.
|
||||
last_revision =
|
||||
|
||||
# optional options:
|
||||
|
||||
# branch: specify the branch in the component repo to pull from
|
||||
# (master if not specified)
|
||||
|
||||
# file_filter: only include the specified file(s)
|
||||
# file_filter = [path] [path] ...
|
||||
# example:
|
||||
# file_filter = src/ : only include the subdir src
|
||||
# file_filter = src/*.c : only include the src *.c file
|
||||
# file_filter = src/main.c src/Makefile.am : only include these two files
|
||||
|
||||
# file_exclude: filter out these file(s)
|
||||
# file_exclude = [path] [path] ...
|
||||
#
|
||||
# Each entry must match a file name. In contrast do file_filter, matching
|
||||
# a directory has no effect. To achieve that, use append a * wildcard
|
||||
# at the end.
|
||||
#
|
||||
# Wildcards are applied to the complete path and also match slashes.
|
||||
#
|
||||
# example:
|
||||
# file_exclude = src/foobar/* : exclude everything under src/foobar
|
||||
# file_exclude = src/main.c : filter out main.c after including it with file_filter = src/*.c
|
||||
# file_exclude = *~ : exclude backup files
|
||||
|
||||
# hook: if provided, the tool will call the hook to process the generated
|
||||
# patch from upstream, and then apply the modified patch to the combo
|
||||
# repo.
|
||||
# the hook script is called as follows: ./hook patchpath revision reponame
|
||||
# example:
|
||||
# hook = combo-layer-hook-default.sh
|
||||
|
||||
# since_revision:
|
||||
# since_revision = release-1-2
|
||||
# since_revision = 12345 abcdf
|
||||
#
|
||||
# If provided, truncate imported history during "combo-layer --history
|
||||
# init" at the specified revision(s). More than one can be specified
|
||||
# to cut off multiple component branches.
|
||||
#
|
||||
# The specified commits themselves do not get imported. Instead, an
|
||||
# artificial commit with "unknown" author is created with a content
|
||||
# that matches the original commit.
|
||||
|
||||
[oe-core]
|
||||
src_uri = git://git.openembedded.org/openembedded-core
|
||||
local_repo_dir = /home/kyu3/src/test/oecore
|
||||
dest_dir = .
|
||||
last_revision =
|
||||
since_revision = some-tag-or-commit-on-master-branch
|
||||
|
||||
# It is also possible to embed python code in the config values. Similar
|
||||
# to bitbake it considers every value starting with @ to be a python
|
||||
# script.
|
||||
# e.g. local_repo_dir could easily be configured using an environment
|
||||
# variable:
|
||||
#
|
||||
# [bitbake]
|
||||
# local_repo_dir = @os.getenv("LOCAL_REPO_DIR") + "/bitbake"
|
||||
#
|
||||
+124
@@ -0,0 +1,124 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright (c) 2011, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# DESCRIPTION
|
||||
# This script operates on the .dat file generated by bb-matrix.sh. It tolerates
|
||||
# the header by skipping the first line, but error messages and bad data records
|
||||
# need to be removed first. It will generate three views of the plot, and leave
|
||||
# an interactive view open for further analysis.
|
||||
#
|
||||
# AUTHORS
|
||||
# Darren Hart <dvhart@linux.intel.com>
|
||||
#
|
||||
|
||||
# Setup the defaults
|
||||
DATFILE="bb-matrix.dat"
|
||||
XLABEL="BB_NUMBER_THREADS"
|
||||
YLABEL="PARALLEL_MAKE"
|
||||
FIELD=3
|
||||
DEF_TITLE="Elapsed Time (seconds)"
|
||||
PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
|
||||
SIZE="640,480"
|
||||
|
||||
function usage {
|
||||
CMD=$(basename $0)
|
||||
cat <<EOM
|
||||
Usage: $CMD [-d datfile] [-f field] [-h] [-t title] [-w]
|
||||
-d datfile The data file generated by bb-matrix.sh (default: $DATFILE)
|
||||
-f field The field index to plot as the Z axis from the data file
|
||||
(default: $FIELD, "$DEF_TITLE")
|
||||
-h Display this help message
|
||||
-s W,H PNG and window size in pixels (default: $SIZE)
|
||||
-t title The title to display, should describe the field (-f) and units
|
||||
(default: "$DEF_TITLE")
|
||||
-w Render the plot as wireframe with a 2D colormap projected on the
|
||||
XY plane rather than as the texture for the surface
|
||||
EOM
|
||||
}
|
||||
|
||||
# Parse and validate arguments
|
||||
while getopts "d:f:hs:t:w" OPT; do
|
||||
case $OPT in
|
||||
d)
|
||||
DATFILE="$OPTARG"
|
||||
;;
|
||||
f)
|
||||
FIELD="$OPTARG"
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
s)
|
||||
SIZE="$OPTARG"
|
||||
;;
|
||||
t)
|
||||
TITLE="$OPTARG"
|
||||
;;
|
||||
w)
|
||||
PM3D_FRAGMENT="set pm3d at b"
|
||||
W="-w"
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Ensure the data file exists
|
||||
if [ ! -f "$DATFILE" ]; then
|
||||
echo "ERROR: $DATFILE does not exist"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
PLOT_BASENAME=${DATFILE%.*}-f$FIELD$W
|
||||
|
||||
# Set a sane title
|
||||
# TODO: parse the header and define titles for each format parameter for TIME(1)
|
||||
if [ -z "$TITLE" ]; then
|
||||
if [ ! "$FIELD" == "3" ]; then
|
||||
TITLE="Field $FIELD"
|
||||
else
|
||||
TITLE="$DEF_TITLE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Determine the dgrid3d mesh dimensions size
|
||||
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | head -n1)
|
||||
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | tail -n1)
|
||||
BB_CNT=$[${MAX} - $MIN + 1]
|
||||
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | head -n1)
|
||||
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | tail -n1)
|
||||
PM_CNT=$[${MAX} - $MIN + 1]
|
||||
|
||||
|
||||
(cat <<EOF
|
||||
set title "$TITLE"
|
||||
set xlabel "$XLABEL"
|
||||
set ylabel "$YLABEL"
|
||||
set style line 100 lt 5 lw 1.5
|
||||
$PM3D_FRAGMENT
|
||||
set dgrid3d $PM_CNT,$BB_CNT splines
|
||||
set ticslevel 0.2
|
||||
|
||||
set term png size $SIZE
|
||||
set output "$PLOT_BASENAME.png"
|
||||
splot "$DATFILE" every ::1 using 1:2:$FIELD with lines ls 100
|
||||
|
||||
set view 90,0
|
||||
set output "$PLOT_BASENAME-bb.png"
|
||||
replot
|
||||
|
||||
set view 90,90
|
||||
set output "$PLOT_BASENAME-pm.png"
|
||||
replot
|
||||
|
||||
set view 60,30
|
||||
set term wxt size $SIZE
|
||||
replot
|
||||
EOF
|
||||
) | gnuplot --persist
|
||||
Executable
+66
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright (c) 2011, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# DESCRIPTION
|
||||
# This script runs BB_CMD (typically building core-image-sato) for all
|
||||
# combincations of BB_RANGE and PM_RANGE values. It saves off all the console
|
||||
# logs, the buildstats directories, and creates a bb-pm-runtime.dat file which
|
||||
# can be used to postprocess the results with a plotting tool, spreadsheet, etc.
|
||||
# Before running this script, it is recommended that you pre-download all the
|
||||
# necessary sources by performing the BB_CMD once manually. It is also a good
|
||||
# idea to disable cron to avoid runtime variations caused by things like the
|
||||
# locate process. Be sure to sanitize the dat file prior to post-processing as
|
||||
# it may contain error messages or bad runs that should be removed.
|
||||
#
|
||||
# AUTHORS
|
||||
# Darren Hart <dvhart@linux.intel.com>
|
||||
#
|
||||
|
||||
# The following ranges are appropriate for a 4 core system with 8 logical units
|
||||
# Use leading 0s to ensure all digits are the same string length, this results
|
||||
# in nice log file names and columnar dat files.
|
||||
BB_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
|
||||
PM_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
|
||||
|
||||
DATADIR="bb-matrix-$$"
|
||||
BB_CMD="bitbake core-image-minimal"
|
||||
RUNTIME_LOG="$DATADIR/bb-matrix.dat"
|
||||
|
||||
# See TIME(1) for a description of the time format parameters
|
||||
# The following all report 0: W K r s t w
|
||||
TIME_STR="%e %S %U %P %c %w %R %F %M %x"
|
||||
|
||||
# Prepare the DATADIR
|
||||
mkdir $DATADIR
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to create $DATADIR."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Add a simple header
|
||||
echo "BB PM $TIME_STR" > $RUNTIME_LOG
|
||||
for BB in $BB_RANGE; do
|
||||
for PM in $PM_RANGE; do
|
||||
RUNDIR="$DATADIR/$BB-$PM-build"
|
||||
mkdir $RUNDIR
|
||||
BB_LOG=$RUNDIR/$BB-$PM-bitbake.log
|
||||
date
|
||||
echo "BB=$BB PM=$PM Logging to $BB_LOG"
|
||||
|
||||
echo -n " Preparing the work directory... "
|
||||
rm -rf pseudodone tmp sstate-cache tmp-eglibc &> /dev/null
|
||||
echo "done"
|
||||
|
||||
# Export the variables under test and run the bitbake command
|
||||
# Strip any leading zeroes before passing to bitbake
|
||||
export BB_NUMBER_THREADS=$(echo $BB | sed 's/^0*//')
|
||||
export PARALLEL_MAKE="-j $(echo $PM | sed 's/^0*//')"
|
||||
/usr/bin/time -f "$BB $PM $TIME_STR" -a -o $RUNTIME_LOG $BB_CMD &> $BB_LOG
|
||||
|
||||
echo " $(tail -n1 $RUNTIME_LOG)"
|
||||
cp -a tmp/buildstats $RUNDIR/$BB-$PM-buildstats
|
||||
done
|
||||
done
|
||||
+160
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright (c) 2011, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Produces script data to be consumed by gnuplot. There are two possible plots
|
||||
# depending if either the -S parameter is present or not:
|
||||
#
|
||||
# * without -S: Produces a histogram listing top N recipes/tasks versus
|
||||
# stats. The first stat defined in the -s parameter is the one taken
|
||||
# into account for ranking
|
||||
# * -S: Produces a histogram listing tasks versus stats. In this case,
|
||||
# the value of each stat is the sum for that particular stat in all recipes found.
|
||||
# Stats values are in descending order defined by the first stat defined on -s
|
||||
#
|
||||
# EXAMPLES
|
||||
#
|
||||
# 1. Top recipes' tasks taking into account utime
|
||||
#
|
||||
# $ buildstats-plot.sh -s utime | gnuplot -p
|
||||
#
|
||||
# 2. Tasks versus utime:stime
|
||||
#
|
||||
# $ buildstats-plot.sh -s utime:stime -S | gnuplot -p
|
||||
#
|
||||
# 3. Tasks versus IO write_bytes:IO read_bytes
|
||||
#
|
||||
# $ buildstats-plot.sh -s 'IO write_bytes:IO read_bytes' -S | gnuplot -p
|
||||
#
|
||||
# AUTHORS
|
||||
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
|
||||
#
|
||||
|
||||
set -o nounset
|
||||
set -o errexit
|
||||
|
||||
BS_DIR="tmp/buildstats"
|
||||
N=10
|
||||
RECIPE=""
|
||||
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
|
||||
STATS="utime"
|
||||
ACCUMULATE=""
|
||||
SUM=""
|
||||
OUTDATA_FILE="$PWD/buildstats-plot.out"
|
||||
|
||||
function usage {
|
||||
CMD=$(basename $0)
|
||||
cat <<EOM
|
||||
Usage: $CMD [-b buildstats_dir] [-t do_task]
|
||||
-b buildstats The path where the folder resides
|
||||
(default: "$BS_DIR")
|
||||
-n N Top N recipes to display. Ignored if -S is present
|
||||
(default: "$N")
|
||||
-r recipe The recipe mask to be searched
|
||||
-t tasks The tasks to be computed
|
||||
(default: "$TASKS")
|
||||
-s stats The stats to be matched. If more that one stat, units
|
||||
should be the same because data is plot as histogram.
|
||||
(see buildstats.sh -h for all options) or any other defined
|
||||
(build)stat separated by colons, i.e. stime:utime
|
||||
(default: "$STATS")
|
||||
-a Accumulate all stats values for found recipes
|
||||
-S Sum values for a particular stat for found recipes
|
||||
-o Output data file.
|
||||
(default: "$OUTDATA_FILE")
|
||||
-h Display this help message
|
||||
EOM
|
||||
}
|
||||
|
||||
# Parse and validate arguments
|
||||
while getopts "b:n:r:t:s:o:aSh" OPT; do
|
||||
case $OPT in
|
||||
b)
|
||||
BS_DIR="$OPTARG"
|
||||
;;
|
||||
n)
|
||||
N="$OPTARG"
|
||||
;;
|
||||
r)
|
||||
RECIPE="-r $OPTARG"
|
||||
;;
|
||||
t)
|
||||
TASKS="$OPTARG"
|
||||
;;
|
||||
s)
|
||||
STATS="$OPTARG"
|
||||
;;
|
||||
a)
|
||||
ACCUMULATE="-a"
|
||||
;;
|
||||
S)
|
||||
SUM="y"
|
||||
;;
|
||||
o)
|
||||
OUTDATA_FILE="$OPTARG"
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Get number of stats
|
||||
IFS=':'; statsarray=(${STATS}); unset IFS
|
||||
nstats=${#statsarray[@]}
|
||||
|
||||
# Get script folder, use to run buildstats.sh
|
||||
CD=$(dirname $0)
|
||||
|
||||
# Parse buildstats recipes to produce a single table
|
||||
OUTBUILDSTATS="$PWD/buildstats.log"
|
||||
$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
|
||||
|
||||
# Get headers
|
||||
HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
|
||||
|
||||
echo -e "set boxwidth 0.9 relative"
|
||||
echo -e "set style data histograms"
|
||||
echo -e "set style fill solid 1.0 border lt -1"
|
||||
echo -e "set xtics rotate by 45 right"
|
||||
|
||||
# Get output data
|
||||
if [ -z "$SUM" ]; then
|
||||
cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
|
||||
# include task at recipe column
|
||||
sed -i -e "1i\
|
||||
${HEADERS}" $OUTDATA_FILE
|
||||
echo -e "set title \"Top task/recipes\""
|
||||
echo -e "plot for [COL=3:`expr 3 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(stringcolumn(1).' '.stringcolumn(2)) title columnheader(COL)"
|
||||
else
|
||||
|
||||
# Construct datatamash sum argument (sum 3 sum 4 ...)
|
||||
declare -a sumargs
|
||||
j=0
|
||||
for i in `seq $nstats`; do
|
||||
sumargs[j]=sum; j=$(( $j + 1 ))
|
||||
sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 ))
|
||||
done
|
||||
|
||||
# Do the processing with datamash
|
||||
cat $OUTBUILDSTATS | sed -e '1d' | datamash -t ' ' -g1 ${sumargs[*]} | sort -k2 -n -r > $OUTDATA_FILE
|
||||
|
||||
# Include headers into resulted file, so we can include gnuplot xtics
|
||||
HEADERS=$(echo $HEADERS | sed -e 's/recipe//1')
|
||||
sed -i -e "1i\
|
||||
${HEADERS}" $OUTDATA_FILE
|
||||
|
||||
# Plot
|
||||
echo -e "set title \"Sum stats values per task for all recipes\""
|
||||
echo -e "plot for [COL=2:`expr 2 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(1) title columnheader(COL)"
|
||||
fi
|
||||
|
||||
Executable
+167
@@ -0,0 +1,167 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright (c) 2011, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# DESCRIPTION
|
||||
# Given 'buildstats' data (generate by bitbake when setting
|
||||
# USER_CLASSES ?= "buildstats" on local.conf), task names and a stats values
|
||||
# (these are the ones preset on the buildstats files), outputs
|
||||
# '<task> <recipe> <value_1> <value_2> ... <value_n>'. The units are the ones
|
||||
# defined at buildstats, which in turn takes data from /proc/[pid] files
|
||||
#
|
||||
# Some useful pipelines
|
||||
#
|
||||
# 1. Tasks with largest stime (Amount of time that this process has been scheduled
|
||||
# in kernel mode) values
|
||||
# $ buildstats.sh -b <buildstats> -s stime | sort -k3 -n -r | head
|
||||
#
|
||||
# 2. Min, max, sum utime (Amount of time that this process has been scheduled
|
||||
# in user mode) per task (in needs GNU datamash)
|
||||
# $ buildstats.sh -b <buildstats> -s utime | datamash -t' ' -g1 min 3 max 3 sum 3 | sort -k4 -n -r
|
||||
#
|
||||
# AUTHORS
|
||||
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
|
||||
#
|
||||
|
||||
# Stats, by type
|
||||
TIME="utime:stime:cutime:cstime"
|
||||
IO="IO wchar:IO write_bytes:IO syscr:IO read_bytes:IO rchar:IO syscw:IO cancelled_write_bytes"
|
||||
RUSAGE="rusage ru_utime:rusage ru_stime:rusage ru_maxrss:rusage ru_minflt:rusage ru_majflt:\
|
||||
rusage ru_inblock:rusage ru_oublock:rusage ru_nvcsw:rusage ru_nivcsw"
|
||||
|
||||
CHILD_RUSAGE="Child rusage ru_utime:Child rusage ru_stime:Child rusage ru_maxrss:Child rusage ru_minflt:\
|
||||
Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rusage ru_nvcsw:\
|
||||
Child rusage ru_nivcsw"
|
||||
|
||||
BS_DIR="tmp/buildstats"
|
||||
RECIPE=""
|
||||
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
|
||||
STATS="$TIME"
|
||||
ACCUMULATE=""
|
||||
HEADER="" # No header by default
|
||||
|
||||
function usage {
|
||||
CMD=$(basename $0)
|
||||
cat <<EOM
|
||||
Usage: $CMD [-b buildstats_dir] [-t do_task]
|
||||
-b buildstats The path where the folder resides
|
||||
(default: "$BS_DIR")
|
||||
-r recipe The recipe to be computed
|
||||
-t tasks The tasks to be computed
|
||||
(default: "$TASKS")
|
||||
-s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
|
||||
or any other defined buildstat separated by colons, i.e. stime:utime
|
||||
(default: "$STATS")
|
||||
Default stat sets:
|
||||
TIME=$TIME
|
||||
IO=$IO
|
||||
RUSAGE=$RUSAGE
|
||||
CHILD_RUSAGE=$CHILD_RUSAGE
|
||||
-a Accumulate all stats values for found recipes
|
||||
-h Display this help message
|
||||
EOM
|
||||
}
|
||||
|
||||
# Parse and validate arguments
|
||||
while getopts "b:r:t:s:aHh" OPT; do
|
||||
case $OPT in
|
||||
b)
|
||||
BS_DIR="$OPTARG"
|
||||
;;
|
||||
r)
|
||||
RECIPE="$OPTARG"
|
||||
;;
|
||||
t)
|
||||
TASKS="$OPTARG"
|
||||
;;
|
||||
s)
|
||||
STATS="$OPTARG"
|
||||
;;
|
||||
a)
|
||||
ACCUMULATE="y"
|
||||
;;
|
||||
H)
|
||||
HEADER="y"
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Ensure the buildstats folder exists
|
||||
if [ ! -d "$BS_DIR" ]; then
|
||||
echo "ERROR: $BS_DIR does not exist"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
stats=""
|
||||
IFS=":"
|
||||
for stat in ${STATS}; do
|
||||
case $stat in
|
||||
TIME)
|
||||
stats="${stats}:${TIME}"
|
||||
;;
|
||||
IO)
|
||||
stats="${stats}:${IO}"
|
||||
;;
|
||||
RUSAGE)
|
||||
stats="${stats}:${RUSAGE}"
|
||||
;;
|
||||
CHILD_RUSAGE)
|
||||
stats="${stats}:${CHILD_RUSAGE}"
|
||||
;;
|
||||
*)
|
||||
stats="${STATS}"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# remove possible colon at the beginning
|
||||
stats="$(echo "$stats" | sed -e 's/^://1')"
|
||||
|
||||
# Provide a header if required by the user
|
||||
if [ -n "$HEADER" ] ; then
|
||||
if [ -n "$ACCUMULATE" ]; then
|
||||
echo "task:recipe:accumulated(${stats//:/;})"
|
||||
else
|
||||
echo "task:recipe:$stats"
|
||||
fi
|
||||
fi
|
||||
|
||||
for task in ${TASKS}; do
|
||||
task="do_${task}"
|
||||
for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
|
||||
recipe="$(basename $(dirname $file))"
|
||||
times=""
|
||||
for stat in ${stats}; do
|
||||
[ -z "$stat" ] && { echo "empty stats"; }
|
||||
time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
|
||||
# in case the stat is not present, set the value as NA
|
||||
[ -z "$time" ] && { time="NA"; }
|
||||
# Append it to times
|
||||
if [ -z "$times" ]; then
|
||||
times="${time}"
|
||||
else
|
||||
times="${times} ${time}"
|
||||
fi
|
||||
done
|
||||
if [ -n "$ACCUMULATE" ]; then
|
||||
IFS=' '; valuesarray=(${times}); IFS=':'
|
||||
times=0
|
||||
for value in "${valuesarray[@]}"; do
|
||||
[ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
|
||||
times=$(( $times + $value ))
|
||||
done
|
||||
fi
|
||||
echo "${task} ${recipe} ${times}"
|
||||
done
|
||||
done
|
||||
Executable
+168
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010
|
||||
|
||||
|
||||
import sys
|
||||
import getopt
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
|
||||
# Set up sys.path to let us import tinfoil
|
||||
scripts_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path.insert(0, lib_path)
|
||||
import scriptpath
|
||||
scriptpath.add_bitbake_lib_path()
|
||||
import bb.tinfoil
|
||||
|
||||
def usage():
|
||||
print('Usage: %s -d FILENAME [-d FILENAME]*' % os.path.basename(sys.argv[0]))
|
||||
print(' -d FILENAME documentation file to search')
|
||||
print(' -h, --help display this help and exit')
|
||||
print(' -t FILENAME documentation config file (for doc tags)')
|
||||
print(' -T Only display variables with doc tags (requires -t)')
|
||||
|
||||
def bbvar_is_documented(var, documented_vars):
|
||||
''' Check if variable (var) is in the list of documented variables(documented_vars) '''
|
||||
if var in documented_vars:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def collect_documented_vars(docfiles):
|
||||
''' Walk the docfiles and collect the documented variables '''
|
||||
documented_vars = []
|
||||
prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-")
|
||||
var_prog = re.compile('<glossentry id=\'var-(.*)\'>')
|
||||
for d in docfiles:
|
||||
with open(d) as f:
|
||||
documented_vars += var_prog.findall(f.read())
|
||||
|
||||
return documented_vars
|
||||
|
||||
def bbvar_doctag(var, docconf):
|
||||
prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var))
|
||||
if docconf == "":
|
||||
return "?"
|
||||
|
||||
try:
|
||||
f = open(docconf)
|
||||
except IOError as err:
|
||||
return err.args[1]
|
||||
|
||||
for line in f:
|
||||
m = prog.search(line)
|
||||
if m:
|
||||
return m.group(1)
|
||||
|
||||
f.close()
|
||||
return ""
|
||||
|
||||
def main():
|
||||
docfiles = []
|
||||
bbvars = set()
|
||||
undocumented = []
|
||||
docconf = ""
|
||||
onlydoctags = False
|
||||
|
||||
# Collect and validate input
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
|
||||
except getopt.GetoptError as err:
|
||||
print('%s' % str(err))
|
||||
usage()
|
||||
sys.exit(2)
|
||||
|
||||
for o, a in opts:
|
||||
if o in ('-h', '--help'):
|
||||
usage()
|
||||
sys.exit(0)
|
||||
elif o == '-d':
|
||||
if os.path.isfile(a):
|
||||
docfiles.append(a)
|
||||
else:
|
||||
print('ERROR: documentation file %s is not a regular file' % a)
|
||||
sys.exit(3)
|
||||
elif o == "-t":
|
||||
if os.path.isfile(a):
|
||||
docconf = a
|
||||
elif o == "-T":
|
||||
onlydoctags = True
|
||||
else:
|
||||
assert False, "unhandled option"
|
||||
|
||||
if len(docfiles) == 0:
|
||||
print('ERROR: no docfile specified')
|
||||
usage()
|
||||
sys.exit(5)
|
||||
|
||||
if onlydoctags and docconf == "":
|
||||
print('ERROR: no docconf specified')
|
||||
usage()
|
||||
sys.exit(7)
|
||||
|
||||
prog = re.compile("^[^a-z]*$")
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False)
|
||||
parser = bb.codeparser.PythonParser('parser', None)
|
||||
datastore = tinfoil.config_data
|
||||
|
||||
def bbvars_update(data):
|
||||
if prog.match(data):
|
||||
bbvars.add(data)
|
||||
if tinfoil.config_data.getVarFlag(data, 'python'):
|
||||
try:
|
||||
parser.parse_python(tinfoil.config_data.getVar(data))
|
||||
except bb.data_smart.ExpansionError:
|
||||
pass
|
||||
for var in parser.references:
|
||||
if prog.match(var):
|
||||
bbvars.add(var)
|
||||
else:
|
||||
try:
|
||||
expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
|
||||
for var in expandedVar.references:
|
||||
if prog.match(var):
|
||||
bbvars.add(var)
|
||||
except bb.data_smart.ExpansionError:
|
||||
pass
|
||||
|
||||
# Use tinfoil to collect all the variable names globally
|
||||
for data in datastore:
|
||||
bbvars_update(data)
|
||||
|
||||
# Collect variables from all recipes
|
||||
for recipe in tinfoil.all_recipe_files(variants=False):
|
||||
print("Checking %s" % recipe)
|
||||
for data in tinfoil.parse_recipe_file(recipe):
|
||||
bbvars_update(data)
|
||||
|
||||
documented_vars = collect_documented_vars(docfiles)
|
||||
|
||||
# Check each var for documentation
|
||||
varlen = 0
|
||||
for v in bbvars:
|
||||
if len(v) > varlen:
|
||||
varlen = len(v)
|
||||
if not bbvar_is_documented(v, documented_vars):
|
||||
undocumented.append(v)
|
||||
undocumented.sort()
|
||||
varlen = varlen + 1
|
||||
|
||||
# Report all undocumented variables
|
||||
print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
|
||||
header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
|
||||
print(header)
|
||||
print(str("").ljust(len(header), '='))
|
||||
for v in undocumented:
|
||||
doctag = bbvar_doctag(v, docconf)
|
||||
if not onlydoctags or not doctag == "":
|
||||
print('%s%s' % (v.ljust(varlen), doctag))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
+247
@@ -0,0 +1,247 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Build performance test script wrapper
|
||||
#
|
||||
# Copyright (c) 2016, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# This script is a simple wrapper around the actual build performance tester
|
||||
# script. This script initializes the build environment, runs
|
||||
# oe-build-perf-test and archives the results.
|
||||
|
||||
script=`basename $0`
|
||||
script_dir=$(realpath $(dirname $0))
|
||||
archive_dir=~/perf-results/archives
|
||||
|
||||
usage () {
|
||||
cat << EOF
|
||||
Usage: $script [-h] [-c COMMITISH] [-C GIT_REPO]
|
||||
|
||||
Optional arguments:
|
||||
-h show this help and exit.
|
||||
-a ARCHIVE_DIR archive results tarball here, give an empty string to
|
||||
disable tarball archiving (default: $archive_dir)
|
||||
-c COMMITISH test (checkout) this commit, <branch>:<commit> can be
|
||||
specified to test specific commit of certain branch
|
||||
-C GIT_REPO commit results into Git
|
||||
-d DOWNLOAD_DIR directory to store downloaded sources in
|
||||
-E EMAIL_ADDR send email report
|
||||
-g GLOBALRES_DIR where to place the globalres file
|
||||
-P GIT_REMOTE push results to a remote Git repository
|
||||
-R DEST rsync reports to a remote destination
|
||||
-w WORK_DIR work dir for this script
|
||||
(default: GIT_TOP_DIR/build-perf-test)
|
||||
-x create xml report (instead of json)
|
||||
EOF
|
||||
}
|
||||
|
||||
get_os_release_var () {
|
||||
( source /etc/os-release; eval echo '$'$1 )
|
||||
}
|
||||
|
||||
|
||||
# Parse command line arguments
|
||||
commitish=""
|
||||
oe_build_perf_test_extra_opts=()
|
||||
oe_git_archive_extra_opts=()
|
||||
while getopts "ha:c:C:d:E:g:P:R:w:x" opt; do
|
||||
case $opt in
|
||||
h) usage
|
||||
exit 0
|
||||
;;
|
||||
a) mkdir -p "$OPTARG"
|
||||
archive_dir=`realpath -s "$OPTARG"`
|
||||
;;
|
||||
c) commitish=$OPTARG
|
||||
;;
|
||||
C) mkdir -p "$OPTARG"
|
||||
results_repo=`realpath -s "$OPTARG"`
|
||||
;;
|
||||
d) download_dir=`realpath -s "$OPTARG"`
|
||||
;;
|
||||
E) email_to="$OPTARG"
|
||||
;;
|
||||
g) mkdir -p "$OPTARG"
|
||||
globalres_dir=`realpath -s "$OPTARG"`
|
||||
;;
|
||||
P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
|
||||
;;
|
||||
R) rsync_dst="$OPTARG"
|
||||
;;
|
||||
w) base_dir=`realpath -s "$OPTARG"`
|
||||
;;
|
||||
x) oe_build_perf_test_extra_opts+=("--xml")
|
||||
;;
|
||||
*) usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check positional args
|
||||
shift "$((OPTIND - 1))"
|
||||
if [ $# -ne 0 ]; then
|
||||
echo "ERROR: No positional args are accepted."
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Open a file descriptor for flock and acquire lock
|
||||
LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
|
||||
if ! exec 3> "$LOCK_FILE"; then
|
||||
echo "ERROR: Unable to open loemack file"
|
||||
exit 1
|
||||
fi
|
||||
if ! flock -n 3; then
|
||||
echo "ERROR: Another instance of this script is running"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Running on `uname -n`"
|
||||
if ! git_topdir=$(git rev-parse --show-toplevel); then
|
||||
echo "The current working dir doesn't seem to be a git clone. Please cd there before running `basename $0`"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$git_topdir"
|
||||
|
||||
if [ -n "$commitish" ]; then
|
||||
echo "Running git fetch"
|
||||
git fetch &> /dev/null
|
||||
git checkout HEAD^0 &> /dev/null
|
||||
|
||||
# Handle <branch>:<commit> format
|
||||
if echo "$commitish" | grep -q ":"; then
|
||||
commit=`echo "$commitish" | cut -d":" -f2`
|
||||
branch=`echo "$commitish" | cut -d":" -f1`
|
||||
else
|
||||
commit="$commitish"
|
||||
branch="$commitish"
|
||||
fi
|
||||
|
||||
echo "Checking out $commitish"
|
||||
git branch -D $branch &> /dev/null
|
||||
if ! git checkout -f $branch &> /dev/null; then
|
||||
echo "ERROR: Git checkout failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check that the specified branch really contains the commit
|
||||
commit_hash=`git rev-parse --revs-only $commit --`
|
||||
if [ -z "$commit_hash" -o "`git merge-base $branch $commit`" != "$commit_hash" ]; then
|
||||
echo "ERROR: branch $branch does not contain commit $commit"
|
||||
exit 1
|
||||
fi
|
||||
git reset --hard $commit > /dev/null
|
||||
fi
|
||||
|
||||
# Determine name of the current branch
|
||||
branch=`git symbolic-ref HEAD 2> /dev/null`
|
||||
# Strip refs/heads/
|
||||
branch=${branch:11}
|
||||
|
||||
# Setup build environment
|
||||
if [ -z "$base_dir" ]; then
|
||||
base_dir="$git_topdir/build-perf-test"
|
||||
fi
|
||||
echo "Using working dir $base_dir"
|
||||
|
||||
if [ -z "$download_dir" ]; then
|
||||
download_dir="$base_dir/downloads"
|
||||
fi
|
||||
if [ -z "$globalres_dir" ]; then
|
||||
globalres_dir="$base_dir"
|
||||
fi
|
||||
|
||||
timestamp=`date "+%Y%m%d%H%M%S"`
|
||||
git_rev=$(git rev-parse --short HEAD) || exit 1
|
||||
build_dir="$base_dir/build-$git_rev-$timestamp"
|
||||
results_dir="$base_dir/results-$git_rev-$timestamp"
|
||||
globalres_log="$globalres_dir/globalres.log"
|
||||
machine="qemux86"
|
||||
|
||||
mkdir -p "$base_dir"
|
||||
source ./oe-init-build-env $build_dir >/dev/null || exit 1
|
||||
|
||||
# Additional config
|
||||
auto_conf="$build_dir/conf/auto.conf"
|
||||
echo "MACHINE = \"$machine\"" > "$auto_conf"
|
||||
echo 'BB_NUMBER_THREADS = "8"' >> "$auto_conf"
|
||||
echo 'PARALLEL_MAKE = "-j 8"' >> "$auto_conf"
|
||||
echo "DL_DIR = \"$download_dir\"" >> "$auto_conf"
|
||||
# Disabling network sanity check slightly reduces the variance of timing results
|
||||
echo 'CONNECTIVITY_CHECK_URIS = ""' >> "$auto_conf"
|
||||
# Possibility to define extra settings
|
||||
if [ -f "$base_dir/auto.conf.extra" ]; then
|
||||
cat "$base_dir/auto.conf.extra" >> "$auto_conf"
|
||||
fi
|
||||
|
||||
# Run actual test script
|
||||
oe-build-perf-test --out-dir "$results_dir" \
|
||||
--globalres-file "$globalres_log" \
|
||||
"${oe_build_perf_test_extra_opts[@]}" \
|
||||
--lock-file "$base_dir/oe-build-perf.lock"
|
||||
|
||||
case $? in
|
||||
1) echo "ERROR: oe-build-perf-test script failed!"
|
||||
exit 1
|
||||
;;
|
||||
2) echo "NOTE: some tests failed!"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Commit results to git
|
||||
if [ -n "$results_repo" ]; then
|
||||
echo -e "\nArchiving results in $results_repo"
|
||||
oe-git-archive \
|
||||
--git-dir "$results_repo" \
|
||||
--branch-name "{hostname}/{branch}/{machine}" \
|
||||
--tag-name "{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}" \
|
||||
--exclude "buildstats.json" \
|
||||
--notes "buildstats/{branch_name}" "$results_dir/buildstats.json" \
|
||||
"${oe_git_archive_extra_opts[@]}" \
|
||||
"$results_dir"
|
||||
|
||||
# Generate test reports
|
||||
sanitized_branch=`echo $branch | tr / _`
|
||||
report_txt=`hostname`_${sanitized_branch}_${machine}.txt
|
||||
report_html=`hostname`_${sanitized_branch}_${machine}.html
|
||||
echo -e "\nGenerating test report"
|
||||
oe-build-perf-report -r "$results_repo" > $report_txt
|
||||
oe-build-perf-report -r "$results_repo" --html > $report_html
|
||||
|
||||
# Send email report
|
||||
if [ -n "$email_to" ]; then
|
||||
echo "Emailing test report"
|
||||
os_name=`get_os_release_var PRETTY_NAME`
|
||||
"$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
|
||||
fi
|
||||
|
||||
# Upload report files, unless we're on detached head
|
||||
if [ -n "$rsync_dst" -a -n "$branch" ]; then
|
||||
echo "Uploading test report"
|
||||
rsync $report_txt $report_html $rsync_dst
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
echo -ne "\n\n-----------------\n"
|
||||
echo "Global results file:"
|
||||
echo -ne "\n"
|
||||
|
||||
cat "$globalres_log"
|
||||
|
||||
if [ -n "$archive_dir" ]; then
|
||||
echo -ne "\n\n-----------------\n"
|
||||
echo "Archiving results in $archive_dir"
|
||||
mkdir -p "$archive_dir"
|
||||
results_basename=`basename "$results_dir"`
|
||||
results_dirname=`dirname "$results_dir"`
|
||||
tar -czf "$archive_dir/`uname -n`-${results_basename}.tar.gz" -C "$results_dirname" "$results_basename"
|
||||
fi
|
||||
|
||||
rm -rf "$build_dir"
|
||||
rm -rf "$results_dir"
|
||||
|
||||
echo "DONE"
|
||||
Executable
+155
@@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Conversion script to add new override syntax to existing bitbake metadata
|
||||
#
|
||||
# Copyright (C) 2021 Richard Purdie
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
#
|
||||
# To use this script on a new layer you need to list the overrides the
|
||||
# layer is known to use in the list below.
|
||||
#
|
||||
# Known constraint: Matching is 'loose' and in particular will find variable
|
||||
# and function names with "_append" and "_remove" in them. Those need to be
|
||||
# filtered out manually or in the skip list below.
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
import mimetypes
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Convert override syntax")
|
||||
parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
|
||||
parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
|
||||
parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
|
||||
parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
|
||||
parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
|
||||
parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
|
||||
parser.add_argument("path", nargs="+", help="Paths to convert")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# List of strings to treat as overrides
|
||||
vars = args.override
|
||||
vars += ["append", "prepend", "remove"]
|
||||
vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
|
||||
vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
|
||||
vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
|
||||
vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
|
||||
vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
|
||||
vars += ["tune-", "pn-", "forcevariable"]
|
||||
vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
|
||||
vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
|
||||
vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
|
||||
vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
|
||||
vars += ["linux-gnueabi", "eabi"]
|
||||
vars += ["virtclass-multilib", "virtclass-mcextend"]
|
||||
|
||||
# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
|
||||
# Handles issues with arc matching arch.
|
||||
shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
|
||||
|
||||
# Variables which take packagenames as an override
|
||||
packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
|
||||
"pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
|
||||
"PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
|
||||
"pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
|
||||
"INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
|
||||
|
||||
# Expressions to skip if encountered, these are not overrides
|
||||
skips = args.skip
|
||||
skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
|
||||
skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
|
||||
skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
|
||||
skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
|
||||
skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
|
||||
skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
|
||||
skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
|
||||
skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
|
||||
skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
|
||||
|
||||
imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
|
||||
packagevars += imagevars
|
||||
|
||||
skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
|
||||
|
||||
vars_re = {}
|
||||
for exp in vars:
|
||||
vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
|
||||
|
||||
shortvars_re = {}
|
||||
for exp in shortvars:
|
||||
shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3")
|
||||
|
||||
package_re = {}
|
||||
for exp in packagevars:
|
||||
package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
|
||||
|
||||
# Other substitutions to make
|
||||
subs = {
|
||||
'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")',
|
||||
"val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
|
||||
"f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
|
||||
"d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
|
||||
'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
|
||||
}
|
||||
|
||||
def processfile(fn):
|
||||
print("processing file '%s'" % fn)
|
||||
try:
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
with os.fdopen(fh, 'w') as new_file:
|
||||
with open(fn, "r") as old_file:
|
||||
for line in old_file:
|
||||
skip = False
|
||||
for s in skips:
|
||||
if s in line:
|
||||
skip = True
|
||||
if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
|
||||
skip = False
|
||||
for sub in subs:
|
||||
if sub in line:
|
||||
line = line.replace(sub, subs[sub])
|
||||
skip = True
|
||||
if not skip:
|
||||
for pvar in packagevars:
|
||||
line = package_re[pvar][0].sub(package_re[pvar][1], line)
|
||||
for var in vars:
|
||||
line = vars_re[var][0].sub(vars_re[var][1], line)
|
||||
for shortvar in shortvars:
|
||||
line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
|
||||
if "pkg_postinst:ontarget" in line:
|
||||
line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
|
||||
new_file.write(line)
|
||||
shutil.copymode(fn, abs_path)
|
||||
os.remove(fn)
|
||||
shutil.move(abs_path, fn)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
ourname = os.path.basename(sys.argv[0])
|
||||
ourversion = "0.9.3"
|
||||
|
||||
for p in args.path:
|
||||
if os.path.isfile(p):
|
||||
processfile(p)
|
||||
else:
|
||||
print("processing directory '%s'" % p)
|
||||
for root, dirs, files in os.walk(p):
|
||||
for name in files:
|
||||
if name == ourname:
|
||||
continue
|
||||
fn = os.path.join(root, name)
|
||||
if os.path.islink(fn):
|
||||
continue
|
||||
if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
|
||||
continue
|
||||
processfile(fn)
|
||||
|
||||
print("All files processed with version %s" % ourversion)
|
||||
+145
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Conversion script to change LICENSE entries to SPDX identifiers
|
||||
#
|
||||
# Copyright (C) 2021-2022 Richard Purdie
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
import mimetypes
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Please specify a directory to run the conversion script against.")
|
||||
sys.exit(1)
|
||||
|
||||
license_map = {
|
||||
"AGPL-3" : "AGPL-3.0-only",
|
||||
"AGPL-3+" : "AGPL-3.0-or-later",
|
||||
"AGPLv3" : "AGPL-3.0-only",
|
||||
"AGPLv3+" : "AGPL-3.0-or-later",
|
||||
"AGPLv3.0" : "AGPL-3.0-only",
|
||||
"AGPLv3.0+" : "AGPL-3.0-or-later",
|
||||
"AGPL-3.0" : "AGPL-3.0-only",
|
||||
"AGPL-3.0+" : "AGPL-3.0-or-later",
|
||||
"BSD-0-Clause" : "0BSD",
|
||||
"GPL-1" : "GPL-1.0-only",
|
||||
"GPL-1+" : "GPL-1.0-or-later",
|
||||
"GPLv1" : "GPL-1.0-only",
|
||||
"GPLv1+" : "GPL-1.0-or-later",
|
||||
"GPLv1.0" : "GPL-1.0-only",
|
||||
"GPLv1.0+" : "GPL-1.0-or-later",
|
||||
"GPL-1.0" : "GPL-1.0-only",
|
||||
"GPL-1.0+" : "GPL-1.0-or-later",
|
||||
"GPL-2" : "GPL-2.0-only",
|
||||
"GPL-2+" : "GPL-2.0-or-later",
|
||||
"GPLv2" : "GPL-2.0-only",
|
||||
"GPLv2+" : "GPL-2.0-or-later",
|
||||
"GPLv2.0" : "GPL-2.0-only",
|
||||
"GPLv2.0+" : "GPL-2.0-or-later",
|
||||
"GPL-2.0" : "GPL-2.0-only",
|
||||
"GPL-2.0+" : "GPL-2.0-or-later",
|
||||
"GPL-3" : "GPL-3.0-only",
|
||||
"GPL-3+" : "GPL-3.0-or-later",
|
||||
"GPLv3" : "GPL-3.0-only",
|
||||
"GPLv3+" : "GPL-3.0-or-later",
|
||||
"GPLv3.0" : "GPL-3.0-only",
|
||||
"GPLv3.0+" : "GPL-3.0-or-later",
|
||||
"GPL-3.0" : "GPL-3.0-only",
|
||||
"GPL-3.0+" : "GPL-3.0-or-later",
|
||||
"LGPLv2" : "LGPL-2.0-only",
|
||||
"LGPLv2+" : "LGPL-2.0-or-later",
|
||||
"LGPLv2.0" : "LGPL-2.0-only",
|
||||
"LGPLv2.0+" : "LGPL-2.0-or-later",
|
||||
"LGPL-2.0" : "LGPL-2.0-only",
|
||||
"LGPL-2.0+" : "LGPL-2.0-or-later",
|
||||
"LGPL2.1" : "LGPL-2.1-only",
|
||||
"LGPL2.1+" : "LGPL-2.1-or-later",
|
||||
"LGPLv2.1" : "LGPL-2.1-only",
|
||||
"LGPLv2.1+" : "LGPL-2.1-or-later",
|
||||
"LGPL-2.1" : "LGPL-2.1-only",
|
||||
"LGPL-2.1+" : "LGPL-2.1-or-later",
|
||||
"LGPLv3" : "LGPL-3.0-only",
|
||||
"LGPLv3+" : "LGPL-3.0-or-later",
|
||||
"LGPL-3.0" : "LGPL-3.0-only",
|
||||
"LGPL-3.0+" : "LGPL-3.0-or-later",
|
||||
"MPL-1" : "MPL-1.0",
|
||||
"MPLv1" : "MPL-1.0",
|
||||
"MPLv1.1" : "MPL-1.1",
|
||||
"MPLv2" : "MPL-2.0",
|
||||
"MIT-X" : "MIT",
|
||||
"MIT-style" : "MIT",
|
||||
"openssl" : "OpenSSL",
|
||||
"PSF" : "PSF-2.0",
|
||||
"PSFv2" : "PSF-2.0",
|
||||
"Python-2" : "Python-2.0",
|
||||
"Apachev2" : "Apache-2.0",
|
||||
"Apache-2" : "Apache-2.0",
|
||||
"Artisticv1" : "Artistic-1.0",
|
||||
"Artistic-1" : "Artistic-1.0",
|
||||
"AFL-2" : "AFL-2.0",
|
||||
"AFL-1" : "AFL-1.2",
|
||||
"AFLv2" : "AFL-2.0",
|
||||
"AFLv1" : "AFL-1.2",
|
||||
"CDDLv1" : "CDDL-1.0",
|
||||
"CDDL-1" : "CDDL-1.0",
|
||||
"EPLv1.0" : "EPL-1.0",
|
||||
"FreeType" : "FTL",
|
||||
"Nauman" : "Naumen",
|
||||
"tcl" : "TCL",
|
||||
"vim" : "Vim",
|
||||
"SGIv1" : "SGI-1",
|
||||
}
|
||||
|
||||
def processfile(fn):
|
||||
print("processing file '%s'" % fn)
|
||||
try:
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with os.fdopen(fh, 'w') as new_file:
|
||||
with open(fn, "r") as old_file:
|
||||
for line in old_file:
|
||||
if not line.startswith("LICENSE"):
|
||||
new_file.write(line)
|
||||
continue
|
||||
orig = line
|
||||
for license in sorted(license_map, key=len, reverse=True):
|
||||
for ending in ['"', "'", " ", ")"]:
|
||||
line = line.replace(license + ending, license_map[license] + ending)
|
||||
if orig != line:
|
||||
modified = True
|
||||
new_file.write(line)
|
||||
new_file.close()
|
||||
if modified:
|
||||
shutil.copymode(fn, abs_path)
|
||||
os.remove(fn)
|
||||
shutil.move(abs_path, fn)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
ourname = os.path.basename(sys.argv[0])
|
||||
ourversion = "0.01"
|
||||
|
||||
if os.path.isfile(sys.argv[1]):
|
||||
processfile(sys.argv[1])
|
||||
sys.exit(0)
|
||||
|
||||
for targetdir in sys.argv[1:]:
|
||||
print("processing directory '%s'" % targetdir)
|
||||
for root, dirs, files in os.walk(targetdir):
|
||||
for name in files:
|
||||
if name == ourname:
|
||||
continue
|
||||
fn = os.path.join(root, name)
|
||||
if os.path.islink(fn):
|
||||
continue
|
||||
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
|
||||
continue
|
||||
processfile(fn)
|
||||
|
||||
print("All files processed with version %s" % ourversion)
|
||||
Executable
+77
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Conversion script to update SRC_URI to add branch to git urls
|
||||
#
|
||||
# Copyright (C) 2021 Richard Purdie
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
import mimetypes
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Please specify a directory to run the conversion script against.")
|
||||
sys.exit(1)
|
||||
|
||||
def processfile(fn):
|
||||
def matchline(line):
|
||||
if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
|
||||
return False
|
||||
return True
|
||||
print("processing file '%s'" % fn)
|
||||
try:
|
||||
if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
|
||||
return
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with os.fdopen(fh, 'w') as new_file:
|
||||
with open(fn, "r") as old_file:
|
||||
for line in old_file:
|
||||
if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
|
||||
if line.endswith('"\n'):
|
||||
line = line.replace('"\n', ';branch=master"\n')
|
||||
elif re.search('\s*\\\\$', line):
|
||||
line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
|
||||
modified = True
|
||||
if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
|
||||
if "protocol=git" in line:
|
||||
line = line.replace('protocol=git', 'protocol=https')
|
||||
elif line.endswith('"\n'):
|
||||
line = line.replace('"\n', ';protocol=https"\n')
|
||||
elif re.search('\s*\\\\$', line):
|
||||
line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
|
||||
modified = True
|
||||
new_file.write(line)
|
||||
if modified:
|
||||
shutil.copymode(fn, abs_path)
|
||||
os.remove(fn)
|
||||
shutil.move(abs_path, fn)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
ourname = os.path.basename(sys.argv[0])
|
||||
ourversion = "0.1"
|
||||
|
||||
if os.path.isfile(sys.argv[1]):
|
||||
processfile(sys.argv[1])
|
||||
sys.exit(0)
|
||||
|
||||
for targetdir in sys.argv[1:]:
|
||||
print("processing directory '%s'" % targetdir)
|
||||
for root, dirs, files in os.walk(targetdir):
|
||||
for name in files:
|
||||
if name == ourname:
|
||||
continue
|
||||
fn = os.path.join(root, name)
|
||||
if os.path.islink(fn):
|
||||
continue
|
||||
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
|
||||
continue
|
||||
processfile(fn)
|
||||
|
||||
print("All files processed with version %s" % ourversion)
|
||||
+116
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Conversion script to rename variables to versions with improved terminology.
|
||||
# Also highlights potentially problematic language and removed variables.
|
||||
#
|
||||
# Copyright (C) 2021 Richard Purdie
|
||||
# Copyright (C) 2022 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
import mimetypes
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Please specify a directory to run the conversion script against.")
|
||||
sys.exit(1)
|
||||
|
||||
renames = {
|
||||
"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
|
||||
"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
|
||||
"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
|
||||
"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
|
||||
"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
|
||||
"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
|
||||
"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
|
||||
"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
|
||||
"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
|
||||
"PNBLACKLIST" : "SKIP_RECIPE",
|
||||
"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
|
||||
"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
|
||||
"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
|
||||
"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
|
||||
"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
|
||||
"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
|
||||
"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
|
||||
"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
|
||||
"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
|
||||
"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
|
||||
"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
|
||||
"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
|
||||
}
|
||||
|
||||
removed_list = [
|
||||
"BB_STAMP_WHITELIST",
|
||||
"BB_STAMP_POLICY",
|
||||
"INHERIT_BLACKLIST",
|
||||
"TUNEABI_WHITELIST",
|
||||
]
|
||||
|
||||
context_check_list = [
|
||||
"blacklist",
|
||||
"whitelist",
|
||||
"abort",
|
||||
]
|
||||
|
||||
def processfile(fn):
|
||||
|
||||
print("processing file '%s'" % fn)
|
||||
try:
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with os.fdopen(fh, 'w') as new_file:
|
||||
with open(fn, "r") as old_file:
|
||||
lineno = 0
|
||||
for line in old_file:
|
||||
lineno += 1
|
||||
if not line or "BB_RENAMED_VARIABLE" in line:
|
||||
continue
|
||||
# Do the renames
|
||||
for old_name, new_name in renames.items():
|
||||
if old_name in line:
|
||||
line = line.replace(old_name, new_name)
|
||||
modified = True
|
||||
# Find removed names
|
||||
for removed_name in removed_list:
|
||||
if removed_name in line:
|
||||
print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
|
||||
for check_word in context_check_list:
|
||||
if re.search(check_word, line, re.IGNORECASE):
|
||||
print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
|
||||
new_file.write(line)
|
||||
new_file.close()
|
||||
if modified:
|
||||
print("*** Modified file '%s'" % (fn))
|
||||
shutil.copymode(fn, abs_path)
|
||||
os.remove(fn)
|
||||
shutil.move(abs_path, fn)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
ourname = os.path.basename(sys.argv[0])
|
||||
ourversion = "0.1"
|
||||
|
||||
if os.path.isfile(sys.argv[1]):
|
||||
processfile(sys.argv[1])
|
||||
sys.exit(0)
|
||||
|
||||
for targetdir in sys.argv[1:]:
|
||||
print("processing directory '%s'" % targetdir)
|
||||
for root, dirs, files in os.walk(targetdir):
|
||||
for name in files:
|
||||
if name == ourname:
|
||||
continue
|
||||
fn = os.path.join(root, name)
|
||||
if os.path.islink(fn):
|
||||
continue
|
||||
if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
|
||||
continue
|
||||
processfile(fn)
|
||||
|
||||
print("All files processed with version %s" % ourversion)
|
||||
Executable
+172
@@ -0,0 +1,172 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
# 1MB blocksize
|
||||
BLOCKSIZE=1048576
|
||||
|
||||
usage() {
|
||||
echo "Usage: $(basename $0) IMAGE DEVICE"
|
||||
}
|
||||
|
||||
image_details() {
|
||||
IMG=$1
|
||||
echo "Image details"
|
||||
echo "============="
|
||||
echo " image: $(basename $IMG)"
|
||||
# stat format is different on Mac OS and Linux
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
echo " size: $(stat -L -f '%z bytes' $IMG)"
|
||||
echo " modified: $(stat -L -f '%Sm' $IMG)"
|
||||
else
|
||||
echo " size: $(stat -L -c '%s bytes' $IMG)"
|
||||
echo " modified: $(stat -L -c '%y' $IMG)"
|
||||
fi
|
||||
echo " type: $(file -L -b $IMG)"
|
||||
echo ""
|
||||
}
|
||||
|
||||
device_details() {
|
||||
BLOCK_SIZE=512
|
||||
|
||||
echo "Device details"
|
||||
echo "=============="
|
||||
|
||||
# Collect disk info using diskutil on Mac OS
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
diskutil info $DEVICE | egrep "(Device Node|Media Name|Total Size)"
|
||||
return
|
||||
fi
|
||||
|
||||
# Default / Linux information collection
|
||||
ACTUAL_DEVICE=`readlink -f $DEVICE`
|
||||
DEV=`basename $ACTUAL_DEVICE`
|
||||
if [ "$ACTUAL_DEVICE" != "$DEVICE" ] ; then
|
||||
echo " device: $DEVICE -> $ACTUAL_DEVICE"
|
||||
else
|
||||
echo " device: $DEVICE"
|
||||
fi
|
||||
if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
|
||||
echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
|
||||
else
|
||||
echo " vendor: UNKNOWN"
|
||||
fi
|
||||
if [ -f "/sys/class/block/$DEV/device/model" ]; then
|
||||
echo " model: $(cat /sys/class/block/$DEV/device/model)"
|
||||
else
|
||||
echo " model: UNKNOWN"
|
||||
fi
|
||||
if [ -f "/sys/class/block/$DEV/size" ]; then
|
||||
echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes"
|
||||
else
|
||||
echo " size: UNKNOWN"
|
||||
fi
|
||||
echo ""
|
||||
}
|
||||
|
||||
check_mount_device() {
|
||||
if cat /proc/self/mounts | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1$" ; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
is_mounted() {
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
if df | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1(s[0-9]+)?$" ; then
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
if check_mount_device $1 ; then
|
||||
return 0
|
||||
fi
|
||||
DEV=`basename $1`
|
||||
if [ -d /sys/class/block/$DEV/ ] ; then
|
||||
PARENT_BLKDEV=`basename $(readlink -f "/sys/class/block/$DEV/..")`
|
||||
if [ "$PARENT_BLKDEV" != "block" ] ; then
|
||||
if check_mount_device $PARENT_BLKDEV ; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
for CHILD_BLKDEV in `find /sys/class/block/$DEV/ -mindepth 1 -maxdepth 1 -name "$DEV*" -type d`
|
||||
do
|
||||
if check_mount_device /dev/`basename $CHILD_BLKDEV` ; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
is_inuse() {
|
||||
HOLDERS_DIR="/sys/class/block/`basename $1`/holders"
|
||||
if [ -d $HOLDERS_DIR ] && [ `ls -A $HOLDERS_DIR` ] ; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IMAGE=$1
|
||||
DEVICE=$2
|
||||
|
||||
if [ ! -e "$IMAGE" ]; then
|
||||
echo "ERROR: Image $IMAGE does not exist"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -e "$DEVICE" ]; then
|
||||
echo "ERROR: Device $DEVICE does not exist"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
# readlink doesn't support -f on MacOS, just assume it isn't a symlink
|
||||
ACTUAL_DEVICE=$DEVICE
|
||||
else
|
||||
ACTUAL_DEVICE=`readlink -f $DEVICE`
|
||||
fi
|
||||
if is_mounted $ACTUAL_DEVICE ; then
|
||||
echo "ERROR: Device $DEVICE is currently mounted - check if this is the right device, and unmount it first if so"
|
||||
device_details
|
||||
exit 1
|
||||
fi
|
||||
if is_inuse $ACTUAL_DEVICE ; then
|
||||
echo "ERROR: Device $DEVICE is currently in use (possibly part of LVM) - check if this is the right device!"
|
||||
device_details
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -w "$DEVICE" ]; then
|
||||
echo "ERROR: Device $DEVICE is not writable - possibly use sudo?"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
image_details $IMAGE
|
||||
device_details
|
||||
|
||||
printf "Write $IMAGE to $DEVICE [y/N]? "
|
||||
read RESPONSE
|
||||
if [ "$RESPONSE" != "y" ]; then
|
||||
echo "Write aborted"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Writing image..."
|
||||
if which pv >/dev/null 2>&1; then
|
||||
pv "$IMAGE" | dd of="$DEVICE" bs="$BLOCKSIZE"
|
||||
else
|
||||
dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
|
||||
fi
|
||||
sync
|
||||
Executable
+245
@@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# devtool stress tester
|
||||
#
|
||||
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
|
||||
#
|
||||
# Copyright 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import re
|
||||
import argparse
|
||||
import logging
|
||||
import tempfile
|
||||
import shutil
|
||||
import signal
|
||||
import fnmatch
|
||||
|
||||
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
|
||||
sys.path.insert(0, scripts_lib_path)
|
||||
import scriptutils
|
||||
import argparse_oe
|
||||
logger = scriptutils.logger_create('devtool-stress')
|
||||
|
||||
def select_recipes(args):
|
||||
import bb.tinfoil
|
||||
tinfoil = bb.tinfoil.Tinfoil()
|
||||
tinfoil.prepare(False)
|
||||
|
||||
pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecaches[''], pkg_pn)
|
||||
|
||||
skip_classes = args.skip_classes.split(',')
|
||||
|
||||
recipelist = []
|
||||
for pn in sorted(pkg_pn):
|
||||
pref = preferred_versions[pn]
|
||||
inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecaches[''].inherits[pref[1]]]
|
||||
for cls in skip_classes:
|
||||
if cls in inherits:
|
||||
break
|
||||
else:
|
||||
recipelist.append(pn)
|
||||
|
||||
tinfoil.shutdown()
|
||||
|
||||
resume_from = args.resume_from
|
||||
if resume_from:
|
||||
if not resume_from in recipelist:
|
||||
print('%s is not a testable recipe' % resume_from)
|
||||
return 1
|
||||
if args.only:
|
||||
only = args.only.split(',')
|
||||
for onlyitem in only:
|
||||
for pn in recipelist:
|
||||
if fnmatch.fnmatch(pn, onlyitem):
|
||||
break
|
||||
else:
|
||||
print('%s does not match any testable recipe' % onlyitem)
|
||||
return 1
|
||||
else:
|
||||
only = None
|
||||
if args.skip:
|
||||
skip = args.skip.split(',')
|
||||
else:
|
||||
skip = []
|
||||
|
||||
recipes = []
|
||||
for pn in recipelist:
|
||||
if resume_from:
|
||||
if pn == resume_from:
|
||||
resume_from = None
|
||||
else:
|
||||
continue
|
||||
|
||||
if args.only:
|
||||
for item in only:
|
||||
if fnmatch.fnmatch(pn, item):
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
skipit = False
|
||||
for item in skip:
|
||||
if fnmatch.fnmatch(pn, item):
|
||||
skipit = True
|
||||
if skipit:
|
||||
continue
|
||||
|
||||
recipes.append(pn)
|
||||
|
||||
return recipes
|
||||
|
||||
|
||||
def stress_extract(args):
|
||||
import bb.process
|
||||
|
||||
recipes = select_recipes(args)
|
||||
|
||||
failures = 0
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
os.setpgrp()
|
||||
try:
|
||||
for pn in recipes:
|
||||
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
|
||||
sys.stdout.flush()
|
||||
failed = False
|
||||
skipped = None
|
||||
|
||||
srctree = os.path.join(tmpdir, pn)
|
||||
try:
|
||||
bb.process.run('devtool extract %s %s' % (pn, srctree))
|
||||
except bb.process.ExecutionError as exc:
|
||||
if exc.exitcode == 4:
|
||||
skipped = 'incompatible'
|
||||
else:
|
||||
failed = True
|
||||
with open('stress_%s_extract.log' % pn, 'w') as f:
|
||||
f.write(str(exc))
|
||||
|
||||
if os.path.exists(srctree):
|
||||
shutil.rmtree(srctree)
|
||||
|
||||
if failed:
|
||||
print('failed')
|
||||
failures += 1
|
||||
elif skipped:
|
||||
print('skipped (%s)' % skipped)
|
||||
else:
|
||||
print('ok')
|
||||
except KeyboardInterrupt:
|
||||
# We want any child processes killed. This is crude, but effective.
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
|
||||
if failures:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def stress_modify(args):
|
||||
import bb.process
|
||||
|
||||
recipes = select_recipes(args)
|
||||
|
||||
failures = 0
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
os.setpgrp()
|
||||
try:
|
||||
for pn in recipes:
|
||||
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
|
||||
sys.stdout.flush()
|
||||
failed = False
|
||||
reset = True
|
||||
skipped = None
|
||||
|
||||
srctree = os.path.join(tmpdir, pn)
|
||||
try:
|
||||
bb.process.run('devtool modify -x %s %s' % (pn, srctree))
|
||||
except bb.process.ExecutionError as exc:
|
||||
if exc.exitcode == 4:
|
||||
skipped = 'incompatible'
|
||||
else:
|
||||
with open('stress_%s_modify.log' % pn, 'w') as f:
|
||||
f.write(str(exc))
|
||||
failed = 'modify'
|
||||
reset = False
|
||||
|
||||
if not skipped:
|
||||
if not failed:
|
||||
try:
|
||||
bb.process.run('bitbake -c install %s' % pn)
|
||||
except bb.process.CmdError as exc:
|
||||
with open('stress_%s_install.log' % pn, 'w') as f:
|
||||
f.write(str(exc))
|
||||
failed = 'build'
|
||||
if reset:
|
||||
try:
|
||||
bb.process.run('devtool reset %s' % pn)
|
||||
except bb.process.CmdError as exc:
|
||||
print('devtool reset failed: %s' % str(exc))
|
||||
break
|
||||
|
||||
if os.path.exists(srctree):
|
||||
shutil.rmtree(srctree)
|
||||
|
||||
if failed:
|
||||
print('failed (%s)' % failed)
|
||||
failures += 1
|
||||
elif skipped:
|
||||
print('skipped (%s)' % skipped)
|
||||
else:
|
||||
print('ok')
|
||||
except KeyboardInterrupt:
|
||||
# We want any child processes killed. This is crude, but effective.
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
|
||||
if failures:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse_oe.ArgumentParser(description="devtool stress tester",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
|
||||
parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
|
||||
parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST', default='gcc-source-*,kernel-devsrc,package-index,perf,meta-world-pkgdata,glibc-locale,glibc-mtrace,glibc-scripts,os-release')
|
||||
parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
subparsers.required = True
|
||||
|
||||
parser_modify = subparsers.add_parser('modify',
|
||||
help='Run "devtool modify" followed by a build with bitbake on matching recipes',
|
||||
description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
|
||||
parser_modify.set_defaults(func=stress_modify)
|
||||
|
||||
parser_extract = subparsers.add_parser('extract',
|
||||
help='Run "devtool extract" on matching recipes',
|
||||
description='Runs "devtool extract" on matching recipes')
|
||||
parser_extract.set_defaults(func=stress_extract)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
import scriptpath
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
return 1
|
||||
logger.debug('Found bitbake path: %s' % bitbakepath)
|
||||
|
||||
ret = args.func(args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Executable
+57
@@ -0,0 +1,57 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Simple script to show a manual power prompt for when you want to use
|
||||
# automated hardware testing with testimage.bbclass but you don't have a
|
||||
# web-enabled power strip or similar to do the power on/off/cycle.
|
||||
#
|
||||
# You can enable it by enabling testimage (see the Yocto Project
|
||||
# Development manual "Performing Automated Runtime Testing" section)
|
||||
# and setting the following in your local.conf:
|
||||
#
|
||||
# TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
|
||||
#
|
||||
|
||||
PROMPT=""
|
||||
while true; do
|
||||
case $1 in
|
||||
on)
|
||||
PROMPT="Please turn device power on";;
|
||||
off)
|
||||
PROMPT="Please turn device power off";;
|
||||
cycle)
|
||||
PROMPT="Please click Done, then turn the device power off then on";;
|
||||
"")
|
||||
break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ "$PROMPT" = "" ] ; then
|
||||
echo "ERROR: no power action specified on command line"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if [ "`which kdialog 2>/dev/null`" != "" ] ; then
|
||||
DIALOGUTIL="kdialog"
|
||||
elif [ "`which zenity 2>/dev/null`" != "" ] ; then
|
||||
DIALOGUTIL="zenity"
|
||||
else
|
||||
echo "ERROR: couldn't find program to display a message, install kdialog or zenity"
|
||||
exit 3
|
||||
fi
|
||||
|
||||
if [ "$DIALOGUTIL" = "kdialog" ] ; then
|
||||
kdialog --yesno "$PROMPT" --title "TestImage Power Control" --yes-label "Done" --no-label "Cancel test"
|
||||
elif [ "$DIALOGUTIL" = "zenity" ] ; then
|
||||
zenity --question --text="$PROMPT" --title="TestImage Power Control" --ok-label="Done" --cancel-label="Cancel test"
|
||||
fi
|
||||
|
||||
if [ "$?" != "0" ] ; then
|
||||
echo "User cancelled test at power prompt"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Executable
+97
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Perform an audit of which packages provide documentation and which
|
||||
# are missing -doc packages.
|
||||
#
|
||||
# Setup requirements: be sure to be building for MACHINE=qemux86. Run
|
||||
# this script after source'ing the build environment script, so you're
|
||||
# running it from build/ directory.
|
||||
#
|
||||
|
||||
REPORT_DOC_SIMPLE="documentation_exists.txt"
|
||||
REPORT_DOC_DETAIL="documentation_exists_detail.txt"
|
||||
REPORT_MISSING_SIMPLE="documentation_missing.txt"
|
||||
REPORT_MISSING_DETAIL="documentation_missing_detail.txt"
|
||||
REPORT_BUILD_ERRORS="build_errors.txt"
|
||||
|
||||
rm -rf $REPORT_DOC_SIMPLE $REPORT_DOC_DETAIL $REPORT_MISSING_SIMPLE $REPORT_MISSING_DETAIL
|
||||
|
||||
BITBAKE=`which bitbake`
|
||||
if [ -z "$BITBAKE" ]; then
|
||||
echo "Error: bitbake command not found."
|
||||
echo "Did you forget to source the build environment script?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
|
||||
echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
|
||||
echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
|
||||
|
||||
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
|
||||
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
|
||||
"$pkg" == "Recipe" ||
|
||||
"$pkg" == "Parsing" || "$pkg" == "Package" ||
|
||||
"$pkg" == "NOTE:" || "$pkg" == "WARNING:" ||
|
||||
"$pkg" == "done." || "$pkg" == "===========" ]]
|
||||
then
|
||||
# Skip initial bitbake output
|
||||
continue
|
||||
fi
|
||||
if [[ "$pkg" =~ -native$ || "$pkg" =~ -nativesdk$ ||
|
||||
"$pkg" =~ -cross-canadian ]]; then
|
||||
# Skip native/nativesdk/cross-canadian recipes
|
||||
continue
|
||||
fi
|
||||
if [[ "$pkg" =~ ^meta- || "$pkg" =~ ^packagegroup- || "$pkg" =~ -image ]]; then
|
||||
# Skip meta, task and image recipes
|
||||
continue
|
||||
fi
|
||||
if [[ "$pkg" =~ ^glibc- || "$pkg" =~ ^libiconv$ ||
|
||||
"$pkg" =~ -toolchain$ || "$pkg" =~ ^package-index$ ||
|
||||
"$pkg" =~ ^linux- || "$pkg" =~ ^adt-installer$ ||
|
||||
"$pkg" =~ ^eds-tools$ || "$pkg" =~ ^external-python-tarball$ ||
|
||||
"$pkg" =~ ^qt4-embedded$ || "$pkg" =~ ^qt-mobility ]]; then
|
||||
# Skip glibc, libiconv, -toolchain, and other recipes known
|
||||
# to cause build conflicts or trigger false positives.
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Building package $pkg..."
|
||||
bitbake $pkg > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "There was an error building package $pkg" >> "$REPORT_MISSING_DETAIL"
|
||||
echo "$pkg" >> $REPORT_BUILD_ERRORS
|
||||
|
||||
# Do not skip the remaining tests, as sometimes the
|
||||
# exit status is 1 due to QA errors, and we can still
|
||||
# perform the -doc checks.
|
||||
fi
|
||||
|
||||
echo "$pkg built successfully, checking for a documentation package..."
|
||||
WORKDIR=`bitbake -e $pkg | grep ^WORKDIR | awk -F '=' '{ print \$2 }' | awk -F '"' '{ print \$2 }'`
|
||||
FIND_DOC_PKG=`find $WORKDIR/packages-split/*-doc -maxdepth 0 -type d`
|
||||
if [ -z "$FIND_DOC_PKG" ]; then
|
||||
# No -doc package was generated:
|
||||
echo "No -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
|
||||
echo "$pkg" >> $REPORT_MISSING_SIMPLE
|
||||
continue
|
||||
fi
|
||||
|
||||
FIND_DOC_FILES=`find $FIND_DOC_PKG -type f`
|
||||
if [ -z "$FIND_DOC_FILES" ]; then
|
||||
# No files shipped with the -doc package:
|
||||
echo "No files shipped with the -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
|
||||
echo "$pkg" >> $REPORT_MISSING_SIMPLE
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Documentation shipped with $pkg:" >> "$REPORT_DOC_DETAIL"
|
||||
echo "$FIND_DOC_FILES" >> "$REPORT_DOC_DETAIL"
|
||||
echo "" >> "$REPORT_DOC_DETAIL"
|
||||
|
||||
echo "$pkg" >> "$REPORT_DOC_SIMPLE"
|
||||
done
|
||||
Executable
+118
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Simple graph query utility
|
||||
# useful for getting answers from .dot files produced by bitbake -g
|
||||
#
|
||||
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
|
||||
#
|
||||
# Copyright 2013 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
|
||||
sys.path.insert(0, scripts_lib_path)
|
||||
import argparse_oe
|
||||
|
||||
|
||||
def get_path_networkx(dotfile, fromnode, tonode):
|
||||
try:
|
||||
import networkx
|
||||
except ImportError:
|
||||
print('ERROR: Please install the networkx python module')
|
||||
sys.exit(1)
|
||||
|
||||
graph = networkx.DiGraph(networkx.nx_pydot.read_dot(dotfile))
|
||||
def node_missing(node):
|
||||
import difflib
|
||||
close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7)
|
||||
if close_matches:
|
||||
print('ERROR: no node "%s" in graph. Close matches:\n %s' % (node, '\n '.join(close_matches)))
|
||||
sys.exit(1)
|
||||
|
||||
if not fromnode in graph:
|
||||
node_missing(fromnode)
|
||||
if not tonode in graph:
|
||||
node_missing(tonode)
|
||||
return networkx.all_simple_paths(graph, source=fromnode, target=tonode)
|
||||
|
||||
|
||||
def find_paths(args):
|
||||
path = None
|
||||
for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode):
|
||||
print(" -> ".join(map(str, path)))
|
||||
if not path:
|
||||
print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode))
|
||||
return 1
|
||||
|
||||
|
||||
def filter_graph(args):
|
||||
import fnmatch
|
||||
|
||||
exclude_tasks = []
|
||||
if args.exclude_tasks:
|
||||
for task in args.exclude_tasks.split(','):
|
||||
if not task.startswith('do_'):
|
||||
task = 'do_%s' % task
|
||||
exclude_tasks.append(task)
|
||||
|
||||
def checkref(strval):
|
||||
strval = strval.strip().strip('"')
|
||||
target, taskname = strval.rsplit('.', 1)
|
||||
if exclude_tasks:
|
||||
for extask in exclude_tasks:
|
||||
if fnmatch.fnmatch(taskname, extask):
|
||||
return False
|
||||
if strval in args.ref or target in args.ref:
|
||||
return True
|
||||
return False
|
||||
|
||||
with open(args.infile, 'r') as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if line.startswith(('digraph', '}')):
|
||||
print(line)
|
||||
elif '->' in line:
|
||||
linesplit = line.split('->')
|
||||
if checkref(linesplit[0]) and checkref(linesplit[1]):
|
||||
print(line)
|
||||
elif (not args.no_nodes) and checkref(line.split()[0]):
|
||||
print(line)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files')
|
||||
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
subparsers.required = True
|
||||
|
||||
parser_find_paths = subparsers.add_parser('find-paths',
|
||||
help='Find all of the paths between two nodes in a dot graph',
|
||||
description='Finds all of the paths between two nodes in a dot graph')
|
||||
parser_find_paths.add_argument('dotfile', help='.dot graph to search in')
|
||||
parser_find_paths.add_argument('fromnode', help='starting node name')
|
||||
parser_find_paths.add_argument('tonode', help='ending node name')
|
||||
parser_find_paths.set_defaults(func=find_paths)
|
||||
|
||||
parser_filter = subparsers.add_parser('filter',
|
||||
help='Pare down a task graph to contain only the specified references',
|
||||
description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references')
|
||||
parser_filter.add_argument('infile', help='Input file')
|
||||
parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)')
|
||||
parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines')
|
||||
parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)')
|
||||
parser_filter.set_defaults(func=filter_graph)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
ret = args.func(args)
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ret = main()
|
||||
sys.exit(ret)
|
||||
Executable
+523
@@ -0,0 +1,523 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Script to extract information from image manifests
|
||||
#
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
# Copyright (C) 2021 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import tarfile
|
||||
from collections import OrderedDict
|
||||
|
||||
scripts_path = os.path.dirname(__file__)
|
||||
lib_path = scripts_path + '/../lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
|
||||
import scriptutils
|
||||
logger = scriptutils.logger_create(os.path.basename(__file__))
|
||||
|
||||
import argparse_oe
|
||||
import scriptpath
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
sys.exit(1)
|
||||
logger.debug('Using standard bitbake path %s' % bitbakepath)
|
||||
scriptpath.add_oe_lib_path()
|
||||
|
||||
import bb.tinfoil
|
||||
import bb.utils
|
||||
import oe.utils
|
||||
import oe.recipeutils
|
||||
|
||||
def get_pkg_list(manifest):
|
||||
pkglist = []
|
||||
with open(manifest, 'r') as f:
|
||||
for line in f:
|
||||
linesplit = line.split()
|
||||
if len(linesplit) == 3:
|
||||
# manifest file
|
||||
pkglist.append(linesplit[0])
|
||||
elif len(linesplit) == 1:
|
||||
# build dependency file
|
||||
pkglist.append(linesplit[0])
|
||||
return sorted(pkglist)
|
||||
|
||||
def list_packages(args):
|
||||
pkglist = get_pkg_list(args.manifest)
|
||||
for pkg in pkglist:
|
||||
print('%s' % pkg)
|
||||
|
||||
def pkg2recipe(tinfoil, pkg):
|
||||
if "-native" in pkg:
|
||||
logger.info('skipping %s' % pkg)
|
||||
return None
|
||||
|
||||
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
|
||||
pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
|
||||
logger.debug('pkgdatafile %s' % pkgdatafile)
|
||||
try:
|
||||
f = open(pkgdatafile, 'r')
|
||||
for line in f:
|
||||
if line.startswith('PN:'):
|
||||
recipe = line.split(':', 1)[1].strip()
|
||||
return recipe
|
||||
except Exception:
|
||||
logger.warning('%s is missing' % pkgdatafile)
|
||||
return None
|
||||
|
||||
def get_recipe_list(manifest, tinfoil):
|
||||
pkglist = get_pkg_list(manifest)
|
||||
recipelist = []
|
||||
for pkg in pkglist:
|
||||
recipe = pkg2recipe(tinfoil,pkg)
|
||||
if recipe:
|
||||
if not recipe in recipelist:
|
||||
recipelist.append(recipe)
|
||||
|
||||
return sorted(recipelist)
|
||||
|
||||
def list_recipes(args):
|
||||
import bb.tinfoil
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only=True)
|
||||
recipelist = get_recipe_list(args.manifest, tinfoil)
|
||||
for recipe in sorted(recipelist):
|
||||
print('%s' % recipe)
|
||||
|
||||
def list_layers(args):
|
||||
|
||||
def find_git_repo(pth):
|
||||
checkpth = pth
|
||||
while checkpth != os.sep:
|
||||
if os.path.exists(os.path.join(checkpth, '.git')):
|
||||
return checkpth
|
||||
checkpth = os.path.dirname(checkpth)
|
||||
return None
|
||||
|
||||
def get_git_remote_branch(repodir):
|
||||
try:
|
||||
stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
|
||||
except bb.process.ExecutionError as e:
|
||||
stdout = None
|
||||
if stdout:
|
||||
return stdout.strip()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_git_head_commit(repodir):
|
||||
try:
|
||||
stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
|
||||
except bb.process.ExecutionError as e:
|
||||
stdout = None
|
||||
if stdout:
|
||||
return stdout.strip()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_git_repo_url(repodir, remote='origin'):
|
||||
import bb.process
|
||||
# Try to get upstream repo location from origin remote
|
||||
try:
|
||||
stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
|
||||
except bb.process.ExecutionError as e:
|
||||
stdout = None
|
||||
if stdout:
|
||||
for line in stdout.splitlines():
|
||||
splitline = line.split()
|
||||
if len(splitline) > 1:
|
||||
if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
|
||||
return splitline[1]
|
||||
return None
|
||||
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only=False)
|
||||
layers = OrderedDict()
|
||||
for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
|
||||
layerdata = OrderedDict()
|
||||
layername = os.path.basename(layerdir)
|
||||
logger.debug('layername %s, layerdir %s' % (layername, layerdir))
|
||||
if layername in layers:
|
||||
logger.warning('layername %s is not unique in configuration' % layername)
|
||||
layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
|
||||
logger.debug('trying layername %s' % layername)
|
||||
if layername in layers:
|
||||
logger.error('Layer name %s is not unique in configuration' % layername)
|
||||
sys.exit(2)
|
||||
repodir = find_git_repo(layerdir)
|
||||
if repodir:
|
||||
remotebranch = get_git_remote_branch(repodir)
|
||||
remote = 'origin'
|
||||
if remotebranch and '/' in remotebranch:
|
||||
rbsplit = remotebranch.split('/', 1)
|
||||
layerdata['actual_branch'] = rbsplit[1]
|
||||
remote = rbsplit[0]
|
||||
layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
|
||||
if os.path.abspath(repodir) != os.path.abspath(layerdir):
|
||||
layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
|
||||
commit = get_git_head_commit(repodir)
|
||||
if commit:
|
||||
layerdata['vcs_commit'] = commit
|
||||
layers[layername] = layerdata
|
||||
|
||||
json.dump(layers, args.output, indent=2)
|
||||
|
||||
def get_recipe(args):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only=True)
|
||||
|
||||
recipe = pkg2recipe(tinfoil, args.package)
|
||||
print(' %s package provided by %s' % (args.package, recipe))
|
||||
|
||||
def pkg_dependencies(args):
|
||||
def get_recipe_info(tinfoil, recipe):
|
||||
try:
|
||||
info = tinfoil.get_recipe_info(recipe)
|
||||
except Exception:
|
||||
logger.error('Failed to get recipe info for: %s' % recipe)
|
||||
sys.exit(1)
|
||||
if not info:
|
||||
logger.warning('No recipe info found for: %s' % recipe)
|
||||
sys.exit(1)
|
||||
append_files = tinfoil.get_file_appends(info.fn)
|
||||
appends = True
|
||||
data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
|
||||
data.pn = info.pn
|
||||
data.pv = info.pv
|
||||
return data
|
||||
|
||||
def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
|
||||
spaces = ' ' * order
|
||||
data = recipe_info[rn]
|
||||
if args.native:
|
||||
logger.debug('%s- %s' % (spaces, data.pn))
|
||||
elif "-native" not in data.pn:
|
||||
if "cross" not in data.pn:
|
||||
logger.debug('%s- %s' % (spaces, data.pn))
|
||||
|
||||
depends = []
|
||||
for dep in data.depends:
|
||||
if dep not in assume_provided:
|
||||
depends.append(dep)
|
||||
|
||||
# First find all dependencies not in package list.
|
||||
for dep in depends:
|
||||
if dep not in packages:
|
||||
packages.append(dep)
|
||||
dep_data = get_recipe_info(tinfoil, dep)
|
||||
# Do this once now to reduce the number of bitbake calls.
|
||||
dep_data.depends = dep_data.getVar('DEPENDS').split()
|
||||
recipe_info[dep] = dep_data
|
||||
|
||||
# Then recursively analyze all of the dependencies for the current recipe.
|
||||
for dep in depends:
|
||||
find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
|
||||
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare()
|
||||
|
||||
assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
|
||||
logger.debug('assumed provided:')
|
||||
for ap in sorted(assume_provided):
|
||||
logger.debug(' - %s' % ap)
|
||||
|
||||
recipe = pkg2recipe(tinfoil, args.package)
|
||||
data = get_recipe_info(tinfoil, recipe)
|
||||
data.depends = []
|
||||
depends = data.getVar('DEPENDS').split()
|
||||
for dep in depends:
|
||||
if dep not in assume_provided:
|
||||
data.depends.append(dep)
|
||||
|
||||
recipe_info = dict([(recipe, data)])
|
||||
packages = []
|
||||
find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
|
||||
|
||||
print('\nThe following packages are required to build %s' % recipe)
|
||||
for p in sorted(packages):
|
||||
data = recipe_info[p]
|
||||
if "-native" not in data.pn:
|
||||
if "cross" not in data.pn:
|
||||
print(" %s (%s)" % (data.pn,p))
|
||||
|
||||
if args.native:
|
||||
print('\nThe following native packages are required to build %s' % recipe)
|
||||
for p in sorted(packages):
|
||||
data = recipe_info[p]
|
||||
if "-native" in data.pn:
|
||||
print(" %s(%s)" % (data.pn,p))
|
||||
if "cross" in data.pn:
|
||||
print(" %s(%s)" % (data.pn,p))
|
||||
|
||||
def default_config():
|
||||
vlist = OrderedDict()
|
||||
vlist['PV'] = 'yes'
|
||||
vlist['SUMMARY'] = 'no'
|
||||
vlist['DESCRIPTION'] = 'no'
|
||||
vlist['SECTION'] = 'no'
|
||||
vlist['LICENSE'] = 'yes'
|
||||
vlist['HOMEPAGE'] = 'no'
|
||||
vlist['BUGTRACKER'] = 'no'
|
||||
vlist['PROVIDES'] = 'no'
|
||||
vlist['BBCLASSEXTEND'] = 'no'
|
||||
vlist['DEPENDS'] = 'no'
|
||||
vlist['PACKAGECONFIG'] = 'no'
|
||||
vlist['SRC_URI'] = 'yes'
|
||||
vlist['SRCREV'] = 'yes'
|
||||
vlist['EXTRA_OECONF'] = 'no'
|
||||
vlist['EXTRA_OESCONS'] = 'no'
|
||||
vlist['EXTRA_OECMAKE'] = 'no'
|
||||
vlist['EXTRA_OEMESON'] = 'no'
|
||||
|
||||
clist = OrderedDict()
|
||||
clist['variables'] = vlist
|
||||
clist['filepath'] = 'no'
|
||||
clist['sha256sum'] = 'no'
|
||||
clist['layerdir'] = 'no'
|
||||
clist['layer'] = 'no'
|
||||
clist['inherits'] = 'no'
|
||||
clist['source_urls'] = 'no'
|
||||
clist['packageconfig_opts'] = 'no'
|
||||
clist['patches'] = 'no'
|
||||
clist['packagedir'] = 'no'
|
||||
return clist
|
||||
|
||||
def dump_config(args):
|
||||
config = default_config()
|
||||
f = open('default_config.json', 'w')
|
||||
json.dump(config, f, indent=2)
|
||||
logger.info('Default config list dumped to default_config.json')
|
||||
|
||||
def export_manifest_info(args):
|
||||
|
||||
def handle_value(value):
|
||||
if value:
|
||||
return oe.utils.squashspaces(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
if args.config:
|
||||
logger.debug('config: %s' % args.config)
|
||||
f = open(args.config, 'r')
|
||||
config = json.load(f, object_pairs_hook=OrderedDict)
|
||||
else:
|
||||
config = default_config()
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
print('Configuration:')
|
||||
json.dump(config, sys.stdout, indent=2)
|
||||
print('')
|
||||
|
||||
tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
|
||||
logger.debug('tmp dir: %s' % tmpoutdir)
|
||||
|
||||
# export manifest
|
||||
shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
|
||||
|
||||
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only=False)
|
||||
|
||||
pkglist = get_pkg_list(args.manifest)
|
||||
# export pkg list
|
||||
f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
|
||||
for pkg in pkglist:
|
||||
f.write('%s\n' % pkg)
|
||||
f.close()
|
||||
|
||||
recipelist = []
|
||||
for pkg in pkglist:
|
||||
recipe = pkg2recipe(tinfoil,pkg)
|
||||
if recipe:
|
||||
if not recipe in recipelist:
|
||||
recipelist.append(recipe)
|
||||
recipelist.sort()
|
||||
# export recipe list
|
||||
f = open(os.path.join(tmpoutdir, "recipes"), 'w')
|
||||
for recipe in recipelist:
|
||||
f.write('%s\n' % recipe)
|
||||
f.close()
|
||||
|
||||
try:
|
||||
rvalues = OrderedDict()
|
||||
for pn in sorted(recipelist):
|
||||
logger.debug('Package: %s' % pn)
|
||||
rd = tinfoil.parse_recipe(pn)
|
||||
|
||||
rvalues[pn] = OrderedDict()
|
||||
|
||||
for varname in config['variables']:
|
||||
if config['variables'][varname] == 'yes':
|
||||
rvalues[pn][varname] = handle_value(rd.getVar(varname))
|
||||
|
||||
fpth = rd.getVar('FILE')
|
||||
layerdir = oe.recipeutils.find_layerdir(fpth)
|
||||
if config['filepath'] == 'yes':
|
||||
rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
|
||||
if config['sha256sum'] == 'yes':
|
||||
rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
|
||||
|
||||
if config['layerdir'] == 'yes':
|
||||
rvalues[pn]['layerdir'] = layerdir
|
||||
|
||||
if config['layer'] == 'yes':
|
||||
rvalues[pn]['layer'] = os.path.basename(layerdir)
|
||||
|
||||
if config['inherits'] == 'yes':
|
||||
gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
|
||||
lr = set(rd.getVar("__inherit_cache") or [])
|
||||
rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
|
||||
|
||||
if config['source_urls'] == 'yes':
|
||||
rvalues[pn]['source_urls'] = []
|
||||
for url in (rd.getVar('SRC_URI') or '').split():
|
||||
if not url.startswith('file://'):
|
||||
url = url.split(';')[0]
|
||||
rvalues[pn]['source_urls'].append(url)
|
||||
|
||||
if config['packageconfig_opts'] == 'yes':
|
||||
rvalues[pn]['packageconfig_opts'] = OrderedDict()
|
||||
for key in rd.getVarFlags('PACKAGECONFIG').keys():
|
||||
if key == 'doc':
|
||||
continue
|
||||
rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
|
||||
|
||||
if config['patches'] == 'yes':
|
||||
patches = oe.recipeutils.get_recipe_patches(rd)
|
||||
rvalues[pn]['patches'] = []
|
||||
if patches:
|
||||
recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
|
||||
bb.utils.mkdirhier(recipeoutdir)
|
||||
for patch in patches:
|
||||
# Patches may be in other layers too
|
||||
patchlayerdir = oe.recipeutils.find_layerdir(patch)
|
||||
# patchlayerdir will be None for remote patches, which we ignore
|
||||
# (since currently they are considered as part of sources)
|
||||
if patchlayerdir:
|
||||
rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
|
||||
shutil.copy(patch, recipeoutdir)
|
||||
|
||||
if config['packagedir'] == 'yes':
|
||||
pn_dir = os.path.join(tmpoutdir, pn)
|
||||
bb.utils.mkdirhier(pn_dir)
|
||||
f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
|
||||
json.dump(rvalues[pn], f, indent=2)
|
||||
f.close()
|
||||
|
||||
with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
|
||||
json.dump(rvalues, f, indent=2)
|
||||
|
||||
if args.output:
|
||||
outname = os.path.basename(args.output)
|
||||
else:
|
||||
outname = os.path.splitext(os.path.basename(args.manifest))[0]
|
||||
if outname.endswith('.tar.gz'):
|
||||
outname = outname[:-7]
|
||||
elif outname.endswith('.tgz'):
|
||||
outname = outname[:-4]
|
||||
|
||||
tarfn = outname
|
||||
if tarfn.endswith(os.sep):
|
||||
tarfn = tarfn[:-1]
|
||||
if not tarfn.endswith(('.tar.gz', '.tgz')):
|
||||
tarfn += '.tar.gz'
|
||||
with open(tarfn, 'wb') as f:
|
||||
with tarfile.open(None, "w:gz", f) as tar:
|
||||
tar.add(tmpoutdir, outname)
|
||||
finally:
|
||||
shutil.rmtree(tmpoutdir)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse_oe.ArgumentParser(description="Image manifest utility",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
|
||||
subparsers.required = True
|
||||
|
||||
# get recipe info
|
||||
parser_get_recipes = subparsers.add_parser('recipe-info',
|
||||
help='Get recipe info',
|
||||
description='Get recipe information for a package')
|
||||
parser_get_recipes.add_argument('package', help='Package name')
|
||||
parser_get_recipes.set_defaults(func=get_recipe)
|
||||
|
||||
# list runtime dependencies
|
||||
parser_pkg_dep = subparsers.add_parser('list-depends',
|
||||
help='List dependencies',
|
||||
description='List dependencies required to build the package')
|
||||
parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
|
||||
parser_pkg_dep.add_argument('package', help='Package name')
|
||||
parser_pkg_dep.set_defaults(func=pkg_dependencies)
|
||||
|
||||
# list recipes
|
||||
parser_recipes = subparsers.add_parser('list-recipes',
|
||||
help='List recipes producing packages within an image',
|
||||
description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
|
||||
parser_recipes.add_argument('manifest', help='Manifest file')
|
||||
parser_recipes.set_defaults(func=list_recipes)
|
||||
|
||||
# list packages
|
||||
parser_packages = subparsers.add_parser('list-packages',
|
||||
help='List packages within an image',
|
||||
description='Lists packages that went into an image, using the manifest')
|
||||
parser_packages.add_argument('manifest', help='Manifest file')
|
||||
parser_packages.set_defaults(func=list_packages)
|
||||
|
||||
# list layers
|
||||
parser_layers = subparsers.add_parser('list-layers',
|
||||
help='List included layers',
|
||||
description='Lists included layers')
|
||||
parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
|
||||
default=sys.stdout, type=argparse.FileType('w'))
|
||||
parser_layers.set_defaults(func=list_layers)
|
||||
|
||||
# dump default configuration file
|
||||
parser_dconfig = subparsers.add_parser('dump-config',
|
||||
help='Dump default config',
|
||||
description='Dump default config to default_config.json')
|
||||
parser_dconfig.set_defaults(func=dump_config)
|
||||
|
||||
# export recipe info for packages in manifest
|
||||
parser_export = subparsers.add_parser('manifest-info',
|
||||
help='Export recipe info for a manifest',
|
||||
description='Export recipe information using the manifest')
|
||||
parser_export.add_argument('-c', '--config', help='load config from json file')
|
||||
parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
|
||||
parser_export.add_argument('manifest', help='Manifest file')
|
||||
parser_export.set_defaults(func=export_manifest_info)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.debug("Debug Enabled")
|
||||
elif args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
ret = args.func(args)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(ret)
|
||||
+167
@@ -0,0 +1,167 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (C) 2013 Wind River Systems, Inc.
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# - list available recipes which have PACKAGECONFIG flags
|
||||
# - list available PACKAGECONFIG flags and all affected recipes
|
||||
# - list all recipes and PACKAGECONFIG information
|
||||
|
||||
import sys
|
||||
import optparse
|
||||
import os
|
||||
|
||||
|
||||
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
|
||||
lib_path = os.path.abspath(scripts_path + '/../lib')
|
||||
sys.path = sys.path + [lib_path]
|
||||
|
||||
import scriptpath
|
||||
|
||||
# For importing the following modules
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
|
||||
sys.exit(1)
|
||||
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.tinfoil
|
||||
|
||||
def get_fnlist(bbhandler, pkg_pn, preferred):
|
||||
''' Get all recipe file names '''
|
||||
if preferred:
|
||||
(latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
|
||||
|
||||
fn_list = []
|
||||
for pn in sorted(pkg_pn):
|
||||
if preferred:
|
||||
fn_list.append(preferred_versions[pn][1])
|
||||
else:
|
||||
fn_list.extend(pkg_pn[pn])
|
||||
|
||||
return fn_list
|
||||
|
||||
def get_recipesdata(bbhandler, preferred):
|
||||
''' Get data of all available recipes which have PACKAGECONFIG flags '''
|
||||
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
|
||||
|
||||
data_dict = {}
|
||||
for fn in get_fnlist(bbhandler, pkg_pn, preferred):
|
||||
data = bbhandler.parse_recipe_file(fn)
|
||||
flags = data.getVarFlags("PACKAGECONFIG")
|
||||
flags.pop('doc', None)
|
||||
if flags:
|
||||
data_dict[fn] = data
|
||||
|
||||
return data_dict
|
||||
|
||||
def collect_pkgs(data_dict):
|
||||
''' Collect available pkgs in which have PACKAGECONFIG flags '''
|
||||
# pkg_dict = {'pkg1': ['flag1', 'flag2',...]}
|
||||
pkg_dict = {}
|
||||
for fn in data_dict:
|
||||
pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
|
||||
pkgconfigflags.pop('doc', None)
|
||||
pkgname = data_dict[fn].getVar("PN")
|
||||
pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
|
||||
|
||||
return pkg_dict
|
||||
|
||||
def collect_flags(pkg_dict):
|
||||
''' Collect available PACKAGECONFIG flags and all affected pkgs '''
|
||||
# flag_dict = {'flag': ['pkg1', 'pkg2',...]}
|
||||
flag_dict = {}
|
||||
for pkgname, flaglist in pkg_dict.items():
|
||||
for flag in flaglist:
|
||||
if flag in flag_dict:
|
||||
flag_dict[flag].append(pkgname)
|
||||
else:
|
||||
flag_dict[flag] = [pkgname]
|
||||
|
||||
return flag_dict
|
||||
|
||||
def display_pkgs(pkg_dict):
|
||||
''' Display available pkgs which have PACKAGECONFIG flags '''
|
||||
pkgname_len = len("RECIPE NAME") + 1
|
||||
for pkgname in pkg_dict:
|
||||
if pkgname_len < len(pkgname):
|
||||
pkgname_len = len(pkgname)
|
||||
pkgname_len += 1
|
||||
|
||||
header = '%-*s%s' % (pkgname_len, str("RECIPE NAME"), str("PACKAGECONFIG FLAGS"))
|
||||
print(header)
|
||||
print(str("").ljust(len(header), '='))
|
||||
for pkgname in sorted(pkg_dict):
|
||||
print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname])))
|
||||
|
||||
|
||||
def display_flags(flag_dict):
|
||||
''' Display available PACKAGECONFIG flags and all affected pkgs '''
|
||||
flag_len = len("PACKAGECONFIG FLAG") + 5
|
||||
|
||||
header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("RECIPE NAMES"))
|
||||
print(header)
|
||||
print(str("").ljust(len(header), '='))
|
||||
|
||||
for flag in sorted(flag_dict):
|
||||
print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag]))))
|
||||
|
||||
def display_all(data_dict):
|
||||
''' Display all pkgs and PACKAGECONFIG information '''
|
||||
print(str("").ljust(50, '='))
|
||||
for fn in data_dict:
|
||||
print('%s' % data_dict[fn].getVar("P"))
|
||||
print(fn)
|
||||
packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or ''
|
||||
if packageconfig.strip() == '':
|
||||
packageconfig = 'None'
|
||||
print('PACKAGECONFIG %s' % packageconfig)
|
||||
|
||||
for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").items():
|
||||
if flag == "doc":
|
||||
continue
|
||||
print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
|
||||
print('')
|
||||
|
||||
def main():
|
||||
pkg_dict = {}
|
||||
flag_dict = {}
|
||||
|
||||
# Collect and validate input
|
||||
parser = optparse.OptionParser(
|
||||
description = "Lists recipes and PACKAGECONFIG flags. Without -a or -f, recipes and their available PACKAGECONFIG flags are listed.",
|
||||
usage = """
|
||||
%prog [options]""")
|
||||
|
||||
parser.add_option("-f", "--flags",
|
||||
help = "list available PACKAGECONFIG flags and affected recipes",
|
||||
action="store_const", dest="listtype", const="flags", default="recipes")
|
||||
parser.add_option("-a", "--all",
|
||||
help = "list all recipes and PACKAGECONFIG information",
|
||||
action="store_const", dest="listtype", const="all")
|
||||
parser.add_option("-p", "--preferred-only",
|
||||
help = "where multiple recipe versions are available, list only the preferred version",
|
||||
action="store_true", dest="preferred", default=False)
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
with bb.tinfoil.Tinfoil() as bbhandler:
|
||||
bbhandler.prepare()
|
||||
print("Gathering recipe data...")
|
||||
data_dict = get_recipesdata(bbhandler, options.preferred)
|
||||
|
||||
if options.listtype == 'flags':
|
||||
pkg_dict = collect_pkgs(data_dict)
|
||||
flag_dict = collect_flags(pkg_dict)
|
||||
display_flags(flag_dict)
|
||||
elif options.listtype == 'recipes':
|
||||
pkg_dict = collect_pkgs(data_dict)
|
||||
display_pkgs(pkg_dict)
|
||||
elif options.listtype == 'all':
|
||||
display_all(data_dict)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
+121
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# Send build performance test report emails
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shutil
|
||||
import smtplib
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
||||
log = logging.getLogger('oe-build-perf-report')
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
"""Parse command line arguments"""
|
||||
description = """Email build perf test report"""
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||
description=description)
|
||||
|
||||
parser.add_argument('--debug', '-d', action='store_true',
|
||||
help="Verbose logging")
|
||||
parser.add_argument('--quiet', '-q', action='store_true',
|
||||
help="Only print errors")
|
||||
parser.add_argument('--to', action='append',
|
||||
help="Recipients of the email")
|
||||
parser.add_argument('--cc', action='append',
|
||||
help="Carbon copy recipients of the email")
|
||||
parser.add_argument('--bcc', action='append',
|
||||
help="Blind carbon copy recipients of the email")
|
||||
parser.add_argument('--subject', default="Yocto build perf test report",
|
||||
help="Email subject")
|
||||
parser.add_argument('--outdir', '-o',
|
||||
help="Store files in OUTDIR. Can be used to preserve "
|
||||
"the email parts")
|
||||
parser.add_argument('--text',
|
||||
help="Plain text message")
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if not args.text:
|
||||
parser.error("Please specify --text")
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
|
||||
# Generate email message
|
||||
with open(text_fn) as f:
|
||||
msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
|
||||
|
||||
pw_data = pwd.getpwuid(os.getuid())
|
||||
full_name = pw_data.pw_gecos.split(',')[0]
|
||||
email = os.environ.get('EMAIL',
|
||||
'{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
|
||||
msg['From'] = "{} <{}>".format(full_name, email)
|
||||
msg['To'] = ', '.join(recipients)
|
||||
if copy:
|
||||
msg['Cc'] = ', '.join(copy)
|
||||
if blind_copy:
|
||||
msg['Bcc'] = ', '.join(blind_copy)
|
||||
msg['Subject'] = subject
|
||||
|
||||
# Send email
|
||||
with smtplib.SMTP('localhost') as smtp:
|
||||
smtp.send_message(msg)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
"""Script entry point"""
|
||||
args = parse_args(argv)
|
||||
if args.quiet:
|
||||
log.setLevel(logging.ERROR)
|
||||
if args.debug:
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
if args.outdir:
|
||||
outdir = args.outdir
|
||||
if not os.path.exists(outdir):
|
||||
os.mkdir(outdir)
|
||||
else:
|
||||
outdir = tempfile.mkdtemp(dir='.')
|
||||
|
||||
try:
|
||||
log.debug("Storing email parts in %s", outdir)
|
||||
if args.to:
|
||||
log.info("Sending email to %s", ', '.join(args.to))
|
||||
if args.cc:
|
||||
log.info("Copying to %s", ', '.join(args.cc))
|
||||
if args.bcc:
|
||||
log.info("Blind copying to %s", ', '.join(args.bcc))
|
||||
send_email(args.text, args.subject, args.to, args.cc, args.bcc)
|
||||
except subprocess.CalledProcessError as err:
|
||||
log.error("%s, with output:\n%s", str(err), err.output.decode())
|
||||
return 1
|
||||
finally:
|
||||
if not args.outdir:
|
||||
log.debug("Wiping %s", outdir)
|
||||
shutil.rmtree(outdir)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Executable
+241
@@ -0,0 +1,241 @@
|
||||
#! /usr/bin/env python3
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
# TODO
|
||||
# - option to just list all broken files
|
||||
# - test suite
|
||||
# - validate signed-off-by
|
||||
|
||||
status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
|
||||
|
||||
class Result:
|
||||
# Whether the patch has an Upstream-Status or not
|
||||
missing_upstream_status = False
|
||||
# If the Upstream-Status tag is malformed in some way (string for bad bit)
|
||||
malformed_upstream_status = None
|
||||
# If the Upstream-Status value is unknown (boolean)
|
||||
unknown_upstream_status = False
|
||||
# The upstream status value (Pending, etc)
|
||||
upstream_status = None
|
||||
# Whether the patch has a Signed-off-by or not
|
||||
missing_sob = False
|
||||
# Whether the Signed-off-by tag is malformed in some way
|
||||
malformed_sob = False
|
||||
# The Signed-off-by tag value
|
||||
sob = None
|
||||
# Whether a patch looks like a CVE but doesn't have a CVE tag
|
||||
missing_cve = False
|
||||
|
||||
def blame_patch(patch):
|
||||
"""
|
||||
From a patch filename, return a list of "commit summary (author name <author
|
||||
email>)" strings representing the history.
|
||||
"""
|
||||
import subprocess
|
||||
return subprocess.check_output(("git", "log",
|
||||
"--follow", "--find-renames", "--diff-filter=A",
|
||||
"--format=%s (%aN <%aE>)",
|
||||
"--", patch)).decode("utf-8").splitlines()
|
||||
|
||||
def patchreview(path, patches):
|
||||
import re, os.path
|
||||
|
||||
# General pattern: start of line, optional whitespace, tag with optional
|
||||
# hyphen or spaces, maybe a colon, some whitespace, then the value, all case
|
||||
# insensitive.
|
||||
sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
|
||||
status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
|
||||
cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
|
||||
cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
|
||||
|
||||
results = {}
|
||||
|
||||
for patch in patches:
|
||||
|
||||
fullpath = os.path.join(path, patch)
|
||||
result = Result()
|
||||
results[fullpath] = result
|
||||
|
||||
content = open(fullpath, encoding='ascii', errors='ignore').read()
|
||||
|
||||
# Find the Signed-off-by tag
|
||||
match = sob_re.search(content)
|
||||
if match:
|
||||
value = match.group(1)
|
||||
if value != "Signed-off-by:":
|
||||
result.malformed_sob = value
|
||||
result.sob = match.group(2)
|
||||
else:
|
||||
result.missing_sob = True
|
||||
|
||||
|
||||
# Find the Upstream-Status tag
|
||||
match = status_re.search(content)
|
||||
if match:
|
||||
value = match.group(1)
|
||||
if value != "Upstream-Status:":
|
||||
result.malformed_upstream_status = value
|
||||
|
||||
value = match.group(2).lower()
|
||||
# TODO: check case
|
||||
if value not in status_values:
|
||||
result.unknown_upstream_status = True
|
||||
result.upstream_status = value
|
||||
else:
|
||||
result.missing_upstream_status = True
|
||||
|
||||
# Check that patches which looks like CVEs have CVE tags
|
||||
if cve_re.search(patch) or cve_re.search(content):
|
||||
if not cve_tag_re.search(content):
|
||||
result.missing_cve = True
|
||||
# TODO: extract CVE list
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def analyse(results, want_blame=False, verbose=True):
|
||||
"""
|
||||
want_blame: display blame data for each malformed patch
|
||||
verbose: display per-file results instead of just summary
|
||||
"""
|
||||
|
||||
# want_blame requires verbose, so disable blame if we're not verbose
|
||||
if want_blame and not verbose:
|
||||
want_blame = False
|
||||
|
||||
total_patches = 0
|
||||
missing_sob = 0
|
||||
malformed_sob = 0
|
||||
missing_status = 0
|
||||
malformed_status = 0
|
||||
missing_cve = 0
|
||||
pending_patches = 0
|
||||
|
||||
for patch in sorted(results):
|
||||
r = results[patch]
|
||||
total_patches += 1
|
||||
need_blame = False
|
||||
|
||||
# Build statistics
|
||||
if r.missing_sob:
|
||||
missing_sob += 1
|
||||
if r.malformed_sob:
|
||||
malformed_sob += 1
|
||||
if r.missing_upstream_status:
|
||||
missing_status += 1
|
||||
if r.malformed_upstream_status or r.unknown_upstream_status:
|
||||
malformed_status += 1
|
||||
# Count patches with no status as pending
|
||||
pending_patches +=1
|
||||
if r.missing_cve:
|
||||
missing_cve += 1
|
||||
if r.upstream_status == "pending":
|
||||
pending_patches += 1
|
||||
|
||||
# Output warnings
|
||||
if r.missing_sob:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Missing Signed-off-by tag (%s)" % patch)
|
||||
if r.malformed_sob:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
|
||||
if r.missing_cve:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Missing CVE tag (%s)" % patch)
|
||||
if r.missing_upstream_status:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Missing Upstream-Status tag (%s)" % patch)
|
||||
if r.malformed_upstream_status:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
|
||||
if r.unknown_upstream_status:
|
||||
need_blame = True
|
||||
if verbose:
|
||||
print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
|
||||
|
||||
if want_blame and need_blame:
|
||||
print("\n".join(blame_patch(patch)) + "\n")
|
||||
|
||||
def percent(num):
|
||||
try:
|
||||
return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
|
||||
except ZeroDivisionError:
|
||||
return "N/A"
|
||||
|
||||
if verbose:
|
||||
print()
|
||||
|
||||
print("""Total patches found: %d
|
||||
Patches missing Signed-off-by: %s
|
||||
Patches with malformed Signed-off-by: %s
|
||||
Patches missing CVE: %s
|
||||
Patches missing Upstream-Status: %s
|
||||
Patches with malformed Upstream-Status: %s
|
||||
Patches in Pending state: %s""" % (total_patches,
|
||||
percent(missing_sob),
|
||||
percent(malformed_sob),
|
||||
percent(missing_cve),
|
||||
percent(missing_status),
|
||||
percent(malformed_status),
|
||||
percent(pending_patches)))
|
||||
|
||||
|
||||
|
||||
def histogram(results):
|
||||
from toolz import recipes, dicttoolz
|
||||
import math
|
||||
counts = recipes.countby(lambda r: r.upstream_status, results.values())
|
||||
bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
|
||||
for k in bars:
|
||||
print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse, subprocess, os
|
||||
|
||||
args = argparse.ArgumentParser(description="Patch Review Tool")
|
||||
args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
|
||||
args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
|
||||
args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
|
||||
args.add_argument("-j", "--json", help="update JSON")
|
||||
args.add_argument("directory", help="directory to scan")
|
||||
args = args.parse_args()
|
||||
|
||||
patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split()
|
||||
results = patchreview(args.directory, patches)
|
||||
analyse(results, want_blame=args.blame, verbose=args.verbose)
|
||||
|
||||
if args.json:
|
||||
import json, os.path, collections
|
||||
if os.path.isfile(args.json):
|
||||
data = json.load(open(args.json))
|
||||
else:
|
||||
data = []
|
||||
|
||||
row = collections.Counter()
|
||||
row["total"] = len(results)
|
||||
row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip()
|
||||
row["commit"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%H"]).decode("utf-8").strip()
|
||||
for r in results.values():
|
||||
if r.upstream_status in status_values:
|
||||
row[r.upstream_status] += 1
|
||||
if r.malformed_upstream_status or r.missing_upstream_status:
|
||||
row['malformed-upstream-status'] += 1
|
||||
if r.malformed_sob or r.missing_sob:
|
||||
row['malformed-sob'] += 1
|
||||
|
||||
data.append(row)
|
||||
json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
|
||||
|
||||
if args.histogram:
|
||||
print()
|
||||
histogram(results)
|
||||
Executable
+104
@@ -0,0 +1,104 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# patchtest: Run patchtest on commits starting at master
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
|
||||
set -o errexit
|
||||
|
||||
# Default values
|
||||
pokydir=''
|
||||
|
||||
usage() {
|
||||
CMD=$(basename $0)
|
||||
cat <<EOM
|
||||
Usage: $CMD [-h] [-p pokydir]
|
||||
-p pokydir Defaults to current directory
|
||||
EOM
|
||||
>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
function clone() {
|
||||
local REPOREMOTE=$1
|
||||
local REPODIR=$2
|
||||
if [ ! -d $REPODIR ]; then
|
||||
git clone $REPOREMOTE $REPODIR --quiet
|
||||
else
|
||||
( cd $REPODIR; git pull --quiet )
|
||||
fi
|
||||
}
|
||||
|
||||
while getopts ":p:h" opt; do
|
||||
case $opt in
|
||||
p)
|
||||
pokydir=$OPTARG
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG" >&2
|
||||
usage
|
||||
;;
|
||||
:)
|
||||
echo "Option -$OPTARG requires an argument." >&2
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND-1))
|
||||
|
||||
CDIR="$PWD"
|
||||
|
||||
# default pokydir to current directory if user did not specify one
|
||||
if [ -z "$pokydir" ]; then
|
||||
pokydir="$CDIR"
|
||||
fi
|
||||
|
||||
PTENV="$PWD/patchtest"
|
||||
PT="$PTENV/patchtest"
|
||||
PTOE="$PTENV/patchtest-oe"
|
||||
|
||||
if ! which virtualenv > /dev/null; then
|
||||
echo "Install virtualenv before proceeding"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# activate the virtual env
|
||||
virtualenv $PTENV --quiet
|
||||
source $PTENV/bin/activate
|
||||
|
||||
cd $PTENV
|
||||
|
||||
# clone or pull
|
||||
clone git://git.yoctoproject.org/patchtest $PT
|
||||
clone git://git.yoctoproject.org/patchtest-oe $PTOE
|
||||
|
||||
# install requirements
|
||||
pip install -r $PT/requirements.txt --quiet
|
||||
pip install -r $PTOE/requirements.txt --quiet
|
||||
|
||||
PATH="$PT:$PT/scripts:$PATH"
|
||||
|
||||
# loop through parent to HEAD and execute patchtest on each commit
|
||||
for commit in $(git rev-list master..HEAD --reverse)
|
||||
do
|
||||
shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
|
||||
log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
|
||||
if [ -z "$log" ]; then
|
||||
shortlog="$shortlog: OK"
|
||||
else
|
||||
shortlog="$shortlog: FAIL"
|
||||
fi
|
||||
echo "$shortlog"
|
||||
echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
|
||||
echo ""
|
||||
done
|
||||
|
||||
deactivate
|
||||
|
||||
cd $CDIR
|
||||
Executable
+61
@@ -0,0 +1,61 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
if [ "$1" = "" -o "$1" = "--help" ] ; then
|
||||
echo "Usage: $0 <serial terminal command>"
|
||||
echo
|
||||
echo "Simple script to handle maintaining a terminal for serial devices that"
|
||||
echo "disappear when a device is powered down or reset, such as the USB"
|
||||
echo "serial console on the original BeagleBone (white version)."
|
||||
echo
|
||||
echo "e.g. $0 picocom -b 115200 /dev/ttyUSB0"
|
||||
echo
|
||||
exit
|
||||
fi
|
||||
|
||||
args="$@"
|
||||
DEVICE=""
|
||||
while [ "$1" != "" ]; do
|
||||
case "$1" in
|
||||
/dev/*)
|
||||
DEVICE=$1
|
||||
break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ "$DEVICE" != "" ] ; then
|
||||
while true; do
|
||||
if [ ! -e $DEVICE ] ; then
|
||||
echo "serdevtry: waiting for $DEVICE to exist..."
|
||||
while [ ! -e $DEVICE ]; do
|
||||
sleep 0.1
|
||||
done
|
||||
fi
|
||||
if [ ! -w $DEVICE ] ; then
|
||||
# Sometimes (presumably because of a race with udev) we get to
|
||||
# the device before its permissions have been set up
|
||||
RETRYNUM=0
|
||||
while [ ! -w $DEVICE ]; do
|
||||
if [ "$RETRYNUM" = "2" ] ; then
|
||||
echo "Device $DEVICE exists but is not writable!"
|
||||
exit 1
|
||||
fi
|
||||
RETRYNUM=$((RETRYNUM+1))
|
||||
sleep 0.1
|
||||
done
|
||||
fi
|
||||
$args
|
||||
if [ -e $DEVICE ] ; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Unable to determine device node from command: $args"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Executable
+223
@@ -0,0 +1,223 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Build performance regression test script
|
||||
#
|
||||
# Copyright 2011 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
# DESCRIPTION
|
||||
# This script is intended to be used in conjunction with "git bisect run"
|
||||
# in order to find regressions in build time, however it can also be used
|
||||
# independently. It cleans out the build output directories, runs a
|
||||
# specified worker script (an example is test_build_time_worker.sh) under
|
||||
# TIME(1), logs the results to TEST_LOGDIR (default /tmp) and returns a
|
||||
# value telling "git bisect run" whether the build time is good (under
|
||||
# the specified threshold) or bad (over it). There is also a tolerance
|
||||
# option but it is not particularly useful as it only subtracts the
|
||||
# tolerance from the given threshold and uses it as the actual threshold.
|
||||
#
|
||||
# It is also capable of taking a file listing git revision hashes to be
|
||||
# test-applied to the repository in order to get past build failures that
|
||||
# would otherwise cause certain revisions to have to be skipped; if a
|
||||
# revision does not apply cleanly then the script assumes it does not
|
||||
# need to be applied and ignores it.
|
||||
#
|
||||
# Please see the help output (syntax below) for some important setup
|
||||
# instructions.
|
||||
#
|
||||
# AUTHORS
|
||||
# Paul Eggleton <paul.eggleton@linux.intel.com>
|
||||
|
||||
|
||||
syntax() {
|
||||
echo "syntax: $0 <script> <time> <tolerance> [patchrevlist]"
|
||||
echo ""
|
||||
echo " script - worker script file (if in current dir, prefix with ./)"
|
||||
echo " time - time threshold (in seconds, suffix m for minutes)"
|
||||
echo " tolerance - tolerance (in seconds, suffix m for minutes or % for"
|
||||
echo " percentage, can be 0)"
|
||||
echo " patchrevlist - optional file listing revisions to apply as patches on top"
|
||||
echo ""
|
||||
echo "You must set TEST_BUILDDIR to point to a previously created build directory,"
|
||||
echo "however please note that this script will wipe out the TMPDIR defined in"
|
||||
echo "TEST_BUILDDIR/conf/local.conf as part of its initial setup (as well as your"
|
||||
echo "~/.ccache)"
|
||||
echo ""
|
||||
echo "To get rid of the sudo prompt, please add the following line to /etc/sudoers"
|
||||
echo "(use 'visudo' to edit this; also it is assumed that the user you are running"
|
||||
echo "as is a member of the 'wheel' group):"
|
||||
echo ""
|
||||
echo "%wheel ALL=(ALL) NOPASSWD: /sbin/sysctl -w vm.drop_caches=[1-3]"
|
||||
echo ""
|
||||
echo "Note: it is recommended that you disable crond and any other process that"
|
||||
echo "may cause significant CPU or I/O usage during build performance tests."
|
||||
}
|
||||
|
||||
# Note - we exit with 250 here because that will tell git bisect run that
|
||||
# something bad happened and stop
|
||||
if [ "$1" = "" ] ; then
|
||||
syntax
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ "$2" = "" ] ; then
|
||||
syntax
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ "$3" = "" ] ; then
|
||||
syntax
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if ! [[ "$2" =~ ^[0-9][0-9m.]*$ ]] ; then
|
||||
echo "'$2' is not a valid number for threshold"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if ! [[ "$3" =~ ^[0-9][0-9m.%]*$ ]] ; then
|
||||
echo "'$3' is not a valid number for tolerance"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ "$TEST_BUILDDIR" = "" ] ; then
|
||||
echo "Please set TEST_BUILDDIR to a previously created build directory"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ ! -d "$TEST_BUILDDIR" ] ; then
|
||||
echo "TEST_BUILDDIR $TEST_BUILDDIR not found"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
git diff --quiet
|
||||
if [ $? != 0 ] ; then
|
||||
echo "Working tree is dirty, cannot proceed"
|
||||
exit 251
|
||||
fi
|
||||
|
||||
if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
|
||||
echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
|
||||
fi
|
||||
|
||||
runscript=$1
|
||||
timethreshold=$2
|
||||
tolerance=$3
|
||||
|
||||
if [ "$4" != "" ] ; then
|
||||
patchrevlist=`cat $4`
|
||||
else
|
||||
patchrevlist=""
|
||||
fi
|
||||
|
||||
if [[ timethreshold == *m* ]] ; then
|
||||
timethreshold=`echo $timethreshold | sed s/m/*60/ | bc`
|
||||
fi
|
||||
|
||||
if [[ $tolerance == *m* ]] ; then
|
||||
tolerance=`echo $tolerance | sed s/m/*60/ | bc`
|
||||
elif [[ $tolerance == *%* ]] ; then
|
||||
tolerance=`echo $tolerance | sed s/%//`
|
||||
tolerance=`echo "scale = 2; (($tolerance * $timethreshold) / 100)" | bc`
|
||||
fi
|
||||
|
||||
tmpdir=`grep "^TMPDIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/TMPDIR[ \t]*=[ \t\?]*"//' -e 's/"//'`
|
||||
if [ "x$tmpdir" = "x" ]; then
|
||||
echo "Unable to determine TMPDIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
|
||||
exit 250
|
||||
fi
|
||||
sstatedir=`grep "^SSTATE_DIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/SSTATE_DIR[ \t\?]*=[ \t]*"//' -e 's/"//'`
|
||||
if [ "x$sstatedir" = "x" ]; then
|
||||
echo "Unable to determine SSTATE_DIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ `expr length $tmpdir` -lt 4 ] ; then
|
||||
echo "TMPDIR $tmpdir is less than 4 characters, bailing out"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
if [ `expr length $sstatedir` -lt 4 ] ; then
|
||||
echo "SSTATE_DIR $sstatedir is less than 4 characters, bailing out"
|
||||
exit 250
|
||||
fi
|
||||
|
||||
echo -n "About to wipe out TMPDIR $tmpdir, press Ctrl+C to break out... "
|
||||
for i in 9 8 7 6 5 4 3 2 1
|
||||
do
|
||||
echo -ne "\x08$i"
|
||||
sleep 1
|
||||
done
|
||||
echo
|
||||
|
||||
pushd . > /dev/null
|
||||
|
||||
rm -f pseudodone
|
||||
echo "Removing TMPDIR $tmpdir..."
|
||||
rm -rf $tmpdir
|
||||
echo "Removing TMPDIR $tmpdir-*libc..."
|
||||
rm -rf $tmpdir-*libc
|
||||
echo "Removing SSTATE_DIR $sstatedir..."
|
||||
rm -rf $sstatedir
|
||||
echo "Removing ~/.ccache..."
|
||||
rm -rf ~/.ccache
|
||||
|
||||
echo "Syncing..."
|
||||
sync
|
||||
sync
|
||||
echo "Dropping VM cache..."
|
||||
#echo 3 > /proc/sys/vm/drop_caches
|
||||
sudo /sbin/sysctl -w vm.drop_caches=3 > /dev/null
|
||||
|
||||
if [ "$TEST_LOGDIR" = "" ] ; then
|
||||
logdir="/tmp"
|
||||
else
|
||||
logdir="$TEST_LOGDIR"
|
||||
fi
|
||||
rev=`git rev-parse HEAD`
|
||||
logfile="$logdir/timelog_$rev.log"
|
||||
echo -n > $logfile
|
||||
|
||||
gitroot=`git rev-parse --show-toplevel`
|
||||
cd $gitroot
|
||||
for patchrev in $patchrevlist ; do
|
||||
echo "Applying $patchrev"
|
||||
patchfile=`mktemp`
|
||||
git show $patchrev > $patchfile
|
||||
git apply --check $patchfile &> /dev/null
|
||||
if [ $? != 0 ] ; then
|
||||
echo " ... patch does not apply without errors, ignoring"
|
||||
else
|
||||
echo "Applied $patchrev" >> $logfile
|
||||
git apply $patchfile &> /dev/null
|
||||
fi
|
||||
rm $patchfile
|
||||
done
|
||||
|
||||
sync
|
||||
echo "Quiescing for 5s..."
|
||||
sleep 5
|
||||
|
||||
echo "Running $runscript at $rev..."
|
||||
timeoutfile=`mktemp`
|
||||
/usr/bin/time -o $timeoutfile -f "%e\nreal\t%E\nuser\t%Us\nsys\t%Ss\nmaxm\t%Mk" $runscript 2>&1 | tee -a $logfile
|
||||
exitstatus=$PIPESTATUS
|
||||
|
||||
git reset --hard HEAD > /dev/null
|
||||
popd > /dev/null
|
||||
|
||||
timeresult=`head -n1 $timeoutfile`
|
||||
cat $timeoutfile | tee -a $logfile
|
||||
rm $timeoutfile
|
||||
|
||||
if [ $exitstatus != 0 ] ; then
|
||||
# Build failed, exit with 125 to tell git bisect run to skip this rev
|
||||
echo "*** Build failed (exit code $exitstatus), skipping..." | tee -a $logfile
|
||||
exit 125
|
||||
fi
|
||||
|
||||
ret=`echo "scale = 2; $timeresult > $timethreshold - $tolerance" | bc`
|
||||
echo "Returning $ret" | tee -a $logfile
|
||||
exit $ret
|
||||
|
||||
+41
@@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# This is an example script to be used in conjunction with test_build_time.sh
|
||||
|
||||
if [ "$TEST_BUILDDIR" = "" ] ; then
|
||||
echo "TEST_BUILDDIR is not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
buildsubdir=`basename $TEST_BUILDDIR`
|
||||
if [ ! -d $buildsubdir ] ; then
|
||||
echo "Unable to find build subdir $buildsubdir in current directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f oe-init-build-env ] ; then
|
||||
. ./oe-init-build-env $buildsubdir
|
||||
elif [ -f poky-init-build-env ] ; then
|
||||
. ./poky-init-build-env $buildsubdir
|
||||
else
|
||||
echo "Unable to find build environment setup script"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f ../meta/recipes-sato/images/core-image-sato.bb ] ; then
|
||||
target="core-image-sato"
|
||||
else
|
||||
target="poky-image-sato"
|
||||
fi
|
||||
|
||||
echo "Build started at `date "+%Y-%m-%d %H:%M:%S"`"
|
||||
echo "bitbake $target"
|
||||
bitbake $target
|
||||
ret=$?
|
||||
echo "Build finished at `date "+%Y-%m-%d %H:%M:%S"`"
|
||||
exit $ret
|
||||
|
||||
Executable
+26
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash -eur
|
||||
#
|
||||
# Find python modules uncovered by oe-seltest
|
||||
#
|
||||
# Copyright (c) 2016, Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Author: Ed Bartosh <ed.bartosh@linux.intel.com>
|
||||
#
|
||||
|
||||
if [ ! "$#" -eq 1 -o -t 0 ] ; then
|
||||
echo 'Usage: coverage report | ./scripts/contrib/uncovered <dir>' 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
path=$(readlink -ev $1)
|
||||
|
||||
if [ ! -d "$path" ] ; then
|
||||
echo "directory $1 doesn't exist" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
diff -u <(grep "$path" | grep -v '0%$' | cut -f1 -d: | sort) \
|
||||
<(find $path | xargs file | grep 'Python script' | cut -f1 -d:| sort) | \
|
||||
grep "^+$path" | cut -c2-
|
||||
Executable
+66
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# This script can be used to verify HOMEPAGE values for all recipes in
|
||||
# the current configuration.
|
||||
# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import urllib.request
|
||||
|
||||
|
||||
# Allow importing scripts/lib modules
|
||||
scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
import scriptpath
|
||||
import scriptutils
|
||||
|
||||
# Allow importing bitbake modules
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
logger = scriptutils.logger_create('verify_homepage')
|
||||
|
||||
def wgetHomepage(pn, homepage):
|
||||
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
|
||||
if result:
|
||||
logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def verifyHomepage(bbhandler):
|
||||
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
|
||||
pnlist = sorted(pkg_pn)
|
||||
count = 0
|
||||
checked = []
|
||||
for pn in pnlist:
|
||||
for fn in pkg_pn[pn]:
|
||||
# There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
|
||||
realfn, _, _ = bb.cache.virtualfn2realfn(fn)
|
||||
if realfn in checked:
|
||||
continue
|
||||
data = bbhandler.parse_recipe_file(realfn)
|
||||
homepage = data.getVar("HOMEPAGE")
|
||||
if homepage:
|
||||
try:
|
||||
urllib.request.urlopen(homepage, timeout=5)
|
||||
except Exception:
|
||||
count = count + wgetHomepage(os.path.basename(realfn), homepage)
|
||||
checked.append(realfn)
|
||||
return count
|
||||
|
||||
if __name__=='__main__':
|
||||
with bb.tinfoil.Tinfoil() as bbhandler:
|
||||
bbhandler.prepare()
|
||||
logger.info("Start verifying HOMEPAGE:")
|
||||
failcount = verifyHomepage(bbhandler)
|
||||
logger.info("Finished verifying HOMEPAGE.")
|
||||
logger.info("Summary: %s failed" % failcount)
|
||||
Executable
+56
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation,
|
||||
# don't error.
|
||||
# Also don't error if $1 disappears.
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
|
||||
def copytree(src, dst, symlinks=False, ignore=None):
|
||||
"""Based on shutil.copytree"""
|
||||
names = os.listdir(src)
|
||||
try:
|
||||
os.makedirs(dst)
|
||||
except OSError:
|
||||
# Already exists
|
||||
pass
|
||||
errors = []
|
||||
for name in names:
|
||||
srcname = os.path.join(src, name)
|
||||
dstname = os.path.join(dst, name)
|
||||
try:
|
||||
d = dstname
|
||||
if os.path.isdir(dstname):
|
||||
d = os.path.join(dstname, os.path.basename(srcname))
|
||||
if os.path.exists(d):
|
||||
continue
|
||||
try:
|
||||
os.link(srcname, dstname)
|
||||
except OSError:
|
||||
shutil.copy2(srcname, dstname)
|
||||
# catch the Error from the recursive copytree so that we can
|
||||
# continue with other files
|
||||
except shutil.Error as err:
|
||||
errors.extend(err.args[0])
|
||||
except EnvironmentError as why:
|
||||
errors.append((srcname, dstname, str(why)))
|
||||
try:
|
||||
shutil.copystat(src, dst)
|
||||
except OSError as why:
|
||||
errors.extend((src, dst, str(why)))
|
||||
if errors:
|
||||
raise shutil.Error(errors)
|
||||
|
||||
try:
|
||||
copytree(sys.argv[1], sys.argv[2])
|
||||
except shutil.Error:
|
||||
pass
|
||||
except OSError:
|
||||
pass
|
||||
Executable
+282
@@ -0,0 +1,282 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2010-2013, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
#
|
||||
|
||||
#
|
||||
# This script is intended to be used to prepare a series of patches
|
||||
# and a cover letter in an appropriate and consistent format for
|
||||
# submission to Open Embedded and The Yocto Project, as well as to
|
||||
# related projects and layers.
|
||||
#
|
||||
|
||||
ODIR=pull-$$
|
||||
RELATIVE_TO="master"
|
||||
COMMIT_ID="HEAD"
|
||||
PREFIX="PATCH"
|
||||
RFC=0
|
||||
|
||||
usage() {
|
||||
CMD=$(basename $0)
|
||||
cat <<EOM
|
||||
Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to] [-i commit_id] [-d relative_dir] -u remote [-b branch] [-- <format-patch options>]
|
||||
-b branch Branch name in the specified remote (default: current branch)
|
||||
-l local branch Local branch name (default: HEAD)
|
||||
-c Create an RFC (Request for Comment) patch series
|
||||
-h Display this help message
|
||||
-a Automatically push local branch (-l) to remote branch (-b),
|
||||
or set CPR_CONTRIB_AUTO_PUSH in env
|
||||
-i commit_id Ending commit (default: HEAD)
|
||||
-m msg_body_file The file containing a blurb to be inserted into the summary email
|
||||
-o output_dir Specify the output directory for the messages (default: pull-PID)
|
||||
-p prefix Use [prefix N/M] instead of [PATCH N/M] as the subject prefix
|
||||
-r relative_to Starting commit (default: master)
|
||||
-s subject The subject to be inserted into the summary email
|
||||
-u remote The git remote where the branch is located, or set CPR_CONTRIB_REMOTE in env
|
||||
-d relative_dir Generate patches relative to directory
|
||||
|
||||
Examples:
|
||||
$CMD -u contrib -b nitin/basic
|
||||
$CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro
|
||||
$CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro -l distro
|
||||
$CMD -u contrib -r master -i misc -b nitin/misc -o pull-misc
|
||||
$CMD -u contrib -p "RFC PATCH" -b nitin/experimental
|
||||
$CMD -u contrib -i misc -b nitin/misc -d ./bitbake
|
||||
$CMD -u contrib -r origin/master -o /tmp/out.v3 -- -v3 --in-reply-to=20170511120134.XX7799@site.com
|
||||
EOM
|
||||
}
|
||||
|
||||
REMOTE="$CPR_CONTRIB_REMOTE"
|
||||
# Parse and validate arguments
|
||||
while getopts "b:acd:hi:m:o:p:r:s:u:l:" OPT; do
|
||||
case $OPT in
|
||||
b)
|
||||
BRANCH="$OPTARG"
|
||||
;;
|
||||
l)
|
||||
L_BRANCH="$OPTARG"
|
||||
;;
|
||||
c)
|
||||
RFC=1
|
||||
;;
|
||||
d)
|
||||
RELDIR="$OPTARG"
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
i)
|
||||
COMMIT_ID="$OPTARG"
|
||||
;;
|
||||
m)
|
||||
BODY="$OPTARG"
|
||||
if [ ! -e "$BODY" ]; then
|
||||
echo "ERROR: Body file does not exist"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
o)
|
||||
ODIR="$OPTARG"
|
||||
;;
|
||||
p)
|
||||
PREFIX="$OPTARG"
|
||||
;;
|
||||
r)
|
||||
RELATIVE_TO="$OPTARG"
|
||||
;;
|
||||
s)
|
||||
SUBJECT="$OPTARG"
|
||||
;;
|
||||
u)
|
||||
REMOTE="$OPTARG"
|
||||
;;
|
||||
a)
|
||||
CPR_CONTRIB_AUTO_PUSH="1"
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
shift "$((OPTIND - 1))"
|
||||
extraopts="$@"
|
||||
|
||||
if [ -z "$REMOTE" ]; then
|
||||
echo "ERROR: Missing parameter -u or CPR_CONTRIB_REMOTE in env, no git remote!"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
REMOTE_URL=$(git config remote.$REMOTE.url)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: git config failed to find a url for '$REMOTE'"
|
||||
echo
|
||||
echo "To add a remote url for $REMOTE, use:"
|
||||
echo " git config remote.$REMOTE.url <url>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Rewrite private URLs to public URLs
|
||||
# Determine the repository name for use in the WEB_URL later
|
||||
USER_RE="[A-Za-z0-9_.@][A-Za-z0-9_.@-]*\$\?"
|
||||
PROTO_RE="[a-z][a-z+]*://"
|
||||
GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)"
|
||||
REMOTE_URL=${REMOTE_URL%.git}
|
||||
REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#")
|
||||
REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#")
|
||||
|
||||
if [ -z "$BRANCH" ]; then
|
||||
BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2)
|
||||
echo "NOTE: Assuming remote branch '$BRANCH', use -b to override."
|
||||
fi
|
||||
|
||||
if [ -z "$L_BRANCH" ]; then
|
||||
L_BRANCH=HEAD
|
||||
echo "NOTE: Assuming local branch HEAD, use -l to override."
|
||||
fi
|
||||
|
||||
if [ $RFC -eq 1 ]; then
|
||||
PREFIX="RFC $PREFIX"
|
||||
fi
|
||||
|
||||
|
||||
# Set WEB_URL from known remotes
|
||||
WEB_URL=""
|
||||
case "$REMOTE_URL" in
|
||||
*git.yoctoproject.org*)
|
||||
WEB_URL="http://git.yoctoproject.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
|
||||
;;
|
||||
*git.pokylinux.org*)
|
||||
WEB_URL="http://git.pokylinux.org/cgit.cgi/$REMOTE_REPO/log/?h=$BRANCH"
|
||||
;;
|
||||
*git.openembedded.org*)
|
||||
WEB_URL="http://cgit.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
|
||||
;;
|
||||
*github.com*)
|
||||
WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Perform a sanity test on the web URL. Issue a warning if it is not
|
||||
# accessible, but do not abort as users may want to run offline.
|
||||
if [ -n "$WEB_URL" ]; then
|
||||
if [ "$CPR_CONTRIB_AUTO_PUSH" = "1" ]; then
|
||||
echo "Pushing '$BRANCH' on '$REMOTE' as requested..."
|
||||
git push $REMOTE $L_BRANCH:$BRANCH
|
||||
echo ""
|
||||
fi
|
||||
wget --no-check-certificate -q $WEB_URL -O /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARNING: Branch '$BRANCH' was not found on the contrib git tree."
|
||||
echo " Please check your remote and branch parameter before sending."
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -e $ODIR ]; then
|
||||
echo "ERROR: output directory $ODIR exists."
|
||||
exit 1
|
||||
fi
|
||||
mkdir $ODIR
|
||||
|
||||
if [ -n "$RELDIR" ]; then
|
||||
ODIR=$(realpath $ODIR)
|
||||
pdir=$(pwd)
|
||||
cd $RELDIR
|
||||
extraopts="$extraopts --relative"
|
||||
fi
|
||||
|
||||
# Generate the patches and cover letter
|
||||
git format-patch $extraopts -M40 --subject-prefix="$PREFIX" -n -o $ODIR --thread=shallow --cover-letter $RELATIVE_TO..$COMMIT_ID > /dev/null
|
||||
|
||||
if [ -z "$(ls -A $ODIR 2> /dev/null)" ]; then
|
||||
echo "ERROR: $ODIR is empty, no cover letter and patches was generated!"
|
||||
echo " This is most likely due to that \$RRELATIVE_TO..\$COMMIT_ID"
|
||||
echo " ($RELATIVE_TO..$COMMIT_ID) don't contain any differences."
|
||||
rmdir $ODIR
|
||||
exit 1
|
||||
fi
|
||||
|
||||
[ -n "$RELDIR" ] && cd $pdir
|
||||
|
||||
# Customize the cover letter
|
||||
CL="$(echo $ODIR/*0000-cover-letter.patch)"
|
||||
PM="$ODIR/pull-msg"
|
||||
GIT_VERSION=$(`git --version` | tr -d '[:alpha:][:space:].' | sed 's/\(...\).*/\1/')
|
||||
NEWER_GIT_VERSION=210
|
||||
if [ $GIT_VERSION -lt $NEWER_GIT_VERSION ]; then
|
||||
git request-pull $RELATIVE_TO $REMOTE_URL $COMMIT_ID >> "$PM"
|
||||
else
|
||||
git request-pull $RELATIVE_TO $REMOTE_URL $L_BRANCH:$BRANCH >> "$PM"
|
||||
fi
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: git request-pull reported an error"
|
||||
rm -rf $ODIR
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# The cover letter already has a diffstat, remove it from the pull-msg
|
||||
# before inserting it.
|
||||
sed -n "0,\#$REMOTE_URL# p" "$PM" | sed -i "/BLURB HERE/ r /dev/stdin" "$CL"
|
||||
rm "$PM"
|
||||
|
||||
# If this is an RFC, make that clear in the cover letter
|
||||
if [ $RFC -eq 1 ]; then
|
||||
(cat <<EOM
|
||||
Please review the following changes for suitability for inclusion. If you have
|
||||
any objections or suggestions for improvement, please respond to the patches. If
|
||||
you agree with the changes, please provide your Acked-by.
|
||||
|
||||
EOM
|
||||
) | sed -i "/BLURB HERE/ r /dev/stdin" "$CL"
|
||||
fi
|
||||
|
||||
# Insert the WEB_URL if there is one
|
||||
if [ -n "$WEB_URL" ]; then
|
||||
echo " $WEB_URL" | sed -i "\#$REMOTE_URL# r /dev/stdin" "$CL"
|
||||
fi
|
||||
|
||||
|
||||
# If the user specified a message body, insert it into the cover letter and
|
||||
# remove the BLURB token.
|
||||
if [ -n "$BODY" ]; then
|
||||
sed -i "/BLURB HERE/ r $BODY" "$CL"
|
||||
sed -i "/BLURB HERE/ d" "$CL"
|
||||
fi
|
||||
|
||||
# Set subject automatically if there is only one patch
|
||||
patch_cnt=`git log --pretty=oneline ${RELATIVE_TO}..${L_BRANCH} | wc -l`
|
||||
if [ -z "$SUBJECT" -a $patch_cnt -eq 1 ]; then
|
||||
SUBJECT="`git log --format=%s ${RELATIVE_TO}..${L_BRANCH}`"
|
||||
fi
|
||||
|
||||
# Replace the SUBJECT token with it.
|
||||
if [ -n "$SUBJECT" ]; then
|
||||
sed -i -e "s\`\*\*\* SUBJECT HERE \*\*\*\`$SUBJECT\`" "$CL"
|
||||
fi
|
||||
|
||||
|
||||
# Generate report for user
|
||||
cat <<EOM
|
||||
The following patches have been prepared:
|
||||
$(for PATCH in $(ls $ODIR/*); do echo " $PATCH"; done)
|
||||
|
||||
Review their content, especially the summary mail:
|
||||
$CL
|
||||
|
||||
When you are satisfied, you can send them with:
|
||||
send-pull-request -a -p $ODIR
|
||||
EOM
|
||||
|
||||
# Check the patches for trailing white space
|
||||
egrep -q -e "^\+.*\s+$" $ODIR/*
|
||||
if [ $? -ne 1 ]; then
|
||||
echo
|
||||
echo "WARNING: Trailing white space detected at these locations"
|
||||
egrep -nH --color -e "^\+.*\s+$" $ODIR/*
|
||||
fi
|
||||
Symlink
+1
@@ -0,0 +1 @@
|
||||
../native-intercept/ar
|
||||
Executable
+458
@@ -0,0 +1,458 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Build a systemtap script for a given image, kernel
|
||||
#
|
||||
# Effectively script extracts needed information from set of
|
||||
# 'bitbake -e' commands and contructs proper invocation of stap on
|
||||
# host to build systemtap script for a given target.
|
||||
#
|
||||
# By default script will compile scriptname.ko that could be copied
|
||||
# to taget and activated with 'staprun scriptname.ko' command. Or if
|
||||
# --remote user@hostname option is specified script will build, load
|
||||
# execute script on target.
|
||||
#
|
||||
# This script is very similar and inspired by crosstap shell script.
|
||||
# The major difference that this script supports user-land related
|
||||
# systemtap script, whereas crosstap could deal only with scripts
|
||||
# related to kernel.
|
||||
#
|
||||
# Copyright (c) 2018, Cisco Systems.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import os
|
||||
import optparse
|
||||
|
||||
class Stap(object):
|
||||
def __init__(self, script, module, remote):
|
||||
self.script = script
|
||||
self.module = module
|
||||
self.remote = remote
|
||||
self.stap = None
|
||||
self.sysroot = None
|
||||
self.runtime = None
|
||||
self.tapset = None
|
||||
self.arch = None
|
||||
self.cross_compile = None
|
||||
self.kernel_release = None
|
||||
self.target_path = None
|
||||
self.target_ld_library_path = None
|
||||
|
||||
if not self.remote:
|
||||
if not self.module:
|
||||
# derive module name from script
|
||||
self.module = os.path.basename(self.script)
|
||||
if self.module[-4:] == ".stp":
|
||||
self.module = self.module[:-4]
|
||||
# replace - if any with _
|
||||
self.module = self.module.replace("-", "_")
|
||||
|
||||
def command(self, args):
|
||||
ret = []
|
||||
ret.append(self.stap)
|
||||
|
||||
if self.remote:
|
||||
ret.append("--remote")
|
||||
ret.append(self.remote)
|
||||
else:
|
||||
ret.append("-p4")
|
||||
ret.append("-m")
|
||||
ret.append(self.module)
|
||||
|
||||
ret.append("-a")
|
||||
ret.append(self.arch)
|
||||
|
||||
ret.append("-B")
|
||||
ret.append("CROSS_COMPILE=" + self.cross_compile)
|
||||
|
||||
ret.append("-r")
|
||||
ret.append(self.kernel_release)
|
||||
|
||||
ret.append("-I")
|
||||
ret.append(self.tapset)
|
||||
|
||||
ret.append("-R")
|
||||
ret.append(self.runtime)
|
||||
|
||||
if self.sysroot:
|
||||
ret.append("--sysroot")
|
||||
ret.append(self.sysroot)
|
||||
|
||||
ret.append("--sysenv=PATH=" + self.target_path)
|
||||
ret.append("--sysenv=LD_LIBRARY_PATH=" + self.target_ld_library_path)
|
||||
|
||||
ret = ret + args
|
||||
|
||||
ret.append(self.script)
|
||||
return ret
|
||||
|
||||
def additional_environment(self):
|
||||
ret = {}
|
||||
ret["SYSTEMTAP_DEBUGINFO_PATH"] = "+:.debug:build"
|
||||
return ret
|
||||
|
||||
def environment(self):
|
||||
ret = os.environ.copy()
|
||||
additional = self.additional_environment()
|
||||
for e in additional:
|
||||
ret[e] = additional[e]
|
||||
return ret
|
||||
|
||||
def display_command(self, args):
|
||||
additional_env = self.additional_environment()
|
||||
command = self.command(args)
|
||||
|
||||
print("#!/bin/sh")
|
||||
for e in additional_env:
|
||||
print("export %s=\"%s\"" % (e, additional_env[e]))
|
||||
print(" ".join(command))
|
||||
|
||||
class BitbakeEnvInvocationException(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
class BitbakeEnv(object):
|
||||
BITBAKE="bitbake"
|
||||
|
||||
def __init__(self, package):
|
||||
self.package = package
|
||||
self.cmd = BitbakeEnv.BITBAKE + " -e " + self.package
|
||||
self.popen = subprocess.Popen(self.cmd, shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
self.__lines = self.popen.stdout.readlines()
|
||||
self.popen.wait()
|
||||
|
||||
self.lines = []
|
||||
for line in self.__lines:
|
||||
self.lines.append(line.decode('utf-8'))
|
||||
|
||||
def get_vars(self, vars):
|
||||
if self.popen.returncode:
|
||||
raise BitbakeEnvInvocationException(
|
||||
"\nFailed to execute '" + self.cmd +
|
||||
"' with the following message:\n" +
|
||||
''.join(self.lines))
|
||||
|
||||
search_patterns = []
|
||||
retdict = {}
|
||||
for var in vars:
|
||||
# regular not exported variable
|
||||
rexpr = "^" + var + "=\"(.*)\""
|
||||
re_compiled = re.compile(rexpr)
|
||||
search_patterns.append((var, re_compiled))
|
||||
|
||||
# exported variable
|
||||
rexpr = "^export " + var + "=\"(.*)\""
|
||||
re_compiled = re.compile(rexpr)
|
||||
search_patterns.append((var, re_compiled))
|
||||
|
||||
for line in self.lines:
|
||||
for var, rexpr in search_patterns:
|
||||
m = rexpr.match(line)
|
||||
if m:
|
||||
value = m.group(1)
|
||||
retdict[var] = value
|
||||
|
||||
# fill variables values in order how they were requested
|
||||
ret = []
|
||||
for var in vars:
|
||||
ret.append(retdict.get(var))
|
||||
|
||||
# if it is single value list return it as scalar, not the list
|
||||
if len(ret) == 1:
|
||||
ret = ret[0]
|
||||
|
||||
return ret
|
||||
|
||||
class ParamDiscovery(object):
|
||||
SYMBOLS_CHECK_MESSAGE = """
|
||||
WARNING: image '%s' does not have dbg-pkgs IMAGE_FEATURES enabled and no
|
||||
"image-combined-dbg" in inherited classes is specified. As result the image
|
||||
does not have symbols for user-land processes DWARF based probes. Consider
|
||||
adding 'dbg-pkgs' to EXTRA_IMAGE_FEATURES or adding "image-combined-dbg" to
|
||||
USER_CLASSES. I.e add this line 'USER_CLASSES += "image-combined-dbg"' to
|
||||
local.conf file.
|
||||
|
||||
Or you may use IMAGE_GEN_DEBUGFS="1" option, and then after build you need
|
||||
recombine/unpack image and image-dbg tarballs and pass resulting dir location
|
||||
with --sysroot option.
|
||||
"""
|
||||
|
||||
def __init__(self, image):
|
||||
self.image = image
|
||||
|
||||
self.image_rootfs = None
|
||||
self.image_features = None
|
||||
self.image_gen_debugfs = None
|
||||
self.inherit = None
|
||||
self.base_bindir = None
|
||||
self.base_sbindir = None
|
||||
self.base_libdir = None
|
||||
self.bindir = None
|
||||
self.sbindir = None
|
||||
self.libdir = None
|
||||
|
||||
self.staging_bindir_toolchain = None
|
||||
self.target_prefix = None
|
||||
self.target_arch = None
|
||||
self.target_kernel_builddir = None
|
||||
|
||||
self.staging_dir_native = None
|
||||
|
||||
self.image_combined_dbg = False
|
||||
|
||||
def discover(self):
|
||||
if self.image:
|
||||
benv_image = BitbakeEnv(self.image)
|
||||
(self.image_rootfs,
|
||||
self.image_features,
|
||||
self.image_gen_debugfs,
|
||||
self.inherit,
|
||||
self.base_bindir,
|
||||
self.base_sbindir,
|
||||
self.base_libdir,
|
||||
self.bindir,
|
||||
self.sbindir,
|
||||
self.libdir
|
||||
) = benv_image.get_vars(
|
||||
("IMAGE_ROOTFS",
|
||||
"IMAGE_FEATURES",
|
||||
"IMAGE_GEN_DEBUGFS",
|
||||
"INHERIT",
|
||||
"base_bindir",
|
||||
"base_sbindir",
|
||||
"base_libdir",
|
||||
"bindir",
|
||||
"sbindir",
|
||||
"libdir"
|
||||
))
|
||||
|
||||
benv_kernel = BitbakeEnv("virtual/kernel")
|
||||
(self.staging_bindir_toolchain,
|
||||
self.target_prefix,
|
||||
self.target_arch,
|
||||
self.target_kernel_builddir
|
||||
) = benv_kernel.get_vars(
|
||||
("STAGING_BINDIR_TOOLCHAIN",
|
||||
"TARGET_PREFIX",
|
||||
"TRANSLATED_TARGET_ARCH",
|
||||
"B"
|
||||
))
|
||||
|
||||
benv_systemtap = BitbakeEnv("systemtap-native")
|
||||
(self.staging_dir_native
|
||||
) = benv_systemtap.get_vars(["STAGING_DIR_NATIVE"])
|
||||
|
||||
if self.inherit:
|
||||
if "image-combined-dbg" in self.inherit.split():
|
||||
self.image_combined_dbg = True
|
||||
|
||||
def check(self, sysroot_option):
|
||||
ret = True
|
||||
if self.image_rootfs:
|
||||
sysroot = self.image_rootfs
|
||||
if not os.path.isdir(self.image_rootfs):
|
||||
print("ERROR: Cannot find '" + sysroot +
|
||||
"' directory. Was '" + self.image + "' image built?")
|
||||
ret = False
|
||||
|
||||
stap = self.staging_dir_native + "/usr/bin/stap"
|
||||
if not os.path.isfile(stap):
|
||||
print("ERROR: Cannot find '" + stap +
|
||||
"'. Was 'systemtap-native' built?")
|
||||
ret = False
|
||||
|
||||
if not os.path.isdir(self.target_kernel_builddir):
|
||||
print("ERROR: Cannot find '" + self.target_kernel_builddir +
|
||||
"' directory. Was 'kernel/virtual' built?")
|
||||
ret = False
|
||||
|
||||
if not sysroot_option and self.image_rootfs:
|
||||
dbg_pkgs_found = False
|
||||
|
||||
if self.image_features:
|
||||
image_features = self.image_features.split()
|
||||
if "dbg-pkgs" in image_features:
|
||||
dbg_pkgs_found = True
|
||||
|
||||
if not dbg_pkgs_found \
|
||||
and not self.image_combined_dbg:
|
||||
print(ParamDiscovery.SYMBOLS_CHECK_MESSAGE % (self.image))
|
||||
|
||||
if not ret:
|
||||
print("")
|
||||
|
||||
return ret
|
||||
|
||||
def __map_systemtap_arch(self):
|
||||
a = self.target_arch
|
||||
ret = a
|
||||
if re.match('(athlon|x86.64)$', a):
|
||||
ret = 'x86_64'
|
||||
elif re.match('i.86$', a):
|
||||
ret = 'i386'
|
||||
elif re.match('arm$', a):
|
||||
ret = 'arm'
|
||||
elif re.match('aarch64$', a):
|
||||
ret = 'arm64'
|
||||
elif re.match('mips(isa|)(32|64|)(r6|)(el|)$', a):
|
||||
ret = 'mips'
|
||||
elif re.match('p(pc|owerpc)(|64)', a):
|
||||
ret = 'powerpc'
|
||||
return ret
|
||||
|
||||
def fill_stap(self, stap):
|
||||
stap.stap = self.staging_dir_native + "/usr/bin/stap"
|
||||
if not stap.sysroot:
|
||||
if self.image_rootfs:
|
||||
if self.image_combined_dbg:
|
||||
stap.sysroot = self.image_rootfs + "-dbg"
|
||||
else:
|
||||
stap.sysroot = self.image_rootfs
|
||||
stap.runtime = self.staging_dir_native + "/usr/share/systemtap/runtime"
|
||||
stap.tapset = self.staging_dir_native + "/usr/share/systemtap/tapset"
|
||||
stap.arch = self.__map_systemtap_arch()
|
||||
stap.cross_compile = self.staging_bindir_toolchain + "/" + \
|
||||
self.target_prefix
|
||||
stap.kernel_release = self.target_kernel_builddir
|
||||
|
||||
# do we have standard that tells in which order these need to appear
|
||||
target_path = []
|
||||
if self.sbindir:
|
||||
target_path.append(self.sbindir)
|
||||
if self.bindir:
|
||||
target_path.append(self.bindir)
|
||||
if self.base_sbindir:
|
||||
target_path.append(self.base_sbindir)
|
||||
if self.base_bindir:
|
||||
target_path.append(self.base_bindir)
|
||||
stap.target_path = ":".join(target_path)
|
||||
|
||||
target_ld_library_path = []
|
||||
if self.libdir:
|
||||
target_ld_library_path.append(self.libdir)
|
||||
if self.base_libdir:
|
||||
target_ld_library_path.append(self.base_libdir)
|
||||
stap.target_ld_library_path = ":".join(target_ld_library_path)
|
||||
|
||||
|
||||
def main():
|
||||
usage = """usage: %prog -s <systemtap-script> [options] [-- [systemtap options]]
|
||||
|
||||
%prog cross compile given SystemTap script against given image, kernel
|
||||
|
||||
It needs to run in environtment set for bitbake - it uses bitbake -e
|
||||
invocations to retrieve information to construct proper stap cross build
|
||||
invocation arguments. It assumes that systemtap-native is built in given
|
||||
bitbake workspace.
|
||||
|
||||
Anything after -- option is passed directly to stap.
|
||||
|
||||
Legacy script invocation style supported but deprecated:
|
||||
%prog <user@hostname> <sytemtap-script> [systemtap options]
|
||||
|
||||
To enable most out of systemtap the following site.conf or local.conf
|
||||
configuration is recommended:
|
||||
|
||||
# enables symbol + target binaries rootfs-dbg in workspace
|
||||
IMAGE_GEN_DEBUGFS = "1"
|
||||
IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
|
||||
USER_CLASSES += "image-combined-dbg"
|
||||
|
||||
# enables kernel debug symbols
|
||||
KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
|
||||
|
||||
# minimal, just run-time systemtap configuration in target image
|
||||
PACKAGECONFIG:pn-systemtap = "monitor"
|
||||
|
||||
# add systemtap run-time into target image if it is not there yet
|
||||
IMAGE_INSTALL:append = " systemtap"
|
||||
"""
|
||||
option_parser = optparse.OptionParser(usage=usage)
|
||||
|
||||
option_parser.add_option("-s", "--script", dest="script",
|
||||
help="specify input script FILE name",
|
||||
metavar="FILE")
|
||||
|
||||
option_parser.add_option("-i", "--image", dest="image",
|
||||
help="specify image name for which script should be compiled")
|
||||
|
||||
option_parser.add_option("-r", "--remote", dest="remote",
|
||||
help="specify username@hostname of remote target to run script "
|
||||
"optional, it assumes that remote target can be accessed through ssh")
|
||||
|
||||
option_parser.add_option("-m", "--module", dest="module",
|
||||
help="specify module name, optional, has effect only if --remote is not used, "
|
||||
"if not specified module name will be derived from passed script name")
|
||||
|
||||
option_parser.add_option("-y", "--sysroot", dest="sysroot",
|
||||
help="explicitely specify image sysroot location. May need to use it in case "
|
||||
"when IMAGE_GEN_DEBUGFS=\"1\" option is used and recombined with symbols "
|
||||
"in different location",
|
||||
metavar="DIR")
|
||||
|
||||
option_parser.add_option("-o", "--out", dest="out",
|
||||
action="store_true",
|
||||
help="output shell script that equvivalent invocation of this script with "
|
||||
"given set of arguments, in given bitbake environment. It could be stored in "
|
||||
"separate shell script and could be repeated without incuring bitbake -e "
|
||||
"invocation overhead",
|
||||
default=False)
|
||||
|
||||
option_parser.add_option("-d", "--debug", dest="debug",
|
||||
action="store_true",
|
||||
help="enable debug output. Use this option to see resulting stap invocation",
|
||||
default=False)
|
||||
|
||||
# is invocation follow syntax from orignal crosstap shell script
|
||||
legacy_args = False
|
||||
|
||||
# check if we called the legacy way
|
||||
if len(sys.argv) >= 3:
|
||||
if sys.argv[1].find("@") != -1 and os.path.exists(sys.argv[2]):
|
||||
legacy_args = True
|
||||
|
||||
# fill options values for legacy invocation case
|
||||
options = optparse.Values
|
||||
options.script = sys.argv[2]
|
||||
options.remote = sys.argv[1]
|
||||
options.image = None
|
||||
options.module = None
|
||||
options.sysroot = None
|
||||
options.out = None
|
||||
options.debug = None
|
||||
remaining_args = sys.argv[3:]
|
||||
|
||||
if not legacy_args:
|
||||
(options, remaining_args) = option_parser.parse_args()
|
||||
|
||||
if not options.script or not os.path.exists(options.script):
|
||||
print("'-s FILE' option is missing\n")
|
||||
option_parser.print_help()
|
||||
else:
|
||||
stap = Stap(options.script, options.module, options.remote)
|
||||
discovery = ParamDiscovery(options.image)
|
||||
discovery.discover()
|
||||
if not discovery.check(options.sysroot):
|
||||
option_parser.print_help()
|
||||
else:
|
||||
stap.sysroot = options.sysroot
|
||||
discovery.fill_stap(stap)
|
||||
|
||||
if options.out:
|
||||
stap.display_command(remaining_args)
|
||||
else:
|
||||
cmd = stap.command(remaining_args)
|
||||
env = stap.environment()
|
||||
|
||||
if options.debug:
|
||||
print(" ".join(cmd))
|
||||
|
||||
os.execve(cmd[0], cmd, env)
|
||||
|
||||
main()
|
||||
Executable
+354
@@ -0,0 +1,354 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# OpenEmbedded Development tool
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import glob
|
||||
import re
|
||||
import configparser
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
basepath = ''
|
||||
workspace = {}
|
||||
config = None
|
||||
context = None
|
||||
|
||||
|
||||
scripts_path = os.path.dirname(os.path.realpath(__file__))
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
from devtool import DevtoolError, setup_tinfoil
|
||||
import scriptutils
|
||||
import argparse_oe
|
||||
logger = scriptutils.logger_create('devtool')
|
||||
|
||||
plugins = []
|
||||
|
||||
|
||||
class ConfigHandler(object):
|
||||
config_file = ''
|
||||
config_obj = None
|
||||
init_path = ''
|
||||
workspace_path = ''
|
||||
|
||||
def __init__(self, filename):
|
||||
self.config_file = filename
|
||||
self.config_obj = configparser.ConfigParser()
|
||||
|
||||
def get(self, section, option, default=None):
|
||||
try:
|
||||
ret = self.config_obj.get(section, option)
|
||||
except (configparser.NoOptionError, configparser.NoSectionError):
|
||||
if default != None:
|
||||
ret = default
|
||||
else:
|
||||
raise
|
||||
return ret
|
||||
|
||||
def read(self):
|
||||
if os.path.exists(self.config_file):
|
||||
self.config_obj.read(self.config_file)
|
||||
|
||||
if self.config_obj.has_option('General', 'init_path'):
|
||||
pth = self.get('General', 'init_path')
|
||||
self.init_path = os.path.join(basepath, pth)
|
||||
if not os.path.exists(self.init_path):
|
||||
logger.error('init_path %s specified in config file cannot be found' % pth)
|
||||
return False
|
||||
else:
|
||||
self.config_obj.add_section('General')
|
||||
|
||||
self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace'))
|
||||
return True
|
||||
|
||||
|
||||
def write(self):
|
||||
logger.debug('writing to config file %s' % self.config_file)
|
||||
self.config_obj.set('General', 'workspace_path', self.workspace_path)
|
||||
with open(self.config_file, 'w') as f:
|
||||
self.config_obj.write(f)
|
||||
|
||||
def set(self, section, option, value):
|
||||
if not self.config_obj.has_section(section):
|
||||
self.config_obj.add_section(section)
|
||||
self.config_obj.set(section, option, value)
|
||||
|
||||
class Context:
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
|
||||
def read_workspace():
|
||||
global workspace
|
||||
workspace = {}
|
||||
if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')):
|
||||
if context.fixed_setup:
|
||||
logger.error("workspace layer not set up")
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.info('Creating workspace layer in %s' % config.workspace_path)
|
||||
_create_workspace(config.workspace_path, config, basepath)
|
||||
if not context.fixed_setup:
|
||||
_enable_workspace_layer(config.workspace_path, config, basepath)
|
||||
|
||||
logger.debug('Reading workspace in %s' % config.workspace_path)
|
||||
externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$')
|
||||
for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')):
|
||||
with open(fn, 'r') as f:
|
||||
pnvalues = {}
|
||||
pn = None
|
||||
for line in f:
|
||||
res = externalsrc_re.match(line.rstrip())
|
||||
if res:
|
||||
recipepn = os.path.splitext(os.path.basename(fn))[0].split('_')[0]
|
||||
pn = res.group(2) or recipepn
|
||||
# Find the recipe file within the workspace, if any
|
||||
bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*')
|
||||
recipefile = glob.glob(os.path.join(config.workspace_path,
|
||||
'recipes',
|
||||
recipepn,
|
||||
bbfile))
|
||||
if recipefile:
|
||||
recipefile = recipefile[0]
|
||||
pnvalues['srctree'] = res.group(3)
|
||||
pnvalues['bbappend'] = fn
|
||||
pnvalues['recipefile'] = recipefile
|
||||
elif line.startswith('# srctreebase: '):
|
||||
pnvalues['srctreebase'] = line.split(':', 1)[1].strip()
|
||||
if pnvalues:
|
||||
if not pn:
|
||||
raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
|
||||
"Maybe still using old syntax?" % config.workspace_path)
|
||||
if not pnvalues.get('srctreebase', None):
|
||||
pnvalues['srctreebase'] = pnvalues['srctree']
|
||||
logger.debug('Found recipe %s' % pnvalues)
|
||||
workspace[pn] = pnvalues
|
||||
|
||||
def create_workspace(args, config, basepath, workspace):
|
||||
if args.layerpath:
|
||||
workspacedir = os.path.abspath(args.layerpath)
|
||||
else:
|
||||
workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
|
||||
layerseries = None
|
||||
if args.layerseries:
|
||||
layerseries = args.layerseries
|
||||
_create_workspace(workspacedir, config, basepath, layerseries)
|
||||
if not args.create_only:
|
||||
_enable_workspace_layer(workspacedir, config, basepath)
|
||||
|
||||
def _create_workspace(workspacedir, config, basepath, layerseries=None):
|
||||
import bb
|
||||
|
||||
confdir = os.path.join(workspacedir, 'conf')
|
||||
if os.path.exists(os.path.join(confdir, 'layer.conf')):
|
||||
logger.info('Specified workspace already set up, leaving as-is')
|
||||
else:
|
||||
if not layerseries:
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
layerseries = tinfoil.config_data.getVar('LAYERSERIES_CORENAMES')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
# Add a config file
|
||||
bb.utils.mkdirhier(confdir)
|
||||
with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
|
||||
f.write('# ### workspace layer auto-generated by devtool ###\n')
|
||||
f.write('BBPATH =. "$' + '{LAYERDIR}:"\n')
|
||||
f.write('BBFILES += "$' + '{LAYERDIR}/recipes/*/*.bb \\\n')
|
||||
f.write(' $' + '{LAYERDIR}/appends/*.bbappend"\n')
|
||||
f.write('BBFILE_COLLECTIONS += "workspacelayer"\n')
|
||||
f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
|
||||
f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
|
||||
f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
|
||||
f.write('LAYERSERIES_COMPAT_workspacelayer = "%s"\n' % layerseries)
|
||||
# Add a README file
|
||||
with open(os.path.join(workspacedir, 'README'), 'w') as f:
|
||||
f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
|
||||
f.write('contain recipes and bbappends that are currently being worked on. The idea\n')
|
||||
f.write('is that the contents is temporary - once you have finished working on a\n')
|
||||
f.write('recipe you use the appropriate method to move the files you have been\n')
|
||||
f.write('working on to a proper layer. In most instances you should use the\n')
|
||||
f.write('devtool utility to manage files within it rather than modifying files\n')
|
||||
f.write('directly (although recipes added with "devtool add" will often need\n')
|
||||
f.write('direct modification.)\n')
|
||||
f.write('\nIf you no longer need to use devtool or the workspace layer\'s contents\n')
|
||||
f.write('you can remove the path to this workspace layer from your conf/bblayers.conf\n')
|
||||
f.write('file (and then delete the layer, if you wish).\n')
|
||||
f.write('\nNote that by default, if devtool fetches and unpacks source code, it\n')
|
||||
f.write('will place it in a subdirectory of a "sources" subdirectory of the\n')
|
||||
f.write('layer. If you prefer it to be elsewhere you can specify the source\n')
|
||||
f.write('tree path on the command line.\n')
|
||||
|
||||
def _enable_workspace_layer(workspacedir, config, basepath):
|
||||
"""Ensure the workspace layer is in bblayers.conf"""
|
||||
import bb
|
||||
bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
logger.error('Unable to find bblayers.conf')
|
||||
return
|
||||
if os.path.abspath(workspacedir) != os.path.abspath(config.workspace_path):
|
||||
removedir = config.workspace_path
|
||||
else:
|
||||
removedir = None
|
||||
_, added = bb.utils.edit_bblayers_conf(bblayers_conf, workspacedir, removedir)
|
||||
if added:
|
||||
logger.info('Enabling workspace layer in bblayers.conf')
|
||||
if config.workspace_path != workspacedir:
|
||||
# Update our config to point to the new location
|
||||
config.workspace_path = workspacedir
|
||||
config.write()
|
||||
|
||||
|
||||
def main():
|
||||
global basepath
|
||||
global config
|
||||
global context
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
context = Context(fixed_setup=False)
|
||||
|
||||
# Default basepath
|
||||
basepath = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
parser = argparse_oe.ArgumentParser(description="OpenEmbedded development tool",
|
||||
add_help=False,
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('--basepath', help='Base directory of SDK / build directory')
|
||||
parser.add_argument('--bbpath', help='Explicitly specify the BBPATH, rather than getting it from the metadata')
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
|
||||
|
||||
global_args, unparsed_args = parser.parse_known_args()
|
||||
|
||||
# Help is added here rather than via add_help=True, as we don't want it to
|
||||
# be handled by parse_known_args()
|
||||
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
|
||||
help='show this help message and exit')
|
||||
|
||||
if global_args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif global_args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
if global_args.basepath:
|
||||
# Override
|
||||
basepath = global_args.basepath
|
||||
if os.path.exists(os.path.join(basepath, '.devtoolbase')):
|
||||
context.fixed_setup = True
|
||||
else:
|
||||
pth = basepath
|
||||
while pth != '' and pth != os.sep:
|
||||
if os.path.exists(os.path.join(pth, '.devtoolbase')):
|
||||
context.fixed_setup = True
|
||||
basepath = pth
|
||||
break
|
||||
pth = os.path.dirname(pth)
|
||||
|
||||
if not context.fixed_setup:
|
||||
basepath = os.environ.get('BUILDDIR')
|
||||
if not basepath:
|
||||
logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
|
||||
sys.exit(1)
|
||||
|
||||
logger.debug('Using basepath %s' % basepath)
|
||||
|
||||
config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf'))
|
||||
if not config.read():
|
||||
return -1
|
||||
context.config = config
|
||||
|
||||
bitbake_subdir = config.get('General', 'bitbake_subdir', '')
|
||||
if bitbake_subdir:
|
||||
# Normally set for use within the SDK
|
||||
logger.debug('Using bitbake subdir %s' % bitbake_subdir)
|
||||
sys.path.insert(0, os.path.join(basepath, bitbake_subdir, 'lib'))
|
||||
core_meta_subdir = config.get('General', 'core_meta_subdir')
|
||||
sys.path.insert(0, os.path.join(basepath, core_meta_subdir, 'lib'))
|
||||
else:
|
||||
# Standard location
|
||||
import scriptpath
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
sys.exit(1)
|
||||
logger.debug('Using standard bitbake path %s' % bitbakepath)
|
||||
scriptpath.add_oe_lib_path()
|
||||
|
||||
scriptutils.logger_setup_color(logger, global_args.color)
|
||||
|
||||
if global_args.bbpath is None:
|
||||
try:
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
global_args.bbpath = tinfoil.config_data.getVar('BBPATH')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
except bb.BBHandledException:
|
||||
return 2
|
||||
|
||||
# Search BBPATH first to allow layers to override plugins in scripts_path
|
||||
for path in global_args.bbpath.split(':') + [scripts_path]:
|
||||
pluginpath = os.path.join(path, 'lib', 'devtool')
|
||||
scriptutils.load_plugins(logger, plugins, pluginpath)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
|
||||
subparsers.required = True
|
||||
|
||||
subparsers.add_subparser_group('sdk', 'SDK maintenance', -2)
|
||||
subparsers.add_subparser_group('advanced', 'Advanced', -1)
|
||||
subparsers.add_subparser_group('starting', 'Beginning work on a recipe', 100)
|
||||
subparsers.add_subparser_group('info', 'Getting information')
|
||||
subparsers.add_subparser_group('working', 'Working on a recipe in the workspace')
|
||||
subparsers.add_subparser_group('testbuild', 'Testing changes on target')
|
||||
|
||||
if not context.fixed_setup:
|
||||
parser_create_workspace = subparsers.add_parser('create-workspace',
|
||||
help='Set up workspace in an alternative location',
|
||||
description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
|
||||
group='advanced')
|
||||
parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
|
||||
parser_create_workspace.add_argument('--layerseries', help='Layer series the workspace should be set to be compatible with')
|
||||
parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
|
||||
parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
|
||||
|
||||
for plugin in plugins:
|
||||
if hasattr(plugin, 'register_commands'):
|
||||
plugin.register_commands(subparsers, context)
|
||||
|
||||
args = parser.parse_args(unparsed_args, namespace=global_args)
|
||||
|
||||
try:
|
||||
if not getattr(args, 'no_workspace', False):
|
||||
read_workspace()
|
||||
|
||||
ret = args.func(args, config, basepath, workspace)
|
||||
except DevtoolError as err:
|
||||
if str(err):
|
||||
logger.error(str(err))
|
||||
ret = err.exitcode
|
||||
except argparse_oe.ArgumentUsageError as ae:
|
||||
parser.error_subcommand(ae.message, ae.subcommand)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(ret)
|
||||
Executable
+122
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import errno
|
||||
import time
|
||||
|
||||
def mkdir(d):
|
||||
try:
|
||||
os.makedirs(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
# extract the hash from past the last colon to last underscore
|
||||
def extract_sha(filename):
|
||||
return filename.split(':')[7].split('_')[0]
|
||||
|
||||
# get all files in a directory, extract hash and make
|
||||
# a map from hash to list of file with that hash
|
||||
def map_sha_to_files(dir_, prefix, sha_map):
|
||||
sstate_prefix_path = dir_ + '/' + prefix + '/'
|
||||
if not os.path.exists(sstate_prefix_path):
|
||||
return
|
||||
sstate_files = os.listdir(sstate_prefix_path)
|
||||
for f in sstate_files:
|
||||
try:
|
||||
sha = extract_sha(f)
|
||||
if sha not in sha_map:
|
||||
sha_map[sha] = []
|
||||
sha_map[sha].append(sstate_prefix_path + f)
|
||||
except IndexError:
|
||||
continue
|
||||
|
||||
# given a prefix build a map of hash to list of files
|
||||
def build_sha_cache(prefix):
|
||||
sha_map = {}
|
||||
|
||||
sstate_dir = sys.argv[2]
|
||||
map_sha_to_files(sstate_dir, prefix, sha_map)
|
||||
|
||||
native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
|
||||
map_sha_to_files(native_sstate_dir, prefix, sha_map)
|
||||
|
||||
return sha_map
|
||||
|
||||
if len(sys.argv) < 5:
|
||||
print("Incorrect number of arguments specified")
|
||||
print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring> [filterfile]")
|
||||
sys.exit(1)
|
||||
|
||||
filterlist = []
|
||||
if len(sys.argv) > 5:
|
||||
print('Reading filter file %s' % sys.argv[5])
|
||||
with open(sys.argv[5]) as f:
|
||||
for l in f.readlines():
|
||||
if ":" in l:
|
||||
filterlist.append(l.rstrip())
|
||||
|
||||
print('Reading %s' % sys.argv[1])
|
||||
sigs = []
|
||||
with open(sys.argv[1]) as f:
|
||||
for l in f.readlines():
|
||||
if ":" in l:
|
||||
task, sig = l.split()[0].rsplit(':', 1)
|
||||
if filterlist and not task in filterlist:
|
||||
print('Filtering out %s' % task)
|
||||
else:
|
||||
sigs.append(sig)
|
||||
|
||||
print('Gathering file list')
|
||||
start_time = time.perf_counter()
|
||||
files = set()
|
||||
sstate_content_cache = {}
|
||||
for s in sigs:
|
||||
prefix = s[:2]
|
||||
prefix2 = s[2:4]
|
||||
if prefix not in sstate_content_cache:
|
||||
sstate_content_cache[prefix] = {}
|
||||
if prefix2 not in sstate_content_cache[prefix]:
|
||||
sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
|
||||
|
||||
if s in sstate_content_cache[prefix][prefix2]:
|
||||
for f in sstate_content_cache[prefix][prefix2][s]:
|
||||
files.add(f)
|
||||
|
||||
elapsed = time.perf_counter() - start_time
|
||||
print("Gathering file list took %.1fs" % elapsed)
|
||||
|
||||
print('Processing files')
|
||||
for f in files:
|
||||
sys.stdout.write('Processing %s... ' % f)
|
||||
if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
|
||||
# Most likely a temp file, skip it
|
||||
print('skipping')
|
||||
continue
|
||||
dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
|
||||
destdir = os.path.dirname(dst)
|
||||
mkdir(destdir)
|
||||
|
||||
src = os.path.realpath(f)
|
||||
if os.path.exists(dst):
|
||||
os.remove(dst)
|
||||
if (os.stat(src).st_dev == os.stat(destdir).st_dev):
|
||||
print('linking')
|
||||
try:
|
||||
os.link(src, dst)
|
||||
except OSError as e:
|
||||
print('hard linking failed, copying')
|
||||
shutil.copyfile(src, dst)
|
||||
else:
|
||||
print('copying')
|
||||
shutil.copyfile(src, dst)
|
||||
|
||||
print('Done!')
|
||||
Executable
+43
@@ -0,0 +1,43 @@
|
||||
#! /bin/sh
|
||||
# Copyright (c) 2005-2008 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
cat << EOF
|
||||
AC_PREREQ(2.57)
|
||||
AC_INIT([site_wide],[1.0.0])
|
||||
|
||||
EOF
|
||||
|
||||
# Disable as endian is set in the default config
|
||||
#echo AC_C_BIGENDIAN
|
||||
#echo
|
||||
|
||||
if [ -e $1/types ] ; then
|
||||
while read type ; do
|
||||
echo "AC_CHECK_SIZEOF([$type])"
|
||||
done < $1/types
|
||||
|
||||
echo
|
||||
fi
|
||||
|
||||
if [ -e $1/funcs ]; then
|
||||
while read func ; do
|
||||
echo "AC_CHECK_FUNCS([$func])"
|
||||
done < $1/funcs
|
||||
|
||||
echo
|
||||
fi
|
||||
|
||||
if [ -e $1/headers ]; then
|
||||
while read header ; do
|
||||
echo "AC_CHECK_HEADERS([$header])"
|
||||
done < $1/headers
|
||||
|
||||
echo
|
||||
fi
|
||||
|
||||
cat << EOF
|
||||
AC_OUTPUT
|
||||
EOF
|
||||
Executable
+30
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# Wrapper around 'git' that doesn't think we are root
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
os.environ['PSEUDO_UNLOAD'] = '1'
|
||||
|
||||
# calculate path to the real 'git'
|
||||
path = os.environ['PATH']
|
||||
# we need to remove our path but also any other copy of this script which
|
||||
# may be present, e.g. eSDK.
|
||||
replacements = [os.path.dirname(sys.argv[0])]
|
||||
for p in path.split(":"):
|
||||
if p.endswith("/scripts"):
|
||||
replacements.append(p)
|
||||
for r in replacements:
|
||||
path = path.replace(r, '/ignoreme')
|
||||
real_git = shutil.which('git', path=path)
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
os.execl(real_git, 'git')
|
||||
|
||||
os.execv(real_git, sys.argv)
|
||||
Executable
+357
@@ -0,0 +1,357 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Buildtools and buildtools extended installer helper script
|
||||
#
|
||||
# Copyright (C) 2017-2020 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
# NOTE: --with-extended-buildtools is on by default
|
||||
#
|
||||
# Example usage (extended buildtools from milestone):
|
||||
# (1) using --url and --filename
|
||||
# $ install-buildtools \
|
||||
# --url http://downloads.yoctoproject.org/releases/yocto/milestones/yocto-3.1_M3/buildtools \
|
||||
# --filename x86_64-buildtools-extended-nativesdk-standalone-3.0+snapshot-20200315.sh
|
||||
# (2) using --base-url, --release, --installer-version and --build-date
|
||||
# $ install-buildtools \
|
||||
# --base-url http://downloads.yoctoproject.org/releases/yocto \
|
||||
# --release yocto-3.1_M3 \
|
||||
# --installer-version 3.0+snapshot
|
||||
# --build-date 202000315
|
||||
#
|
||||
# Example usage (standard buildtools from release):
|
||||
# (3) using --url and --filename
|
||||
# $ install-buildtools --without-extended-buildtools \
|
||||
# --url http://downloads.yoctoproject.org/releases/yocto/yocto-3.0.2/buildtools \
|
||||
# --filename x86_64-buildtools-nativesdk-standalone-3.0.2.sh
|
||||
# (4) using --base-url, --release and --installer-version
|
||||
# $ install-buildtools --without-extended-buildtools \
|
||||
# --base-url http://downloads.yoctoproject.org/releases/yocto \
|
||||
# --release yocto-3.0.2 \
|
||||
# --installer-version 3.0.2
|
||||
#
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import shlex
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from urllib.parse import quote
|
||||
|
||||
scripts_path = os.path.dirname(os.path.realpath(__file__))
|
||||
lib_path = scripts_path + '/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
import scriptutils
|
||||
import scriptpath
|
||||
|
||||
|
||||
PROGNAME = 'install-buildtools'
|
||||
logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
|
||||
|
||||
DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
|
||||
DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto'
|
||||
DEFAULT_RELEASE = 'yocto-4.1'
|
||||
DEFAULT_INSTALLER_VERSION = '4.1'
|
||||
DEFAULT_BUILDDATE = '202110XX'
|
||||
|
||||
# Python version sanity check
|
||||
if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
|
||||
logger.error("This script requires Python 3.4 or greater")
|
||||
logger.error("You have Python %s.%s" %
|
||||
(sys.version_info.major, sys.version_info.minor))
|
||||
sys.exit(1)
|
||||
|
||||
# The following three functions are copied directly from
|
||||
# bitbake/lib/bb/utils.py, in order to allow this script
|
||||
# to run on versions of python earlier than what bitbake
|
||||
# supports (e.g. less than Python 3.5 for YP 3.1 release)
|
||||
|
||||
def _hasher(method, filename):
|
||||
import mmap
|
||||
|
||||
with open(filename, "rb") as f:
|
||||
try:
|
||||
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
|
||||
for chunk in iter(lambda: mm.read(8192), b''):
|
||||
method.update(chunk)
|
||||
except ValueError:
|
||||
# You can't mmap() an empty file so silence this exception
|
||||
pass
|
||||
return method.hexdigest()
|
||||
|
||||
|
||||
def md5_file(filename):
|
||||
"""
|
||||
Return the hex string representation of the MD5 checksum of filename.
|
||||
"""
|
||||
import hashlib
|
||||
return _hasher(hashlib.md5(), filename)
|
||||
|
||||
def sha256_file(filename):
|
||||
"""
|
||||
Return the hex string representation of the 256-bit SHA checksum of
|
||||
filename.
|
||||
"""
|
||||
import hashlib
|
||||
return _hasher(hashlib.sha256(), filename)
|
||||
|
||||
|
||||
def main():
|
||||
global DEFAULT_INSTALL_DIR
|
||||
global DEFAULT_BASE_URL
|
||||
global DEFAULT_RELEASE
|
||||
global DEFAULT_INSTALLER_VERSION
|
||||
global DEFAULT_BUILDDATE
|
||||
filename = ""
|
||||
release = ""
|
||||
buildtools_url = ""
|
||||
install_dir = ""
|
||||
arch = platform.machine()
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Buildtools installation helper",
|
||||
add_help=False)
|
||||
parser.add_argument('-u', '--url',
|
||||
help='URL from where to fetch buildtools SDK installer, not '
|
||||
'including filename (optional)\n'
|
||||
'Requires --filename.',
|
||||
action='store')
|
||||
parser.add_argument('-f', '--filename',
|
||||
help='filename for the buildtools SDK installer to be installed '
|
||||
'(optional)\nRequires --url',
|
||||
action='store')
|
||||
parser.add_argument('-d', '--directory',
|
||||
default=DEFAULT_INSTALL_DIR,
|
||||
help='directory where buildtools SDK will be installed (optional)',
|
||||
action='store')
|
||||
parser.add_argument('-r', '--release',
|
||||
default=DEFAULT_RELEASE,
|
||||
help='Yocto Project release string for SDK which will be '
|
||||
'installed (optional)',
|
||||
action='store')
|
||||
parser.add_argument('-V', '--installer-version',
|
||||
default=DEFAULT_INSTALLER_VERSION,
|
||||
help='version string for the SDK to be installed (optional)',
|
||||
action='store')
|
||||
parser.add_argument('-b', '--base-url',
|
||||
default=DEFAULT_BASE_URL,
|
||||
help='base URL from which to fetch SDK (optional)', action='store')
|
||||
parser.add_argument('-t', '--build-date',
|
||||
default=DEFAULT_BUILDDATE,
|
||||
help='Build date of pre-release SDK (optional)', action='store')
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument('--with-extended-buildtools', action='store_true',
|
||||
dest='with_extended_buildtools',
|
||||
default=True,
|
||||
help='enable extended buildtools tarball (on by default)')
|
||||
group.add_argument('--without-extended-buildtools', action='store_false',
|
||||
dest='with_extended_buildtools',
|
||||
help='disable extended buildtools (traditional buildtools tarball)')
|
||||
group.add_argument('--make-only', action='store_true',
|
||||
help='only install make tarball')
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument('-c', '--check', help='enable checksum validation',
|
||||
default=True, action='store_true')
|
||||
group.add_argument('-n', '--no-check', help='disable checksum validation',
|
||||
dest="check", action='store_false')
|
||||
parser.add_argument('-D', '--debug', help='enable debug output',
|
||||
action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='print only errors',
|
||||
action='store_true')
|
||||
|
||||
parser.add_argument('-h', '--help', action='help',
|
||||
default=argparse.SUPPRESS,
|
||||
help='show this help message and exit')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.make_only:
|
||||
args.with_extended_buildtools = False
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
if args.url and args.filename:
|
||||
logger.debug("--url and --filename detected. Ignoring --base-url "
|
||||
"--release --installer-version arguments.")
|
||||
filename = args.filename
|
||||
buildtools_url = "%s/%s" % (args.url, filename)
|
||||
else:
|
||||
if args.base_url:
|
||||
base_url = args.base_url
|
||||
else:
|
||||
base_url = DEFAULT_BASE_URL
|
||||
if args.release:
|
||||
# check if this is a pre-release "milestone" SDK
|
||||
m = re.search(r"^(?P<distro>[a-zA-Z\-]+)(?P<version>[0-9.]+)(?P<milestone>_M[1-9])$",
|
||||
args.release)
|
||||
logger.debug("milestone regex: %s" % m)
|
||||
if m and m.group('milestone'):
|
||||
logger.debug("release[distro]: %s" % m.group('distro'))
|
||||
logger.debug("release[version]: %s" % m.group('version'))
|
||||
logger.debug("release[milestone]: %s" % m.group('milestone'))
|
||||
if not args.build_date:
|
||||
logger.error("Milestone installers require --build-date")
|
||||
else:
|
||||
if args.make_only:
|
||||
filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % (
|
||||
arch, args.installer_version, args.build_date)
|
||||
elif args.with_extended_buildtools:
|
||||
filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
|
||||
arch, args.installer_version, args.build_date)
|
||||
else:
|
||||
filename = "%s-buildtools-nativesdk-standalone-%s-%s.sh" % (
|
||||
arch, args.installer_version, args.build_date)
|
||||
safe_filename = quote(filename)
|
||||
buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
|
||||
# regular release SDK
|
||||
else:
|
||||
if args.make_only:
|
||||
filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
|
||||
if args.with_extended_buildtools:
|
||||
filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
|
||||
else:
|
||||
filename = "%s-buildtools-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
|
||||
safe_filename = quote(filename)
|
||||
buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename)
|
||||
|
||||
tmpsdk_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
# Fetch installer
|
||||
logger.info("Fetching buildtools installer")
|
||||
tmpbuildtools = os.path.join(tmpsdk_dir, filename)
|
||||
ret = subprocess.call("wget -q -O %s %s" %
|
||||
(tmpbuildtools, buildtools_url), shell=True)
|
||||
if ret != 0:
|
||||
logger.error("Could not download file from %s" % buildtools_url)
|
||||
return ret
|
||||
|
||||
# Verify checksum
|
||||
if args.check:
|
||||
logger.info("Fetching buildtools installer checksum")
|
||||
checksum_type = ""
|
||||
for checksum_type in ["md5sum", "sha256sum"]:
|
||||
check_url = "{}.{}".format(buildtools_url, checksum_type)
|
||||
checksum_filename = "{}.{}".format(filename, checksum_type)
|
||||
tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename)
|
||||
ret = subprocess.call("wget -q -O %s %s" %
|
||||
(tmpbuildtools_checksum, check_url), shell=True)
|
||||
if ret == 0:
|
||||
break
|
||||
else:
|
||||
if ret != 0:
|
||||
logger.error("Could not download file from %s" % check_url)
|
||||
return ret
|
||||
regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$")
|
||||
with open(tmpbuildtools_checksum, 'rb') as f:
|
||||
original = f.read()
|
||||
m = re.search(regex, original.decode("utf-8"))
|
||||
logger.debug("checksum regex match: %s" % m)
|
||||
logger.debug("checksum: %s" % m.group('checksum'))
|
||||
logger.debug("path: %s" % m.group('path'))
|
||||
logger.debug("filename: %s" % m.group('filename'))
|
||||
if filename != m.group('filename'):
|
||||
logger.error("Filename does not match name in checksum")
|
||||
return 1
|
||||
checksum = m.group('checksum')
|
||||
if checksum_type == "md5sum":
|
||||
checksum_value = md5_file(tmpbuildtools)
|
||||
else:
|
||||
checksum_value = sha256_file(tmpbuildtools)
|
||||
if checksum == checksum_value:
|
||||
logger.info("Checksum success")
|
||||
else:
|
||||
logger.error("Checksum %s expected. Actual checksum is %s." %
|
||||
(checksum, checksum_value))
|
||||
return 1
|
||||
|
||||
# Make installer executable
|
||||
logger.info("Making installer executable")
|
||||
st = os.stat(tmpbuildtools)
|
||||
os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC)
|
||||
logger.debug(os.stat(tmpbuildtools))
|
||||
if args.directory:
|
||||
install_dir = args.directory
|
||||
ret = subprocess.call("%s -d %s -y" %
|
||||
(tmpbuildtools, install_dir), shell=True)
|
||||
else:
|
||||
install_dir = "/opt/poky/%s" % args.installer_version
|
||||
ret = subprocess.call("%s -y" % tmpbuildtools, shell=True)
|
||||
if ret != 0:
|
||||
logger.error("Could not run buildtools installer")
|
||||
return ret
|
||||
|
||||
# Setup the environment
|
||||
logger.info("Setting up the environment")
|
||||
regex = re.compile(r'^(?P<export>export )?(?P<env_var>[A-Z_]+)=(?P<env_val>.+)$')
|
||||
with open("%s/environment-setup-%s-pokysdk-linux" %
|
||||
(install_dir, arch), 'rb') as f:
|
||||
for line in f:
|
||||
match = regex.search(line.decode('utf-8'))
|
||||
logger.debug("export regex: %s" % match)
|
||||
if match:
|
||||
env_var = match.group('env_var')
|
||||
logger.debug("env_var: %s" % env_var)
|
||||
env_val = match.group('env_val')
|
||||
logger.debug("env_val: %s" % env_val)
|
||||
os.environ[env_var] = env_val
|
||||
|
||||
# Test installation
|
||||
logger.info("Testing installation")
|
||||
tool = ""
|
||||
m = re.search("extended", tmpbuildtools)
|
||||
logger.debug("extended regex: %s" % m)
|
||||
if args.with_extended_buildtools and not m:
|
||||
logger.info("Ignoring --with-extended-buildtools as filename "
|
||||
"does not contain 'extended'")
|
||||
if args.make_only:
|
||||
tool = 'make'
|
||||
elif args.with_extended_buildtools and m:
|
||||
tool = 'gcc'
|
||||
else:
|
||||
tool = 'tar'
|
||||
logger.debug("install_dir: %s" % install_dir)
|
||||
cmd = shlex.split("/usr/bin/which %s" % tool)
|
||||
logger.debug("cmd: %s" % cmd)
|
||||
logger.debug("tool: %s" % tool)
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
output, errors = proc.communicate()
|
||||
logger.debug("proc.args: %s" % proc.args)
|
||||
logger.debug("proc.communicate(): output %s" % output)
|
||||
logger.debug("proc.communicate(): errors %s" % errors)
|
||||
which_tool = output.decode('utf-8')
|
||||
logger.debug("which %s: %s" % (tool, which_tool))
|
||||
ret = proc.returncode
|
||||
if not which_tool.startswith(install_dir):
|
||||
logger.error("Something went wrong: %s not found in %s" %
|
||||
(tool, install_dir))
|
||||
if ret != 0:
|
||||
logger.error("Something went wrong: installation failed")
|
||||
else:
|
||||
logger.info("Installation successful. Remember to source the "
|
||||
"environment setup script now and in any new session.")
|
||||
return ret
|
||||
|
||||
finally:
|
||||
# cleanup tmp directory
|
||||
shutil.rmtree(tmpsdk_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(ret)
|
||||
@@ -0,0 +1,182 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
class ArgumentUsageError(Exception):
|
||||
"""Exception class you can raise (and catch) in order to show the help"""
|
||||
def __init__(self, message, subcommand=None):
|
||||
self.message = message
|
||||
self.subcommand = subcommand
|
||||
|
||||
class ArgumentParser(argparse.ArgumentParser):
|
||||
"""Our own version of argparse's ArgumentParser"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('formatter_class', OeHelpFormatter)
|
||||
self._subparser_groups = OrderedDict()
|
||||
super(ArgumentParser, self).__init__(*args, **kwargs)
|
||||
self._positionals.title = 'arguments'
|
||||
self._optionals.title = 'options'
|
||||
|
||||
def error(self, message):
|
||||
"""error(message: string)
|
||||
|
||||
Prints a help message incorporating the message to stderr and
|
||||
exits.
|
||||
"""
|
||||
self._print_message('%s: error: %s\n' % (self.prog, message), sys.stderr)
|
||||
self.print_help(sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
def error_subcommand(self, message, subcommand):
|
||||
if subcommand:
|
||||
action = self._get_subparser_action()
|
||||
try:
|
||||
subparser = action._name_parser_map[subcommand]
|
||||
except KeyError:
|
||||
self.error('no subparser for name "%s"' % subcommand)
|
||||
else:
|
||||
subparser.error(message)
|
||||
|
||||
self.error(message)
|
||||
|
||||
def add_subparsers(self, *args, **kwargs):
|
||||
if 'dest' not in kwargs:
|
||||
kwargs['dest'] = '_subparser_name'
|
||||
|
||||
ret = super(ArgumentParser, self).add_subparsers(*args, **kwargs)
|
||||
# Need a way of accessing the parent parser
|
||||
ret._parent_parser = self
|
||||
# Ensure our class gets instantiated
|
||||
ret._parser_class = ArgumentSubParser
|
||||
# Hacky way of adding a method to the subparsers object
|
||||
ret.add_subparser_group = self.add_subparser_group
|
||||
return ret
|
||||
|
||||
def add_subparser_group(self, groupname, groupdesc, order=0):
|
||||
self._subparser_groups[groupname] = (groupdesc, order)
|
||||
|
||||
def parse_args(self, args=None, namespace=None):
|
||||
"""Parse arguments, using the correct subparser to show the error."""
|
||||
args, argv = self.parse_known_args(args, namespace)
|
||||
if argv:
|
||||
message = 'unrecognized arguments: %s' % ' '.join(argv)
|
||||
if self._subparsers:
|
||||
subparser = self._get_subparser(args)
|
||||
subparser.error(message)
|
||||
else:
|
||||
self.error(message)
|
||||
sys.exit(2)
|
||||
return args
|
||||
|
||||
def _get_subparser(self, args):
|
||||
action = self._get_subparser_action()
|
||||
if action.dest == argparse.SUPPRESS:
|
||||
self.error('cannot get subparser, the subparser action dest is suppressed')
|
||||
|
||||
name = getattr(args, action.dest)
|
||||
try:
|
||||
return action._name_parser_map[name]
|
||||
except KeyError:
|
||||
self.error('no subparser for name "%s"' % name)
|
||||
|
||||
def _get_subparser_action(self):
|
||||
if not self._subparsers:
|
||||
self.error('cannot return the subparser action, no subparsers added')
|
||||
|
||||
for action in self._subparsers._group_actions:
|
||||
if isinstance(action, argparse._SubParsersAction):
|
||||
return action
|
||||
|
||||
|
||||
class ArgumentSubParser(ArgumentParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'group' in kwargs:
|
||||
self._group = kwargs.pop('group')
|
||||
if 'order' in kwargs:
|
||||
self._order = kwargs.pop('order')
|
||||
super(ArgumentSubParser, self).__init__(*args, **kwargs)
|
||||
|
||||
def parse_known_args(self, args=None, namespace=None):
|
||||
# This works around argparse not handling optional positional arguments being
|
||||
# intermixed with other options. A pretty horrible hack, but we're not left
|
||||
# with much choice given that the bug in argparse exists and it's difficult
|
||||
# to subclass.
|
||||
# Borrowed from http://stackoverflow.com/questions/20165843/argparse-how-to-handle-variable-number-of-arguments-nargs
|
||||
# with an extra workaround (in format_help() below) for the positional
|
||||
# arguments disappearing from the --help output, as well as structural tweaks.
|
||||
# Originally simplified from http://bugs.python.org/file30204/test_intermixed.py
|
||||
positionals = self._get_positional_actions()
|
||||
for action in positionals:
|
||||
# deactivate positionals
|
||||
action.save_nargs = action.nargs
|
||||
action.nargs = 0
|
||||
|
||||
namespace, remaining_args = super(ArgumentSubParser, self).parse_known_args(args, namespace)
|
||||
for action in positionals:
|
||||
# remove the empty positional values from namespace
|
||||
if hasattr(namespace, action.dest):
|
||||
delattr(namespace, action.dest)
|
||||
for action in positionals:
|
||||
action.nargs = action.save_nargs
|
||||
# parse positionals
|
||||
namespace, extras = super(ArgumentSubParser, self).parse_known_args(remaining_args, namespace)
|
||||
return namespace, extras
|
||||
|
||||
def format_help(self):
|
||||
# Quick, restore the positionals!
|
||||
positionals = self._get_positional_actions()
|
||||
for action in positionals:
|
||||
if hasattr(action, 'save_nargs'):
|
||||
action.nargs = action.save_nargs
|
||||
return super(ArgumentParser, self).format_help()
|
||||
|
||||
|
||||
class OeHelpFormatter(argparse.HelpFormatter):
|
||||
def _format_action(self, action):
|
||||
if hasattr(action, '_get_subactions'):
|
||||
# subcommands list
|
||||
groupmap = defaultdict(list)
|
||||
ordermap = {}
|
||||
subparser_groups = action._parent_parser._subparser_groups
|
||||
groups = sorted(subparser_groups.keys(), key=lambda item: subparser_groups[item][1], reverse=True)
|
||||
for subaction in self._iter_indented_subactions(action):
|
||||
parser = action._name_parser_map[subaction.dest]
|
||||
group = getattr(parser, '_group', None)
|
||||
groupmap[group].append(subaction)
|
||||
if group not in groups:
|
||||
groups.append(group)
|
||||
order = getattr(parser, '_order', 0)
|
||||
ordermap[subaction.dest] = order
|
||||
|
||||
lines = []
|
||||
if len(groupmap) > 1:
|
||||
groupindent = ' '
|
||||
else:
|
||||
groupindent = ''
|
||||
for group in groups:
|
||||
subactions = groupmap[group]
|
||||
if not subactions:
|
||||
continue
|
||||
if groupindent:
|
||||
if not group:
|
||||
group = 'other'
|
||||
groupdesc = subparser_groups.get(group, (group, 0))[0]
|
||||
lines.append(' %s:' % groupdesc)
|
||||
for subaction in sorted(subactions, key=lambda item: ordermap[item.dest], reverse=True):
|
||||
lines.append('%s%s' % (groupindent, self._format_action(subaction).rstrip()))
|
||||
return '\n'.join(lines)
|
||||
else:
|
||||
return super(OeHelpFormatter, self)._format_action(action)
|
||||
|
||||
def int_positive(value):
|
||||
ivalue = int(value)
|
||||
if ivalue <= 0:
|
||||
raise argparse.ArgumentTypeError(
|
||||
"%s is not a positive int value" % value)
|
||||
return ivalue
|
||||
@@ -0,0 +1,24 @@
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Build performance test library functions"""
|
||||
|
||||
def print_table(rows, row_fmt=None):
|
||||
"""Print data table"""
|
||||
if not rows:
|
||||
return
|
||||
if not row_fmt:
|
||||
row_fmt = ['{:{wid}} '] * len(rows[0])
|
||||
|
||||
# Go through the data to get maximum cell widths
|
||||
num_cols = len(row_fmt)
|
||||
col_widths = [0] * num_cols
|
||||
for row in rows:
|
||||
for i, val in enumerate(row):
|
||||
col_widths[i] = max(col_widths[i], len(str(val)))
|
||||
|
||||
for row in rows:
|
||||
print(*[row_fmt[i].format(col, wid=col_widths[i]) for i, col in enumerate(row)])
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Helper module for HTML reporting"""
|
||||
from jinja2 import Environment, PackageLoader
|
||||
|
||||
|
||||
env = Environment(loader=PackageLoader('build_perf', 'html'))
|
||||
|
||||
template = env.get_template('report.html')
|
||||
@@ -0,0 +1,50 @@
|
||||
<script type="text/javascript">
|
||||
chartsDrawing += 1;
|
||||
google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }});
|
||||
function drawChart_{{ chart_elem_id }}() {
|
||||
var data = new google.visualization.DataTable();
|
||||
|
||||
// Chart options
|
||||
var options = {
|
||||
theme : 'material',
|
||||
legend: 'none',
|
||||
hAxis: { format: '', title: 'Commit number',
|
||||
minValue: {{ chart_opts.haxis.min }},
|
||||
maxValue: {{ chart_opts.haxis.max }} },
|
||||
{% if measurement.type == 'time' %}
|
||||
vAxis: { format: 'h:mm:ss' },
|
||||
{% else %}
|
||||
vAxis: { format: '' },
|
||||
{% endif %}
|
||||
pointSize: 5,
|
||||
chartArea: { left: 80, right: 15 },
|
||||
};
|
||||
|
||||
// Define data columns
|
||||
data.addColumn('number', 'Commit');
|
||||
data.addColumn('{{ measurement.value_type.gv_data_type }}',
|
||||
'{{ measurement.value_type.quantity }}');
|
||||
// Add data rows
|
||||
data.addRows([
|
||||
{% for sample in measurement.samples %}
|
||||
[{{ sample.commit_num }}, {{ sample.mean.gv_value() }}],
|
||||
{% endfor %}
|
||||
]);
|
||||
|
||||
// Finally, draw the chart
|
||||
chart_div = document.getElementById('{{ chart_elem_id }}');
|
||||
var chart = new google.visualization.LineChart(chart_div);
|
||||
google.visualization.events.addListener(chart, 'ready', function () {
|
||||
//chart_div = document.getElementById('{{ chart_elem_id }}');
|
||||
//chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">';
|
||||
png_div = document.getElementById('{{ chart_elem_id }}_png');
|
||||
png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>';
|
||||
console.log("CHART READY: {{ chart_elem_id }}");
|
||||
chartsDrawing -= 1;
|
||||
if (chartsDrawing == 0)
|
||||
console.log("ALL CHARTS READY");
|
||||
});
|
||||
chart.draw(data, options);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -0,0 +1,289 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
{# Scripts, for visualization#}
|
||||
<!--START-OF-SCRIPTS-->
|
||||
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
|
||||
<script type="text/javascript">
|
||||
google.charts.load('current', {'packages':['corechart']});
|
||||
var chartsDrawing = 0;
|
||||
</script>
|
||||
|
||||
{# Render measurement result charts #}
|
||||
{% for test in test_data %}
|
||||
{% if test.status == 'SUCCESS' %}
|
||||
{% for measurement in test.measurements %}
|
||||
{% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %}
|
||||
{% include 'measurement_chart.html' %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<!--END-OF-SCRIPTS-->
|
||||
|
||||
{# Styles #}
|
||||
<style>
|
||||
.meta-table {
|
||||
font-size: 14px;
|
||||
text-align: left;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.meta-table tr:nth-child(even){background-color: #f2f2f2}
|
||||
meta-table th, .meta-table td {
|
||||
padding: 4px;
|
||||
}
|
||||
.summary {
|
||||
margin: 0;
|
||||
font-size: 14px;
|
||||
text-align: left;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
summary th, .meta-table td {
|
||||
padding: 4px;
|
||||
}
|
||||
.measurement {
|
||||
padding: 8px 0px 8px 8px;
|
||||
border: 2px solid #f0f0f0;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.details {
|
||||
margin: 0;
|
||||
font-size: 12px;
|
||||
text-align: left;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.details th {
|
||||
padding-right: 8px;
|
||||
}
|
||||
.details.plain th {
|
||||
font-weight: normal;
|
||||
}
|
||||
.preformatted {
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap;
|
||||
background-color: #f0f0f0;
|
||||
margin-left: 10px;
|
||||
}
|
||||
hr {
|
||||
color: #f0f0f0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 20px;
|
||||
margin-bottom: 0px;
|
||||
color: #707070;
|
||||
}
|
||||
h3 {
|
||||
font-size: 16px;
|
||||
margin: 0px;
|
||||
color: #707070;
|
||||
}
|
||||
</style>
|
||||
|
||||
<title>{{ title }}</title>
|
||||
</head>
|
||||
|
||||
{% macro poky_link(commit) -%}
|
||||
<a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a>
|
||||
{%- endmacro %}
|
||||
|
||||
<body><div style="width: 700px">
|
||||
{# Test metadata #}
|
||||
<h2>General</h2>
|
||||
<hr>
|
||||
<table class="meta-table" style="width: 100%">
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>Current commit</th>
|
||||
<th>Comparing with</th>
|
||||
</tr>
|
||||
{% for key, item in metadata.items() %}
|
||||
<tr>
|
||||
<th>{{ item.title }}</th>
|
||||
{%if key == 'commit' %}
|
||||
<td>{{ poky_link(item.value) }}</td>
|
||||
<td>{{ poky_link(item.value_old) }}</td>
|
||||
{% else %}
|
||||
<td>{{ item.value }}</td>
|
||||
<td>{{ item.value_old }}</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
{# Test result summary #}
|
||||
<h2>Test result summary</h2>
|
||||
<hr>
|
||||
<table class="summary" style="width: 100%">
|
||||
{% for test in test_data %}
|
||||
{% if loop.index is even %}
|
||||
{% set row_style = 'style="background-color: #f2f2f2"' %}
|
||||
{% else %}
|
||||
{% set row_style = 'style="background-color: #ffffff"' %}
|
||||
{% endif %}
|
||||
{% if test.status == 'SUCCESS' %}
|
||||
{% for measurement in test.measurements %}
|
||||
<tr {{ row_style }}>
|
||||
{% if loop.index == 1 %}
|
||||
<td>{{ test.name }}: {{ test.description }}</td>
|
||||
{% else %}
|
||||
{# add empty cell in place of the test name#}
|
||||
<td></td>
|
||||
{% endif %}
|
||||
{% if measurement.absdiff > 0 %}
|
||||
{% set result_style = "color: red" %}
|
||||
{% elif measurement.absdiff == measurement.absdiff %}
|
||||
{% set result_style = "color: green" %}
|
||||
{% else %}
|
||||
{% set result_style = "color: orange" %}
|
||||
{%endif %}
|
||||
{% if measurement.reldiff|abs > 2 %}
|
||||
{% set result_style = result_style + "; font-weight: bold" %}
|
||||
{% endif %}
|
||||
<td>{{ measurement.description }}</td>
|
||||
<td style="font-weight: bold">{{ measurement.value.mean }}</td>
|
||||
<td style="{{ result_style }}">{{ measurement.absdiff_str }}</td>
|
||||
<td style="{{ result_style }}">{{ measurement.reldiff_str }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<td style="font-weight: bold; color: red;">{{test.status }}</td>
|
||||
<td></td> <td></td> <td></td> <td></td>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
{# Detailed test results #}
|
||||
{% for test in test_data %}
|
||||
<h2>{{ test.name }}: {{ test.description }}</h2>
|
||||
<hr>
|
||||
{% if test.status == 'SUCCESS' %}
|
||||
{% for measurement in test.measurements %}
|
||||
<div class="measurement">
|
||||
<h3>{{ measurement.description }}</h3>
|
||||
<div style="font-weight:bold;">
|
||||
<span style="font-size: 23px;">{{ measurement.value.mean }}</span>
|
||||
<span style="font-size: 20px; margin-left: 12px">
|
||||
{% if measurement.absdiff > 0 %}
|
||||
<span style="color: red">
|
||||
{% elif measurement.absdiff == measurement.absdiff %}
|
||||
<span style="color: green">
|
||||
{% else %}
|
||||
<span style="color: orange">
|
||||
{% endif %}
|
||||
{{ measurement.absdiff_str }} ({{measurement.reldiff_str}})
|
||||
</span></span>
|
||||
</div>
|
||||
{# Table for trendchart and the statistics #}
|
||||
<table style="width: 100%">
|
||||
<tr>
|
||||
<td style="width: 75%">
|
||||
{# Linechart #}
|
||||
<div id="{{ test.name }}_{{ measurement.name }}_chart"></div>
|
||||
</td>
|
||||
<td>
|
||||
{# Measurement statistics #}
|
||||
<table class="details plain">
|
||||
<tr>
|
||||
<th>Test runs</th><td>{{ measurement.value.sample_cnt }}</td>
|
||||
</tr><tr>
|
||||
<th>-/+</th><td>-{{ measurement.value.minus }} / +{{ measurement.value.plus }}</td>
|
||||
</tr><tr>
|
||||
<th>Min</th><td>{{ measurement.value.min }}</td>
|
||||
</tr><tr>
|
||||
<th>Max</th><td>{{ measurement.value.max }}</td>
|
||||
</tr><tr>
|
||||
<th>Stdev</th><td>{{ measurement.value.stdev }}</td>
|
||||
</tr><tr>
|
||||
<th><div id="{{ test.name }}_{{ measurement.name }}_chart_png"></div></th>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
{# Task and recipe summary from buildstats #}
|
||||
{% if 'buildstats' in measurement %}
|
||||
Task resource usage
|
||||
<table class="details" style="width:100%">
|
||||
<tr>
|
||||
<th>Number of tasks</th>
|
||||
<th>Top consumers of cputime</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="vertical-align: top">{{ measurement.buildstats.tasks.count }} ({{ measurement.buildstats.tasks.change }})</td>
|
||||
{# Table of most resource-hungry tasks #}
|
||||
<td>
|
||||
<table class="details plain">
|
||||
{% for diff in measurement.buildstats.top_consumer|reverse %}
|
||||
<tr>
|
||||
<th>{{ diff.pkg }}.{{ diff.task }}</th>
|
||||
<td>{{ '%0.0f' % diff.value2 }} s</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Biggest increase in cputime</th>
|
||||
<th>Biggest decrease in cputime</th>
|
||||
</tr>
|
||||
<tr>
|
||||
{# Table biggest increase in resource usage #}
|
||||
<td>
|
||||
<table class="details plain">
|
||||
{% for diff in measurement.buildstats.top_increase|reverse %}
|
||||
<tr>
|
||||
<th>{{ diff.pkg }}.{{ diff.task }}</th>
|
||||
<td>{{ '%+0.0f' % diff.absdiff }} s</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</td>
|
||||
{# Table biggest decrease in resource usage #}
|
||||
<td>
|
||||
<table class="details plain">
|
||||
{% for diff in measurement.buildstats.top_decrease %}
|
||||
<tr>
|
||||
<th>{{ diff.pkg }}.{{ diff.task }}</th>
|
||||
<td>{{ '%+0.0f' % diff.absdiff }} s</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
{# Recipe version differences #}
|
||||
{% if measurement.buildstats.ver_diff %}
|
||||
<div style="margin-top: 16px">Recipe version changes</div>
|
||||
<table class="details">
|
||||
{% for head, recipes in measurement.buildstats.ver_diff.items() %}
|
||||
<tr>
|
||||
<th colspan="2">{{ head }}</th>
|
||||
</tr>
|
||||
{% for name, info in recipes|sort %}
|
||||
<tr>
|
||||
<td>{{ name }}</td>
|
||||
<td>{{ info }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</table>
|
||||
{% else %}
|
||||
<div style="margin-top: 16px">No recipe version changes detected</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
{# Unsuccessful test #}
|
||||
{% else %}
|
||||
<span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }}
|
||||
{% if test.err_type %}<span style="font-size: 75%; font-weight: normal">({{ test.err_type }})</span>{% endif %}
|
||||
</span>
|
||||
<div class="preformatted">{{ test.message }}</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div></body>
|
||||
</html>
|
||||
|
||||
@@ -0,0 +1,339 @@
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Handling of build perf test reports"""
|
||||
from collections import OrderedDict, namedtuple
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime, timezone
|
||||
from numbers import Number
|
||||
from statistics import mean, stdev, variance
|
||||
|
||||
|
||||
AggregateTestData = namedtuple('AggregateTestData', ['metadata', 'results'])
|
||||
|
||||
|
||||
def isofmt_to_timestamp(string):
|
||||
"""Convert timestamp string in ISO 8601 format into unix timestamp"""
|
||||
if '.' in string:
|
||||
dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%f')
|
||||
else:
|
||||
dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S')
|
||||
return dt.replace(tzinfo=timezone.utc).timestamp()
|
||||
|
||||
|
||||
def metadata_xml_to_json(elem):
|
||||
"""Convert metadata xml into JSON format"""
|
||||
assert elem.tag == 'metadata', "Invalid metadata file format"
|
||||
|
||||
def _xml_to_json(elem):
|
||||
"""Convert xml element to JSON object"""
|
||||
out = OrderedDict()
|
||||
for child in elem.getchildren():
|
||||
key = child.attrib.get('name', child.tag)
|
||||
if len(child):
|
||||
out[key] = _xml_to_json(child)
|
||||
else:
|
||||
out[key] = child.text
|
||||
return out
|
||||
return _xml_to_json(elem)
|
||||
|
||||
|
||||
def results_xml_to_json(elem):
|
||||
"""Convert results xml into JSON format"""
|
||||
rusage_fields = ('ru_utime', 'ru_stime', 'ru_maxrss', 'ru_minflt',
|
||||
'ru_majflt', 'ru_inblock', 'ru_oublock', 'ru_nvcsw',
|
||||
'ru_nivcsw')
|
||||
iostat_fields = ('rchar', 'wchar', 'syscr', 'syscw', 'read_bytes',
|
||||
'write_bytes', 'cancelled_write_bytes')
|
||||
|
||||
def _read_measurement(elem):
|
||||
"""Convert measurement to JSON"""
|
||||
data = OrderedDict()
|
||||
data['type'] = elem.tag
|
||||
data['name'] = elem.attrib['name']
|
||||
data['legend'] = elem.attrib['legend']
|
||||
values = OrderedDict()
|
||||
|
||||
# SYSRES measurement
|
||||
if elem.tag == 'sysres':
|
||||
for subel in elem:
|
||||
if subel.tag == 'time':
|
||||
values['start_time'] = isofmt_to_timestamp(subel.attrib['timestamp'])
|
||||
values['elapsed_time'] = float(subel.text)
|
||||
elif subel.tag == 'rusage':
|
||||
rusage = OrderedDict()
|
||||
for field in rusage_fields:
|
||||
if 'time' in field:
|
||||
rusage[field] = float(subel.attrib[field])
|
||||
else:
|
||||
rusage[field] = int(subel.attrib[field])
|
||||
values['rusage'] = rusage
|
||||
elif subel.tag == 'iostat':
|
||||
values['iostat'] = OrderedDict([(f, int(subel.attrib[f]))
|
||||
for f in iostat_fields])
|
||||
elif subel.tag == 'buildstats_file':
|
||||
values['buildstats_file'] = subel.text
|
||||
else:
|
||||
raise TypeError("Unknown sysres value element '{}'".format(subel.tag))
|
||||
# DISKUSAGE measurement
|
||||
elif elem.tag == 'diskusage':
|
||||
values['size'] = int(elem.find('size').text)
|
||||
else:
|
||||
raise Exception("Unknown measurement tag '{}'".format(elem.tag))
|
||||
data['values'] = values
|
||||
return data
|
||||
|
||||
def _read_testcase(elem):
|
||||
"""Convert testcase into JSON"""
|
||||
assert elem.tag == 'testcase', "Expecting 'testcase' element instead of {}".format(elem.tag)
|
||||
|
||||
data = OrderedDict()
|
||||
data['name'] = elem.attrib['name']
|
||||
data['description'] = elem.attrib['description']
|
||||
data['status'] = 'SUCCESS'
|
||||
data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
|
||||
data['elapsed_time'] = float(elem.attrib['time'])
|
||||
measurements = OrderedDict()
|
||||
|
||||
for subel in elem.getchildren():
|
||||
if subel.tag == 'error' or subel.tag == 'failure':
|
||||
data['status'] = subel.tag.upper()
|
||||
data['message'] = subel.attrib['message']
|
||||
data['err_type'] = subel.attrib['type']
|
||||
data['err_output'] = subel.text
|
||||
elif subel.tag == 'skipped':
|
||||
data['status'] = 'SKIPPED'
|
||||
data['message'] = subel.text
|
||||
else:
|
||||
measurements[subel.attrib['name']] = _read_measurement(subel)
|
||||
data['measurements'] = measurements
|
||||
return data
|
||||
|
||||
def _read_testsuite(elem):
|
||||
"""Convert suite to JSON"""
|
||||
assert elem.tag == 'testsuite', \
|
||||
"Expecting 'testsuite' element instead of {}".format(elem.tag)
|
||||
|
||||
data = OrderedDict()
|
||||
if 'hostname' in elem.attrib:
|
||||
data['tester_host'] = elem.attrib['hostname']
|
||||
data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
|
||||
data['elapsed_time'] = float(elem.attrib['time'])
|
||||
tests = OrderedDict()
|
||||
|
||||
for case in elem.getchildren():
|
||||
tests[case.attrib['name']] = _read_testcase(case)
|
||||
data['tests'] = tests
|
||||
return data
|
||||
|
||||
# Main function
|
||||
assert elem.tag == 'testsuites', "Invalid test report format"
|
||||
assert len(elem) == 1, "Too many testsuites"
|
||||
|
||||
return _read_testsuite(elem.getchildren()[0])
|
||||
|
||||
|
||||
def aggregate_metadata(metadata):
|
||||
"""Aggregate metadata into one, basically a sanity check"""
|
||||
mutable_keys = ('pretty_name', 'version_id')
|
||||
|
||||
def aggregate_obj(aggregate, obj, assert_str=True):
|
||||
"""Aggregate objects together"""
|
||||
assert type(aggregate) is type(obj), \
|
||||
"Type mismatch: {} != {}".format(type(aggregate), type(obj))
|
||||
if isinstance(obj, Mapping):
|
||||
assert set(aggregate.keys()) == set(obj.keys())
|
||||
for key, val in obj.items():
|
||||
aggregate_obj(aggregate[key], val, key not in mutable_keys)
|
||||
elif isinstance(obj, list):
|
||||
assert len(aggregate) == len(obj)
|
||||
for i, val in enumerate(obj):
|
||||
aggregate_obj(aggregate[i], val)
|
||||
elif not isinstance(obj, str) or (isinstance(obj, str) and assert_str):
|
||||
assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
|
||||
|
||||
if not metadata:
|
||||
return {}
|
||||
|
||||
# Do the aggregation
|
||||
aggregate = metadata[0].copy()
|
||||
for testrun in metadata[1:]:
|
||||
aggregate_obj(aggregate, testrun)
|
||||
aggregate['testrun_count'] = len(metadata)
|
||||
return aggregate
|
||||
|
||||
|
||||
def aggregate_data(data):
|
||||
"""Aggregate multiple test results JSON structures into one"""
|
||||
|
||||
mutable_keys = ('status', 'message', 'err_type', 'err_output')
|
||||
|
||||
class SampleList(list):
|
||||
"""Container for numerical samples"""
|
||||
pass
|
||||
|
||||
def new_aggregate_obj(obj):
|
||||
"""Create new object for aggregate"""
|
||||
if isinstance(obj, Number):
|
||||
new_obj = SampleList()
|
||||
new_obj.append(obj)
|
||||
elif isinstance(obj, str):
|
||||
new_obj = obj
|
||||
else:
|
||||
# Lists and and dicts are kept as is
|
||||
new_obj = obj.__class__()
|
||||
aggregate_obj(new_obj, obj)
|
||||
return new_obj
|
||||
|
||||
def aggregate_obj(aggregate, obj, assert_str=True):
|
||||
"""Recursive "aggregation" of JSON objects"""
|
||||
if isinstance(obj, Number):
|
||||
assert isinstance(aggregate, SampleList)
|
||||
aggregate.append(obj)
|
||||
return
|
||||
|
||||
assert type(aggregate) == type(obj), \
|
||||
"Type mismatch: {} != {}".format(type(aggregate), type(obj))
|
||||
if isinstance(obj, Mapping):
|
||||
for key, val in obj.items():
|
||||
if not key in aggregate:
|
||||
aggregate[key] = new_aggregate_obj(val)
|
||||
else:
|
||||
aggregate_obj(aggregate[key], val, key not in mutable_keys)
|
||||
elif isinstance(obj, list):
|
||||
for i, val in enumerate(obj):
|
||||
if i >= len(aggregate):
|
||||
aggregate[key] = new_aggregate_obj(val)
|
||||
else:
|
||||
aggregate_obj(aggregate[i], val)
|
||||
elif isinstance(obj, str):
|
||||
# Sanity check for data
|
||||
if assert_str:
|
||||
assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
|
||||
else:
|
||||
raise Exception("BUG: unable to aggregate '{}' ({})".format(type(obj), str(obj)))
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
# Do the aggregation
|
||||
aggregate = data[0].__class__()
|
||||
for testrun in data:
|
||||
aggregate_obj(aggregate, testrun)
|
||||
return aggregate
|
||||
|
||||
|
||||
class MeasurementVal(float):
|
||||
"""Base class representing measurement values"""
|
||||
gv_data_type = 'number'
|
||||
|
||||
def gv_value(self):
|
||||
"""Value formatting for visualization"""
|
||||
if self != self:
|
||||
return "null"
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
class TimeVal(MeasurementVal):
|
||||
"""Class representing time values"""
|
||||
quantity = 'time'
|
||||
gv_title = 'elapsed time'
|
||||
gv_data_type = 'timeofday'
|
||||
|
||||
def hms(self):
|
||||
"""Split time into hours, minutes and seconeds"""
|
||||
hhh = int(abs(self) / 3600)
|
||||
mmm = int((abs(self) % 3600) / 60)
|
||||
sss = abs(self) % 60
|
||||
return hhh, mmm, sss
|
||||
|
||||
def __str__(self):
|
||||
if self != self:
|
||||
return "nan"
|
||||
hh, mm, ss = self.hms()
|
||||
sign = '-' if self < 0 else ''
|
||||
if hh > 0:
|
||||
return '{}{:d}:{:02d}:{:02.0f}'.format(sign, hh, mm, ss)
|
||||
elif mm > 0:
|
||||
return '{}{:d}:{:04.1f}'.format(sign, mm, ss)
|
||||
elif ss > 1:
|
||||
return '{}{:.1f} s'.format(sign, ss)
|
||||
else:
|
||||
return '{}{:.2f} s'.format(sign, ss)
|
||||
|
||||
def gv_value(self):
|
||||
"""Value formatting for visualization"""
|
||||
if self != self:
|
||||
return "null"
|
||||
hh, mm, ss = self.hms()
|
||||
return [hh, mm, int(ss), int(ss*1000) % 1000]
|
||||
|
||||
|
||||
class SizeVal(MeasurementVal):
|
||||
"""Class representing time values"""
|
||||
quantity = 'size'
|
||||
gv_title = 'size in MiB'
|
||||
gv_data_type = 'number'
|
||||
|
||||
def __str__(self):
|
||||
if self != self:
|
||||
return "nan"
|
||||
if abs(self) < 1024:
|
||||
return '{:.1f} kiB'.format(self)
|
||||
elif abs(self) < 1048576:
|
||||
return '{:.2f} MiB'.format(self / 1024)
|
||||
else:
|
||||
return '{:.2f} GiB'.format(self / 1048576)
|
||||
|
||||
def gv_value(self):
|
||||
"""Value formatting for visualization"""
|
||||
if self != self:
|
||||
return "null"
|
||||
return self / 1024
|
||||
|
||||
def measurement_stats(meas, prefix=''):
|
||||
"""Get statistics of a measurement"""
|
||||
if not meas:
|
||||
return {prefix + 'sample_cnt': 0,
|
||||
prefix + 'mean': MeasurementVal('nan'),
|
||||
prefix + 'stdev': MeasurementVal('nan'),
|
||||
prefix + 'variance': MeasurementVal('nan'),
|
||||
prefix + 'min': MeasurementVal('nan'),
|
||||
prefix + 'max': MeasurementVal('nan'),
|
||||
prefix + 'minus': MeasurementVal('nan'),
|
||||
prefix + 'plus': MeasurementVal('nan')}
|
||||
|
||||
stats = {'name': meas['name']}
|
||||
if meas['type'] == 'sysres':
|
||||
val_cls = TimeVal
|
||||
values = meas['values']['elapsed_time']
|
||||
elif meas['type'] == 'diskusage':
|
||||
val_cls = SizeVal
|
||||
values = meas['values']['size']
|
||||
else:
|
||||
raise Exception("Unknown measurement type '{}'".format(meas['type']))
|
||||
stats['val_cls'] = val_cls
|
||||
stats['quantity'] = val_cls.quantity
|
||||
stats[prefix + 'sample_cnt'] = len(values)
|
||||
|
||||
mean_val = val_cls(mean(values))
|
||||
min_val = val_cls(min(values))
|
||||
max_val = val_cls(max(values))
|
||||
|
||||
stats[prefix + 'mean'] = mean_val
|
||||
if len(values) > 1:
|
||||
stats[prefix + 'stdev'] = val_cls(stdev(values))
|
||||
stats[prefix + 'variance'] = val_cls(variance(values))
|
||||
else:
|
||||
stats[prefix + 'stdev'] = float('nan')
|
||||
stats[prefix + 'variance'] = float('nan')
|
||||
stats[prefix + 'min'] = min_val
|
||||
stats[prefix + 'max'] = max_val
|
||||
stats[prefix + 'minus'] = val_cls(mean_val - min_val)
|
||||
stats[prefix + 'plus'] = val_cls(max_val - mean_val)
|
||||
|
||||
return stats
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
var fs = require('fs');
|
||||
var system = require('system');
|
||||
var page = require('webpage').create();
|
||||
|
||||
// Examine console log for message from chart drawing
|
||||
page.onConsoleMessage = function(msg) {
|
||||
console.log(msg);
|
||||
if (msg === "ALL CHARTS READY") {
|
||||
window.charts_ready = true;
|
||||
}
|
||||
else if (msg.slice(0, 11) === "CHART READY") {
|
||||
var chart_id = msg.split(" ")[2];
|
||||
console.log('grabbing ' + chart_id);
|
||||
var png_data = page.evaluate(function (chart_id) {
|
||||
var chart_div = document.getElementById(chart_id + '_png');
|
||||
return chart_div.outerHTML;
|
||||
}, chart_id);
|
||||
fs.write(args[2] + '/' + chart_id + '.png', png_data, 'w');
|
||||
}
|
||||
};
|
||||
|
||||
// Check command line arguments
|
||||
var args = system.args;
|
||||
if (args.length != 3) {
|
||||
console.log("USAGE: " + args[0] + " REPORT_HTML OUT_DIR\n");
|
||||
phantom.exit(1);
|
||||
}
|
||||
|
||||
// Open the web page
|
||||
page.open(args[1], function(status) {
|
||||
if (status == 'fail') {
|
||||
console.log("Failed to open file '" + args[1] + "'");
|
||||
phantom.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// Check status every 100 ms
|
||||
interval = window.setInterval(function () {
|
||||
//console.log('waiting');
|
||||
if (window.charts_ready) {
|
||||
clearTimeout(timer);
|
||||
clearInterval(interval);
|
||||
|
||||
var fname = args[1].replace(/\/+$/, "").split("/").pop()
|
||||
console.log("saving " + fname);
|
||||
fs.write(args[2] + '/' + fname, page.content, 'w');
|
||||
phantom.exit(0);
|
||||
}
|
||||
}, 100);
|
||||
|
||||
// Time-out after 10 seconds
|
||||
timer = window.setTimeout(function () {
|
||||
clearInterval(interval);
|
||||
console.log("ERROR: timeout");
|
||||
phantom.exit(1);
|
||||
}, 10000);
|
||||
@@ -0,0 +1,368 @@
|
||||
#
|
||||
# Copyright (c) 2017, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Functionality for analyzing buildstats"""
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from collections import namedtuple
|
||||
from statistics import mean
|
||||
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
taskdiff_fields = ('pkg', 'pkg_op', 'task', 'task_op', 'value1', 'value2',
|
||||
'absdiff', 'reldiff')
|
||||
TaskDiff = namedtuple('TaskDiff', ' '.join(taskdiff_fields))
|
||||
|
||||
|
||||
class BSError(Exception):
|
||||
"""Error handling of buildstats"""
|
||||
pass
|
||||
|
||||
|
||||
class BSTask(dict):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self['start_time'] = None
|
||||
self['elapsed_time'] = None
|
||||
self['status'] = None
|
||||
self['iostat'] = {}
|
||||
self['rusage'] = {}
|
||||
self['child_rusage'] = {}
|
||||
super(BSTask, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def cputime(self):
|
||||
"""Sum of user and system time taken by the task"""
|
||||
rusage = self['rusage']['ru_stime'] + self['rusage']['ru_utime']
|
||||
if self['child_rusage']:
|
||||
# Child rusage may have been optimized out
|
||||
return rusage + self['child_rusage']['ru_stime'] + self['child_rusage']['ru_utime']
|
||||
else:
|
||||
return rusage
|
||||
|
||||
@property
|
||||
def walltime(self):
|
||||
"""Elapsed wall clock time"""
|
||||
return self['elapsed_time']
|
||||
|
||||
@property
|
||||
def read_bytes(self):
|
||||
"""Bytes read from the block layer"""
|
||||
return self['iostat']['read_bytes']
|
||||
|
||||
@property
|
||||
def write_bytes(self):
|
||||
"""Bytes written to the block layer"""
|
||||
return self['iostat']['write_bytes']
|
||||
|
||||
@property
|
||||
def read_ops(self):
|
||||
"""Number of read operations on the block layer"""
|
||||
if self['child_rusage']:
|
||||
# Child rusage may have been optimized out
|
||||
return self['rusage']['ru_inblock'] + self['child_rusage']['ru_inblock']
|
||||
else:
|
||||
return self['rusage']['ru_inblock']
|
||||
|
||||
@property
|
||||
def write_ops(self):
|
||||
"""Number of write operations on the block layer"""
|
||||
if self['child_rusage']:
|
||||
# Child rusage may have been optimized out
|
||||
return self['rusage']['ru_oublock'] + self['child_rusage']['ru_oublock']
|
||||
else:
|
||||
return self['rusage']['ru_oublock']
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, buildstat_file, fallback_end=0):
|
||||
"""Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
|
||||
bs_task = cls()
|
||||
log.debug("Reading task buildstats from %s", buildstat_file)
|
||||
end_time = None
|
||||
with open(buildstat_file) as fobj:
|
||||
for line in fobj.readlines():
|
||||
key, val = line.split(':', 1)
|
||||
val = val.strip()
|
||||
if key == 'Started':
|
||||
start_time = float(val)
|
||||
bs_task['start_time'] = start_time
|
||||
elif key == 'Ended':
|
||||
end_time = float(val)
|
||||
elif key.startswith('IO '):
|
||||
split = key.split()
|
||||
bs_task['iostat'][split[1]] = int(val)
|
||||
elif key.find('rusage') >= 0:
|
||||
split = key.split()
|
||||
ru_key = split[-1]
|
||||
if ru_key in ('ru_stime', 'ru_utime'):
|
||||
val = float(val)
|
||||
else:
|
||||
val = int(val)
|
||||
ru_type = 'rusage' if split[0] == 'rusage' else \
|
||||
'child_rusage'
|
||||
bs_task[ru_type][ru_key] = val
|
||||
elif key == 'Status':
|
||||
bs_task['status'] = val
|
||||
# If the task didn't finish, fill in the fallback end time if specified
|
||||
if start_time and not end_time and fallback_end:
|
||||
end_time = fallback_end
|
||||
if start_time and end_time:
|
||||
bs_task['elapsed_time'] = end_time - start_time
|
||||
else:
|
||||
raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
|
||||
return bs_task
|
||||
|
||||
|
||||
class BSTaskAggregate(object):
|
||||
"""Class representing multiple runs of the same task"""
|
||||
properties = ('cputime', 'walltime', 'read_bytes', 'write_bytes',
|
||||
'read_ops', 'write_ops')
|
||||
|
||||
def __init__(self, tasks=None):
|
||||
self._tasks = tasks or []
|
||||
self._properties = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self.properties:
|
||||
if name not in self._properties:
|
||||
# Calculate properties on demand only. We only provide mean
|
||||
# value, so far
|
||||
self._properties[name] = mean([getattr(t, name) for t in self._tasks])
|
||||
return self._properties[name]
|
||||
else:
|
||||
raise AttributeError("'BSTaskAggregate' has no attribute '{}'".format(name))
|
||||
|
||||
def append(self, task):
|
||||
"""Append new task"""
|
||||
# Reset pre-calculated properties
|
||||
assert isinstance(task, BSTask), "Type is '{}' instead of 'BSTask'".format(type(task))
|
||||
self._properties = {}
|
||||
self._tasks.append(task)
|
||||
|
||||
|
||||
class BSRecipe(object):
|
||||
"""Class representing buildstats of one recipe"""
|
||||
def __init__(self, name, epoch, version, revision):
|
||||
self.name = name
|
||||
self.epoch = epoch
|
||||
self.version = version
|
||||
self.revision = revision
|
||||
if epoch is None:
|
||||
self.evr = "{}-{}".format(version, revision)
|
||||
else:
|
||||
self.evr = "{}_{}-{}".format(epoch, version, revision)
|
||||
self.tasks = {}
|
||||
|
||||
def aggregate(self, bsrecipe):
|
||||
"""Aggregate data of another recipe buildstats"""
|
||||
if self.nevr != bsrecipe.nevr:
|
||||
raise ValueError("Refusing to aggregate buildstats, recipe version "
|
||||
"differs: {} vs. {}".format(self.nevr, bsrecipe.nevr))
|
||||
if set(self.tasks.keys()) != set(bsrecipe.tasks.keys()):
|
||||
raise ValueError("Refusing to aggregate buildstats, set of tasks "
|
||||
"in {} differ".format(self.name))
|
||||
|
||||
for taskname, taskdata in bsrecipe.tasks.items():
|
||||
if not isinstance(self.tasks[taskname], BSTaskAggregate):
|
||||
self.tasks[taskname] = BSTaskAggregate([self.tasks[taskname]])
|
||||
self.tasks[taskname].append(taskdata)
|
||||
|
||||
@property
|
||||
def nevr(self):
|
||||
return self.name + '-' + self.evr
|
||||
|
||||
|
||||
class BuildStats(dict):
|
||||
"""Class representing buildstats of one build"""
|
||||
|
||||
@property
|
||||
def num_tasks(self):
|
||||
"""Get number of tasks"""
|
||||
num = 0
|
||||
for recipe in self.values():
|
||||
num += len(recipe.tasks)
|
||||
return num
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, bs_json):
|
||||
"""Create new BuildStats object from JSON object"""
|
||||
buildstats = cls()
|
||||
for recipe in bs_json:
|
||||
if recipe['name'] in buildstats:
|
||||
raise BSError("Cannot handle multiple versions of the same "
|
||||
"package ({})".format(recipe['name']))
|
||||
bsrecipe = BSRecipe(recipe['name'], recipe['epoch'],
|
||||
recipe['version'], recipe['revision'])
|
||||
for task, data in recipe['tasks'].items():
|
||||
bsrecipe.tasks[task] = BSTask(data)
|
||||
|
||||
buildstats[recipe['name']] = bsrecipe
|
||||
|
||||
return buildstats
|
||||
|
||||
@staticmethod
|
||||
def from_file_json(path):
|
||||
"""Load buildstats from a JSON file"""
|
||||
with open(path) as fobj:
|
||||
bs_json = json.load(fobj)
|
||||
return BuildStats.from_json(bs_json)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def split_nevr(nevr):
|
||||
"""Split name and version information from recipe "nevr" string"""
|
||||
n_e_v, revision = nevr.rsplit('-', 1)
|
||||
match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[0-9]\S*)$',
|
||||
n_e_v)
|
||||
if not match:
|
||||
# If we're not able to parse a version starting with a number, just
|
||||
# take the part after last dash
|
||||
match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[^-]+)$',
|
||||
n_e_v)
|
||||
name = match.group('name')
|
||||
version = match.group('version')
|
||||
epoch = match.group('epoch')
|
||||
return name, epoch, version, revision
|
||||
|
||||
@staticmethod
|
||||
def parse_top_build_stats(path):
|
||||
"""
|
||||
Parse the top-level build_stats file for build-wide start and duration.
|
||||
"""
|
||||
start = elapsed = 0
|
||||
with open(path) as fobj:
|
||||
for line in fobj.readlines():
|
||||
key, val = line.split(':', 1)
|
||||
val = val.strip()
|
||||
if key == 'Build Started':
|
||||
start = float(val)
|
||||
elif key == "Elapsed time":
|
||||
elapsed = float(val.split()[0])
|
||||
return start, elapsed
|
||||
|
||||
@classmethod
|
||||
def from_dir(cls, path):
|
||||
"""Load buildstats from a buildstats directory"""
|
||||
top_stats = os.path.join(path, 'build_stats')
|
||||
if not os.path.isfile(top_stats):
|
||||
raise BSError("{} does not look like a buildstats directory".format(path))
|
||||
|
||||
log.debug("Reading buildstats directory %s", path)
|
||||
buildstats = cls()
|
||||
build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
|
||||
build_end = build_started + build_elapsed
|
||||
|
||||
subdirs = os.listdir(path)
|
||||
for dirname in subdirs:
|
||||
recipe_dir = os.path.join(path, dirname)
|
||||
if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
|
||||
continue
|
||||
name, epoch, version, revision = cls.split_nevr(dirname)
|
||||
bsrecipe = BSRecipe(name, epoch, version, revision)
|
||||
for task in os.listdir(recipe_dir):
|
||||
bsrecipe.tasks[task] = BSTask.from_file(
|
||||
os.path.join(recipe_dir, task), build_end)
|
||||
if name in buildstats:
|
||||
raise BSError("Cannot handle multiple versions of the same "
|
||||
"package ({})".format(name))
|
||||
buildstats[name] = bsrecipe
|
||||
|
||||
return buildstats
|
||||
|
||||
def aggregate(self, buildstats):
|
||||
"""Aggregate other buildstats into this"""
|
||||
if set(self.keys()) != set(buildstats.keys()):
|
||||
raise ValueError("Refusing to aggregate buildstats, set of "
|
||||
"recipes is different: %s" % (set(self.keys()) ^ set(buildstats.keys())))
|
||||
for pkg, data in buildstats.items():
|
||||
self[pkg].aggregate(data)
|
||||
|
||||
|
||||
def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None, only_tasks=[]):
|
||||
"""Compare the tasks of two buildstats"""
|
||||
tasks_diff = []
|
||||
pkgs = set(bs1.keys()).union(set(bs2.keys()))
|
||||
for pkg in pkgs:
|
||||
tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
|
||||
tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
|
||||
if only_tasks:
|
||||
tasks1 = {k: v for k, v in tasks1.items() if k in only_tasks}
|
||||
tasks2 = {k: v for k, v in tasks2.items() if k in only_tasks}
|
||||
|
||||
if not tasks1:
|
||||
pkg_op = '+'
|
||||
elif not tasks2:
|
||||
pkg_op = '-'
|
||||
else:
|
||||
pkg_op = ' '
|
||||
|
||||
for task in set(tasks1.keys()).union(set(tasks2.keys())):
|
||||
task_op = ' '
|
||||
if task in tasks1:
|
||||
val1 = getattr(bs1[pkg].tasks[task], stat_attr)
|
||||
else:
|
||||
task_op = '+'
|
||||
val1 = 0
|
||||
if task in tasks2:
|
||||
val2 = getattr(bs2[pkg].tasks[task], stat_attr)
|
||||
else:
|
||||
val2 = 0
|
||||
task_op = '-'
|
||||
|
||||
if val1 == 0:
|
||||
reldiff = float('inf')
|
||||
else:
|
||||
reldiff = 100 * (val2 - val1) / val1
|
||||
|
||||
if min_val and max(val1, val2) < min_val:
|
||||
log.debug("Filtering out %s:%s (%s)", pkg, task,
|
||||
max(val1, val2))
|
||||
continue
|
||||
if min_absdiff and abs(val2 - val1) < min_absdiff:
|
||||
log.debug("Filtering out %s:%s (difference of %s)", pkg, task,
|
||||
val2-val1)
|
||||
continue
|
||||
tasks_diff.append(TaskDiff(pkg, pkg_op, task, task_op, val1, val2,
|
||||
val2-val1, reldiff))
|
||||
return tasks_diff
|
||||
|
||||
|
||||
class BSVerDiff(object):
|
||||
"""Class representing recipe version differences between two buildstats"""
|
||||
def __init__(self, bs1, bs2):
|
||||
RecipeVerDiff = namedtuple('RecipeVerDiff', 'left right')
|
||||
|
||||
recipes1 = set(bs1.keys())
|
||||
recipes2 = set(bs2.keys())
|
||||
|
||||
self.new = dict([(r, bs2[r]) for r in sorted(recipes2 - recipes1)])
|
||||
self.dropped = dict([(r, bs1[r]) for r in sorted(recipes1 - recipes2)])
|
||||
self.echanged = {}
|
||||
self.vchanged = {}
|
||||
self.rchanged = {}
|
||||
self.unchanged = {}
|
||||
self.empty_diff = False
|
||||
|
||||
common = recipes2.intersection(recipes1)
|
||||
if common:
|
||||
for recipe in common:
|
||||
rdiff = RecipeVerDiff(bs1[recipe], bs2[recipe])
|
||||
if bs1[recipe].epoch != bs2[recipe].epoch:
|
||||
self.echanged[recipe] = rdiff
|
||||
elif bs1[recipe].version != bs2[recipe].version:
|
||||
self.vchanged[recipe] = rdiff
|
||||
elif bs1[recipe].revision != bs2[recipe].revision:
|
||||
self.rchanged[recipe] = rdiff
|
||||
else:
|
||||
self.unchanged[recipe] = rdiff
|
||||
|
||||
if len(recipes1) == len(recipes2) == len(self.unchanged):
|
||||
self.empty_diff = True
|
||||
|
||||
def __bool__(self):
|
||||
return not self.empty_diff
|
||||
@@ -0,0 +1,454 @@
|
||||
# Yocto Project layer check tool
|
||||
#
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from enum import Enum
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
class LayerType(Enum):
|
||||
BSP = 0
|
||||
DISTRO = 1
|
||||
SOFTWARE = 2
|
||||
CORE = 3
|
||||
ERROR_NO_LAYER_CONF = 98
|
||||
ERROR_BSP_DISTRO = 99
|
||||
|
||||
def _get_configurations(path):
|
||||
configs = []
|
||||
|
||||
for f in os.listdir(path):
|
||||
file_path = os.path.join(path, f)
|
||||
if os.path.isfile(file_path) and f.endswith('.conf'):
|
||||
configs.append(f[:-5]) # strip .conf
|
||||
return configs
|
||||
|
||||
def _get_layer_collections(layer_path, lconf=None, data=None):
|
||||
import bb.parse
|
||||
import bb.data
|
||||
|
||||
if lconf is None:
|
||||
lconf = os.path.join(layer_path, 'conf', 'layer.conf')
|
||||
|
||||
if data is None:
|
||||
ldata = bb.data.init()
|
||||
bb.parse.init_parser(ldata)
|
||||
else:
|
||||
ldata = data.createCopy()
|
||||
|
||||
ldata.setVar('LAYERDIR', layer_path)
|
||||
try:
|
||||
ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
|
||||
except:
|
||||
raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
|
||||
ldata.expandVarref('LAYERDIR')
|
||||
|
||||
collections = (ldata.getVar('BBFILE_COLLECTIONS') or '').split()
|
||||
if not collections:
|
||||
name = os.path.basename(layer_path)
|
||||
collections = [name]
|
||||
|
||||
collections = {c: {} for c in collections}
|
||||
for name in collections:
|
||||
priority = ldata.getVar('BBFILE_PRIORITY_%s' % name)
|
||||
pattern = ldata.getVar('BBFILE_PATTERN_%s' % name)
|
||||
depends = ldata.getVar('LAYERDEPENDS_%s' % name)
|
||||
compat = ldata.getVar('LAYERSERIES_COMPAT_%s' % name)
|
||||
try:
|
||||
depDict = bb.utils.explode_dep_versions2(depends or "")
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (name, str(vse)))
|
||||
|
||||
collections[name]['priority'] = priority
|
||||
collections[name]['pattern'] = pattern
|
||||
collections[name]['depends'] = ' '.join(depDict.keys())
|
||||
collections[name]['compat'] = compat
|
||||
|
||||
return collections
|
||||
|
||||
def _detect_layer(layer_path):
|
||||
"""
|
||||
Scans layer directory to detect what type of layer
|
||||
is BSP, Distro or Software.
|
||||
|
||||
Returns a dictionary with layer name, type and path.
|
||||
"""
|
||||
|
||||
layer = {}
|
||||
layer_name = os.path.basename(layer_path)
|
||||
|
||||
layer['name'] = layer_name
|
||||
layer['path'] = layer_path
|
||||
layer['conf'] = {}
|
||||
|
||||
if not os.path.isfile(os.path.join(layer_path, 'conf', 'layer.conf')):
|
||||
layer['type'] = LayerType.ERROR_NO_LAYER_CONF
|
||||
return layer
|
||||
|
||||
machine_conf = os.path.join(layer_path, 'conf', 'machine')
|
||||
distro_conf = os.path.join(layer_path, 'conf', 'distro')
|
||||
|
||||
is_bsp = False
|
||||
is_distro = False
|
||||
|
||||
if os.path.isdir(machine_conf):
|
||||
machines = _get_configurations(machine_conf)
|
||||
if machines:
|
||||
is_bsp = True
|
||||
|
||||
if os.path.isdir(distro_conf):
|
||||
distros = _get_configurations(distro_conf)
|
||||
if distros:
|
||||
is_distro = True
|
||||
|
||||
layer['collections'] = _get_layer_collections(layer['path'])
|
||||
|
||||
if layer_name == "meta" and "core" in layer['collections']:
|
||||
layer['type'] = LayerType.CORE
|
||||
layer['conf']['machines'] = machines
|
||||
layer['conf']['distros'] = distros
|
||||
elif is_bsp and is_distro:
|
||||
layer['type'] = LayerType.ERROR_BSP_DISTRO
|
||||
elif is_bsp:
|
||||
layer['type'] = LayerType.BSP
|
||||
layer['conf']['machines'] = machines
|
||||
elif is_distro:
|
||||
layer['type'] = LayerType.DISTRO
|
||||
layer['conf']['distros'] = distros
|
||||
else:
|
||||
layer['type'] = LayerType.SOFTWARE
|
||||
|
||||
return layer
|
||||
|
||||
def detect_layers(layer_directories, no_auto):
|
||||
layers = []
|
||||
|
||||
for directory in layer_directories:
|
||||
directory = os.path.realpath(directory)
|
||||
if directory[-1] == '/':
|
||||
directory = directory[0:-1]
|
||||
|
||||
if no_auto:
|
||||
conf_dir = os.path.join(directory, 'conf')
|
||||
if os.path.isdir(conf_dir):
|
||||
layer = _detect_layer(directory)
|
||||
if layer:
|
||||
layers.append(layer)
|
||||
else:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
dir_name = os.path.basename(root)
|
||||
conf_dir = os.path.join(root, 'conf')
|
||||
if os.path.isdir(conf_dir):
|
||||
layer = _detect_layer(root)
|
||||
if layer:
|
||||
layers.append(layer)
|
||||
|
||||
return layers
|
||||
|
||||
def _find_layer(depend, layers):
|
||||
for layer in layers:
|
||||
if 'collections' not in layer:
|
||||
continue
|
||||
|
||||
for collection in layer['collections']:
|
||||
if depend == collection:
|
||||
return layer
|
||||
return None
|
||||
|
||||
def sanity_check_layers(layers, logger):
|
||||
"""
|
||||
Check that we didn't find duplicate collection names, as the layer that will
|
||||
be used is non-deterministic. The precise check is duplicate collections
|
||||
with different patterns, as the same pattern being repeated won't cause
|
||||
problems.
|
||||
"""
|
||||
import collections
|
||||
|
||||
passed = True
|
||||
seen = collections.defaultdict(set)
|
||||
for layer in layers:
|
||||
for name, data in layer.get("collections", {}).items():
|
||||
seen[name].add(data["pattern"])
|
||||
|
||||
for name, patterns in seen.items():
|
||||
if len(patterns) > 1:
|
||||
passed = False
|
||||
logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
|
||||
return passed
|
||||
|
||||
def get_layer_dependencies(layer, layers, logger):
|
||||
def recurse_dependencies(depends, layer, layers, logger, ret = []):
|
||||
logger.debug('Processing dependencies %s for layer %s.' % \
|
||||
(depends, layer['name']))
|
||||
|
||||
for depend in depends.split():
|
||||
# core (oe-core) is suppose to be provided
|
||||
if depend == 'core':
|
||||
continue
|
||||
|
||||
layer_depend = _find_layer(depend, layers)
|
||||
if not layer_depend:
|
||||
logger.error('Layer %s depends on %s and isn\'t found.' % \
|
||||
(layer['name'], depend))
|
||||
ret = None
|
||||
continue
|
||||
|
||||
# We keep processing, even if ret is None, this allows us to report
|
||||
# multiple errors at once
|
||||
if ret is not None and layer_depend not in ret:
|
||||
ret.append(layer_depend)
|
||||
else:
|
||||
# we might have processed this dependency already, in which case
|
||||
# we should not do it again (avoid recursive loop)
|
||||
continue
|
||||
|
||||
# Recursively process...
|
||||
if 'collections' not in layer_depend:
|
||||
continue
|
||||
|
||||
for collection in layer_depend['collections']:
|
||||
collect_deps = layer_depend['collections'][collection]['depends']
|
||||
if not collect_deps:
|
||||
continue
|
||||
ret = recurse_dependencies(collect_deps, layer_depend, layers, logger, ret)
|
||||
|
||||
return ret
|
||||
|
||||
layer_depends = []
|
||||
for collection in layer['collections']:
|
||||
depends = layer['collections'][collection]['depends']
|
||||
if not depends:
|
||||
continue
|
||||
|
||||
layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
|
||||
|
||||
# Note: [] (empty) is allowed, None is not!
|
||||
return layer_depends
|
||||
|
||||
def add_layer_dependencies(bblayersconf, layer, layers, logger):
|
||||
|
||||
layer_depends = get_layer_dependencies(layer, layers, logger)
|
||||
if layer_depends is None:
|
||||
return False
|
||||
else:
|
||||
add_layers(bblayersconf, layer_depends, logger)
|
||||
|
||||
return True
|
||||
|
||||
def add_layers(bblayersconf, layers, logger):
|
||||
# Don't add a layer that is already present.
|
||||
added = set()
|
||||
output = check_command('Getting existing layers failed.', 'bitbake-layers show-layers').decode('utf-8')
|
||||
for layer, path, pri in re.findall(r'^(\S+) +([^\n]*?) +(\d+)$', output, re.MULTILINE):
|
||||
added.add(path)
|
||||
|
||||
with open(bblayersconf, 'a+') as f:
|
||||
for layer in layers:
|
||||
logger.info('Adding layer %s' % layer['name'])
|
||||
name = layer['name']
|
||||
path = layer['path']
|
||||
if path in added:
|
||||
logger.info('%s is already in %s' % (name, bblayersconf))
|
||||
else:
|
||||
added.add(path)
|
||||
f.write("\nBBLAYERS += \"%s\"\n" % path)
|
||||
return True
|
||||
|
||||
def check_bblayers(bblayersconf, layer_path, logger):
|
||||
'''
|
||||
If layer_path found in BBLAYERS return True
|
||||
'''
|
||||
import bb.parse
|
||||
import bb.data
|
||||
|
||||
ldata = bb.parse.handle(bblayersconf, bb.data.init(), include=True)
|
||||
for bblayer in (ldata.getVar('BBLAYERS') or '').split():
|
||||
if os.path.normpath(bblayer) == os.path.normpath(layer_path):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def check_command(error_msg, cmd, cwd=None):
|
||||
'''
|
||||
Run a command under a shell, capture stdout and stderr in a single stream,
|
||||
throw an error when command returns non-zero exit code. Returns the output.
|
||||
'''
|
||||
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd)
|
||||
output, _ = p.communicate()
|
||||
if p.returncode:
|
||||
msg = "%s\nCommand: %s\nOutput:\n%s" % (error_msg, cmd, output.decode('utf-8'))
|
||||
raise RuntimeError(msg)
|
||||
return output
|
||||
|
||||
def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
|
||||
import re
|
||||
|
||||
# some recipes needs to be excluded like meta-world-pkgdata
|
||||
# because a layer can add recipes to a world build so signature
|
||||
# will be change
|
||||
exclude_recipes = ('meta-world-pkgdata',)
|
||||
|
||||
sigs = {}
|
||||
tune2tasks = {}
|
||||
|
||||
cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
|
||||
if extravars:
|
||||
cmd += extravars
|
||||
cmd += ' '
|
||||
if machine:
|
||||
cmd += 'MACHINE=%s ' % machine
|
||||
cmd += 'bitbake '
|
||||
if failsafe:
|
||||
cmd += '-k '
|
||||
cmd += '-S none world'
|
||||
sigs_file = os.path.join(builddir, 'locked-sigs.inc')
|
||||
if os.path.exists(sigs_file):
|
||||
os.unlink(sigs_file)
|
||||
try:
|
||||
check_command('Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.',
|
||||
cmd, builddir)
|
||||
except RuntimeError as ex:
|
||||
if failsafe and os.path.exists(sigs_file):
|
||||
# Ignore the error here. Most likely some recipes active
|
||||
# in a world build lack some dependencies. There is a
|
||||
# separate test_machine_world_build which exposes the
|
||||
# failure.
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$")
|
||||
tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
|
||||
current_tune = None
|
||||
with open(sigs_file, 'r') as f:
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
t = tune_regex.search(line)
|
||||
if t:
|
||||
current_tune = t.group('tune')
|
||||
s = sig_regex.match(line)
|
||||
if s:
|
||||
exclude = False
|
||||
for er in exclude_recipes:
|
||||
(recipe, task) = s.group('task').split(':')
|
||||
if er == recipe:
|
||||
exclude = True
|
||||
break
|
||||
if exclude:
|
||||
continue
|
||||
|
||||
sigs[s.group('task')] = s.group('hash')
|
||||
tune2tasks.setdefault(current_tune, []).append(s.group('task'))
|
||||
|
||||
if not sigs:
|
||||
raise RuntimeError('Can\'t load signatures from %s' % sigs_file)
|
||||
|
||||
return (sigs, tune2tasks)
|
||||
|
||||
def get_depgraph(targets=['world'], failsafe=False):
|
||||
'''
|
||||
Returns the dependency graph for the given target(s).
|
||||
The dependency graph is taken directly from DepTreeEvent.
|
||||
'''
|
||||
depgraph = None
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False)
|
||||
tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted'])
|
||||
if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'):
|
||||
raise RuntimeError('starting generateDepTreeEvent failed')
|
||||
while True:
|
||||
event = tinfoil.wait_event(timeout=1000)
|
||||
if event:
|
||||
if isinstance(event, bb.command.CommandFailed):
|
||||
raise RuntimeError('Generating dependency information failed: %s' % event.error)
|
||||
elif isinstance(event, bb.command.CommandCompleted):
|
||||
break
|
||||
elif isinstance(event, bb.event.NoProvider):
|
||||
if failsafe:
|
||||
# The event is informational, we will get information about the
|
||||
# remaining dependencies eventually and thus can ignore this
|
||||
# here like we do in get_signatures(), if desired.
|
||||
continue
|
||||
if event._reasons:
|
||||
raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons))
|
||||
else:
|
||||
raise RuntimeError('Nothing provides %s.' % (event._item))
|
||||
elif isinstance(event, bb.event.DepTreeGenerated):
|
||||
depgraph = event._depgraph
|
||||
|
||||
if depgraph is None:
|
||||
raise RuntimeError('Could not retrieve the depgraph.')
|
||||
return depgraph
|
||||
|
||||
def compare_signatures(old_sigs, curr_sigs):
|
||||
'''
|
||||
Compares the result of two get_signatures() calls. Returns None if no
|
||||
problems found, otherwise a string that can be used as additional
|
||||
explanation in self.fail().
|
||||
'''
|
||||
# task -> (old signature, new signature)
|
||||
sig_diff = {}
|
||||
for task in old_sigs:
|
||||
if task in curr_sigs and \
|
||||
old_sigs[task] != curr_sigs[task]:
|
||||
sig_diff[task] = (old_sigs[task], curr_sigs[task])
|
||||
|
||||
if not sig_diff:
|
||||
return None
|
||||
|
||||
# Beware, depgraph uses task=<pn>.<taskname> whereas get_signatures()
|
||||
# uses <pn>:<taskname>. Need to convert sometimes. The output follows
|
||||
# the convention from get_signatures() because that seems closer to
|
||||
# normal bitbake output.
|
||||
def sig2graph(task):
|
||||
pn, taskname = task.rsplit(':', 1)
|
||||
return pn + '.' + taskname
|
||||
def graph2sig(task):
|
||||
pn, taskname = task.rsplit('.', 1)
|
||||
return pn + ':' + taskname
|
||||
depgraph = get_depgraph(failsafe=True)
|
||||
depends = depgraph['tdepends']
|
||||
|
||||
# If a task A has a changed signature, but none of its
|
||||
# dependencies, then we need to report it because it is
|
||||
# the one which introduces a change. Any task depending on
|
||||
# A (directly or indirectly) will also have a changed
|
||||
# signature, but we don't need to report it. It might have
|
||||
# its own changes, which will become apparent once the
|
||||
# issues that we do report are fixed and the test gets run
|
||||
# again.
|
||||
sig_diff_filtered = []
|
||||
for task, (old_sig, new_sig) in sig_diff.items():
|
||||
deps_tainted = False
|
||||
for dep in depends.get(sig2graph(task), ()):
|
||||
if graph2sig(dep) in sig_diff:
|
||||
deps_tainted = True
|
||||
break
|
||||
if not deps_tainted:
|
||||
sig_diff_filtered.append((task, old_sig, new_sig))
|
||||
|
||||
msg = []
|
||||
msg.append('%d signatures changed, initial differences (first hash before, second after):' %
|
||||
len(sig_diff))
|
||||
for diff in sorted(sig_diff_filtered):
|
||||
recipe, taskname = diff[0].rsplit(':', 1)
|
||||
cmd = 'bitbake-diffsigs --task %s %s --signature %s %s' % \
|
||||
(recipe, taskname, diff[1], diff[2])
|
||||
msg.append(' %s: %s -> %s' % diff)
|
||||
msg.append(' %s' % cmd)
|
||||
try:
|
||||
output = check_command('Determining signature difference failed.',
|
||||
cmd).decode('utf-8')
|
||||
except RuntimeError as error:
|
||||
output = str(error)
|
||||
if output:
|
||||
msg.extend([' ' + line for line in output.splitlines()])
|
||||
msg.append('')
|
||||
return '\n'.join(msg)
|
||||
@@ -0,0 +1,9 @@
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.core.case import OETestCase
|
||||
|
||||
class OECheckLayerTestCase(OETestCase):
|
||||
pass
|
||||
@@ -0,0 +1,206 @@
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import unittest
|
||||
|
||||
from checklayer import LayerType, get_signatures, check_command, get_depgraph
|
||||
from checklayer.case import OECheckLayerTestCase
|
||||
|
||||
class BSPCheckLayer(OECheckLayerTestCase):
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
|
||||
raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
|
||||
self.tc.layer['name'])
|
||||
|
||||
def test_bsp_defines_machines(self):
|
||||
self.assertTrue(self.tc.layer['conf']['machines'],
|
||||
"Layer is BSP but doesn't defines machines.")
|
||||
|
||||
def test_bsp_no_set_machine(self):
|
||||
from oeqa.utils.commands import get_bb_var
|
||||
|
||||
machine = get_bb_var('MACHINE')
|
||||
self.assertEqual(self.td['bbvars']['MACHINE'], machine,
|
||||
msg="Layer %s modified machine %s -> %s" % \
|
||||
(self.tc.layer['name'], self.td['bbvars']['MACHINE'], machine))
|
||||
|
||||
|
||||
def test_machine_world(self):
|
||||
'''
|
||||
"bitbake world" is expected to work regardless which machine is selected.
|
||||
BSP layers sometimes break that by enabling a recipe for a certain machine
|
||||
without checking whether that recipe actually can be built in the current
|
||||
distro configuration (for example, OpenGL might not enabled).
|
||||
|
||||
This test iterates over all machines. It would be nicer to instantiate
|
||||
it once per machine. It merely checks for errors during parse
|
||||
time. It does not actually attempt to build anything.
|
||||
'''
|
||||
|
||||
if not self.td['machines']:
|
||||
self.skipTest('No machines set with --machines.')
|
||||
msg = []
|
||||
for machine in self.td['machines']:
|
||||
# In contrast to test_machine_signatures() below, errors are fatal here.
|
||||
try:
|
||||
get_signatures(self.td['builddir'], failsafe=False, machine=machine)
|
||||
except RuntimeError as ex:
|
||||
msg.append(str(ex))
|
||||
if msg:
|
||||
msg.insert(0, 'The following machines broke a world build:')
|
||||
self.fail('\n'.join(msg))
|
||||
|
||||
def test_machine_signatures(self):
|
||||
'''
|
||||
Selecting a machine may only affect the signature of tasks that are specific
|
||||
to that machine. In other words, when MACHINE=A and MACHINE=B share a recipe
|
||||
foo and the output of foo, then both machine configurations must build foo
|
||||
in exactly the same way. Otherwise it is not possible to use both machines
|
||||
in the same distribution.
|
||||
|
||||
This criteria can only be tested by testing different machines in combination,
|
||||
i.e. one main layer, potentially several additional BSP layers and an explicit
|
||||
choice of machines:
|
||||
yocto-check-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale
|
||||
'''
|
||||
|
||||
if not self.td['machines']:
|
||||
self.skipTest('No machines set with --machines.')
|
||||
|
||||
# Collect signatures for all machines that we are testing
|
||||
# and merge that into a hash:
|
||||
# tune -> task -> signature -> list of machines with that combination
|
||||
#
|
||||
# It is an error if any tune/task pair has more than one signature,
|
||||
# because that implies that the machines that caused those different
|
||||
# signatures do not agree on how to execute the task.
|
||||
tunes = {}
|
||||
# Preserve ordering of machines as chosen by the user.
|
||||
for machine in self.td['machines']:
|
||||
curr_sigs, tune2tasks = get_signatures(self.td['builddir'], failsafe=True, machine=machine)
|
||||
# Invert the tune -> [tasks] mapping.
|
||||
tasks2tune = {}
|
||||
for tune, tasks in tune2tasks.items():
|
||||
for task in tasks:
|
||||
tasks2tune[task] = tune
|
||||
for task, sighash in curr_sigs.items():
|
||||
tunes.setdefault(tasks2tune[task], {}).setdefault(task, {}).setdefault(sighash, []).append(machine)
|
||||
|
||||
msg = []
|
||||
pruned = 0
|
||||
last_line_key = None
|
||||
# do_fetch, do_unpack, ..., do_build
|
||||
taskname_list = []
|
||||
if tunes:
|
||||
# The output below is most useful when we start with tasks that are at
|
||||
# the bottom of the dependency chain, i.e. those that run first. If
|
||||
# those tasks differ, the rest also does.
|
||||
#
|
||||
# To get an ordering of tasks, we do a topological sort of the entire
|
||||
# depgraph for the base configuration, then on-the-fly flatten that list by stripping
|
||||
# out the recipe names and removing duplicates. The base configuration
|
||||
# is not necessarily representative, but should be close enough. Tasks
|
||||
# that were not encountered get a default priority.
|
||||
depgraph = get_depgraph()
|
||||
depends = depgraph['tdepends']
|
||||
WHITE = 1
|
||||
GRAY = 2
|
||||
BLACK = 3
|
||||
color = {}
|
||||
found = set()
|
||||
def visit(task):
|
||||
color[task] = GRAY
|
||||
for dep in depends.get(task, ()):
|
||||
if color.setdefault(dep, WHITE) == WHITE:
|
||||
visit(dep)
|
||||
color[task] = BLACK
|
||||
pn, taskname = task.rsplit('.', 1)
|
||||
if taskname not in found:
|
||||
taskname_list.append(taskname)
|
||||
found.add(taskname)
|
||||
for task in depends.keys():
|
||||
if color.setdefault(task, WHITE) == WHITE:
|
||||
visit(task)
|
||||
|
||||
taskname_order = dict([(task, index) for index, task in enumerate(taskname_list) ])
|
||||
def task_key(task):
|
||||
pn, taskname = task.rsplit(':', 1)
|
||||
return (pn, taskname_order.get(taskname, len(taskname_list)), taskname)
|
||||
|
||||
for tune in sorted(tunes.keys()):
|
||||
tasks = tunes[tune]
|
||||
# As for test_signatures it would be nicer to sort tasks
|
||||
# by dependencies here, but that is harder because we have
|
||||
# to report on tasks from different machines, which might
|
||||
# have different dependencies. We resort to pruning the
|
||||
# output by reporting only one task per recipe if the set
|
||||
# of machines matches.
|
||||
#
|
||||
# "bitbake-diffsigs -t -s" is intelligent enough to print
|
||||
# diffs recursively, so often it does not matter that much
|
||||
# if we don't pick the underlying difference
|
||||
# here. However, sometimes recursion fails
|
||||
# (https://bugzilla.yoctoproject.org/show_bug.cgi?id=6428).
|
||||
#
|
||||
# To mitigate that a bit, we use a hard-coded ordering of
|
||||
# tasks that represents how they normally run and prefer
|
||||
# to print the ones that run first.
|
||||
for task in sorted(tasks.keys(), key=task_key):
|
||||
signatures = tasks[task]
|
||||
# do_build can be ignored: it is know to have
|
||||
# different signatures in some cases, for example in
|
||||
# the allarch ca-certificates due to RDEPENDS=openssl.
|
||||
# That particular dependency is marked via
|
||||
# SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
|
||||
# in the sstate signature hash because filtering it
|
||||
# out would be hard and running do_build multiple
|
||||
# times doesn't really matter.
|
||||
if len(signatures.keys()) > 1 and \
|
||||
not task.endswith(':do_build'):
|
||||
# Error!
|
||||
#
|
||||
# Sort signatures by machines, because the hex values don't mean anything.
|
||||
# => all-arch adwaita-icon-theme:do_build: 1234... (beaglebone, qemux86) != abcdf... (qemux86-64)
|
||||
#
|
||||
# Skip the line if it is covered already by the predecessor (same pn, same sets of machines).
|
||||
pn, taskname = task.rsplit(':', 1)
|
||||
next_line_key = (pn, sorted(signatures.values()))
|
||||
if next_line_key != last_line_key:
|
||||
line = ' %s %s: ' % (tune, task)
|
||||
line += ' != '.join(['%s (%s)' % (signature, ', '.join([m for m in signatures[signature]])) for
|
||||
signature in sorted(signatures.keys(), key=lambda s: signatures[s])])
|
||||
last_line_key = next_line_key
|
||||
msg.append(line)
|
||||
# Randomly pick two mismatched signatures and remember how to invoke
|
||||
# bitbake-diffsigs for them.
|
||||
iterator = iter(signatures.items())
|
||||
a = next(iterator)
|
||||
b = next(iterator)
|
||||
diffsig_machines = '(%s) != (%s)' % (', '.join(a[1]), ', '.join(b[1]))
|
||||
diffsig_params = '-t %s %s -s %s %s' % (pn, taskname, a[0], b[0])
|
||||
else:
|
||||
pruned += 1
|
||||
|
||||
if msg:
|
||||
msg.insert(0, 'The machines have conflicting signatures for some shared tasks:')
|
||||
if pruned > 0:
|
||||
msg.append('')
|
||||
msg.append('%d tasks where not listed because some other task of the recipe already differed.' % pruned)
|
||||
msg.append('It is likely that differences from different recipes also have the same root cause.')
|
||||
msg.append('')
|
||||
# Explain how to investigate...
|
||||
msg.append('To investigate, run bitbake-diffsigs -t recipename taskname -s fromsig tosig.')
|
||||
cmd = 'bitbake-diffsigs %s' % diffsig_params
|
||||
msg.append('Example: %s in the last line' % diffsig_machines)
|
||||
msg.append('Command: %s' % cmd)
|
||||
# ... and actually do it automatically for that example, but without aborting
|
||||
# when that fails.
|
||||
try:
|
||||
output = check_command('Comparing signatures failed.', cmd).decode('utf-8')
|
||||
except RuntimeError as ex:
|
||||
output = str(ex)
|
||||
msg.extend([' ' + line for line in output.splitlines()])
|
||||
self.fail('\n'.join(msg))
|
||||
@@ -0,0 +1,104 @@
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import glob
|
||||
import os
|
||||
import unittest
|
||||
import re
|
||||
from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
|
||||
from checklayer.case import OECheckLayerTestCase
|
||||
|
||||
class CommonCheckLayer(OECheckLayerTestCase):
|
||||
def test_readme(self):
|
||||
if self.tc.layer['type'] == LayerType.CORE:
|
||||
raise unittest.SkipTest("Core layer's README is top level")
|
||||
|
||||
# The top-level README file may have a suffix (like README.rst or README.txt).
|
||||
readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
|
||||
self.assertTrue(len(readme_files) > 0,
|
||||
msg="Layer doesn't contain a README file.")
|
||||
|
||||
# There might be more than one file matching the file pattern above
|
||||
# (for example, README.rst and README-COPYING.rst). The one with the shortest
|
||||
# name is considered the "main" one.
|
||||
readme_file = sorted(readme_files)[0]
|
||||
data = ''
|
||||
with open(readme_file, 'r') as f:
|
||||
data = f.read()
|
||||
self.assertTrue(data,
|
||||
msg="Layer contains a README file but it is empty.")
|
||||
|
||||
# If a layer's README references another README, then the checks below are not valid
|
||||
if re.search('README', data, re.IGNORECASE):
|
||||
return
|
||||
|
||||
self.assertIn('maintainer', data.lower())
|
||||
self.assertIn('patch', data.lower())
|
||||
# Check that there is an email address in the README
|
||||
email_regex = re.compile(r"[^@]+@[^@]+")
|
||||
self.assertTrue(email_regex.match(data))
|
||||
|
||||
def test_parse(self):
|
||||
check_command('Layer %s failed to parse.' % self.tc.layer['name'],
|
||||
'bitbake -p')
|
||||
|
||||
def test_show_environment(self):
|
||||
check_command('Layer %s failed to show environment.' % self.tc.layer['name'],
|
||||
'bitbake -e')
|
||||
|
||||
def test_world(self):
|
||||
'''
|
||||
"bitbake world" is expected to work. test_signatures does not cover that
|
||||
because it is more lenient and ignores recipes in a world build that
|
||||
are not actually buildable, so here we fail when "bitbake -S none world"
|
||||
fails.
|
||||
'''
|
||||
get_signatures(self.td['builddir'], failsafe=False)
|
||||
|
||||
def test_world_inherit_class(self):
|
||||
'''
|
||||
This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
|
||||
class, which can do additional per-recipe test cases.
|
||||
'''
|
||||
msg = []
|
||||
try:
|
||||
get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
|
||||
except RuntimeError as ex:
|
||||
msg.append(str(ex))
|
||||
if msg:
|
||||
msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
|
||||
self.tc.layer['name'])
|
||||
self.fail('\n'.join(msg))
|
||||
|
||||
@unittest.expectedFailure
|
||||
def test_patches_upstream_status(self):
|
||||
import sys
|
||||
sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
|
||||
import oe.qa
|
||||
patches = []
|
||||
for dirpath, dirs, files in os.walk(self.tc.layer['path']):
|
||||
for filename in files:
|
||||
if filename.endswith(".patch"):
|
||||
ppath = os.path.join(dirpath, filename)
|
||||
if oe.qa.check_upstream_status(ppath):
|
||||
patches.append(ppath)
|
||||
self.assertEqual(len(patches), 0 , \
|
||||
msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
|
||||
|
||||
def test_signatures(self):
|
||||
if self.tc.layer['type'] == LayerType.SOFTWARE and \
|
||||
not self.tc.test_software_layer_signatures:
|
||||
raise unittest.SkipTest("Not testing for signature changes in a software layer %s." \
|
||||
% self.tc.layer['name'])
|
||||
|
||||
curr_sigs, _ = get_signatures(self.td['builddir'], failsafe=True)
|
||||
msg = compare_signatures(self.td['sigs'], curr_sigs)
|
||||
if msg is not None:
|
||||
self.fail('Adding layer %s changed signatures.\n%s' % (self.tc.layer['name'], msg))
|
||||
|
||||
def test_layerseries_compat(self):
|
||||
for collection_name, collection_data in self.tc.layer['collections'].items():
|
||||
self.assertTrue(collection_data['compat'], "Collection %s from layer %s does not set compatible oe-core versions via LAYERSERIES_COMPAT_collection." \
|
||||
% (collection_name, self.tc.layer['name']))
|
||||
@@ -0,0 +1,28 @@
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import unittest
|
||||
|
||||
from checklayer import LayerType
|
||||
from checklayer.case import OECheckLayerTestCase
|
||||
|
||||
class DistroCheckLayer(OECheckLayerTestCase):
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
|
||||
raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
|
||||
self.tc.layer['name'])
|
||||
|
||||
def test_distro_defines_distros(self):
|
||||
self.assertTrue(self.tc.layer['conf']['distros'],
|
||||
"Layer is BSP but doesn't defines machines.")
|
||||
|
||||
def test_distro_no_set_distros(self):
|
||||
from oeqa.utils.commands import get_bb_var
|
||||
|
||||
distro = get_bb_var('DISTRO')
|
||||
self.assertEqual(self.td['bbvars']['DISTRO'], distro,
|
||||
msg="Layer %s modified distro %s -> %s" % \
|
||||
(self.tc.layer['name'], self.td['bbvars']['DISTRO'], distro))
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (C) 2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import re
|
||||
|
||||
from oeqa.core.context import OETestContext
|
||||
|
||||
class CheckLayerTestContext(OETestContext):
|
||||
def __init__(self, td=None, logger=None, layer=None, test_software_layer_signatures=True):
|
||||
super(CheckLayerTestContext, self).__init__(td, logger)
|
||||
self.layer = layer
|
||||
self.test_software_layer_signatures = test_software_layer_signatures
|
||||
@@ -0,0 +1,379 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Development tool - utility functions for plugins
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugins module"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import logging
|
||||
import re
|
||||
import codecs
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class DevtoolError(Exception):
|
||||
"""Exception for handling devtool errors"""
|
||||
def __init__(self, message, exitcode=1):
|
||||
super(DevtoolError, self).__init__(message)
|
||||
self.exitcode = exitcode
|
||||
|
||||
|
||||
def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
|
||||
"""Run a program in bitbake build context"""
|
||||
import bb
|
||||
if not 'cwd' in options:
|
||||
options["cwd"] = builddir
|
||||
if init_path:
|
||||
# As the OE init script makes use of BASH_SOURCE to determine OEROOT,
|
||||
# and can't determine it when running under dash, we need to set
|
||||
# the executable to bash to correctly set things up
|
||||
if not 'executable' in options:
|
||||
options['executable'] = 'bash'
|
||||
logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
|
||||
init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
|
||||
else:
|
||||
logger.debug('Executing command "%s"' % cmd)
|
||||
init_prefix = ''
|
||||
if watch:
|
||||
if sys.stdout.isatty():
|
||||
# Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
|
||||
cmd = 'script -e -q -c "%s" /dev/null' % cmd
|
||||
return exec_watch('%s%s' % (init_prefix, cmd), **options)
|
||||
else:
|
||||
return bb.process.run('%s%s' % (init_prefix, cmd), **options)
|
||||
|
||||
def exec_watch(cmd, **options):
|
||||
"""Run program with stdout shown on sys.stdout"""
|
||||
import bb
|
||||
if isinstance(cmd, str) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
|
||||
)
|
||||
|
||||
reader = codecs.getreader('utf-8')(process.stdout)
|
||||
buf = ''
|
||||
while True:
|
||||
out = reader.read(1, 1)
|
||||
if out:
|
||||
sys.stdout.write(out)
|
||||
sys.stdout.flush()
|
||||
buf += out
|
||||
elif out == '' and process.poll() != None:
|
||||
break
|
||||
|
||||
if process.returncode != 0:
|
||||
raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
|
||||
|
||||
return buf, None
|
||||
|
||||
def exec_fakeroot(d, cmd, **kwargs):
|
||||
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
|
||||
# Grab the command and check it actually exists
|
||||
fakerootcmd = d.getVar('FAKEROOTCMD')
|
||||
if not os.path.exists(fakerootcmd):
|
||||
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
|
||||
return 2
|
||||
# Set up the appropriate environment
|
||||
newenv = dict(os.environ)
|
||||
fakerootenv = d.getVar('FAKEROOTENV')
|
||||
for varvalue in fakerootenv.split():
|
||||
if '=' in varvalue:
|
||||
splitval = varvalue.split('=', 1)
|
||||
newenv[splitval[0]] = splitval[1]
|
||||
return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
|
||||
|
||||
def setup_tinfoil(config_only=False, basepath=None, tracking=False):
|
||||
"""Initialize tinfoil api from bitbake"""
|
||||
import scriptpath
|
||||
orig_cwd = os.path.abspath(os.curdir)
|
||||
try:
|
||||
if basepath:
|
||||
os.chdir(basepath)
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
sys.exit(1)
|
||||
|
||||
import bb.tinfoil
|
||||
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
|
||||
try:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only)
|
||||
except bb.tinfoil.TinfoilUIException:
|
||||
tinfoil.shutdown()
|
||||
raise DevtoolError('Failed to start bitbake environment')
|
||||
except:
|
||||
tinfoil.shutdown()
|
||||
raise
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
return tinfoil
|
||||
|
||||
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
|
||||
"""Parse the specified recipe"""
|
||||
try:
|
||||
recipefile = tinfoil.get_recipe_file(pn)
|
||||
except bb.providers.NoProvider as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
if appends:
|
||||
append_files = tinfoil.get_file_appends(recipefile)
|
||||
if filter_workspace:
|
||||
# Filter out appends from the workspace
|
||||
append_files = [path for path in append_files if
|
||||
not path.startswith(config.workspace_path)]
|
||||
else:
|
||||
append_files = None
|
||||
try:
|
||||
rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
return rd
|
||||
|
||||
def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
|
||||
"""
|
||||
Check that a recipe is in the workspace and (optionally) that source
|
||||
is present.
|
||||
"""
|
||||
|
||||
workspacepn = pn
|
||||
|
||||
for recipe, value in workspace.items():
|
||||
if recipe == pn:
|
||||
break
|
||||
if bbclassextend:
|
||||
recipefile = value['recipefile']
|
||||
if recipefile:
|
||||
targets = get_bbclassextend_targets(recipefile, recipe)
|
||||
if pn in targets:
|
||||
workspacepn = recipe
|
||||
break
|
||||
else:
|
||||
raise DevtoolError("No recipe named '%s' in your workspace" % pn)
|
||||
|
||||
if checksrc:
|
||||
srctree = workspace[workspacepn]['srctree']
|
||||
if not os.path.exists(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
|
||||
if not os.listdir(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
|
||||
|
||||
return workspacepn
|
||||
|
||||
def use_external_build(same_dir, no_same_dir, d):
|
||||
"""
|
||||
Determine if we should use B!=S (separate build and source directories) or not
|
||||
"""
|
||||
b_is_s = True
|
||||
if no_same_dir:
|
||||
logger.info('Using separate build directory since --no-same-dir specified')
|
||||
b_is_s = False
|
||||
elif same_dir:
|
||||
logger.info('Using source tree as build directory since --same-dir specified')
|
||||
elif bb.data.inherits_class('autotools-brokensep', d):
|
||||
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
|
||||
elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
|
||||
logger.info('Using source tree as build directory since that would be the default for this recipe')
|
||||
else:
|
||||
b_is_s = False
|
||||
return b_is_s
|
||||
|
||||
def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
|
||||
"""
|
||||
Set up the git repository for the source tree
|
||||
"""
|
||||
import bb.process
|
||||
import oe.patch
|
||||
if not os.path.exists(os.path.join(repodir, '.git')):
|
||||
bb.process.run('git init', cwd=repodir)
|
||||
bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
|
||||
bb.process.run('git add -f -A .', cwd=repodir)
|
||||
commit_cmd = ['git']
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
|
||||
commit_cmd += ['commit', '-q']
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
if not stdout:
|
||||
commit_cmd.append('--allow-empty')
|
||||
commitmsg = "Initial empty commit with no upstream sources"
|
||||
elif version:
|
||||
commitmsg = "Initial commit from upstream at version %s" % version
|
||||
else:
|
||||
commitmsg = "Initial commit from upstream"
|
||||
commit_cmd += ['-m', commitmsg]
|
||||
bb.process.run(commit_cmd, cwd=repodir)
|
||||
|
||||
# Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
|
||||
gitinfodir = os.path.join(repodir, '.git', 'info')
|
||||
try:
|
||||
os.mkdir(gitinfodir)
|
||||
except FileExistsError:
|
||||
pass
|
||||
excludes = []
|
||||
excludefile = os.path.join(gitinfodir, 'exclude')
|
||||
try:
|
||||
with open(excludefile, 'r') as f:
|
||||
excludes = f.readlines()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if 'singletask.lock\n' not in excludes:
|
||||
excludes.append('singletask.lock\n')
|
||||
with open(excludefile, 'w') as f:
|
||||
for line in excludes:
|
||||
f.write(line)
|
||||
|
||||
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
|
||||
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
|
||||
|
||||
def recipe_to_append(recipefile, config, wildcard=False):
|
||||
"""
|
||||
Convert a recipe file to a bbappend file path within the workspace.
|
||||
NOTE: if the bbappend already exists, you should be using
|
||||
workspace[args.recipename]['bbappend'] instead of calling this
|
||||
function.
|
||||
"""
|
||||
appendname = os.path.splitext(os.path.basename(recipefile))[0]
|
||||
if wildcard:
|
||||
appendname = re.sub(r'_.*', '_%', appendname)
|
||||
appendpath = os.path.join(config.workspace_path, 'appends')
|
||||
appendfile = os.path.join(appendpath, appendname + '.bbappend')
|
||||
return appendfile
|
||||
|
||||
def get_bbclassextend_targets(recipefile, pn):
|
||||
"""
|
||||
Cheap function to get BBCLASSEXTEND and then convert that to the
|
||||
list of targets that would result.
|
||||
"""
|
||||
import bb.utils
|
||||
|
||||
values = {}
|
||||
def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
with open(recipefile, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
|
||||
|
||||
targets = []
|
||||
bbclassextend = values.get('BBCLASSEXTEND', '').split()
|
||||
if bbclassextend:
|
||||
for variant in bbclassextend:
|
||||
if variant == 'nativesdk':
|
||||
targets.append('%s-%s' % (variant, pn))
|
||||
elif variant in ['native', 'cross', 'crosssdk']:
|
||||
targets.append('%s-%s' % (pn, variant))
|
||||
return targets
|
||||
|
||||
def replace_from_file(path, old, new):
|
||||
"""Replace strings on a file"""
|
||||
|
||||
def read_file(path):
|
||||
data = None
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
return data
|
||||
|
||||
def write_file(path, data):
|
||||
if data is None:
|
||||
return
|
||||
wdata = data.rstrip() + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(wdata)
|
||||
|
||||
# In case old is None, return immediately
|
||||
if old is None:
|
||||
return
|
||||
try:
|
||||
rdata = read_file(path)
|
||||
except IOError as e:
|
||||
# if file does not exit, just quit, otherwise raise an exception
|
||||
if e.errno == errno.ENOENT:
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
old_contents = rdata.splitlines()
|
||||
new_contents = []
|
||||
for old_content in old_contents:
|
||||
try:
|
||||
new_contents.append(old_content.replace(old, new))
|
||||
except ValueError:
|
||||
pass
|
||||
write_file(path, "\n".join(new_contents))
|
||||
|
||||
|
||||
def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
|
||||
""" This function will make unlocked-sigs.inc match the recipes in the
|
||||
workspace plus any extras we want unlocked. """
|
||||
|
||||
if not fixed_setup:
|
||||
# Only need to write this out within the eSDK
|
||||
return
|
||||
|
||||
if not extra:
|
||||
extra = []
|
||||
|
||||
confdir = os.path.join(basepath, 'conf')
|
||||
unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
|
||||
|
||||
# Get current unlocked list if any
|
||||
values = {}
|
||||
def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
if os.path.exists(unlockedsigs):
|
||||
with open(unlockedsigs, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
|
||||
unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
|
||||
|
||||
# If the new list is different to the current list, write it out
|
||||
newunlocked = sorted(list(workspace.keys()) + extra)
|
||||
if unlocked != newunlocked:
|
||||
bb.utils.mkdirhier(confdir)
|
||||
with open(unlockedsigs, 'w') as f:
|
||||
f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
|
||||
"# This layer was created by the OpenEmbedded devtool" +
|
||||
" utility in order to\n" +
|
||||
"# contain recipes that are unlocked.\n")
|
||||
|
||||
f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
|
||||
for pn in newunlocked:
|
||||
f.write(' ' + pn)
|
||||
f.write('"')
|
||||
|
||||
def check_prerelease_version(ver, operation):
|
||||
if 'pre' in ver or 'rc' in ver:
|
||||
logger.warning('Version "%s" looks like a pre-release version. '
|
||||
'If that is the case, in order to ensure that the '
|
||||
'version doesn\'t appear to go backwards when you '
|
||||
'later upgrade to the final release version, it is '
|
||||
'recommmended that instead you use '
|
||||
'<current version>+<pre-release version> e.g. if '
|
||||
'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
|
||||
'If you prefer not to reset and re-try, you can change '
|
||||
'the version after %s succeeds using "devtool rename" '
|
||||
'with -V/--version.' % (ver, operation))
|
||||
|
||||
def check_git_repo_dirty(repodir):
|
||||
"""Check if a git repository is clean or not"""
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
return stdout
|
||||
|
||||
def check_git_repo_op(srctree, ignoredirs=None):
|
||||
"""Check if a git repository is in the middle of a rebase"""
|
||||
stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
|
||||
topleveldir = stdout.strip()
|
||||
if ignoredirs and topleveldir in ignoredirs:
|
||||
return
|
||||
gitdir = os.path.join(topleveldir, '.git')
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
|
||||
@@ -0,0 +1,92 @@
|
||||
# Development tool - build command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool build plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import tempfile
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
from devtool import parse_recipe
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def _set_file_values(fn, values):
|
||||
remaining = list(values.keys())
|
||||
|
||||
def varfunc(varname, origvalue, op, newlines):
|
||||
newvalue = values.get(varname, origvalue)
|
||||
remaining.remove(varname)
|
||||
return (newvalue, '=', 0, True)
|
||||
|
||||
with open(fn, 'r') as f:
|
||||
(updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
|
||||
|
||||
for item in remaining:
|
||||
updated = True
|
||||
newlines.append('%s = "%s"' % (item, values[item]))
|
||||
|
||||
if updated:
|
||||
with open(fn, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
def _get_build_tasks(config):
|
||||
tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
|
||||
return ['do_%s' % task.strip() for task in tasks]
|
||||
|
||||
def build(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build' subcommand"""
|
||||
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if args.clean:
|
||||
# use clean instead of cleansstate to avoid messing things up in eSDK
|
||||
build_tasks = ['do_clean']
|
||||
else:
|
||||
build_tasks = _get_build_tasks(config)
|
||||
if deploytask:
|
||||
build_tasks.append('do_deploy')
|
||||
|
||||
bbappend = workspace[workspacepn]['bbappend']
|
||||
if args.disable_parallel_make:
|
||||
logger.info("Disabling 'make' parallelism")
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': ''})
|
||||
try:
|
||||
bbargs = []
|
||||
for task in build_tasks:
|
||||
if args.recipename.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
bbargs.append('%s:%s' % (args.recipename, task))
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
finally:
|
||||
if args.disable_parallel_make:
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': None})
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('build', help='Build a recipe',
|
||||
description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
|
||||
group='working', order=50)
|
||||
parser_build.add_argument('recipename', help='Recipe to build')
|
||||
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
|
||||
parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
|
||||
parser_build.set_defaults(func=build)
|
||||
@@ -0,0 +1,164 @@
|
||||
# Development tool - build-image plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool plugin containing the build-image subcommand."""
|
||||
|
||||
import os
|
||||
import errno
|
||||
import logging
|
||||
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class TargetNotImageError(Exception):
|
||||
pass
|
||||
|
||||
def _get_packages(tinfoil, workspace, config):
|
||||
"""Get list of packages from recipes in the workspace."""
|
||||
result = []
|
||||
for recipe in workspace:
|
||||
data = parse_recipe(config, tinfoil, recipe, True)
|
||||
if 'class-target' in data.getVar('OVERRIDES').split(':'):
|
||||
if recipe in data.getVar('PACKAGES').split():
|
||||
result.append(recipe)
|
||||
else:
|
||||
logger.warning("Skipping recipe %s as it doesn't produce a "
|
||||
"package with the same name", recipe)
|
||||
return result
|
||||
|
||||
def build_image(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build-image' subcommand."""
|
||||
|
||||
image = args.imagename
|
||||
auto_image = False
|
||||
if not image:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
auto_image = True
|
||||
if not image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
|
||||
try:
|
||||
if args.add_packages:
|
||||
add_packages = args.add_packages.split(',')
|
||||
else:
|
||||
add_packages = None
|
||||
result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
|
||||
except TargetNotImageError:
|
||||
if auto_image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
else:
|
||||
raise DevtoolError('Specified recipe %s is not an image recipe' % image)
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built %s. You can find output files in %s'
|
||||
% (image, outputdir))
|
||||
return result
|
||||
|
||||
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
|
||||
# remove <image>.bbappend to make sure setup_tinfoil doesn't
|
||||
# break because of it
|
||||
target_basename = config.get('SDK', 'target_basename', '')
|
||||
if target_basename:
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
try:
|
||||
os.unlink(appendfile)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, image, True)
|
||||
if not rd:
|
||||
# Error already shown
|
||||
return (1, None)
|
||||
if not bb.data.inherits_class('image', rd):
|
||||
raise TargetNotImageError()
|
||||
|
||||
# Get the actual filename used and strip the .bb and full path
|
||||
target_basename = rd.getVar('FILE')
|
||||
target_basename = os.path.splitext(os.path.basename(target_basename))[0]
|
||||
config.set('SDK', 'target_basename', target_basename)
|
||||
config.write()
|
||||
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
|
||||
outputdir = None
|
||||
try:
|
||||
if workspace or add_packages:
|
||||
if add_packages:
|
||||
packages = add_packages
|
||||
else:
|
||||
packages = _get_packages(tinfoil, workspace, config)
|
||||
else:
|
||||
packages = None
|
||||
if not task:
|
||||
if not packages and not add_packages and workspace:
|
||||
logger.warning('No recipes in workspace, building image %s unmodified', image)
|
||||
elif not packages:
|
||||
logger.warning('No packages to add, building image %s unmodified', image)
|
||||
|
||||
if packages or extra_append:
|
||||
bb.utils.mkdirhier(os.path.dirname(appendfile))
|
||||
with open(appendfile, 'w') as afile:
|
||||
if packages:
|
||||
# include packages from workspace recipes into the image
|
||||
afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
|
||||
if not task:
|
||||
logger.info('Building image %s with the following '
|
||||
'additional packages: %s', image, ' '.join(packages))
|
||||
if extra_append:
|
||||
for line in extra_append:
|
||||
afile.write('%s\n' % line)
|
||||
|
||||
if task in ['populate_sdk', 'populate_sdk_ext']:
|
||||
outputdir = rd.getVar('SDK_DEPLOY')
|
||||
else:
|
||||
outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
|
||||
|
||||
tmp_tinfoil = tinfoil
|
||||
tinfoil = None
|
||||
tmp_tinfoil.shutdown()
|
||||
|
||||
options = ''
|
||||
if task:
|
||||
options += '-c %s' % task
|
||||
|
||||
# run bitbake to build image (or specified task)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake %s %s' % (options, image), watch=True)
|
||||
except ExecutionError as err:
|
||||
return (err.exitcode, None)
|
||||
finally:
|
||||
if os.path.isfile(appendfile):
|
||||
os.unlink(appendfile)
|
||||
finally:
|
||||
if tinfoil:
|
||||
tinfoil.shutdown()
|
||||
return (0, outputdir)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the build-image plugin"""
|
||||
parser = subparsers.add_parser('build-image',
|
||||
help='Build image including workspace recipe packages',
|
||||
description='Builds an image, extending it to include '
|
||||
'packages from recipes in the workspace',
|
||||
group='testbuild', order=-10)
|
||||
parser.add_argument('imagename', help='Image recipe to build', nargs='?')
|
||||
parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
|
||||
'entire workspace, specify packages to be added to the image '
|
||||
'(separate multiple packages by commas)',
|
||||
metavar='PACKAGES')
|
||||
parser.set_defaults(func=build_image)
|
||||
@@ -0,0 +1,55 @@
|
||||
# Development tool - build-sdk command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
from devtool import build_image
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def build_sdk(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool build-sdk command"""
|
||||
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
else:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
extra_append = ['SDK_DERIVATIVE = "1"']
|
||||
try:
|
||||
result, outputdir = build_image.build_image_task(config,
|
||||
basepath,
|
||||
workspace,
|
||||
image,
|
||||
task='populate_sdk_ext',
|
||||
extra_append=extra_append)
|
||||
except build_image.TargetNotImageError:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built SDK. You can find output files in %s'
|
||||
% outputdir)
|
||||
return result
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands"""
|
||||
if context.fixed_setup:
|
||||
parser_build_sdk = subparsers.add_parser('build-sdk',
|
||||
help='Build a derivative SDK of this one',
|
||||
description='Builds an extensible SDK based upon this one and the items in your workspace',
|
||||
group='advanced')
|
||||
parser_build_sdk.set_defaults(func=build_sdk)
|
||||
@@ -0,0 +1,366 @@
|
||||
# Development tool - deploy/undeploy command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugin containing the deploy subcommands"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import bb.utils
|
||||
import argparse_oe
|
||||
import oe.types
|
||||
|
||||
from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
deploylist_path = '/.devtool'
|
||||
|
||||
def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
|
||||
"""
|
||||
Prepare a shell script for running on the target to
|
||||
deploy/undeploy files. We have to be careful what we put in this
|
||||
script - only commands that are likely to be available on the
|
||||
target are suitable (the target might be constrained, e.g. using
|
||||
busybox rather than bash with coreutils).
|
||||
"""
|
||||
lines = []
|
||||
lines.append('#!/bin/sh')
|
||||
lines.append('set -e')
|
||||
if undeployall:
|
||||
# Yes, I know this is crude - but it does work
|
||||
lines.append('for entry in %s/*.list; do' % deploylist_path)
|
||||
lines.append('[ ! -f $entry ] && exit')
|
||||
lines.append('set `basename $entry | sed "s/.list//"`')
|
||||
if dryrun:
|
||||
if not deploy:
|
||||
lines.append('echo "Previously deployed files for $1:"')
|
||||
lines.append('manifest="%s/$1.list"' % deploylist_path)
|
||||
lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
|
||||
lines.append('if [ -f $manifest ] ; then')
|
||||
# Read manifest in reverse and delete files / remove empty dirs
|
||||
lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
|
||||
lines.append(' do')
|
||||
if dryrun:
|
||||
lines.append(' if [ ! -d $file ] ; then')
|
||||
lines.append(' echo $file')
|
||||
lines.append(' fi')
|
||||
else:
|
||||
lines.append(' if [ -d $file ] ; then')
|
||||
# Avoid deleting a preserved directory in case it has special perms
|
||||
lines.append(' if [ ! -d $preservedir/$file ] ; then')
|
||||
lines.append(' rmdir $file > /dev/null 2>&1 || true')
|
||||
lines.append(' fi')
|
||||
lines.append(' else')
|
||||
lines.append(' rm -f $file')
|
||||
lines.append(' fi')
|
||||
lines.append(' done')
|
||||
if not dryrun:
|
||||
lines.append(' rm $manifest')
|
||||
if not deploy and not dryrun:
|
||||
# May as well remove all traces
|
||||
lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
|
||||
lines.append('fi')
|
||||
|
||||
if deploy:
|
||||
if not nocheckspace:
|
||||
# Check for available space
|
||||
# FIXME This doesn't take into account files spread across multiple
|
||||
# partitions, but doing that is non-trivial
|
||||
# Find the part of the destination path that exists
|
||||
lines.append('checkpath="$2"')
|
||||
lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
|
||||
lines.append('do')
|
||||
lines.append(' checkpath=`dirname "$checkpath"`')
|
||||
lines.append('done')
|
||||
lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
|
||||
# First line of the file is the total space
|
||||
lines.append('total=`head -n1 $3`')
|
||||
lines.append('if [ $total -gt $freespace ] ; then')
|
||||
lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
|
||||
lines.append(' exit 1')
|
||||
lines.append('fi')
|
||||
if not nopreserve:
|
||||
# Preserve any files that exist. Note that this will add to the
|
||||
# preserved list with successive deployments if the list of files
|
||||
# deployed changes, but because we've deleted any previously
|
||||
# deployed files at this point it will never preserve anything
|
||||
# that was deployed, only files that existed prior to any deploying
|
||||
# (which makes the most sense)
|
||||
lines.append('cat $3 | sed "1d" | while read file fsize')
|
||||
lines.append('do')
|
||||
lines.append(' if [ -e $file ] ; then')
|
||||
lines.append(' dest="$preservedir/$file"')
|
||||
lines.append(' mkdir -p `dirname $dest`')
|
||||
lines.append(' mv $file $dest')
|
||||
lines.append(' fi')
|
||||
lines.append('done')
|
||||
lines.append('rm $3')
|
||||
lines.append('mkdir -p `dirname $manifest`')
|
||||
lines.append('mkdir -p $2')
|
||||
if verbose:
|
||||
lines.append(' tar xv -C $2 -f - | tee $manifest')
|
||||
else:
|
||||
lines.append(' tar xv -C $2 -f - > $manifest')
|
||||
lines.append('sed -i "s!^./!$2!" $manifest')
|
||||
elif not dryrun:
|
||||
# Put any preserved files back
|
||||
lines.append('if [ -d $preservedir ] ; then')
|
||||
lines.append(' cd $preservedir')
|
||||
# find from busybox might not have -exec, so we don't use that
|
||||
lines.append(' find . -type f | while read file')
|
||||
lines.append(' do')
|
||||
lines.append(' mv $file /$file')
|
||||
lines.append(' done')
|
||||
lines.append(' cd /')
|
||||
lines.append(' rm -rf $preservedir')
|
||||
lines.append('fi')
|
||||
|
||||
if undeployall:
|
||||
if not dryrun:
|
||||
lines.append('echo "NOTE: Successfully undeployed $1"')
|
||||
lines.append('done')
|
||||
|
||||
# Delete the script itself
|
||||
lines.append('rm $0')
|
||||
lines.append('')
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
|
||||
def deploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'deploy' subcommand"""
|
||||
import math
|
||||
import oe.recipeutils
|
||||
import oe.package
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename, checksrc=False)
|
||||
|
||||
try:
|
||||
host, destdir = args.target.split(':')
|
||||
except ValueError:
|
||||
destdir = '/'
|
||||
else:
|
||||
args.target = host
|
||||
if not destdir.endswith('/'):
|
||||
destdir += '/'
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
try:
|
||||
rd = tinfoil.parse_recipe(args.recipename)
|
||||
except Exception as e:
|
||||
raise DevtoolError('Exception parsing recipe %s: %s' %
|
||||
(args.recipename, e))
|
||||
recipe_outdir = rd.getVar('D')
|
||||
if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
|
||||
raise DevtoolError('No files to deploy - have you built the %s '
|
||||
'recipe? If so, the install step has not installed '
|
||||
'any files.' % args.recipename)
|
||||
|
||||
if args.strip and not args.dry_run:
|
||||
# Fakeroot copy to new destination
|
||||
srcdir = recipe_outdir
|
||||
recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'devtool-deploy-target-stripped')
|
||||
if os.path.isdir(recipe_outdir):
|
||||
exec_fakeroot(rd, "rm -rf %s" % recipe_outdir, shell=True)
|
||||
exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
|
||||
os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
|
||||
oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
|
||||
rd.getVar('base_libdir'), rd)
|
||||
|
||||
filelist = []
|
||||
inodes = set({})
|
||||
ftotalsize = 0
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for fn in files:
|
||||
fstat = os.lstat(os.path.join(root, fn))
|
||||
# Get the size in kiB (since we'll be comparing it to the output of du -k)
|
||||
# MUST use lstat() here not stat() or getfilesize() since we don't want to
|
||||
# dereference symlinks
|
||||
if fstat.st_ino in inodes:
|
||||
fsize = 0
|
||||
else:
|
||||
fsize = int(math.ceil(float(fstat.st_size)/1024))
|
||||
inodes.add(fstat.st_ino)
|
||||
ftotalsize += fsize
|
||||
# The path as it would appear on the target
|
||||
fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
|
||||
filelist.append((fpath, fsize))
|
||||
|
||||
if args.dry_run:
|
||||
print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
|
||||
for item, _ in filelist:
|
||||
print(' %s' % item)
|
||||
return 0
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_sshexec = ''
|
||||
ssh_sshexec = 'ssh'
|
||||
if args.ssh_exec:
|
||||
scp_sshexec = "-S %s" % args.ssh_exec
|
||||
ssh_sshexec = args.ssh_exec
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if args.port:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
if args.key:
|
||||
extraoptions += ' -i %s' % args.key
|
||||
|
||||
# In order to delete previously deployed files and have the manifest file on
|
||||
# the target, we write out a shell script and then copy it to the target
|
||||
# so we can then run it (piping tar output to it).
|
||||
# (We cannot use scp here, because it doesn't preserve symlinks.)
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_deploy.sh'
|
||||
tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
|
||||
shellscript = _prepare_remote_script(deploy=True,
|
||||
verbose=args.show_status,
|
||||
nopreserve=args.no_preserve,
|
||||
nocheckspace=args.no_check_space)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Write out the file list
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
|
||||
f.write('%d\n' % ftotalsize)
|
||||
for fpath, fsize in filelist:
|
||||
f.write('%s %d\n' % (fpath, fsize))
|
||||
# Copy them to the target
|
||||
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
logger.info('Successfully deployed %s' % recipe_outdir)
|
||||
|
||||
files_list = []
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for filename in files:
|
||||
filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
|
||||
files_list.append(os.path.join(destdir, filename))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return 0
|
||||
|
||||
def undeploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'undeploy' subcommand"""
|
||||
if args.all and args.recipename:
|
||||
raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
|
||||
elif not args.recipename and not args.all:
|
||||
raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_sshexec = ''
|
||||
ssh_sshexec = 'ssh'
|
||||
if args.ssh_exec:
|
||||
scp_sshexec = "-S %s" % args.ssh_exec
|
||||
ssh_sshexec = args.ssh_exec
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if args.port:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
args.target = args.target.split(':')[0]
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_undeploy.sh'
|
||||
shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Copy it to the target
|
||||
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
if not args.all and not args.dry_run:
|
||||
logger.info('Successfully undeployed %s' % args.recipename)
|
||||
return 0
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the deploy plugin"""
|
||||
|
||||
parser_deploy = subparsers.add_parser('deploy-target',
|
||||
help='Deploy recipe output files to live target machine',
|
||||
description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
|
||||
group='testbuild')
|
||||
parser_deploy.add_argument('recipename', help='Recipe to deploy')
|
||||
parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
|
||||
parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
|
||||
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
|
||||
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
|
||||
parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
|
||||
parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
|
||||
parser_deploy.add_argument('-I', '--key',
|
||||
help='Specify ssh private key for connection to the target')
|
||||
|
||||
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
|
||||
strip_opts.add_argument('-S', '--strip',
|
||||
help='Strip executables prior to deploying (default: %(default)s). '
|
||||
'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
|
||||
default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
|
||||
action='store_true')
|
||||
strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
|
||||
|
||||
parser_deploy.set_defaults(func=deploy)
|
||||
|
||||
parser_undeploy = subparsers.add_parser('undeploy-target',
|
||||
help='Undeploy recipe output files in live target machine',
|
||||
description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
|
||||
group='testbuild')
|
||||
parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
|
||||
parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
|
||||
parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
|
||||
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
|
||||
parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
|
||||
parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
|
||||
parser_undeploy.add_argument('-I', '--key',
|
||||
help='Specify ssh private key for connection to the target')
|
||||
|
||||
parser_undeploy.set_defaults(func=undeploy)
|
||||
@@ -0,0 +1,109 @@
|
||||
# Development tool - export command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool export plugin"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import tarfile
|
||||
import logging
|
||||
import datetime
|
||||
import json
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
# output files
|
||||
default_arcname_prefix = "workspace-export"
|
||||
metadata = '.export_metadata'
|
||||
|
||||
def export(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'export' subcommand"""
|
||||
|
||||
def add_metadata(tar):
|
||||
"""Archive the workspace object"""
|
||||
# finally store the workspace metadata
|
||||
with open(metadata, 'w') as fd:
|
||||
fd.write(json.dumps((config.workspace_path, workspace)))
|
||||
tar.add(metadata)
|
||||
os.unlink(metadata)
|
||||
|
||||
def add_recipe(tar, recipe, data):
|
||||
"""Archive recipe with proper arcname"""
|
||||
# Create a map of name/arcnames
|
||||
arcnames = []
|
||||
for key, name in data.items():
|
||||
if name:
|
||||
if key == 'srctree':
|
||||
# all sources, no matter where are located, goes into the sources directory
|
||||
arcname = 'sources/%s' % recipe
|
||||
else:
|
||||
arcname = name.replace(config.workspace_path, '')
|
||||
arcnames.append((name, arcname))
|
||||
|
||||
for name, arcname in arcnames:
|
||||
tar.add(name, arcname=arcname)
|
||||
|
||||
|
||||
# Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
|
||||
if not workspace:
|
||||
logger.info('Workspace contains no recipes, nothing to export')
|
||||
return 0
|
||||
else:
|
||||
for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
|
||||
for recipe in recipes:
|
||||
if recipe not in workspace:
|
||||
logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
|
||||
return 1
|
||||
|
||||
name = args.file
|
||||
|
||||
default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
|
||||
if not name:
|
||||
name = default_name
|
||||
else:
|
||||
# if name is a directory, append the default name
|
||||
if os.path.isdir(name):
|
||||
name = os.path.join(name, default_name)
|
||||
|
||||
if os.path.exists(name) and not args.overwrite:
|
||||
logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
|
||||
return 1
|
||||
|
||||
# if all workspace is excluded, quit
|
||||
if not len(set(workspace.keys()).difference(set(args.exclude))):
|
||||
logger.warning('All recipes in workspace excluded, nothing to export')
|
||||
return 0
|
||||
|
||||
exported = []
|
||||
with tarfile.open(name, 'w:gz') as tar:
|
||||
if args.include:
|
||||
for recipe in args.include:
|
||||
add_recipe(tar, recipe, workspace[recipe])
|
||||
exported.append(recipe)
|
||||
else:
|
||||
for recipe, data in workspace.items():
|
||||
if recipe not in args.exclude:
|
||||
add_recipe(tar, recipe, data)
|
||||
exported.append(recipe)
|
||||
|
||||
add_metadata(tar)
|
||||
|
||||
logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool export subcommands"""
|
||||
parser = subparsers.add_parser('export',
|
||||
help='Export workspace into a tar archive',
|
||||
description='Export one or more recipes from current workspace into a tar archive',
|
||||
group='advanced')
|
||||
|
||||
parser.add_argument('--file', '-f', help='Output archive file name')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
|
||||
group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
|
||||
parser.set_defaults(func=export)
|
||||
@@ -0,0 +1,134 @@
|
||||
# Development tool - import command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool import plugin"""
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import fnmatch
|
||||
|
||||
from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
|
||||
from devtool import export
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def devimport(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'import' subcommand"""
|
||||
|
||||
def get_pn(name):
|
||||
""" Returns the filename of a workspace recipe/append"""
|
||||
metadata = name.split('/')[-1]
|
||||
fn, _ = os.path.splitext(metadata)
|
||||
return fn
|
||||
|
||||
if not os.path.exists(args.file):
|
||||
raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
|
||||
|
||||
with tarfile.open(args.file) as tar:
|
||||
# Get exported metadata
|
||||
export_workspace_path = export_workspace = None
|
||||
try:
|
||||
metadata = tar.getmember(export.metadata)
|
||||
except KeyError as ke:
|
||||
raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
|
||||
|
||||
tar.extract(metadata)
|
||||
with open(metadata.name) as fdm:
|
||||
export_workspace_path, export_workspace = json.load(fdm)
|
||||
os.unlink(metadata.name)
|
||||
|
||||
members = tar.getmembers()
|
||||
|
||||
# Get appends and recipes from the exported archive, these
|
||||
# will be needed to find out those appends without corresponding
|
||||
# recipe pair
|
||||
append_fns, recipe_fns = set(), set()
|
||||
for member in members:
|
||||
if member.name.startswith('appends'):
|
||||
append_fns.add(get_pn(member.name))
|
||||
elif member.name.startswith('recipes'):
|
||||
recipe_fns.add(get_pn(member.name))
|
||||
|
||||
# Setup tinfoil, get required data and shutdown
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
# Find those appends that do not have recipes in current metadata
|
||||
non_importables = []
|
||||
for fn in append_fns - recipe_fns:
|
||||
# Check on current metadata (covering those layers indicated in bblayers.conf)
|
||||
for current_fn in current_fns:
|
||||
if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
|
||||
break
|
||||
else:
|
||||
non_importables.append(fn)
|
||||
logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
|
||||
|
||||
# Extract
|
||||
imported = []
|
||||
for member in members:
|
||||
if member.name == export.metadata:
|
||||
continue
|
||||
|
||||
for nonimp in non_importables:
|
||||
pn = nonimp.split('_')[0]
|
||||
# do not extract data from non-importable recipes or metadata
|
||||
if member.name.startswith('appends/%s' % nonimp) or \
|
||||
member.name.startswith('recipes/%s' % nonimp) or \
|
||||
member.name.startswith('sources/%s' % pn):
|
||||
break
|
||||
else:
|
||||
path = os.path.join(config.workspace_path, member.name)
|
||||
if os.path.exists(path):
|
||||
# by default, no file overwrite is done unless -o is given by the user
|
||||
if args.overwrite:
|
||||
try:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
except PermissionError as pe:
|
||||
logger.warning(pe)
|
||||
else:
|
||||
logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
|
||||
continue
|
||||
else:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
|
||||
# Update EXTERNALSRC and the devtool md5 file
|
||||
if member.name.startswith('appends'):
|
||||
if export_workspace_path:
|
||||
# appends created by 'devtool modify' just need to update the workspace
|
||||
replace_from_file(path, export_workspace_path, config.workspace_path)
|
||||
|
||||
# appends created by 'devtool add' need replacement of exported source tree
|
||||
pn = get_pn(member.name).split('_')[0]
|
||||
exported_srctree = export_workspace[pn]['srctree']
|
||||
if exported_srctree:
|
||||
replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
|
||||
|
||||
standard._add_md5(config, pn, path)
|
||||
imported.append(pn)
|
||||
|
||||
if imported:
|
||||
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
|
||||
else:
|
||||
logger.warning('No recipes imported into the workspace')
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool import subcommands"""
|
||||
parser = subparsers.add_parser('import',
|
||||
help='Import exported tar archive into workspace',
|
||||
description='Import tar archive previously created by "devtool export" into workspace',
|
||||
group='advanced')
|
||||
parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
|
||||
parser.set_defaults(func=devimport)
|
||||
@@ -0,0 +1,81 @@
|
||||
# OpenEmbedded Development tool - menuconfig command plugin
|
||||
#
|
||||
# Copyright (C) 2018 Xilinx
|
||||
# Written by: Chandana Kalluri <ckalluri@xilinx.com>
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool menuconfig plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
import glob
|
||||
from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
|
||||
from devtool import check_workspace_recipe
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def menuconfig(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'menuconfig' subcommand"""
|
||||
|
||||
rd = ""
|
||||
kconfigpath = ""
|
||||
pn_src = ""
|
||||
localfilesdir = ""
|
||||
workspace_dir = ""
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
|
||||
check_workspace_recipe(workspace, args.component)
|
||||
pn = rd.getVar('PN')
|
||||
|
||||
if not rd.getVarFlag('do_menuconfig','task'):
|
||||
raise DevtoolError("This recipe does not support menuconfig option")
|
||||
|
||||
workspace_dir = os.path.join(config.workspace_path,'sources')
|
||||
kconfigpath = rd.getVar('B')
|
||||
pn_src = os.path.join(workspace_dir,pn)
|
||||
|
||||
# add check to see if oe_local_files exists or not
|
||||
localfilesdir = os.path.join(pn_src,'oe-local-files')
|
||||
if not os.path.exists(localfilesdir):
|
||||
bb.utils.mkdirhier(localfilesdir)
|
||||
# Add gitignore to ensure source tree is clean
|
||||
gitignorefile = os.path.join(localfilesdir,'.gitignore')
|
||||
with open(gitignorefile, 'w') as f:
|
||||
f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
|
||||
f.write('*\n')
|
||||
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
logger.info('Launching menuconfig')
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
|
||||
fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
|
||||
res = standard._create_kconfig_diff(pn_src,rd,fragment)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""register devtool subcommands from this plugin"""
|
||||
parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
|
||||
parser_menuconfig.add_argument('component', help='compenent to alter config')
|
||||
parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
|
||||
@@ -0,0 +1,50 @@
|
||||
# Development tool - package command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugin containing the package subcommands"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def package(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'package' subcommand"""
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
|
||||
try:
|
||||
image_pkgtype = config.get('Package', 'image_pkgtype', '')
|
||||
if not image_pkgtype:
|
||||
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
|
||||
|
||||
deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
logger.info('Your packages are in %s' % deploy_dir_pkg)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the package plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_package = subparsers.add_parser('package',
|
||||
help='Build packages for a recipe',
|
||||
description='Builds packages for a recipe\'s output files',
|
||||
group='testbuild', order=-5)
|
||||
parser_package.add_argument('recipename', help='Recipe to package')
|
||||
parser_package.set_defaults(func=package)
|
||||
@@ -0,0 +1,64 @@
|
||||
# Development tool - runqemu command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool runqemu plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import glob
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def runqemu(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'runqemu' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
machine = tinfoil.config_data.getVar('MACHINE')
|
||||
bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
|
||||
tinfoil.config_data.getVar('BUILD_ARCH'),
|
||||
tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
|
||||
raise DevtoolError('QEMU is not available within this SDK')
|
||||
|
||||
imagename = args.imagename
|
||||
if not imagename:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
imagename = sdk_targets[0]
|
||||
if not imagename:
|
||||
raise DevtoolError('Unable to determine image name to run, please specify one')
|
||||
|
||||
try:
|
||||
# FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
|
||||
# run bitbake to find out the values of various environment variables, which
|
||||
# isn't the case for the extensible SDK. Work around it for now.
|
||||
newenv = dict(os.environ)
|
||||
newenv.pop('OECORE_NATIVE_SYSROOT', '')
|
||||
exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
|
||||
description='Runs QEMU to boot the specified image',
|
||||
group='testbuild', order=-20)
|
||||
parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
|
||||
parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
|
||||
nargs=argparse.REMAINDER)
|
||||
parser_runqemu.set_defaults(func=runqemu)
|
||||
@@ -0,0 +1,329 @@
|
||||
# Development tool - sdk-update command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
import re
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def parse_locked_sigs(sigfile_path):
|
||||
"""Return <pn:task>:<hash> dictionary"""
|
||||
sig_dict = {}
|
||||
with open(sigfile_path) as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if ':' in line:
|
||||
taskkey, _, hashval = line.rpartition(':')
|
||||
sig_dict[taskkey.strip()] = hashval.split()[0]
|
||||
return sig_dict
|
||||
|
||||
def generate_update_dict(sigfile_new, sigfile_old):
|
||||
"""Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
|
||||
update_dict = {}
|
||||
sigdict_new = parse_locked_sigs(sigfile_new)
|
||||
sigdict_old = parse_locked_sigs(sigfile_old)
|
||||
for k in sigdict_new:
|
||||
if k not in sigdict_old:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
if sigdict_new[k] != sigdict_old[k]:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
return update_dict
|
||||
|
||||
def get_sstate_objects(update_dict, sstate_dir):
|
||||
"""Return a list containing sstate objects which are to be installed"""
|
||||
sstate_objects = []
|
||||
for k in update_dict:
|
||||
files = set()
|
||||
hashval = update_dict[k]
|
||||
p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
files = list(files)
|
||||
if len(files) == 1:
|
||||
sstate_objects.extend(files)
|
||||
elif len(files) > 1:
|
||||
logger.error("More than one matching sstate object found for %s" % hashval)
|
||||
|
||||
return sstate_objects
|
||||
|
||||
def mkdir(d):
|
||||
try:
|
||||
os.makedirs(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
|
||||
"""Install sstate objects into destination SDK"""
|
||||
sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
|
||||
if not os.path.exists(sstate_dir):
|
||||
logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
|
||||
raise
|
||||
for sb in sstate_objects:
|
||||
dst = sb.replace(src_sdk, dest_sdk)
|
||||
destdir = os.path.dirname(dst)
|
||||
mkdir(destdir)
|
||||
logger.debug("Copying %s to %s" % (sb, dst))
|
||||
shutil.copy(sb, dst)
|
||||
|
||||
def check_manifest(fn, basepath):
|
||||
import bb.utils
|
||||
changedfiles = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
splitline = line.split()
|
||||
if len(splitline) > 1:
|
||||
chksum = splitline[0]
|
||||
fpath = splitline[1]
|
||||
curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
|
||||
if chksum != curr_chksum:
|
||||
logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
|
||||
changedfiles.append(fpath)
|
||||
return changedfiles
|
||||
|
||||
def sdk_update(args, config, basepath, workspace):
|
||||
"""Entry point for devtool sdk-update command"""
|
||||
updateserver = args.updateserver
|
||||
if not updateserver:
|
||||
updateserver = config.get('SDK', 'updateserver', '')
|
||||
logger.debug("updateserver: %s" % updateserver)
|
||||
|
||||
# Make sure we are using sdk-update from within SDK
|
||||
logger.debug("basepath = %s" % basepath)
|
||||
old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
|
||||
if not os.path.exists(old_locked_sig_file_path):
|
||||
logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
|
||||
return -1
|
||||
else:
|
||||
logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
|
||||
|
||||
if not '://' in updateserver:
|
||||
logger.error("Update server must be a URL")
|
||||
return -1
|
||||
|
||||
layers_dir = os.path.join(basepath, 'layers')
|
||||
conf_dir = os.path.join(basepath, 'conf')
|
||||
|
||||
# Grab variable values
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
|
||||
sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
|
||||
site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
tmpsdk_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
|
||||
new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
|
||||
# Fetch manifest from server
|
||||
tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
|
||||
ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
|
||||
if ret != 0:
|
||||
logger.error("Cannot dowload files from %s" % updateserver)
|
||||
return ret
|
||||
changedfiles = check_manifest(tmpmanifest, basepath)
|
||||
if not changedfiles:
|
||||
logger.info("Already up-to-date")
|
||||
return 0
|
||||
# Update metadata
|
||||
logger.debug("Updating metadata via git ...")
|
||||
#Check for the status before doing a fetch and reset
|
||||
if os.path.exists(os.path.join(basepath, 'layers/.git')):
|
||||
out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
|
||||
if not out:
|
||||
ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
|
||||
else:
|
||||
logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
|
||||
logger.error("Changed files:\n%s" % out);
|
||||
return -1
|
||||
else:
|
||||
ret = -1
|
||||
if ret != 0:
|
||||
ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating metadata via git failed")
|
||||
return ret
|
||||
logger.debug("Updating conf files ...")
|
||||
for changedfile in changedfiles:
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating %s failed" % changedfile)
|
||||
return ret
|
||||
|
||||
# Check if UNINATIVE_CHECKSUM changed
|
||||
uninative = False
|
||||
if 'conf/local.conf' in changedfiles:
|
||||
def read_uninative_checksums(fn):
|
||||
chksumitems = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('UNINATIVE_CHECKSUM'):
|
||||
splitline = re.split(r'[\[\]"\']', line)
|
||||
if len(splitline) > 3:
|
||||
chksumitems.append((splitline[1], splitline[3]))
|
||||
return chksumitems
|
||||
|
||||
oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
|
||||
newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
|
||||
if oldsums != newsums:
|
||||
uninative = True
|
||||
for buildarch, chksum in newsums:
|
||||
uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
|
||||
mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
|
||||
|
||||
# Ok, all is well at this point - move everything over
|
||||
tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
|
||||
if os.path.exists(tmplayers_dir):
|
||||
shutil.rmtree(layers_dir)
|
||||
shutil.move(tmplayers_dir, layers_dir)
|
||||
for changedfile in changedfiles:
|
||||
destfile = os.path.join(basepath, changedfile)
|
||||
os.remove(destfile)
|
||||
shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
|
||||
os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
|
||||
shutil.move(tmpmanifest, conf_dir)
|
||||
if uninative:
|
||||
shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
|
||||
shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
|
||||
|
||||
if not sstate_mirrors:
|
||||
with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
|
||||
f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
|
||||
f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
|
||||
finally:
|
||||
shutil.rmtree(tmpsdk_dir)
|
||||
|
||||
if not args.skip_prepare:
|
||||
# Find all potentially updateable tasks
|
||||
sdk_update_targets = []
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
for root, _, files in os.walk(stamps_dir):
|
||||
for fn in files:
|
||||
if not '.sigdata.' in fn:
|
||||
for task in tasks:
|
||||
if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
|
||||
sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
|
||||
# Run bitbake command for the whole SDK
|
||||
logger.info("Preparing build system... (This may take some time.)")
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
runlines = []
|
||||
for line in output.splitlines():
|
||||
if 'Running task ' in line:
|
||||
runlines.append(line)
|
||||
if runlines:
|
||||
logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
|
||||
return -1
|
||||
except bb.process.ExecutionError as e:
|
||||
logger.error('Preparation failed:\n%s' % e.stdout)
|
||||
return -1
|
||||
return 0
|
||||
|
||||
def sdk_install(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool sdk-install command"""
|
||||
|
||||
import oe.recipeutils
|
||||
import bb.process
|
||||
|
||||
for recipe in args.recipename:
|
||||
if recipe in workspace:
|
||||
raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
|
||||
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
stampprefixes = {}
|
||||
def checkstamp(recipe):
|
||||
stampprefix = stampprefixes[recipe]
|
||||
stamps = glob.glob(stampprefix + '*')
|
||||
for stamp in stamps:
|
||||
if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
install_recipes = []
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
for recipe in args.recipename:
|
||||
rd = parse_recipe(config, tinfoil, recipe, True)
|
||||
if not rd:
|
||||
return 1
|
||||
stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
|
||||
if checkstamp(recipe):
|
||||
logger.info('%s is already installed' % recipe)
|
||||
else:
|
||||
install_recipes.append(recipe)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if install_recipes:
|
||||
logger.info('Installing %s...' % ', '.join(install_recipes))
|
||||
install_tasks = []
|
||||
for recipe in install_recipes:
|
||||
for task in tasks:
|
||||
if recipe.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
install_tasks.append('%s:%s' % (recipe, task))
|
||||
options = ''
|
||||
if not args.allow_build:
|
||||
options += ' --setscene-only'
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
|
||||
failed = False
|
||||
for recipe in install_recipes:
|
||||
if checkstamp(recipe):
|
||||
logger.info('Successfully installed %s' % recipe)
|
||||
else:
|
||||
raise DevtoolError('Failed to install %s - unavailable' % recipe)
|
||||
failed = True
|
||||
if failed:
|
||||
return 2
|
||||
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the sdk plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_sdk = subparsers.add_parser('sdk-update',
|
||||
help='Update SDK components',
|
||||
description='Updates installed SDK components from a remote server',
|
||||
group='sdk')
|
||||
updateserver = context.config.get('SDK', 'updateserver', '')
|
||||
if updateserver:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
|
||||
else:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
|
||||
parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
|
||||
parser_sdk.set_defaults(func=sdk_update)
|
||||
|
||||
parser_sdk_install = subparsers.add_parser('sdk-install',
|
||||
help='Install additional SDK components',
|
||||
description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
|
||||
group='sdk')
|
||||
parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
|
||||
parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
|
||||
parser_sdk_install.set_defaults(func=sdk_install)
|
||||
@@ -0,0 +1,109 @@
|
||||
# Development tool - search command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool search plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
from devtool import setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def search(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'search' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
|
||||
defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
|
||||
|
||||
keyword_rc = re.compile(args.keyword)
|
||||
|
||||
def print_match(pn):
|
||||
rd = parse_recipe(config, tinfoil, pn, True)
|
||||
if not rd:
|
||||
return
|
||||
summary = rd.getVar('SUMMARY')
|
||||
if summary == rd.expand(defsummary):
|
||||
summary = ''
|
||||
print("%s %s" % (pn.ljust(20), summary))
|
||||
|
||||
|
||||
matches = []
|
||||
if os.path.exists(pkgdata_dir):
|
||||
for fn in os.listdir(pkgdata_dir):
|
||||
pfn = os.path.join(pkgdata_dir, fn)
|
||||
if not os.path.isfile(pfn):
|
||||
continue
|
||||
|
||||
packages = []
|
||||
match = False
|
||||
if keyword_rc.search(fn):
|
||||
match = True
|
||||
|
||||
if not match:
|
||||
with open(pfn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('PACKAGES:'):
|
||||
packages = line.split(':', 1)[1].strip().split()
|
||||
|
||||
for pkg in packages:
|
||||
if keyword_rc.search(pkg):
|
||||
match = True
|
||||
break
|
||||
if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
|
||||
with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
|
||||
for line in f:
|
||||
if ': ' in line:
|
||||
splitline = line.split(': ', 1)
|
||||
key = splitline[0]
|
||||
value = splitline[1].strip()
|
||||
key = key.replace(":" + pkg, "")
|
||||
if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
|
||||
if keyword_rc.search(value):
|
||||
match = True
|
||||
break
|
||||
if match:
|
||||
print_match(fn)
|
||||
matches.append(fn)
|
||||
else:
|
||||
logger.warning('Package data is not available, results may be limited')
|
||||
|
||||
for recipe in tinfoil.all_recipes():
|
||||
if args.fixed_setup and 'nativesdk' in recipe.inherits():
|
||||
continue
|
||||
|
||||
match = False
|
||||
if keyword_rc.search(recipe.pn):
|
||||
match = True
|
||||
else:
|
||||
for prov in recipe.provides:
|
||||
if keyword_rc.search(prov):
|
||||
match = True
|
||||
break
|
||||
if not match:
|
||||
for rprov in recipe.rprovides:
|
||||
if keyword_rc.search(rprov):
|
||||
match = True
|
||||
break
|
||||
if match and not recipe.pn in matches:
|
||||
print_match(recipe.pn)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_search = subparsers.add_parser('search', help='Search available recipes',
|
||||
description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
|
||||
group='info')
|
||||
parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
|
||||
parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,658 @@
|
||||
# Development tool - upgrade command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool upgrade plugin"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import scriptutils
|
||||
import errno
|
||||
import bb
|
||||
|
||||
devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
|
||||
sys.path = sys.path + [devtool_path]
|
||||
|
||||
import oe.recipeutils
|
||||
from devtool import standard
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _run(cmd, cwd=''):
|
||||
logger.debug("Running command %s> %s" % (cwd,cmd))
|
||||
return bb.process.run('%s' % cmd, cwd=cwd)
|
||||
|
||||
def _get_srctree(tmpdir):
|
||||
srctree = tmpdir
|
||||
dirs = scriptutils.filter_src_subdirs(tmpdir)
|
||||
if len(dirs) == 1:
|
||||
srctree = os.path.join(tmpdir, dirs[0])
|
||||
return srctree
|
||||
|
||||
def _copy_source_code(orig, dest):
|
||||
for path in standard._ls_tree(orig):
|
||||
dest_dir = os.path.join(dest, os.path.dirname(path))
|
||||
bb.utils.mkdirhier(dest_dir)
|
||||
dest_path = os.path.join(dest, path)
|
||||
shutil.move(os.path.join(orig, path), dest_path)
|
||||
|
||||
def _remove_patch_dirs(recipefolder):
|
||||
for root, dirs, files in os.walk(recipefolder):
|
||||
for d in dirs:
|
||||
shutil.rmtree(os.path.join(root,d))
|
||||
|
||||
def _recipe_contains(rd, var):
|
||||
rf = rd.getVar('FILE')
|
||||
varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
|
||||
for var, fn in varfiles.items():
|
||||
if fn and fn.startswith(os.path.dirname(rf) + os.sep):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _rename_recipe_dirs(oldpv, newpv, path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
# Rename directories with the version in their name
|
||||
for olddir in dirs:
|
||||
if olddir.find(oldpv) != -1:
|
||||
newdir = olddir.replace(oldpv, newpv)
|
||||
if olddir != newdir:
|
||||
shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
|
||||
# Rename any inc files with the version in their name (unusual, but possible)
|
||||
for oldfile in files:
|
||||
if oldfile.endswith('.inc'):
|
||||
if oldfile.find(oldpv) != -1:
|
||||
newfile = oldfile.replace(oldpv, newpv)
|
||||
if oldfile != newfile:
|
||||
bb.utils.rename(os.path.join(path, oldfile),
|
||||
os.path.join(path, newfile))
|
||||
|
||||
def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
|
||||
oldrecipe = os.path.basename(oldrecipe)
|
||||
if oldrecipe.endswith('_%s.bb' % oldpv):
|
||||
newrecipe = '%s_%s.bb' % (bpn, newpv)
|
||||
if oldrecipe != newrecipe:
|
||||
shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
|
||||
else:
|
||||
newrecipe = oldrecipe
|
||||
return os.path.join(path, newrecipe)
|
||||
|
||||
def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
|
||||
_rename_recipe_dirs(oldpv, newpv, path)
|
||||
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
|
||||
|
||||
def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, rev, copied, workspace, d):
|
||||
"""Writes an append file"""
|
||||
if not os.path.exists(rc):
|
||||
raise DevtoolError("bbappend not created because %s does not exist" % rc)
|
||||
|
||||
appendpath = os.path.join(workspace, 'appends')
|
||||
if not os.path.exists(appendpath):
|
||||
bb.utils.mkdirhier(appendpath)
|
||||
|
||||
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
|
||||
|
||||
srctree = os.path.abspath(srctree)
|
||||
pn = d.getVar('PN')
|
||||
af = os.path.join(appendpath, '%s.bbappend' % brf)
|
||||
with open(af, 'w') as f:
|
||||
f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
|
||||
# Local files can be modified/tracked in separate subdir under srctree
|
||||
# Mostly useful for packages with S != WORKDIR
|
||||
f.write('FILESPATH:prepend := "%s:"\n' %
|
||||
os.path.join(srctreebase, 'oe-local-files'))
|
||||
f.write('# srctreebase: %s\n' % srctreebase)
|
||||
f.write('inherit externalsrc\n')
|
||||
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
|
||||
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
|
||||
f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
|
||||
b_is_s = use_external_build(same_dir, no_same_dir, d)
|
||||
if b_is_s:
|
||||
f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
|
||||
f.write('\n')
|
||||
if rev:
|
||||
f.write('# initial_rev: %s\n' % rev)
|
||||
if copied:
|
||||
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
|
||||
f.write('# original_files: %s\n' % ' '.join(copied))
|
||||
return af
|
||||
|
||||
def _cleanup_on_error(rd, srctree):
|
||||
if os.path.exists(rd):
|
||||
shutil.rmtree(rd)
|
||||
srctree = os.path.abspath(srctree)
|
||||
if os.path.exists(srctree):
|
||||
shutil.rmtree(srctree)
|
||||
|
||||
def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
|
||||
if not keep_failure:
|
||||
_cleanup_on_error(rd, srctree)
|
||||
logger.error(e)
|
||||
if extramsg:
|
||||
logger.error(extramsg)
|
||||
if keep_failure:
|
||||
logger.info('Preserving failed upgrade files (--keep-failure)')
|
||||
sys.exit(1)
|
||||
|
||||
def _get_uri(rd):
|
||||
srcuris = rd.getVar('SRC_URI').split()
|
||||
if not len(srcuris):
|
||||
raise DevtoolError('SRC_URI not found on recipe')
|
||||
# Get first non-local entry in SRC_URI - usually by convention it's
|
||||
# the first entry, but not always!
|
||||
srcuri = None
|
||||
for entry in srcuris:
|
||||
if not entry.startswith('file://'):
|
||||
srcuri = entry
|
||||
break
|
||||
if not srcuri:
|
||||
raise DevtoolError('Unable to find non-local entry in SRC_URI')
|
||||
srcrev = '${AUTOREV}'
|
||||
if '://' in srcuri:
|
||||
# Fetch a URL
|
||||
rev_re = re.compile(';rev=([^;]+)')
|
||||
res = rev_re.search(srcuri)
|
||||
if res:
|
||||
srcrev = res.group(1)
|
||||
srcuri = rev_re.sub('', srcuri)
|
||||
return srcuri, srcrev
|
||||
|
||||
def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
|
||||
"""Extract sources of a recipe with a new version"""
|
||||
|
||||
def __run(cmd):
|
||||
"""Simple wrapper which calls _run with srctree as cwd"""
|
||||
return _run(cmd, srctree)
|
||||
|
||||
crd = rd.createCopy()
|
||||
|
||||
pv = crd.getVar('PV')
|
||||
crd.setVar('PV', newpv)
|
||||
|
||||
tmpsrctree = None
|
||||
uri, rev = _get_uri(crd)
|
||||
if srcrev:
|
||||
rev = srcrev
|
||||
if uri.startswith('git://') or uri.startswith('gitsm://'):
|
||||
__run('git fetch')
|
||||
__run('git checkout %s' % rev)
|
||||
__run('git tag -f devtool-base-new')
|
||||
md5 = None
|
||||
sha256 = None
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
|
||||
srcsubdir_rel = params.get('destsuffix', 'git')
|
||||
if not srcbranch:
|
||||
check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
|
||||
get_branch = [x.strip() for x in check_branch.splitlines()]
|
||||
# Remove HEAD reference point and drop remote prefix
|
||||
get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
|
||||
if len(get_branch) == 1:
|
||||
# If srcrev is on only ONE branch, then use that branch
|
||||
srcbranch = get_branch[0]
|
||||
elif 'main' in get_branch:
|
||||
# If srcrev is on multiple branches, then choose 'main' if it is one of them
|
||||
srcbranch = 'main'
|
||||
elif 'master' in get_branch:
|
||||
# Otherwise choose 'master' if it is one of the branches
|
||||
srcbranch = 'master'
|
||||
else:
|
||||
# If get_branch contains more than one objects, then display error and exit.
|
||||
mbrch = '\n ' + '\n '.join(get_branch)
|
||||
raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
|
||||
else:
|
||||
__run('git checkout devtool-base -b devtool-%s' % newpv)
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
|
||||
except scriptutils.FetchUrlFailure as e:
|
||||
raise DevtoolError(e)
|
||||
|
||||
if ftmpdir and keep_temp:
|
||||
logger.info('Fetch temp directory is %s' % ftmpdir)
|
||||
|
||||
md5 = checksums['md5sum']
|
||||
sha256 = checksums['sha256sum']
|
||||
|
||||
tmpsrctree = _get_srctree(tmpdir)
|
||||
srctree = os.path.abspath(srctree)
|
||||
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
|
||||
|
||||
# Delete all sources so we ensure no stray files are left over
|
||||
for item in os.listdir(srctree):
|
||||
if item in ['.git', 'oe-local-files']:
|
||||
continue
|
||||
itempath = os.path.join(srctree, item)
|
||||
if os.path.isdir(itempath):
|
||||
shutil.rmtree(itempath)
|
||||
else:
|
||||
os.remove(itempath)
|
||||
|
||||
# Copy in new ones
|
||||
_copy_source_code(tmpsrctree, srctree)
|
||||
|
||||
(stdout,_) = __run('git ls-files --modified --others')
|
||||
filelist = stdout.splitlines()
|
||||
pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
|
||||
pbar.start()
|
||||
batchsize = 100
|
||||
for i in range(0, len(filelist), batchsize):
|
||||
batch = filelist[i:i+batchsize]
|
||||
__run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
|
||||
pbar.update(i)
|
||||
pbar.finish()
|
||||
|
||||
useroptions = []
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
|
||||
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
|
||||
__run('git tag -f devtool-base-%s' % newpv)
|
||||
|
||||
(stdout, _) = __run('git rev-parse HEAD')
|
||||
rev = stdout.rstrip()
|
||||
|
||||
if no_patch:
|
||||
patches = oe.recipeutils.get_recipe_patches(crd)
|
||||
if patches:
|
||||
logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
|
||||
else:
|
||||
__run('git checkout devtool-patched -b %s' % branch)
|
||||
(stdout, _) = __run('git branch --list devtool-override-*')
|
||||
branches_to_rebase = [branch] + stdout.split()
|
||||
for b in branches_to_rebase:
|
||||
logger.info("Rebasing {} onto {}".format(b, rev))
|
||||
__run('git checkout %s' % b)
|
||||
try:
|
||||
__run('git rebase %s' % rev)
|
||||
except bb.process.ExecutionError as e:
|
||||
if 'conflict' in e.stdout:
|
||||
logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
|
||||
__run('git rebase --abort')
|
||||
else:
|
||||
logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
|
||||
__run('git checkout %s' % branch)
|
||||
|
||||
if tmpsrctree:
|
||||
if keep_temp:
|
||||
logger.info('Preserving temporary directory %s' % tmpsrctree)
|
||||
else:
|
||||
shutil.rmtree(tmpsrctree)
|
||||
if tmpdir != tmpsrctree:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
return (rev, md5, sha256, srcbranch, srcsubdir_rel)
|
||||
|
||||
def _add_license_diff_to_recipe(path, diff):
|
||||
notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
|
||||
# The following is the difference between the old and the new license text.
|
||||
# Please update the LICENSE value if needed, and summarize the changes in
|
||||
# the commit message via 'License-Update:' tag.
|
||||
# (example: 'License-Update: copyright years updated.')
|
||||
#
|
||||
# The changes:
|
||||
#
|
||||
"""
|
||||
commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
|
||||
with open(path, 'rb') as f:
|
||||
orig_content = f.read()
|
||||
with open(path, 'wb') as f:
|
||||
f.write(notice_text.encode())
|
||||
f.write(commented_diff.encode())
|
||||
f.write("\n#\n\n".encode())
|
||||
f.write(orig_content)
|
||||
|
||||
def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
|
||||
"""Creates the new recipe under workspace"""
|
||||
|
||||
bpn = rd.getVar('BPN')
|
||||
path = os.path.join(workspace, 'recipes', bpn)
|
||||
bb.utils.mkdirhier(path)
|
||||
copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
|
||||
if not copied:
|
||||
raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
|
||||
logger.debug('Copied %s to %s' % (copied, path))
|
||||
|
||||
oldpv = rd.getVar('PV')
|
||||
if not newpv:
|
||||
newpv = oldpv
|
||||
origpath = rd.getVar('FILE')
|
||||
fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
|
||||
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
|
||||
|
||||
newvalues = {}
|
||||
if _recipe_contains(rd, 'PV') and newpv != oldpv:
|
||||
newvalues['PV'] = newpv
|
||||
|
||||
if srcrev:
|
||||
newvalues['SRCREV'] = srcrev
|
||||
|
||||
if srcbranch:
|
||||
src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
|
||||
changed = False
|
||||
replacing = True
|
||||
new_src_uri = []
|
||||
for entry in src_uri:
|
||||
try:
|
||||
scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
|
||||
except bb.fetch2.MalformedUrl as e:
|
||||
raise DevtoolError("Could not decode SRC_URI: {}".format(e))
|
||||
if replacing and scheme in ['git', 'gitsm']:
|
||||
branch = params.get('branch', 'master')
|
||||
if rd.expand(branch) != srcbranch:
|
||||
# Handle case where branch is set through a variable
|
||||
res = re.match(r'\$\{([^}@]+)\}', branch)
|
||||
if res:
|
||||
newvalues[res.group(1)] = srcbranch
|
||||
# We know we won't change SRC_URI now, so break out
|
||||
break
|
||||
else:
|
||||
params['branch'] = srcbranch
|
||||
entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
|
||||
changed = True
|
||||
replacing = False
|
||||
new_src_uri.append(entry)
|
||||
if changed:
|
||||
newvalues['SRC_URI'] = ' '.join(new_src_uri)
|
||||
|
||||
newvalues['PR'] = None
|
||||
|
||||
# Work out which SRC_URI entries have changed in case the entry uses a name
|
||||
crd = rd.createCopy()
|
||||
crd.setVar('PV', newpv)
|
||||
for var, value in newvalues.items():
|
||||
crd.setVar(var, value)
|
||||
old_src_uri = (rd.getVar('SRC_URI') or '').split()
|
||||
new_src_uri = (crd.getVar('SRC_URI') or '').split()
|
||||
newnames = []
|
||||
addnames = []
|
||||
for newentry in new_src_uri:
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
|
||||
if 'name' in params:
|
||||
newnames.append(params['name'])
|
||||
if newentry not in old_src_uri:
|
||||
addnames.append(params['name'])
|
||||
# Find what's been set in the original recipe
|
||||
oldnames = []
|
||||
noname = False
|
||||
for varflag in rd.getVarFlags('SRC_URI'):
|
||||
if varflag.endswith(('.md5sum', '.sha256sum')):
|
||||
name = varflag.rsplit('.', 1)[0]
|
||||
if name not in oldnames:
|
||||
oldnames.append(name)
|
||||
elif varflag in ['md5sum', 'sha256sum']:
|
||||
noname = True
|
||||
# Even if SRC_URI has named entries it doesn't have to actually use the name
|
||||
if noname and addnames and addnames[0] not in oldnames:
|
||||
addnames = []
|
||||
# Drop any old names (the name actually might include ${PV})
|
||||
for name in oldnames:
|
||||
if name not in newnames:
|
||||
newvalues['SRC_URI[%s.md5sum]' % name] = None
|
||||
newvalues['SRC_URI[%s.sha256sum]' % name] = None
|
||||
|
||||
if sha256:
|
||||
if addnames:
|
||||
nameprefix = '%s.' % addnames[0]
|
||||
else:
|
||||
nameprefix = ''
|
||||
newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
|
||||
newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256
|
||||
|
||||
if srcsubdir_new != srcsubdir_old:
|
||||
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
|
||||
s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
|
||||
if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
|
||||
# Subdir for old extracted source matches what S points to (it should!)
|
||||
# but subdir for new extracted source doesn't match what S will be
|
||||
newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
|
||||
if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
|
||||
# It's the default, drop it
|
||||
# FIXME what if S is being set in a .inc?
|
||||
newvalues['S'] = None
|
||||
logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
|
||||
else:
|
||||
logger.info('Source subdirectory has changed, updating S value')
|
||||
|
||||
if license_diff:
|
||||
newlicchksum = " ".join(["file://{}".format(l['path']) +
|
||||
(";beginline={}".format(l['beginline']) if l['beginline'] else "") +
|
||||
(";endline={}".format(l['endline']) if l['endline'] else "") +
|
||||
(";md5={}".format(l['actual_md5'])) for l in new_licenses])
|
||||
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
|
||||
_add_license_diff_to_recipe(fullpath, license_diff)
|
||||
|
||||
try:
|
||||
rd = tinfoil.parse_recipe_file(fullpath, False)
|
||||
except bb.tinfoil.TinfoilCommandFailed as e:
|
||||
_upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
|
||||
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
|
||||
|
||||
return fullpath, copied
|
||||
|
||||
|
||||
def _check_git_config():
|
||||
def getconfig(name):
|
||||
try:
|
||||
value = bb.process.run('git config --global %s' % name)[0].strip()
|
||||
except bb.process.ExecutionError as e:
|
||||
if e.exitcode == 1:
|
||||
value = None
|
||||
else:
|
||||
raise
|
||||
return value
|
||||
|
||||
username = getconfig('user.name')
|
||||
useremail = getconfig('user.email')
|
||||
configerr = []
|
||||
if not username:
|
||||
configerr.append('Please set your name using:\n git config --global user.name')
|
||||
if not useremail:
|
||||
configerr.append('Please set your email using:\n git config --global user.email')
|
||||
if configerr:
|
||||
raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
|
||||
|
||||
def _extract_licenses(srcpath, recipe_licenses):
|
||||
licenses = []
|
||||
for url in recipe_licenses.split():
|
||||
license = {}
|
||||
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
|
||||
license['path'] = path
|
||||
license['md5'] = parm.get('md5', '')
|
||||
license['beginline'], license['endline'] = 0, 0
|
||||
if 'beginline' in parm:
|
||||
license['beginline'] = int(parm['beginline'])
|
||||
if 'endline' in parm:
|
||||
license['endline'] = int(parm['endline'])
|
||||
license['text'] = []
|
||||
with open(os.path.join(srcpath, path), 'rb') as f:
|
||||
import hashlib
|
||||
actual_md5 = hashlib.md5()
|
||||
lineno = 0
|
||||
for line in f:
|
||||
lineno += 1
|
||||
if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
|
||||
license['text'].append(line.decode(errors='ignore'))
|
||||
actual_md5.update(line)
|
||||
license['actual_md5'] = actual_md5.hexdigest()
|
||||
licenses.append(license)
|
||||
return licenses
|
||||
|
||||
def _generate_license_diff(old_licenses, new_licenses):
|
||||
need_diff = False
|
||||
for l in new_licenses:
|
||||
if l['md5'] != l['actual_md5']:
|
||||
need_diff = True
|
||||
break
|
||||
if need_diff == False:
|
||||
return None
|
||||
|
||||
import difflib
|
||||
diff = ''
|
||||
for old, new in zip(old_licenses, new_licenses):
|
||||
for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
|
||||
diff = diff + line
|
||||
return diff
|
||||
|
||||
def upgrade(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'upgrade' subcommand"""
|
||||
|
||||
if args.recipename in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
|
||||
if args.srcbranch and not args.srcrev:
|
||||
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
|
||||
|
||||
_check_git_config()
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
return 1
|
||||
|
||||
pn = rd.getVar('PN')
|
||||
if pn != args.recipename:
|
||||
logger.info('Mapping %s to %s' % (args.recipename, pn))
|
||||
if pn in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % pn)
|
||||
|
||||
if args.srctree:
|
||||
srctree = os.path.abspath(args.srctree)
|
||||
else:
|
||||
srctree = standard.get_default_srctree(config, pn)
|
||||
|
||||
srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
|
||||
|
||||
# try to automatically discover latest version and revision if not provided on command line
|
||||
if not args.version and not args.srcrev:
|
||||
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
|
||||
if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
|
||||
args.version = version_info['version']
|
||||
if version_info['revision']:
|
||||
args.srcrev = version_info['revision']
|
||||
if not args.version and not args.srcrev:
|
||||
raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
|
||||
|
||||
standard._check_compatible_recipe(pn, rd)
|
||||
old_srcrev = rd.getVar('SRCREV')
|
||||
if old_srcrev == 'INVALID':
|
||||
old_srcrev = None
|
||||
if old_srcrev and not args.srcrev:
|
||||
raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
|
||||
old_ver = rd.getVar('PV')
|
||||
if old_ver == args.version and old_srcrev == args.srcrev:
|
||||
raise DevtoolError("Current and upgrade versions are the same version")
|
||||
if args.version:
|
||||
if bb.utils.vercmp_string(args.version, old_ver) < 0:
|
||||
logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
|
||||
check_prerelease_version(args.version, 'devtool upgrade')
|
||||
|
||||
rf = None
|
||||
license_diff = None
|
||||
try:
|
||||
logger.info('Extracting current version source...')
|
||||
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
|
||||
old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
|
||||
logger.info('Extracting upgraded version source...')
|
||||
rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
|
||||
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
|
||||
tinfoil, rd)
|
||||
new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
|
||||
license_diff = _generate_license_diff(old_licenses, new_licenses)
|
||||
rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
|
||||
except (bb.process.CmdError, DevtoolError) as e:
|
||||
recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
|
||||
_upgrade_error(e, recipedir, srctree, args.keep_failure)
|
||||
standard._add_md5(config, pn, os.path.dirname(rf))
|
||||
|
||||
af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
|
||||
copied, config.workspace_path, rd)
|
||||
standard._add_md5(config, pn, af)
|
||||
|
||||
update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
|
||||
|
||||
logger.info('Upgraded source extracted to %s' % srctree)
|
||||
logger.info('New recipe is %s' % rf)
|
||||
if license_diff:
|
||||
logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
|
||||
preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
|
||||
if preferred_version:
|
||||
logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return 0
|
||||
|
||||
def latest_version(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'latest_version' subcommand"""
|
||||
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
return 1
|
||||
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
|
||||
# "new-commits-available" is an indication that upstream never issues version tags
|
||||
if not version_info['version'].endswith("new-commits-available"):
|
||||
logger.info("Current version: {}".format(version_info['current_version']))
|
||||
logger.info("Latest version: {}".format(version_info['version']))
|
||||
if version_info['revision']:
|
||||
logger.info("Latest version's commit: {}".format(version_info['revision']))
|
||||
else:
|
||||
logger.info("Latest commit: {}".format(version_info['revision']))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return 0
|
||||
|
||||
def check_upgrade_status(args, config, basepath, workspace):
|
||||
if not args.recipe:
|
||||
logger.info("Checking the upstream status for all recipes may take a few minutes")
|
||||
results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
|
||||
for result in results:
|
||||
# pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
|
||||
if args.all or result[1] != 'MATCH':
|
||||
logger.info("{:25} {:15} {:15} {} {} {}".format( result[0],
|
||||
result[2],
|
||||
result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
|
||||
result[4],
|
||||
result[5] if result[5] != 'N/A' else "",
|
||||
"cannot be updated due to: %s" %(result[6]) if result[6] else ""))
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
|
||||
defsrctree = standard.get_default_srctree(context.config)
|
||||
|
||||
parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
|
||||
description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
|
||||
group='starting')
|
||||
parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
|
||||
parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
|
||||
parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
|
||||
parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
|
||||
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
|
||||
parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
|
||||
group = parser_upgrade.add_mutually_exclusive_group()
|
||||
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
|
||||
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
|
||||
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
|
||||
parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
|
||||
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
|
||||
|
||||
parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
|
||||
description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
|
||||
group='info')
|
||||
parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
|
||||
parser_latest_version.set_defaults(func=latest_version)
|
||||
|
||||
parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
|
||||
description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
|
||||
group='info')
|
||||
parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
|
||||
parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
|
||||
parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
|
||||
@@ -0,0 +1,242 @@
|
||||
# Development tool - utility commands plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool utility plugins"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import subprocess
|
||||
import scriptutils
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
from devtool import parse_recipe
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _find_recipe_path(args, config, basepath, workspace):
|
||||
if args.any_recipe:
|
||||
logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
|
||||
if args.recipename in workspace:
|
||||
recipefile = workspace[args.recipename]['recipefile']
|
||||
else:
|
||||
recipefile = None
|
||||
if not recipefile:
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
raise DevtoolError("Failed to find specified recipe")
|
||||
recipefile = rd.getVar('FILE')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return recipefile
|
||||
|
||||
|
||||
def find_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'find-recipe' subcommand"""
|
||||
recipefile = _find_recipe_path(args, config, basepath, workspace)
|
||||
print(recipefile)
|
||||
return 0
|
||||
|
||||
|
||||
def edit_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'edit-recipe' subcommand"""
|
||||
return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
|
||||
|
||||
|
||||
def configure_help(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'configure-help' subcommand"""
|
||||
import oe.utils
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
b = rd.getVar('B')
|
||||
s = rd.getVar('S')
|
||||
configurescript = os.path.join(s, 'configure')
|
||||
confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
|
||||
configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
|
||||
extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
|
||||
extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
|
||||
do_configure = rd.getVar('do_configure') or ''
|
||||
do_configure_noexpand = rd.getVar('do_configure', False) or ''
|
||||
packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
|
||||
autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
|
||||
cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
|
||||
cmake_do_configure = rd.getVar('cmake_do_configure')
|
||||
pn = rd.getVar('PN')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if 'doc' in packageconfig:
|
||||
del packageconfig['doc']
|
||||
|
||||
if autotools and not os.path.exists(configurescript):
|
||||
logger.info('Running do_configure to generate configure script')
|
||||
try:
|
||||
stdout, _ = exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake -c configure %s' % args.recipename,
|
||||
stderr=subprocess.STDOUT)
|
||||
except bb.process.ExecutionError:
|
||||
pass
|
||||
|
||||
if confdisabled or do_configure.strip() in ('', ':'):
|
||||
raise DevtoolError("do_configure task has been disabled for this recipe")
|
||||
elif args.no_pager and not os.path.exists(configurescript):
|
||||
raise DevtoolError("No configure script found and no other information to display")
|
||||
else:
|
||||
configopttext = ''
|
||||
if autotools and configureopts:
|
||||
configopttext = '''
|
||||
Arguments currently passed to the configure script:
|
||||
|
||||
%s
|
||||
|
||||
Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
|
||||
if extra_oeconf:
|
||||
configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
elif cmake:
|
||||
in_cmake = False
|
||||
cmake_cmd = ''
|
||||
for line in cmake_do_configure.splitlines():
|
||||
if in_cmake:
|
||||
cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
|
||||
if not line.endswith('\\'):
|
||||
break
|
||||
if line.lstrip().startswith('cmake '):
|
||||
cmake_cmd = line.strip().rstrip('\\')
|
||||
if line.endswith('\\'):
|
||||
in_cmake = True
|
||||
else:
|
||||
break
|
||||
if cmake_cmd:
|
||||
configopttext = '''
|
||||
The current cmake command line:
|
||||
|
||||
%s
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
|
||||
else:
|
||||
configopttext = '''
|
||||
The current implementation of cmake_do_configure:
|
||||
|
||||
cmake_do_configure() {
|
||||
%s
|
||||
}
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
|
||||
|
||||
elif do_configure:
|
||||
configopttext = '''
|
||||
The current implementation of do_configure:
|
||||
|
||||
do_configure() {
|
||||
%s
|
||||
}''' % do_configure.rstrip()
|
||||
if '${EXTRA_OECONF}' in do_configure_noexpand:
|
||||
configopttext += '''
|
||||
|
||||
Arguments specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
if packageconfig:
|
||||
configopttext += '''
|
||||
|
||||
Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
|
||||
|
||||
if args.arg:
|
||||
helpargs = ' '.join(args.arg)
|
||||
elif cmake:
|
||||
helpargs = '-LH'
|
||||
else:
|
||||
helpargs = '--help'
|
||||
|
||||
msg = '''configure information for %s
|
||||
------------------------------------------
|
||||
%s''' % (pn, configopttext)
|
||||
|
||||
if cmake:
|
||||
msg += '''
|
||||
|
||||
The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
elif os.path.exists(configurescript):
|
||||
msg += '''
|
||||
|
||||
The ./configure %s output for %s follows.
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
|
||||
olddir = os.getcwd()
|
||||
tmppath = tempfile.mkdtemp()
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
if not args.no_header:
|
||||
tf.write(msg + '\n')
|
||||
tf.close()
|
||||
try:
|
||||
try:
|
||||
cmd = 'cat %s' % tf.name
|
||||
if cmake:
|
||||
cmd += '; cmake %s %s 2>&1' % (helpargs, s)
|
||||
os.chdir(b)
|
||||
elif os.path.exists(configurescript):
|
||||
cmd += '; %s %s' % (configurescript, helpargs)
|
||||
if sys.stdout.isatty() and not args.no_pager:
|
||||
pager = os.environ.get('PAGER', 'less')
|
||||
cmd = '(%s) | %s' % (cmd, pager)
|
||||
subprocess.check_call(cmd, shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
return e.returncode
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
shutil.rmtree(tmppath)
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
|
||||
description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_edit_recipe.set_defaults(func=edit_recipe)
|
||||
|
||||
# Find-recipe
|
||||
parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
|
||||
description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_find_recipe.add_argument('recipename', help='Recipe to find')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_find_recipe.set_defaults(func=find_recipe)
|
||||
|
||||
# NOTE: Needed to override the usage string here since the default
|
||||
# gets the order wrong - recipename must come before --arg
|
||||
parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
|
||||
usage='devtool configure-help [options] recipename [--arg ...]',
|
||||
description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
|
||||
group='working')
|
||||
parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
|
||||
parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
|
||||
parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
|
||||
parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
|
||||
parser_configure_help.set_defaults(func=configure_help)
|
||||
@@ -0,0 +1,447 @@
|
||||
# Recipe creation tool - append plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import glob
|
||||
import fnmatch
|
||||
import re
|
||||
import subprocess
|
||||
import logging
|
||||
import stat
|
||||
import shutil
|
||||
import scriptutils
|
||||
import errno
|
||||
from collections import defaultdict
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
# FIXME guessing when we don't have pkgdata?
|
||||
# FIXME mode to create patch rather than directly substitute
|
||||
|
||||
class InvalidTargetFileError(Exception):
|
||||
pass
|
||||
|
||||
def find_target_file(targetpath, d, pkglist=None):
|
||||
"""Find the recipe installing the specified target path, optionally limited to a select list of packages"""
|
||||
import json
|
||||
|
||||
pkgdata_dir = d.getVar('PKGDATA_DIR')
|
||||
|
||||
# The mix between /etc and ${sysconfdir} here may look odd, but it is just
|
||||
# being consistent with usage elsewhere
|
||||
invalidtargets = {'${sysconfdir}/version': '${sysconfdir}/version is written out at image creation time',
|
||||
'/etc/timestamp': '/etc/timestamp is written out at image creation time',
|
||||
'/dev/*': '/dev is handled by udev (or equivalent) and the kernel (devtmpfs)',
|
||||
'/etc/passwd': '/etc/passwd should be managed through the useradd and extrausers classes',
|
||||
'/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
|
||||
'/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
|
||||
'/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
|
||||
'${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
|
||||
|
||||
for pthspec, message in invalidtargets.items():
|
||||
if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
|
||||
raise InvalidTargetFileError(d.expand(message))
|
||||
|
||||
targetpath_re = re.compile(r'\s+(\$D)?%s(\s|$)' % targetpath)
|
||||
|
||||
recipes = defaultdict(list)
|
||||
for root, dirs, files in os.walk(os.path.join(pkgdata_dir, 'runtime')):
|
||||
if pkglist:
|
||||
filelist = pkglist
|
||||
else:
|
||||
filelist = files
|
||||
for fn in filelist:
|
||||
pkgdatafile = os.path.join(root, fn)
|
||||
if pkglist and not os.path.exists(pkgdatafile):
|
||||
continue
|
||||
with open(pkgdatafile, 'r') as f:
|
||||
pn = ''
|
||||
# This does assume that PN comes before other values, but that's a fairly safe assumption
|
||||
for line in f:
|
||||
if line.startswith('PN:'):
|
||||
pn = line.split(': ', 1)[1].strip()
|
||||
elif line.startswith('FILES_INFO'):
|
||||
val = line.split(': ', 1)[1].strip()
|
||||
dictval = json.loads(val)
|
||||
for fullpth in dictval.keys():
|
||||
if fnmatch.fnmatchcase(fullpth, targetpath):
|
||||
recipes[targetpath].append(pn)
|
||||
elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
|
||||
scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
|
||||
if 'update-alternatives --install %s ' % targetpath in scriptval:
|
||||
recipes[targetpath].append('?%s' % pn)
|
||||
elif targetpath_re.search(scriptval):
|
||||
recipes[targetpath].append('!%s' % pn)
|
||||
return recipes
|
||||
|
||||
def _parse_recipe(pn, tinfoil):
|
||||
try:
|
||||
rd = tinfoil.parse_recipe(pn)
|
||||
except bb.providers.NoProvider as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
return rd
|
||||
|
||||
def determine_file_source(targetpath, rd):
|
||||
"""Assuming we know a file came from a specific recipe, figure out exactly where it came from"""
|
||||
import oe.recipeutils
|
||||
|
||||
# See if it's in do_install for the recipe
|
||||
workdir = rd.getVar('WORKDIR')
|
||||
src_uri = rd.getVar('SRC_URI')
|
||||
srcfile = ''
|
||||
modpatches = []
|
||||
elements = check_do_install(rd, targetpath)
|
||||
if elements:
|
||||
logger.debug('do_install line:\n%s' % ' '.join(elements))
|
||||
srcpath = get_source_path(elements)
|
||||
logger.debug('source path: %s' % srcpath)
|
||||
if not srcpath.startswith('/'):
|
||||
# Handle non-absolute path
|
||||
srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
|
||||
if srcpath.startswith(workdir):
|
||||
# OK, now we have the source file name, look for it in SRC_URI
|
||||
workdirfile = os.path.relpath(srcpath, workdir)
|
||||
# FIXME this is where we ought to have some code in the fetcher, because this is naive
|
||||
for item in src_uri.split():
|
||||
localpath = bb.fetch2.localpath(item, rd)
|
||||
# Source path specified in do_install might be a glob
|
||||
if fnmatch.fnmatch(os.path.basename(localpath), workdirfile):
|
||||
srcfile = 'file://%s' % localpath
|
||||
elif '/' in workdirfile:
|
||||
if item == 'file://%s' % workdirfile:
|
||||
srcfile = 'file://%s' % localpath
|
||||
|
||||
# Check patches
|
||||
srcpatches = []
|
||||
patchedfiles = oe.recipeutils.get_recipe_patched_files(rd)
|
||||
for patch, filelist in patchedfiles.items():
|
||||
for fileitem in filelist:
|
||||
if fileitem[0] == srcpath:
|
||||
srcpatches.append((patch, fileitem[1]))
|
||||
if srcpatches:
|
||||
addpatch = None
|
||||
for patch in srcpatches:
|
||||
if patch[1] == 'A':
|
||||
addpatch = patch[0]
|
||||
else:
|
||||
modpatches.append(patch[0])
|
||||
if addpatch:
|
||||
srcfile = 'patch://%s' % addpatch
|
||||
|
||||
return (srcfile, elements, modpatches)
|
||||
|
||||
def get_source_path(cmdelements):
|
||||
"""Find the source path specified within a command"""
|
||||
command = cmdelements[0]
|
||||
if command in ['install', 'cp']:
|
||||
helptext = subprocess.check_output('LC_ALL=C %s --help' % command, shell=True).decode('utf-8')
|
||||
argopts = ''
|
||||
argopt_line_re = re.compile('^-([a-zA-Z0-9]), --[a-z-]+=')
|
||||
for line in helptext.splitlines():
|
||||
line = line.lstrip()
|
||||
res = argopt_line_re.search(line)
|
||||
if res:
|
||||
argopts += res.group(1)
|
||||
if not argopts:
|
||||
# Fallback
|
||||
if command == 'install':
|
||||
argopts = 'gmoSt'
|
||||
elif command == 'cp':
|
||||
argopts = 't'
|
||||
else:
|
||||
raise Exception('No fallback arguments for command %s' % command)
|
||||
|
||||
skipnext = False
|
||||
for elem in cmdelements[1:-1]:
|
||||
if elem.startswith('-'):
|
||||
if len(elem) > 1 and elem[1] in argopts:
|
||||
skipnext = True
|
||||
continue
|
||||
if skipnext:
|
||||
skipnext = False
|
||||
continue
|
||||
return elem
|
||||
else:
|
||||
raise Exception('get_source_path: no handling for command "%s"')
|
||||
|
||||
def get_func_deps(func, d):
|
||||
"""Find the function dependencies of a shell function"""
|
||||
deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
|
||||
deps |= set((d.getVarFlag(func, "vardeps") or "").split())
|
||||
funcdeps = []
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, 'func'):
|
||||
funcdeps.append(dep)
|
||||
return funcdeps
|
||||
|
||||
def check_do_install(rd, targetpath):
|
||||
"""Look at do_install for a command that installs/copies the specified target path"""
|
||||
instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/')))
|
||||
do_install = rd.getVar('do_install')
|
||||
# Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose)
|
||||
deps = get_func_deps('do_install', rd)
|
||||
for dep in deps:
|
||||
do_install = do_install.replace(dep, rd.getVar(dep))
|
||||
|
||||
# Look backwards through do_install as we want to catch where a later line (perhaps
|
||||
# from a bbappend) is writing over the top
|
||||
for line in reversed(do_install.splitlines()):
|
||||
line = line.strip()
|
||||
if (line.startswith('install ') and ' -m' in line) or line.startswith('cp '):
|
||||
elements = line.split()
|
||||
destpath = os.path.abspath(elements[-1])
|
||||
if destpath == instpath:
|
||||
return elements
|
||||
elif destpath.rstrip('/') == os.path.dirname(instpath):
|
||||
# FIXME this doesn't take recursive copy into account; unsure if it's practical to do so
|
||||
srcpath = get_source_path(elements)
|
||||
if fnmatch.fnmatchcase(os.path.basename(instpath), os.path.basename(srcpath)):
|
||||
return elements
|
||||
return None
|
||||
|
||||
|
||||
def appendfile(args):
|
||||
import oe.recipeutils
|
||||
|
||||
stdout = ''
|
||||
try:
|
||||
(stdout, _) = bb.process.run('LANG=C file -b %s' % args.newfile, shell=True)
|
||||
if 'cannot open' in stdout:
|
||||
raise bb.process.ExecutionError(stdout)
|
||||
except bb.process.ExecutionError as err:
|
||||
logger.debug('file command returned error: %s' % err)
|
||||
stdout = ''
|
||||
if stdout:
|
||||
logger.debug('file command output: %s' % stdout.rstrip())
|
||||
if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout:
|
||||
logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.')
|
||||
|
||||
if args.recipe:
|
||||
recipes = {args.targetpath: [args.recipe],}
|
||||
else:
|
||||
try:
|
||||
recipes = find_target_file(args.targetpath, tinfoil.config_data)
|
||||
except InvalidTargetFileError as e:
|
||||
logger.error('%s cannot be handled by this tool: %s' % (args.targetpath, e))
|
||||
return 1
|
||||
if not recipes:
|
||||
logger.error('Unable to find any package producing path %s - this may be because the recipe packaging it has not been built yet' % args.targetpath)
|
||||
return 1
|
||||
|
||||
alternative_pns = []
|
||||
postinst_pns = []
|
||||
|
||||
selectpn = None
|
||||
for targetpath, pnlist in recipes.items():
|
||||
for pn in pnlist:
|
||||
if pn.startswith('?'):
|
||||
alternative_pns.append(pn[1:])
|
||||
elif pn.startswith('!'):
|
||||
postinst_pns.append(pn[1:])
|
||||
elif selectpn:
|
||||
# hit here with multilibs
|
||||
continue
|
||||
else:
|
||||
selectpn = pn
|
||||
|
||||
if not selectpn and len(alternative_pns) == 1:
|
||||
selectpn = alternative_pns[0]
|
||||
logger.error('File %s is an alternative possibly provided by recipe %s but seemingly no other, selecting it by default - you should double check other recipes' % (args.targetpath, selectpn))
|
||||
|
||||
if selectpn:
|
||||
logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath))
|
||||
if postinst_pns:
|
||||
logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns)))
|
||||
rd = _parse_recipe(selectpn, tinfoil)
|
||||
if not rd:
|
||||
# Error message already shown
|
||||
return 1
|
||||
sourcefile, instelements, modpatches = determine_file_source(args.targetpath, rd)
|
||||
sourcepath = None
|
||||
if sourcefile:
|
||||
sourcetype, sourcepath = sourcefile.split('://', 1)
|
||||
logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype))
|
||||
if sourcetype == 'patch':
|
||||
logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath))
|
||||
sourcepath = None
|
||||
else:
|
||||
logger.debug('Unable to determine source file, proceeding anyway')
|
||||
if modpatches:
|
||||
logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches)))
|
||||
|
||||
if instelements and sourcepath:
|
||||
install = None
|
||||
else:
|
||||
# Auto-determine permissions
|
||||
# Check destination
|
||||
binpaths = '${bindir}:${sbindir}:${base_bindir}:${base_sbindir}:${libexecdir}:${sysconfdir}/init.d'
|
||||
perms = '0644'
|
||||
if os.path.abspath(os.path.dirname(args.targetpath)) in rd.expand(binpaths).split(':'):
|
||||
# File is going into a directory normally reserved for executables, so it should be executable
|
||||
perms = '0755'
|
||||
else:
|
||||
# Check source
|
||||
st = os.stat(args.newfile)
|
||||
if st.st_mode & stat.S_IXUSR:
|
||||
perms = '0755'
|
||||
install = {args.newfile: (args.targetpath, perms)}
|
||||
oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine)
|
||||
return 0
|
||||
else:
|
||||
if alternative_pns:
|
||||
logger.error('File %s is an alternative possibly provided by the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(alternative_pns)))
|
||||
elif postinst_pns:
|
||||
logger.error('File %s may be written out in a pre/postinstall script of the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(postinst_pns)))
|
||||
return 3
|
||||
|
||||
|
||||
def appendsrc(args, files, rd, extralines=None):
|
||||
import oe.recipeutils
|
||||
|
||||
srcdir = rd.getVar('S')
|
||||
workdir = rd.getVar('WORKDIR')
|
||||
|
||||
import bb.fetch
|
||||
simplified = {}
|
||||
src_uri = rd.getVar('SRC_URI').split()
|
||||
for uri in src_uri:
|
||||
if uri.endswith(';'):
|
||||
uri = uri[:-1]
|
||||
simple_uri = bb.fetch.URI(uri)
|
||||
simple_uri.params = {}
|
||||
simplified[str(simple_uri)] = uri
|
||||
|
||||
copyfiles = {}
|
||||
extralines = extralines or []
|
||||
for newfile, srcfile in files.items():
|
||||
src_destdir = os.path.dirname(srcfile)
|
||||
if not args.use_workdir:
|
||||
if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
|
||||
srcdir = os.path.join(workdir, 'git')
|
||||
if not bb.data.inherits_class('kernel-yocto', rd):
|
||||
logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git')
|
||||
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
|
||||
src_destdir = os.path.normpath(src_destdir)
|
||||
|
||||
source_uri = 'file://{0}'.format(os.path.basename(srcfile))
|
||||
if src_destdir and src_destdir != '.':
|
||||
source_uri += ';subdir={0}'.format(src_destdir)
|
||||
|
||||
simple = bb.fetch.URI(source_uri)
|
||||
simple.params = {}
|
||||
simple_str = str(simple)
|
||||
if simple_str in simplified:
|
||||
existing = simplified[simple_str]
|
||||
if source_uri != existing:
|
||||
logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
|
||||
else:
|
||||
logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
|
||||
else:
|
||||
extralines.append('SRC_URI += {0}'.format(source_uri))
|
||||
copyfiles[newfile] = srcfile
|
||||
|
||||
oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines)
|
||||
|
||||
|
||||
def appendsrcfiles(parser, args):
|
||||
recipedata = _parse_recipe(args.recipe, tinfoil)
|
||||
if not recipedata:
|
||||
parser.error('RECIPE must be a valid recipe name')
|
||||
|
||||
files = dict((f, os.path.join(args.destdir, os.path.basename(f)))
|
||||
for f in args.files)
|
||||
return appendsrc(args, files, recipedata)
|
||||
|
||||
|
||||
def appendsrcfile(parser, args):
|
||||
recipedata = _parse_recipe(args.recipe, tinfoil)
|
||||
if not recipedata:
|
||||
parser.error('RECIPE must be a valid recipe name')
|
||||
|
||||
if not args.destfile:
|
||||
args.destfile = os.path.basename(args.file)
|
||||
elif args.destfile.endswith('/'):
|
||||
args.destfile = os.path.join(args.destfile, os.path.basename(args.file))
|
||||
|
||||
return appendsrc(args, {args.file: args.destfile}, recipedata)
|
||||
|
||||
|
||||
def layer(layerpath):
|
||||
if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
|
||||
return layerpath
|
||||
|
||||
|
||||
def existing_path(filepath):
|
||||
if not os.path.exists(filepath):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be an existing path'.format(filepath))
|
||||
return filepath
|
||||
|
||||
|
||||
def existing_file(filepath):
|
||||
filepath = existing_path(filepath)
|
||||
if os.path.isdir(filepath):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be a file, not a directory'.format(filepath))
|
||||
return filepath
|
||||
|
||||
|
||||
def destination_path(destpath):
|
||||
if os.path.isabs(destpath):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be a relative path, not absolute'.format(destpath))
|
||||
return destpath
|
||||
|
||||
|
||||
def target_path(targetpath):
|
||||
if not os.path.isabs(targetpath):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be an absolute path, not relative'.format(targetpath))
|
||||
return targetpath
|
||||
|
||||
|
||||
def register_commands(subparsers):
|
||||
common = argparse.ArgumentParser(add_help=False)
|
||||
common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE')
|
||||
common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
|
||||
common.add_argument('destlayer', metavar='DESTLAYER', help='Base directory of the destination layer to write the bbappend to', type=layer)
|
||||
|
||||
parser_appendfile = subparsers.add_parser('appendfile',
|
||||
parents=[common],
|
||||
help='Create/update a bbappend to replace a target file',
|
||||
description='Creates a bbappend (or updates an existing one) to replace the specified file that appears in the target system, determining the recipe that packages the file and the required path and name for the bbappend automatically. Note that the ability to determine the recipe packaging a particular file depends upon the recipe\'s do_packagedata task having already run prior to running this command (which it will have when the recipe has been built successfully, which in turn will have happened if one or more of the recipe\'s packages is included in an image that has been built successfully).')
|
||||
parser_appendfile.add_argument('targetpath', help='Path to the file to be replaced (as it would appear within the target image, e.g. /etc/motd)', type=target_path)
|
||||
parser_appendfile.add_argument('newfile', help='Custom file to replace the target file with', type=existing_file)
|
||||
parser_appendfile.add_argument('-r', '--recipe', help='Override recipe to apply to (default is to find which recipe already packages the file)')
|
||||
parser_appendfile.set_defaults(func=appendfile, parserecipes=True)
|
||||
|
||||
common_src = argparse.ArgumentParser(add_help=False, parents=[common])
|
||||
common_src.add_argument('-W', '--workdir', help='Unpack file into WORKDIR rather than S', dest='use_workdir', action='store_true')
|
||||
common_src.add_argument('recipe', metavar='RECIPE', help='Override recipe to apply to')
|
||||
|
||||
parser = subparsers.add_parser('appendsrcfiles',
|
||||
parents=[common_src],
|
||||
help='Create/update a bbappend to add or replace source files',
|
||||
description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
|
||||
parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
|
||||
parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
|
||||
parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
|
||||
|
||||
parser = subparsers.add_parser('appendsrcfile',
|
||||
parents=[common_src],
|
||||
help='Create/update a bbappend to add or replace a source file',
|
||||
description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
|
||||
parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
|
||||
parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
|
||||
parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,875 @@
|
||||
# Recipe creation tool - create command build system handlers
|
||||
#
|
||||
# Copyright (C) 2014-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import logging
|
||||
import glob
|
||||
from recipetool.create import RecipeHandler, validate_pv
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
plugins = None
|
||||
|
||||
def plugin_init(pluginlist):
|
||||
# Take a reference to the list so we can use it later
|
||||
global plugins
|
||||
plugins = pluginlist
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
class CmakeRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
|
||||
classes.append('cmake')
|
||||
values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues)
|
||||
classes.extend(values.pop('inherit', '').split())
|
||||
for var, value in values.items():
|
||||
lines_before.append('%s = "%s"' % (var, value))
|
||||
lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
|
||||
lines_after.append('EXTRA_OECMAKE = ""')
|
||||
lines_after.append('')
|
||||
handled.append('buildsystem')
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None):
|
||||
# Find all plugins that want to register handlers
|
||||
logger.debug('Loading cmake handlers')
|
||||
handlers = []
|
||||
for plugin in plugins:
|
||||
if hasattr(plugin, 'register_cmake_handlers'):
|
||||
plugin.register_cmake_handlers(handlers)
|
||||
|
||||
values = {}
|
||||
inherits = []
|
||||
|
||||
if cmakelistsfile:
|
||||
srcfiles = [cmakelistsfile]
|
||||
else:
|
||||
srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt'])
|
||||
|
||||
# Note that some of these are non-standard, but probably better to
|
||||
# be able to map them anyway if we see them
|
||||
cmake_pkgmap = {'alsa': 'alsa-lib',
|
||||
'aspell': 'aspell',
|
||||
'atk': 'atk',
|
||||
'bison': 'bison-native',
|
||||
'boost': 'boost',
|
||||
'bzip2': 'bzip2',
|
||||
'cairo': 'cairo',
|
||||
'cups': 'cups',
|
||||
'curl': 'curl',
|
||||
'curses': 'ncurses',
|
||||
'cvs': 'cvs',
|
||||
'drm': 'libdrm',
|
||||
'dbus': 'dbus',
|
||||
'dbusglib': 'dbus-glib',
|
||||
'egl': 'virtual/egl',
|
||||
'expat': 'expat',
|
||||
'flex': 'flex-native',
|
||||
'fontconfig': 'fontconfig',
|
||||
'freetype': 'freetype',
|
||||
'gettext': '',
|
||||
'git': '',
|
||||
'gio': 'glib-2.0',
|
||||
'giounix': 'glib-2.0',
|
||||
'glew': 'glew',
|
||||
'glib': 'glib-2.0',
|
||||
'glib2': 'glib-2.0',
|
||||
'glu': 'libglu',
|
||||
'glut': 'freeglut',
|
||||
'gobject': 'glib-2.0',
|
||||
'gperf': 'gperf-native',
|
||||
'gnutls': 'gnutls',
|
||||
'gtk2': 'gtk+',
|
||||
'gtk3': 'gtk+3',
|
||||
'gtk': 'gtk+3',
|
||||
'harfbuzz': 'harfbuzz',
|
||||
'icu': 'icu',
|
||||
'intl': 'virtual/libintl',
|
||||
'jpeg': 'jpeg',
|
||||
'libarchive': 'libarchive',
|
||||
'libiconv': 'virtual/libiconv',
|
||||
'liblzma': 'xz',
|
||||
'libxml2': 'libxml2',
|
||||
'libxslt': 'libxslt',
|
||||
'opengl': 'virtual/libgl',
|
||||
'openmp': '',
|
||||
'openssl': 'openssl',
|
||||
'pango': 'pango',
|
||||
'perl': '',
|
||||
'perllibs': '',
|
||||
'pkgconfig': '',
|
||||
'png': 'libpng',
|
||||
'pthread': '',
|
||||
'pythoninterp': '',
|
||||
'pythonlibs': '',
|
||||
'ruby': 'ruby-native',
|
||||
'sdl': 'libsdl',
|
||||
'sdl2': 'libsdl2',
|
||||
'subversion': 'subversion-native',
|
||||
'swig': 'swig-native',
|
||||
'tcl': 'tcl-native',
|
||||
'threads': '',
|
||||
'tiff': 'tiff',
|
||||
'wget': 'wget',
|
||||
'x11': 'libx11',
|
||||
'xcb': 'libxcb',
|
||||
'xext': 'libxext',
|
||||
'xfixes': 'libxfixes',
|
||||
'zlib': 'zlib',
|
||||
}
|
||||
|
||||
pcdeps = []
|
||||
libdeps = []
|
||||
deps = []
|
||||
unmappedpkgs = []
|
||||
|
||||
proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE)
|
||||
pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
|
||||
pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
|
||||
findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
|
||||
findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
|
||||
checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
|
||||
include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
|
||||
subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
|
||||
dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
|
||||
|
||||
def find_cmake_package(pkg):
|
||||
RecipeHandler.load_devel_filemap(tinfoil.config_data)
|
||||
for fn, pn in RecipeHandler.recipecmakefilemap.items():
|
||||
splitname = fn.split('/')
|
||||
if len(splitname) > 1:
|
||||
if splitname[0].lower().startswith(pkg.lower()):
|
||||
if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg:
|
||||
return pn
|
||||
return None
|
||||
|
||||
def interpret_value(value):
|
||||
return value.strip('"')
|
||||
|
||||
def parse_cmake_file(fn, paths=None):
|
||||
searchpaths = (paths or []) + [os.path.dirname(fn)]
|
||||
logger.debug('Parsing file %s' % fn)
|
||||
with open(fn, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
for handler in handlers:
|
||||
if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
|
||||
continue
|
||||
res = include_re.match(line)
|
||||
if res:
|
||||
includefn = bb.utils.which(':'.join(searchpaths), res.group(1))
|
||||
if includefn:
|
||||
parse_cmake_file(includefn, searchpaths)
|
||||
else:
|
||||
logger.debug('Unable to recurse into include file %s' % res.group(1))
|
||||
continue
|
||||
res = subdir_re.match(line)
|
||||
if res:
|
||||
subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt')
|
||||
if os.path.exists(subdirfn):
|
||||
parse_cmake_file(subdirfn, searchpaths)
|
||||
else:
|
||||
logger.debug('Unable to recurse into subdirectory file %s' % subdirfn)
|
||||
continue
|
||||
res = proj_re.match(line)
|
||||
if res:
|
||||
extravalues['PN'] = interpret_value(res.group(1).split()[0])
|
||||
continue
|
||||
res = pkgcm_re.match(line)
|
||||
if res:
|
||||
res = dep_re.findall(res.group(2))
|
||||
if res:
|
||||
pcdeps.extend([interpret_value(x[0]) for x in res])
|
||||
inherits.append('pkgconfig')
|
||||
continue
|
||||
res = pkgsm_re.match(line)
|
||||
if res:
|
||||
res = dep_re.findall(res.group(2))
|
||||
if res:
|
||||
# Note: appending a tuple here!
|
||||
item = tuple((interpret_value(x[0]) for x in res))
|
||||
if len(item) == 1:
|
||||
item = item[0]
|
||||
pcdeps.append(item)
|
||||
inherits.append('pkgconfig')
|
||||
continue
|
||||
res = findpackage_re.match(line)
|
||||
if res:
|
||||
origpkg = res.group(1)
|
||||
pkg = interpret_value(origpkg)
|
||||
found = False
|
||||
for handler in handlers:
|
||||
if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values):
|
||||
logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__))
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
elif pkg == 'Gettext':
|
||||
inherits.append('gettext')
|
||||
elif pkg == 'Perl':
|
||||
inherits.append('perlnative')
|
||||
elif pkg == 'PkgConfig':
|
||||
inherits.append('pkgconfig')
|
||||
elif pkg == 'PythonInterp':
|
||||
inherits.append('python3native')
|
||||
elif pkg == 'PythonLibs':
|
||||
inherits.append('python3-dir')
|
||||
else:
|
||||
# Try to map via looking at installed CMake packages in pkgdata
|
||||
dep = find_cmake_package(pkg)
|
||||
if dep:
|
||||
logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep))
|
||||
deps.append(dep)
|
||||
else:
|
||||
dep = cmake_pkgmap.get(pkg.lower(), None)
|
||||
if dep:
|
||||
logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep))
|
||||
deps.append(dep)
|
||||
elif dep is None:
|
||||
unmappedpkgs.append(origpkg)
|
||||
continue
|
||||
res = checklib_re.match(line)
|
||||
if res:
|
||||
lib = interpret_value(res.group(1))
|
||||
if not lib.startswith('$'):
|
||||
libdeps.append(lib)
|
||||
res = findlibrary_re.match(line)
|
||||
if res:
|
||||
libs = res.group(2).split()
|
||||
for lib in libs:
|
||||
if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')):
|
||||
break
|
||||
lib = interpret_value(lib)
|
||||
if not lib.startswith('$'):
|
||||
libdeps.append(lib)
|
||||
if line.lower().startswith('useswig'):
|
||||
deps.append('swig-native')
|
||||
continue
|
||||
|
||||
parse_cmake_file(srcfiles[0])
|
||||
|
||||
if unmappedpkgs:
|
||||
outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs))))
|
||||
|
||||
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
|
||||
|
||||
for handler in handlers:
|
||||
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
|
||||
|
||||
if inherits:
|
||||
values['inherit'] = ' '.join(list(set(inherits)))
|
||||
|
||||
return values
|
||||
|
||||
|
||||
class CmakeExtensionHandler(object):
|
||||
'''Base class for CMake extension handlers'''
|
||||
def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
|
||||
'''
|
||||
Handle a line parsed out of an CMake file.
|
||||
Return True if you've completely handled the passed in line, otherwise return False.
|
||||
'''
|
||||
return False
|
||||
|
||||
def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values):
|
||||
'''
|
||||
Handle a find_package package parsed out of a CMake file.
|
||||
Return True if you've completely handled the passed in package, otherwise return False.
|
||||
'''
|
||||
return False
|
||||
|
||||
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
|
||||
'''
|
||||
Apply any desired post-processing on the output
|
||||
'''
|
||||
return
|
||||
|
||||
|
||||
|
||||
class SconsRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
|
||||
classes.append('scons')
|
||||
lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
|
||||
lines_after.append('EXTRA_OESCONS = ""')
|
||||
lines_after.append('')
|
||||
handled.append('buildsystem')
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class QmakeRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
if RecipeHandler.checkfiles(srctree, ['*.pro']):
|
||||
classes.append('qmake2')
|
||||
handled.append('buildsystem')
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class AutotoolsRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
autoconf = False
|
||||
if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
|
||||
autoconf = True
|
||||
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues)
|
||||
classes.extend(values.pop('inherit', '').split())
|
||||
for var, value in values.items():
|
||||
lines_before.append('%s = "%s"' % (var, value))
|
||||
else:
|
||||
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
|
||||
if conffile:
|
||||
# Check if this is just a pre-generated autoconf configure script
|
||||
with open(conffile[0], 'r', errors='surrogateescape') as f:
|
||||
for i in range(1, 10):
|
||||
if 'Generated by GNU Autoconf' in f.readline():
|
||||
autoconf = True
|
||||
break
|
||||
|
||||
if autoconf and not ('PV' in extravalues and 'PN' in extravalues):
|
||||
# Last resort
|
||||
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
|
||||
if conffile:
|
||||
with open(conffile[0], 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='):
|
||||
pv = line.split('=')[1].strip('"\'')
|
||||
if pv and not 'PV' in extravalues and validate_pv(pv):
|
||||
extravalues['PV'] = pv
|
||||
elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='):
|
||||
pn = line.split('=')[1].strip('"\'')
|
||||
if pn and not 'PN' in extravalues:
|
||||
extravalues['PN'] = pn
|
||||
|
||||
if autoconf:
|
||||
lines_before.append('')
|
||||
lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
|
||||
lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
|
||||
lines_before.append('# inherit line')
|
||||
classes.append('autotools')
|
||||
lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
|
||||
lines_after.append('EXTRA_OECONF = ""')
|
||||
lines_after.append('')
|
||||
handled.append('buildsystem')
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None):
|
||||
import shlex
|
||||
|
||||
# Find all plugins that want to register handlers
|
||||
logger.debug('Loading autotools handlers')
|
||||
handlers = []
|
||||
for plugin in plugins:
|
||||
if hasattr(plugin, 'register_autotools_handlers'):
|
||||
plugin.register_autotools_handlers(handlers)
|
||||
|
||||
values = {}
|
||||
inherits = []
|
||||
|
||||
# Hardcoded map, we also use a dynamic one based on what's in the sysroot
|
||||
progmap = {'flex': 'flex-native',
|
||||
'bison': 'bison-native',
|
||||
'm4': 'm4-native',
|
||||
'tar': 'tar-native',
|
||||
'ar': 'binutils-native',
|
||||
'ranlib': 'binutils-native',
|
||||
'ld': 'binutils-native',
|
||||
'strip': 'binutils-native',
|
||||
'libtool': '',
|
||||
'autoconf': '',
|
||||
'autoheader': '',
|
||||
'automake': '',
|
||||
'uname': '',
|
||||
'rm': '',
|
||||
'cp': '',
|
||||
'mv': '',
|
||||
'find': '',
|
||||
'awk': '',
|
||||
'sed': '',
|
||||
}
|
||||
progclassmap = {'gconftool-2': 'gconf',
|
||||
'pkg-config': 'pkgconfig',
|
||||
'python': 'python3native',
|
||||
'python3': 'python3native',
|
||||
'perl': 'perlnative',
|
||||
'makeinfo': 'texinfo',
|
||||
}
|
||||
|
||||
pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
|
||||
pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
|
||||
lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
|
||||
libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
|
||||
progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
|
||||
dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
|
||||
ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
|
||||
am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
|
||||
define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
|
||||
version_re = re.compile('([0-9.]+)')
|
||||
|
||||
defines = {}
|
||||
def subst_defines(value):
|
||||
newvalue = value
|
||||
for define, defval in defines.items():
|
||||
newvalue = newvalue.replace(define, defval)
|
||||
if newvalue != value:
|
||||
return subst_defines(newvalue)
|
||||
return value
|
||||
|
||||
def process_value(value):
|
||||
value = value.replace('[', '').replace(']', '')
|
||||
if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('):
|
||||
cmd = subst_defines(value[value.index('(')+1:-1])
|
||||
try:
|
||||
if '|' in cmd:
|
||||
cmd = 'set -o pipefail; ' + cmd
|
||||
stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True)
|
||||
ret = stdout.rstrip()
|
||||
except bb.process.ExecutionError as e:
|
||||
ret = ''
|
||||
elif value.startswith('m4_'):
|
||||
return None
|
||||
ret = subst_defines(value)
|
||||
if ret:
|
||||
ret = ret.strip('"\'')
|
||||
return ret
|
||||
|
||||
# Since a configure.ac file is essentially a program, this is only ever going to be
|
||||
# a hack unfortunately; but it ought to be enough of an approximation
|
||||
if acfile:
|
||||
srcfiles = [acfile]
|
||||
else:
|
||||
srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in'])
|
||||
|
||||
pcdeps = []
|
||||
libdeps = []
|
||||
deps = []
|
||||
unmapped = []
|
||||
|
||||
RecipeHandler.load_binmap(tinfoil.config_data)
|
||||
|
||||
def process_macro(keyword, value):
|
||||
for handler in handlers:
|
||||
if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
|
||||
return
|
||||
logger.debug('Found keyword %s with value "%s"' % (keyword, value))
|
||||
if keyword == 'PKG_CHECK_MODULES':
|
||||
res = pkg_re.search(value)
|
||||
if res:
|
||||
res = dep_re.findall(res.group(1))
|
||||
if res:
|
||||
pcdeps.extend([x[0] for x in res])
|
||||
inherits.append('pkgconfig')
|
||||
elif keyword == 'PKG_CHECK_EXISTS':
|
||||
res = pkgce_re.search(value)
|
||||
if res:
|
||||
res = dep_re.findall(res.group(1))
|
||||
if res:
|
||||
pcdeps.extend([x[0] for x in res])
|
||||
inherits.append('pkgconfig')
|
||||
elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'):
|
||||
inherits.append('gettext')
|
||||
elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'):
|
||||
deps.append('intltool-native')
|
||||
elif keyword == 'AM_PATH_GLIB_2_0':
|
||||
deps.append('glib-2.0')
|
||||
elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'):
|
||||
res = progs_re.search(value)
|
||||
if res:
|
||||
for prog in shlex.split(res.group(1)):
|
||||
prog = prog.split()[0]
|
||||
for handler in handlers:
|
||||
if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values):
|
||||
return
|
||||
progclass = progclassmap.get(prog, None)
|
||||
if progclass:
|
||||
inherits.append(progclass)
|
||||
else:
|
||||
progdep = RecipeHandler.recipebinmap.get(prog, None)
|
||||
if not progdep:
|
||||
progdep = progmap.get(prog, None)
|
||||
if progdep:
|
||||
deps.append(progdep)
|
||||
elif progdep is None:
|
||||
if not prog.startswith('$'):
|
||||
unmapped.append(prog)
|
||||
elif keyword == 'AC_CHECK_LIB':
|
||||
res = lib_re.search(value)
|
||||
if res:
|
||||
lib = res.group(1)
|
||||
if not lib.startswith('$'):
|
||||
libdeps.append(lib)
|
||||
elif keyword == 'AX_CHECK_LIBRARY':
|
||||
res = libx_re.search(value)
|
||||
if res:
|
||||
lib = res.group(2)
|
||||
if not lib.startswith('$'):
|
||||
header = res.group(1)
|
||||
libdeps.append((lib, header))
|
||||
elif keyword == 'AC_PATH_X':
|
||||
deps.append('libx11')
|
||||
elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'):
|
||||
deps.append('boost')
|
||||
elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'):
|
||||
deps.append('flex-native')
|
||||
elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'):
|
||||
deps.append('bison-native')
|
||||
elif keyword == 'AX_CHECK_ZLIB':
|
||||
deps.append('zlib')
|
||||
elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
|
||||
deps.append('openssl')
|
||||
elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
|
||||
deps.append('curl')
|
||||
elif keyword == 'AX_LIB_BEECRYPT':
|
||||
deps.append('beecrypt')
|
||||
elif keyword == 'AX_LIB_EXPAT':
|
||||
deps.append('expat')
|
||||
elif keyword == 'AX_LIB_GCRYPT':
|
||||
deps.append('libgcrypt')
|
||||
elif keyword == 'AX_LIB_NETTLE':
|
||||
deps.append('nettle')
|
||||
elif keyword == 'AX_LIB_READLINE':
|
||||
deps.append('readline')
|
||||
elif keyword == 'AX_LIB_SQLITE3':
|
||||
deps.append('sqlite3')
|
||||
elif keyword == 'AX_LIB_TAGLIB':
|
||||
deps.append('taglib')
|
||||
elif keyword in ['AX_PKG_SWIG', 'AC_PROG_SWIG']:
|
||||
deps.append('swig-native')
|
||||
elif keyword == 'AX_PROG_XSLTPROC':
|
||||
deps.append('libxslt-native')
|
||||
elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
|
||||
pythonclass = 'python3native'
|
||||
elif keyword == 'AX_WITH_CURSES':
|
||||
deps.append('ncurses')
|
||||
elif keyword == 'AX_PATH_BDB':
|
||||
deps.append('db')
|
||||
elif keyword == 'AX_PATH_LIB_PCRE':
|
||||
deps.append('libpcre')
|
||||
elif keyword == 'AC_INIT':
|
||||
if extravalues is not None:
|
||||
res = ac_init_re.match(value)
|
||||
if res:
|
||||
extravalues['PN'] = process_value(res.group(1))
|
||||
pv = process_value(res.group(2))
|
||||
if validate_pv(pv):
|
||||
extravalues['PV'] = pv
|
||||
elif keyword == 'AM_INIT_AUTOMAKE':
|
||||
if extravalues is not None:
|
||||
if 'PN' not in extravalues:
|
||||
res = am_init_re.match(value)
|
||||
if res:
|
||||
if res.group(1) != 'AC_PACKAGE_NAME':
|
||||
extravalues['PN'] = process_value(res.group(1))
|
||||
pv = process_value(res.group(2))
|
||||
if validate_pv(pv):
|
||||
extravalues['PV'] = pv
|
||||
elif keyword == 'define(':
|
||||
res = define_re.match(value)
|
||||
if res:
|
||||
key = res.group(2).strip('[]')
|
||||
value = process_value(res.group(3))
|
||||
if value is not None:
|
||||
defines[key] = value
|
||||
|
||||
keywords = ['PKG_CHECK_MODULES',
|
||||
'PKG_CHECK_EXISTS',
|
||||
'AM_GNU_GETTEXT',
|
||||
'AM_GLIB_GNU_GETTEXT',
|
||||
'GETTEXT_PACKAGE',
|
||||
'AC_PROG_INTLTOOL',
|
||||
'IT_PROG_INTLTOOL',
|
||||
'AM_PATH_GLIB_2_0',
|
||||
'AC_CHECK_PROG',
|
||||
'AC_PATH_PROG',
|
||||
'AX_WITH_PROG',
|
||||
'AC_CHECK_LIB',
|
||||
'AX_CHECK_LIBRARY',
|
||||
'AC_PATH_X',
|
||||
'AX_BOOST',
|
||||
'BOOST_REQUIRE',
|
||||
'AC_PROG_LEX',
|
||||
'AM_PROG_LEX',
|
||||
'AX_PROG_FLEX',
|
||||
'AC_PROG_YACC',
|
||||
'AX_PROG_BISON',
|
||||
'AX_CHECK_ZLIB',
|
||||
'AX_CHECK_OPENSSL',
|
||||
'AX_LIB_CRYPTO',
|
||||
'AX_LIB_CURL',
|
||||
'LIBCURL_CHECK_CONFIG',
|
||||
'AX_LIB_BEECRYPT',
|
||||
'AX_LIB_EXPAT',
|
||||
'AX_LIB_GCRYPT',
|
||||
'AX_LIB_NETTLE',
|
||||
'AX_LIB_READLINE'
|
||||
'AX_LIB_SQLITE3',
|
||||
'AX_LIB_TAGLIB',
|
||||
'AX_PKG_SWIG',
|
||||
'AC_PROG_SWIG',
|
||||
'AX_PROG_XSLTPROC',
|
||||
'AC_PYTHON_DEVEL',
|
||||
'AX_PYTHON_DEVEL',
|
||||
'AM_PATH_PYTHON',
|
||||
'AX_WITH_CURSES',
|
||||
'AX_PATH_BDB',
|
||||
'AX_PATH_LIB_PCRE',
|
||||
'AC_INIT',
|
||||
'AM_INIT_AUTOMAKE',
|
||||
'define(',
|
||||
]
|
||||
|
||||
for handler in handlers:
|
||||
handler.extend_keywords(keywords)
|
||||
|
||||
for srcfile in srcfiles:
|
||||
nesting = 0
|
||||
in_keyword = ''
|
||||
partial = ''
|
||||
with open(srcfile, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
if in_keyword:
|
||||
partial += ' ' + line.strip()
|
||||
if partial.endswith('\\'):
|
||||
partial = partial[:-1]
|
||||
nesting = nesting + line.count('(') - line.count(')')
|
||||
if nesting == 0:
|
||||
process_macro(in_keyword, partial)
|
||||
partial = ''
|
||||
in_keyword = ''
|
||||
else:
|
||||
for keyword in keywords:
|
||||
if keyword in line:
|
||||
nesting = line.count('(') - line.count(')')
|
||||
if nesting > 0:
|
||||
partial = line.strip()
|
||||
if partial.endswith('\\'):
|
||||
partial = partial[:-1]
|
||||
in_keyword = keyword
|
||||
else:
|
||||
process_macro(keyword, line.strip())
|
||||
break
|
||||
|
||||
if in_keyword:
|
||||
process_macro(in_keyword, partial)
|
||||
|
||||
if extravalues:
|
||||
for k,v in list(extravalues.items()):
|
||||
if v:
|
||||
if v.startswith('$') or v.startswith('@') or v.startswith('%'):
|
||||
del extravalues[k]
|
||||
else:
|
||||
extravalues[k] = v.strip('"\'').rstrip('()')
|
||||
|
||||
if unmapped:
|
||||
outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped))))
|
||||
|
||||
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
|
||||
|
||||
for handler in handlers:
|
||||
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
|
||||
|
||||
if inherits:
|
||||
values['inherit'] = ' '.join(list(set(inherits)))
|
||||
|
||||
return values
|
||||
|
||||
|
||||
class AutotoolsExtensionHandler(object):
|
||||
'''Base class for Autotools extension handlers'''
|
||||
def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
|
||||
'''
|
||||
Handle a macro parsed out of an autotools file. Note that if you want this to be called
|
||||
for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need
|
||||
to add it to the keywords list in extend_keywords().
|
||||
Return True if you've completely handled the passed in macro, otherwise return False.
|
||||
'''
|
||||
return False
|
||||
|
||||
def extend_keywords(self, keywords):
|
||||
'''Adds keywords to be recognised by the parser (so that you get a call to process_macro)'''
|
||||
return
|
||||
|
||||
def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values):
|
||||
'''
|
||||
Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line
|
||||
Return True if you've completely handled the passed in macro, otherwise return False.
|
||||
'''
|
||||
return False
|
||||
|
||||
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
|
||||
'''
|
||||
Apply any desired post-processing on the output
|
||||
'''
|
||||
return
|
||||
|
||||
|
||||
class MakefileRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
makefile = RecipeHandler.checkfiles(srctree, ['Makefile', 'makefile', 'GNUmakefile'])
|
||||
if makefile:
|
||||
lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
|
||||
lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
|
||||
lines_after.append('# that the appropriate arguments are passed in.')
|
||||
lines_after.append('')
|
||||
|
||||
scanfile = os.path.join(srctree, 'configure.scan')
|
||||
skipscan = False
|
||||
try:
|
||||
stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
skipscan = True
|
||||
if scanfile and os.path.exists(scanfile):
|
||||
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
|
||||
classes.extend(values.pop('inherit', '').split())
|
||||
for var, value in values.items():
|
||||
if var == 'DEPENDS':
|
||||
lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
|
||||
lines_before.append('%s = "%s"' % (var, value))
|
||||
lines_before.append('')
|
||||
for f in ['configure.scan', 'autoscan.log']:
|
||||
fp = os.path.join(srctree, f)
|
||||
if os.path.exists(fp):
|
||||
os.remove(fp)
|
||||
|
||||
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
|
||||
|
||||
func = []
|
||||
func.append('# You will almost certainly need to add additional arguments here')
|
||||
func.append('oe_runmake')
|
||||
self.genfunction(lines_after, 'do_compile', func)
|
||||
|
||||
installtarget = True
|
||||
try:
|
||||
stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
if e.exitcode != 1:
|
||||
installtarget = False
|
||||
func = []
|
||||
if installtarget:
|
||||
func.append('# This is a guess; additional arguments may be required')
|
||||
makeargs = ''
|
||||
with open(makefile[0], 'r', errors='surrogateescape') as f:
|
||||
for i in range(1, 100):
|
||||
if 'DESTDIR' in f.readline():
|
||||
makeargs += " 'DESTDIR=${D}'"
|
||||
break
|
||||
func.append('oe_runmake install%s' % makeargs)
|
||||
else:
|
||||
func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
|
||||
func.append('# target named "install", so you will need to define this yourself')
|
||||
self.genfunction(lines_after, 'do_install', func)
|
||||
|
||||
handled.append('buildsystem')
|
||||
else:
|
||||
lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
|
||||
lines_after.append('')
|
||||
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
|
||||
self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
|
||||
self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
|
||||
|
||||
|
||||
class VersionFileRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'PV' not in extravalues:
|
||||
# Look for a VERSION or version file containing a single line consisting
|
||||
# only of a version number
|
||||
filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version'])
|
||||
version = None
|
||||
for fileitem in filelist:
|
||||
linecount = 0
|
||||
with open(fileitem, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
line = line.rstrip().strip('"\'')
|
||||
linecount += 1
|
||||
if line:
|
||||
if linecount > 1:
|
||||
version = None
|
||||
break
|
||||
else:
|
||||
if validate_pv(line):
|
||||
version = line
|
||||
if version:
|
||||
extravalues['PV'] = version
|
||||
break
|
||||
|
||||
|
||||
class SpecFileRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'PV' in extravalues and 'PN' in extravalues:
|
||||
return
|
||||
filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True)
|
||||
valuemap = {'Name': 'PN',
|
||||
'Version': 'PV',
|
||||
'Summary': 'SUMMARY',
|
||||
'Url': 'HOMEPAGE',
|
||||
'License': 'LICENSE'}
|
||||
foundvalues = {}
|
||||
for fileitem in filelist:
|
||||
linecount = 0
|
||||
with open(fileitem, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
for value, varname in valuemap.items():
|
||||
if line.startswith(value + ':') and not varname in foundvalues:
|
||||
foundvalues[varname] = line.split(':', 1)[1].strip()
|
||||
break
|
||||
if len(foundvalues) == len(valuemap):
|
||||
break
|
||||
# Drop values containing unexpanded RPM macros
|
||||
for k in list(foundvalues.keys()):
|
||||
if '%' in foundvalues[k]:
|
||||
del foundvalues[k]
|
||||
if 'PV' in foundvalues:
|
||||
if not validate_pv(foundvalues['PV']):
|
||||
del foundvalues['PV']
|
||||
license = foundvalues.pop('LICENSE', None)
|
||||
if license:
|
||||
liccomment = '# NOTE: spec file indicates the license may be "%s"' % license
|
||||
for i, line in enumerate(lines_before):
|
||||
if line.startswith('LICENSE ='):
|
||||
lines_before.insert(i, liccomment)
|
||||
break
|
||||
else:
|
||||
lines_before.append(liccomment)
|
||||
extravalues.update(foundvalues)
|
||||
|
||||
def register_recipe_handlers(handlers):
|
||||
# Set priorities with some gaps so that other plugins can insert
|
||||
# their own handlers (so avoid changing these numbers)
|
||||
handlers.append((CmakeRecipeHandler(), 50))
|
||||
handlers.append((AutotoolsRecipeHandler(), 40))
|
||||
handlers.append((SconsRecipeHandler(), 30))
|
||||
handlers.append((QmakeRecipeHandler(), 20))
|
||||
handlers.append((MakefileRecipeHandler(), 10))
|
||||
handlers.append((VersionFileRecipeHandler(), -1))
|
||||
handlers.append((SpecFileRecipeHandler(), -1))
|
||||
@@ -0,0 +1,747 @@
|
||||
# Recipe creation tool - create build system handler for python
|
||||
#
|
||||
# Copyright (C) 2015 Mentor Graphics Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import ast
|
||||
import codecs
|
||||
import collections
|
||||
import setuptools.command.build_py
|
||||
import email
|
||||
import imp
|
||||
import glob
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
from recipetool.create import RecipeHandler
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
class PythonRecipeHandler(RecipeHandler):
|
||||
base_pkgdeps = ['python3-core']
|
||||
excluded_pkgdeps = ['python3-dbg']
|
||||
# os.path is provided by python3-core
|
||||
assume_provided = ['builtins', 'os.path']
|
||||
# Assumes that the host python3 builtin_module_names is sane for target too
|
||||
assume_provided = assume_provided + list(sys.builtin_module_names)
|
||||
|
||||
bbvar_map = {
|
||||
'Name': 'PN',
|
||||
'Version': 'PV',
|
||||
'Home-page': 'HOMEPAGE',
|
||||
'Summary': 'SUMMARY',
|
||||
'Description': 'DESCRIPTION',
|
||||
'License': 'LICENSE',
|
||||
'Requires': 'RDEPENDS:${PN}',
|
||||
'Provides': 'RPROVIDES:${PN}',
|
||||
'Obsoletes': 'RREPLACES:${PN}',
|
||||
}
|
||||
# PN/PV are already set by recipetool core & desc can be extremely long
|
||||
excluded_fields = [
|
||||
'Description',
|
||||
]
|
||||
setup_parse_map = {
|
||||
'Url': 'Home-page',
|
||||
'Classifiers': 'Classifier',
|
||||
'Description': 'Summary',
|
||||
}
|
||||
setuparg_map = {
|
||||
'Home-page': 'url',
|
||||
'Classifier': 'classifiers',
|
||||
'Summary': 'description',
|
||||
'Description': 'long-description',
|
||||
}
|
||||
# Values which are lists, used by the setup.py argument based metadata
|
||||
# extraction method, to determine how to process the setup.py output.
|
||||
setuparg_list_fields = [
|
||||
'Classifier',
|
||||
'Requires',
|
||||
'Provides',
|
||||
'Obsoletes',
|
||||
'Platform',
|
||||
'Supported-Platform',
|
||||
]
|
||||
setuparg_multi_line_values = ['Description']
|
||||
replacements = [
|
||||
('License', r' +$', ''),
|
||||
('License', r'^ +', ''),
|
||||
('License', r' ', '-'),
|
||||
('License', r'^GNU-', ''),
|
||||
('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
|
||||
('License', r'^UNKNOWN$', ''),
|
||||
|
||||
# Remove currently unhandled version numbers from these variables
|
||||
('Requires', r' *\([^)]*\)', ''),
|
||||
('Provides', r' *\([^)]*\)', ''),
|
||||
('Obsoletes', r' *\([^)]*\)', ''),
|
||||
('Install-requires', r'^([^><= ]+).*', r'\1'),
|
||||
('Extras-require', r'^([^><= ]+).*', r'\1'),
|
||||
('Tests-require', r'^([^><= ]+).*', r'\1'),
|
||||
|
||||
# Remove unhandled dependency on particular features (e.g. foo[PDF])
|
||||
('Install-requires', r'\[[^\]]+\]$', ''),
|
||||
]
|
||||
|
||||
classifier_license_map = {
|
||||
'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
|
||||
'License :: OSI Approved :: Apache Software License': 'Apache',
|
||||
'License :: OSI Approved :: Apple Public Source License': 'APSL',
|
||||
'License :: OSI Approved :: Artistic License': 'Artistic',
|
||||
'License :: OSI Approved :: Attribution Assurance License': 'AAL',
|
||||
'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
|
||||
'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
|
||||
'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
|
||||
'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
|
||||
'License :: OSI Approved :: Common Public License': 'CPL',
|
||||
'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
|
||||
'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
|
||||
'License :: OSI Approved :: Eiffel Forum License': 'EFL',
|
||||
'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
|
||||
'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
|
||||
'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
|
||||
'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
|
||||
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
|
||||
'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
|
||||
'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
|
||||
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
|
||||
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
|
||||
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
|
||||
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
|
||||
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
|
||||
'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
|
||||
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
|
||||
'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
|
||||
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
|
||||
'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
|
||||
'License :: OSI Approved :: IBM Public License': 'IPL',
|
||||
'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
|
||||
'License :: OSI Approved :: Intel Open Source License': 'Intel',
|
||||
'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
|
||||
'License :: OSI Approved :: MIT License': 'MIT',
|
||||
'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
|
||||
'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
|
||||
'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
|
||||
'License :: OSI Approved :: Motosoto License': 'Motosoto',
|
||||
'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
|
||||
'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
|
||||
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
|
||||
'License :: OSI Approved :: Nethack General Public License': 'NGPL',
|
||||
'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
|
||||
'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
|
||||
'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
|
||||
'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
|
||||
'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
|
||||
'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
|
||||
'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
|
||||
'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
|
||||
'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
|
||||
'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
|
||||
'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
|
||||
'License :: OSI Approved :: Sun Public License': 'SPL',
|
||||
'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
|
||||
'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
|
||||
'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
|
||||
'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
|
||||
'License :: OSI Approved :: W3C License': 'W3C',
|
||||
'License :: OSI Approved :: X.Net License': 'Xnet',
|
||||
'License :: OSI Approved :: Zope Public License': 'ZPL',
|
||||
'License :: OSI Approved :: zlib/libpng License': 'Zlib',
|
||||
'License :: Other/Proprietary License': 'Proprietary',
|
||||
'License :: Public Domain': 'PD',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
# Check for non-zero size setup.py files
|
||||
setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
|
||||
for fn in setupfiles:
|
||||
if os.path.getsize(fn):
|
||||
break
|
||||
else:
|
||||
return False
|
||||
|
||||
# setup.py is always parsed to get at certain required information, such as
|
||||
# distutils vs setuptools
|
||||
#
|
||||
# If egg info is available, we use it for both its PKG-INFO metadata
|
||||
# and for its requires.txt for install_requires.
|
||||
# If PKG-INFO is available but no egg info is, we use that for metadata in preference to
|
||||
# the parsed setup.py, but use the install_requires info from the
|
||||
# parsed setup.py.
|
||||
|
||||
setupscript = os.path.join(srctree, 'setup.py')
|
||||
try:
|
||||
setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript)
|
||||
except Exception:
|
||||
logger.exception("Failed to parse setup.py")
|
||||
setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], []
|
||||
|
||||
egginfo = glob.glob(os.path.join(srctree, '*.egg-info'))
|
||||
if egginfo:
|
||||
info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO'))
|
||||
requires_txt = os.path.join(egginfo[0], 'requires.txt')
|
||||
if os.path.exists(requires_txt):
|
||||
with codecs.open(requires_txt) as f:
|
||||
inst_req = []
|
||||
extras_req = collections.defaultdict(list)
|
||||
current_feature = None
|
||||
for line in f.readlines():
|
||||
line = line.rstrip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
if line.startswith('['):
|
||||
# PACKAGECONFIG must not contain expressions or whitespace
|
||||
line = line.replace(" ", "")
|
||||
line = line.replace(':', "")
|
||||
line = line.replace('.', "-dot-")
|
||||
line = line.replace('"', "")
|
||||
line = line.replace('<', "-smaller-")
|
||||
line = line.replace('>', "-bigger-")
|
||||
line = line.replace('_', "-")
|
||||
line = line.replace('(', "")
|
||||
line = line.replace(')', "")
|
||||
line = line.replace('!', "-not-")
|
||||
line = line.replace('=', "-equals-")
|
||||
current_feature = line[1:-1]
|
||||
elif current_feature:
|
||||
extras_req[current_feature].append(line)
|
||||
else:
|
||||
inst_req.append(line)
|
||||
info['Install-requires'] = inst_req
|
||||
info['Extras-require'] = extras_req
|
||||
elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']):
|
||||
info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO'))
|
||||
|
||||
if setup_info:
|
||||
if 'Install-requires' in setup_info:
|
||||
info['Install-requires'] = setup_info['Install-requires']
|
||||
if 'Extras-require' in setup_info:
|
||||
info['Extras-require'] = setup_info['Extras-require']
|
||||
else:
|
||||
if setup_info:
|
||||
info = setup_info
|
||||
else:
|
||||
info = self.get_setup_args_info(setupscript)
|
||||
|
||||
# Grab the license value before applying replacements
|
||||
license_str = info.get('License', '').strip()
|
||||
|
||||
self.apply_info_replacements(info)
|
||||
|
||||
if uses_setuptools:
|
||||
classes.append('setuptools3')
|
||||
else:
|
||||
classes.append('distutils3')
|
||||
|
||||
if license_str:
|
||||
for i, line in enumerate(lines_before):
|
||||
if line.startswith('LICENSE = '):
|
||||
lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
|
||||
break
|
||||
|
||||
if 'Classifier' in info:
|
||||
existing_licenses = info.get('License', '')
|
||||
licenses = []
|
||||
for classifier in info['Classifier']:
|
||||
if classifier in self.classifier_license_map:
|
||||
license = self.classifier_license_map[classifier]
|
||||
if license == 'Apache' and 'Apache-2.0' in existing_licenses:
|
||||
license = 'Apache-2.0'
|
||||
elif license == 'GPL':
|
||||
if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
|
||||
license = 'GPL-2.0'
|
||||
elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
|
||||
license = 'GPL-3.0'
|
||||
elif license == 'LGPL':
|
||||
if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
|
||||
license = 'LGPL-2.1'
|
||||
elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
|
||||
license = 'LGPL-2.0'
|
||||
elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
|
||||
license = 'LGPL-3.0'
|
||||
licenses.append(license)
|
||||
|
||||
if licenses:
|
||||
info['License'] = ' & '.join(licenses)
|
||||
|
||||
# Map PKG-INFO & setup.py fields to bitbake variables
|
||||
for field, values in info.items():
|
||||
if field in self.excluded_fields:
|
||||
continue
|
||||
|
||||
if field not in self.bbvar_map:
|
||||
continue
|
||||
|
||||
if isinstance(values, str):
|
||||
value = values
|
||||
else:
|
||||
value = ' '.join(str(v) for v in values if v)
|
||||
|
||||
bbvar = self.bbvar_map[field]
|
||||
if bbvar not in extravalues and value:
|
||||
extravalues[bbvar] = value
|
||||
|
||||
mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
|
||||
|
||||
extras_req = set()
|
||||
if 'Extras-require' in info:
|
||||
extras_req = info['Extras-require']
|
||||
if extras_req:
|
||||
lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
|
||||
lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
|
||||
lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
|
||||
lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
|
||||
lines_after.append('#')
|
||||
lines_after.append('# Uncomment this line to enable all the optional features.')
|
||||
lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
|
||||
for feature, feature_reqs in extras_req.items():
|
||||
unmapped_deps.difference_update(feature_reqs)
|
||||
|
||||
feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
|
||||
lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps)))
|
||||
|
||||
inst_reqs = set()
|
||||
if 'Install-requires' in info:
|
||||
if extras_req:
|
||||
lines_after.append('')
|
||||
inst_reqs = info['Install-requires']
|
||||
if inst_reqs:
|
||||
unmapped_deps.difference_update(inst_reqs)
|
||||
|
||||
inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
|
||||
lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
|
||||
lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
|
||||
lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
|
||||
|
||||
if mapped_deps:
|
||||
name = info.get('Name')
|
||||
if name and name[0] in mapped_deps:
|
||||
# Attempt to avoid self-reference
|
||||
mapped_deps.remove(name[0])
|
||||
mapped_deps -= set(self.excluded_pkgdeps)
|
||||
if inst_reqs or extras_req:
|
||||
lines_after.append('')
|
||||
lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
|
||||
lines_after.append('# python sources, and might not be 100% accurate.')
|
||||
lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
|
||||
|
||||
unmapped_deps -= set(extensions)
|
||||
unmapped_deps -= set(self.assume_provided)
|
||||
if unmapped_deps:
|
||||
if mapped_deps:
|
||||
lines_after.append('')
|
||||
lines_after.append('# WARNING: We were unable to map the following python package/module')
|
||||
lines_after.append('# dependencies to the bitbake packages which include them:')
|
||||
lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps))
|
||||
|
||||
handled.append('buildsystem')
|
||||
|
||||
def get_pkginfo(self, pkginfo_fn):
|
||||
msg = email.message_from_file(open(pkginfo_fn, 'r'))
|
||||
msginfo = {}
|
||||
for field in msg.keys():
|
||||
values = msg.get_all(field)
|
||||
if len(values) == 1:
|
||||
msginfo[field] = values[0]
|
||||
else:
|
||||
msginfo[field] = values
|
||||
return msginfo
|
||||
|
||||
def parse_setup_py(self, setupscript='./setup.py'):
|
||||
with codecs.open(setupscript) as f:
|
||||
info, imported_modules, non_literals, extensions = gather_setup_info(f)
|
||||
|
||||
def _map(key):
|
||||
key = key.replace('_', '-')
|
||||
key = key[0].upper() + key[1:]
|
||||
if key in self.setup_parse_map:
|
||||
key = self.setup_parse_map[key]
|
||||
return key
|
||||
|
||||
# Naive mapping of setup() arguments to PKG-INFO field names
|
||||
for d in [info, non_literals]:
|
||||
for key, value in list(d.items()):
|
||||
if key is None:
|
||||
continue
|
||||
new_key = _map(key)
|
||||
if new_key != key:
|
||||
del d[key]
|
||||
d[new_key] = value
|
||||
|
||||
return info, 'setuptools' in imported_modules, non_literals, extensions
|
||||
|
||||
def get_setup_args_info(self, setupscript='./setup.py'):
|
||||
cmd = ['python3', setupscript]
|
||||
info = {}
|
||||
keys = set(self.bbvar_map.keys())
|
||||
keys |= set(self.setuparg_list_fields)
|
||||
keys |= set(self.setuparg_multi_line_values)
|
||||
grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
|
||||
for index, keys in grouped_keys:
|
||||
if index == (True, False):
|
||||
# Splitlines output for each arg as a list value
|
||||
for key in keys:
|
||||
arg = self.setuparg_map.get(key, key.lower())
|
||||
try:
|
||||
arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
info[key] = [l.rstrip() for l in arg_info.splitlines()]
|
||||
elif index == (False, True):
|
||||
# Entire output for each arg
|
||||
for key in keys:
|
||||
arg = self.setuparg_map.get(key, key.lower())
|
||||
try:
|
||||
arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
info[key] = arg_info
|
||||
else:
|
||||
info.update(self.get_setup_byline(list(keys), setupscript))
|
||||
return info
|
||||
|
||||
def get_setup_byline(self, fields, setupscript='./setup.py'):
|
||||
info = {}
|
||||
|
||||
cmd = ['python3', setupscript]
|
||||
cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
|
||||
try:
|
||||
info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
if len(fields) != len(info_lines):
|
||||
logger.error('Mismatch between setup.py output lines and number of fields')
|
||||
sys.exit(1)
|
||||
|
||||
for lineno, line in enumerate(info_lines):
|
||||
line = line.rstrip()
|
||||
info[fields[lineno]] = line
|
||||
return info
|
||||
|
||||
def apply_info_replacements(self, info):
|
||||
for variable, search, replace in self.replacements:
|
||||
if variable not in info:
|
||||
continue
|
||||
|
||||
def replace_value(search, replace, value):
|
||||
if replace is None:
|
||||
if re.search(search, value):
|
||||
return None
|
||||
else:
|
||||
new_value = re.sub(search, replace, value)
|
||||
if value != new_value:
|
||||
return new_value
|
||||
return value
|
||||
|
||||
value = info[variable]
|
||||
if isinstance(value, str):
|
||||
new_value = replace_value(search, replace, value)
|
||||
if new_value is None:
|
||||
del info[variable]
|
||||
elif new_value != value:
|
||||
info[variable] = new_value
|
||||
elif hasattr(value, 'items'):
|
||||
for dkey, dvalue in list(value.items()):
|
||||
new_list = []
|
||||
for pos, a_value in enumerate(dvalue):
|
||||
new_value = replace_value(search, replace, a_value)
|
||||
if new_value is not None and new_value != value:
|
||||
new_list.append(new_value)
|
||||
|
||||
if value != new_list:
|
||||
value[dkey] = new_list
|
||||
else:
|
||||
new_list = []
|
||||
for pos, a_value in enumerate(value):
|
||||
new_value = replace_value(search, replace, a_value)
|
||||
if new_value is not None and new_value != value:
|
||||
new_list.append(new_value)
|
||||
|
||||
if value != new_list:
|
||||
info[variable] = new_list
|
||||
|
||||
def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
|
||||
if 'Package-dir' in setup_info:
|
||||
package_dir = setup_info['Package-dir']
|
||||
else:
|
||||
package_dir = {}
|
||||
|
||||
dist = setuptools.Distribution()
|
||||
|
||||
class PackageDir(setuptools.command.build_py.build_py):
|
||||
def __init__(self, package_dir):
|
||||
self.package_dir = package_dir
|
||||
self.dist = dist
|
||||
super().__init__(self.dist)
|
||||
|
||||
pd = PackageDir(package_dir)
|
||||
to_scan = []
|
||||
if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
|
||||
if 'Py-modules' in setup_info:
|
||||
for module in setup_info['Py-modules']:
|
||||
try:
|
||||
package, module = module.rsplit('.', 1)
|
||||
except ValueError:
|
||||
package, module = '.', module
|
||||
module_path = os.path.join(pd.get_package_dir(package), module + '.py')
|
||||
to_scan.append(module_path)
|
||||
|
||||
if 'Packages' in setup_info:
|
||||
for package in setup_info['Packages']:
|
||||
to_scan.append(pd.get_package_dir(package))
|
||||
|
||||
if 'Scripts' in setup_info:
|
||||
to_scan.extend(setup_info['Scripts'])
|
||||
else:
|
||||
logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
|
||||
|
||||
if not to_scan:
|
||||
to_scan = ['.']
|
||||
|
||||
logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
|
||||
|
||||
provided_packages = self.parse_pkgdata_for_python_packages()
|
||||
scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
|
||||
mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
|
||||
for dep in scanned_deps:
|
||||
mapped = provided_packages.get(dep)
|
||||
if mapped:
|
||||
logger.debug('Mapped %s to %s' % (dep, mapped))
|
||||
mapped_deps.add(mapped)
|
||||
else:
|
||||
logger.debug('Could not map %s' % dep)
|
||||
unmapped_deps.add(dep)
|
||||
return mapped_deps, unmapped_deps
|
||||
|
||||
def scan_python_dependencies(self, paths):
|
||||
deps = set()
|
||||
try:
|
||||
dep_output = self.run_command(['pythondeps', '-d'] + paths)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
for line in dep_output.splitlines():
|
||||
line = line.rstrip()
|
||||
dep, filename = line.split('\t', 1)
|
||||
if filename.endswith('/setup.py'):
|
||||
continue
|
||||
deps.add(dep)
|
||||
|
||||
try:
|
||||
provides_output = self.run_command(['pythondeps', '-p'] + paths)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
pass
|
||||
else:
|
||||
provides_lines = (l.rstrip() for l in provides_output.splitlines())
|
||||
provides = set(l for l in provides_lines if l and l != 'setup')
|
||||
deps -= provides
|
||||
|
||||
return deps
|
||||
|
||||
def parse_pkgdata_for_python_packages(self):
|
||||
suffixes = [t[0] for t in imp.get_suffixes()]
|
||||
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
|
||||
|
||||
ldata = tinfoil.config_data.createCopy()
|
||||
bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
|
||||
python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
|
||||
|
||||
dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
|
||||
python_dirs = [python_sitedir + os.sep,
|
||||
os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
|
||||
os.path.dirname(python_sitedir) + os.sep]
|
||||
packages = {}
|
||||
for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
|
||||
files_info = None
|
||||
with open(pkgdatafile, 'r') as f:
|
||||
for line in f.readlines():
|
||||
field, value = line.split(': ', 1)
|
||||
if field.startswith('FILES_INFO'):
|
||||
files_info = ast.literal_eval(value)
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
for fn in files_info:
|
||||
for suffix in suffixes:
|
||||
if fn.endswith(suffix):
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
if fn.startswith(dynload_dir + os.sep):
|
||||
if '/.debug/' in fn:
|
||||
continue
|
||||
base = os.path.basename(fn)
|
||||
provided = base.split('.', 1)[0]
|
||||
packages[provided] = os.path.basename(pkgdatafile)
|
||||
continue
|
||||
|
||||
for python_dir in python_dirs:
|
||||
if fn.startswith(python_dir):
|
||||
relpath = fn[len(python_dir):]
|
||||
relstart, _, relremaining = relpath.partition(os.sep)
|
||||
if relstart.endswith('.egg'):
|
||||
relpath = relremaining
|
||||
base, _ = os.path.splitext(relpath)
|
||||
|
||||
if '/.debug/' in base:
|
||||
continue
|
||||
if os.path.basename(base) == '__init__':
|
||||
base = os.path.dirname(base)
|
||||
base = base.replace(os.sep + os.sep, os.sep)
|
||||
provided = base.replace(os.sep, '.')
|
||||
packages[provided] = os.path.basename(pkgdatafile)
|
||||
return packages
|
||||
|
||||
@classmethod
|
||||
def run_command(cls, cmd, **popenargs):
|
||||
if 'stderr' not in popenargs:
|
||||
popenargs['stderr'] = subprocess.STDOUT
|
||||
try:
|
||||
return subprocess.check_output(cmd, **popenargs).decode('utf-8')
|
||||
except OSError as exc:
|
||||
logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
|
||||
raise
|
||||
except subprocess.CalledProcessError as exc:
|
||||
logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
|
||||
raise
|
||||
|
||||
|
||||
def gather_setup_info(fileobj):
|
||||
parsed = ast.parse(fileobj.read(), fileobj.name)
|
||||
visitor = SetupScriptVisitor()
|
||||
visitor.visit(parsed)
|
||||
|
||||
non_literals, extensions = {}, []
|
||||
for key, value in list(visitor.keywords.items()):
|
||||
if key == 'ext_modules':
|
||||
if isinstance(value, list):
|
||||
for ext in value:
|
||||
if (isinstance(ext, ast.Call) and
|
||||
isinstance(ext.func, ast.Name) and
|
||||
ext.func.id == 'Extension' and
|
||||
not has_non_literals(ext.args)):
|
||||
extensions.append(ext.args[0])
|
||||
elif has_non_literals(value):
|
||||
non_literals[key] = value
|
||||
del visitor.keywords[key]
|
||||
|
||||
return visitor.keywords, visitor.imported_modules, non_literals, extensions
|
||||
|
||||
|
||||
class SetupScriptVisitor(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
ast.NodeVisitor.__init__(self)
|
||||
self.keywords = {}
|
||||
self.non_literals = []
|
||||
self.imported_modules = set()
|
||||
|
||||
def visit_Expr(self, node):
|
||||
if isinstance(node.value, ast.Call) and \
|
||||
isinstance(node.value.func, ast.Name) and \
|
||||
node.value.func.id == 'setup':
|
||||
self.visit_setup(node.value)
|
||||
|
||||
def visit_setup(self, node):
|
||||
call = LiteralAstTransform().visit(node)
|
||||
self.keywords = call.keywords
|
||||
for k, v in self.keywords.items():
|
||||
if has_non_literals(v):
|
||||
self.non_literals.append(k)
|
||||
|
||||
def visit_Import(self, node):
|
||||
for alias in node.names:
|
||||
self.imported_modules.add(alias.name)
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
self.imported_modules.add(node.module)
|
||||
|
||||
|
||||
class LiteralAstTransform(ast.NodeTransformer):
|
||||
"""Simplify the ast through evaluation of literals."""
|
||||
excluded_fields = ['ctx']
|
||||
|
||||
def visit(self, node):
|
||||
if not isinstance(node, ast.AST):
|
||||
return node
|
||||
else:
|
||||
return ast.NodeTransformer.visit(self, node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
try:
|
||||
return ast.literal_eval(node)
|
||||
except ValueError:
|
||||
for field, value in ast.iter_fields(node):
|
||||
if field in self.excluded_fields:
|
||||
delattr(node, field)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, list):
|
||||
if field in ('keywords', 'kwargs'):
|
||||
new_value = dict((kw.arg, self.visit(kw.value)) for kw in value)
|
||||
else:
|
||||
new_value = [self.visit(i) for i in value]
|
||||
else:
|
||||
new_value = self.visit(value)
|
||||
setattr(node, field, new_value)
|
||||
return node
|
||||
|
||||
def visit_Name(self, node):
|
||||
if hasattr('__builtins__', node.id):
|
||||
return getattr(__builtins__, node.id)
|
||||
else:
|
||||
return self.generic_visit(node)
|
||||
|
||||
def visit_Tuple(self, node):
|
||||
return tuple(self.visit(v) for v in node.elts)
|
||||
|
||||
def visit_List(self, node):
|
||||
return [self.visit(v) for v in node.elts]
|
||||
|
||||
def visit_Set(self, node):
|
||||
return set(self.visit(v) for v in node.elts)
|
||||
|
||||
def visit_Dict(self, node):
|
||||
keys = (self.visit(k) for k in node.keys)
|
||||
values = (self.visit(v) for v in node.values)
|
||||
return dict(zip(keys, values))
|
||||
|
||||
|
||||
def has_non_literals(value):
|
||||
if isinstance(value, ast.AST):
|
||||
return True
|
||||
elif isinstance(value, str):
|
||||
return False
|
||||
elif hasattr(value, 'values'):
|
||||
return any(has_non_literals(v) for v in value.values())
|
||||
elif hasattr(value, '__iter__'):
|
||||
return any(has_non_literals(v) for v in value)
|
||||
|
||||
|
||||
def register_recipe_handlers(handlers):
|
||||
# We need to make sure this is ahead of the makefile fallback handler
|
||||
handlers.append((PythonRecipeHandler(), 70))
|
||||
@@ -0,0 +1,89 @@
|
||||
# Recipe creation tool - kernel support plugin
|
||||
#
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import logging
|
||||
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
class KernelRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
import bb.process
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']:
|
||||
if not os.path.exists(os.path.join(srctree, tell)):
|
||||
return False
|
||||
|
||||
handled.append('buildsystem')
|
||||
del lines_after[:]
|
||||
del classes[:]
|
||||
template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb')
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname in ['SRCREV', 'SRCREV_machine']:
|
||||
while newlines[-1].startswith('#'):
|
||||
del newlines[-1]
|
||||
try:
|
||||
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
stdout = None
|
||||
if stdout:
|
||||
return stdout.strip(), op, 0, True
|
||||
elif varname == 'LINUX_VERSION':
|
||||
makefile = os.path.join(srctree, 'Makefile')
|
||||
if os.path.exists(makefile):
|
||||
kversion = -1
|
||||
kpatchlevel = -1
|
||||
ksublevel = -1
|
||||
kextraversion = ''
|
||||
with open(makefile, 'r', errors='surrogateescape') as f:
|
||||
for i, line in enumerate(f):
|
||||
if i > 10:
|
||||
break
|
||||
if line.startswith('VERSION ='):
|
||||
kversion = int(line.split('=')[1].strip())
|
||||
elif line.startswith('PATCHLEVEL ='):
|
||||
kpatchlevel = int(line.split('=')[1].strip())
|
||||
elif line.startswith('SUBLEVEL ='):
|
||||
ksublevel = int(line.split('=')[1].strip())
|
||||
elif line.startswith('EXTRAVERSION ='):
|
||||
kextraversion = line.split('=')[1].strip()
|
||||
version = ''
|
||||
if kversion > -1 and kpatchlevel > -1:
|
||||
version = '%d.%d' % (kversion, kpatchlevel)
|
||||
if ksublevel > -1:
|
||||
version += '.%d' % ksublevel
|
||||
version += kextraversion
|
||||
if version:
|
||||
return version, op, 0, True
|
||||
elif varname == 'SRC_URI':
|
||||
while newlines[-1].startswith('#'):
|
||||
del newlines[-1]
|
||||
elif varname == 'COMPATIBLE_MACHINE':
|
||||
while newlines[-1].startswith('#'):
|
||||
del newlines[-1]
|
||||
machine = tinfoil.config_data.getVar('MACHINE')
|
||||
return machine, op, 0, True
|
||||
return origvalue, op, 0, True
|
||||
with open(template, 'r') as f:
|
||||
varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE']
|
||||
(_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var)
|
||||
lines_before[:] = [line.rstrip('\n') for line in newlines]
|
||||
|
||||
return True
|
||||
|
||||
def register_recipe_handlers(handlers):
|
||||
handlers.append((KernelRecipeHandler(), 100))
|
||||
@@ -0,0 +1,142 @@
|
||||
# Recipe creation tool - kernel module support plugin
|
||||
#
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import re
|
||||
import logging
|
||||
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
class KernelModuleRecipeHandler(RecipeHandler):
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
import bb.process
|
||||
if 'buildsystem' in handled:
|
||||
return False
|
||||
|
||||
module_inc_re = re.compile(r'^#include\s+<linux/module.h>$')
|
||||
makefiles = []
|
||||
is_module = False
|
||||
|
||||
makefiles = []
|
||||
|
||||
files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples'])
|
||||
if files:
|
||||
for cfile in files:
|
||||
# Look in same dir or parent for Makefile
|
||||
for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]:
|
||||
if makefile in makefiles:
|
||||
break
|
||||
else:
|
||||
if os.path.exists(makefile):
|
||||
makefiles.append(makefile)
|
||||
break
|
||||
else:
|
||||
continue
|
||||
with open(cfile, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
if module_inc_re.match(line.strip()):
|
||||
is_module = True
|
||||
break
|
||||
if is_module:
|
||||
break
|
||||
|
||||
if is_module:
|
||||
classes.append('module')
|
||||
handled.append('buildsystem')
|
||||
# module.bbclass and the classes it inherits do most of the hard
|
||||
# work, but we need to tweak it slightly depending on what the
|
||||
# Makefile does (and there is a range of those)
|
||||
# Check the makefile for the appropriate install target
|
||||
install_lines = []
|
||||
compile_lines = []
|
||||
in_install = False
|
||||
in_compile = False
|
||||
install_target = None
|
||||
with open(makefile, 'r', errors='surrogateescape') as f:
|
||||
for line in f:
|
||||
if line.startswith('install:'):
|
||||
if not install_lines:
|
||||
in_install = True
|
||||
install_target = 'install'
|
||||
elif line.startswith('modules_install:'):
|
||||
install_lines = []
|
||||
in_install = True
|
||||
install_target = 'modules_install'
|
||||
elif line.startswith('modules:'):
|
||||
compile_lines = []
|
||||
in_compile = True
|
||||
elif line.startswith(('all:', 'default:')):
|
||||
if not compile_lines:
|
||||
in_compile = True
|
||||
elif line:
|
||||
if line[0] == '\t':
|
||||
if in_install:
|
||||
install_lines.append(line)
|
||||
elif in_compile:
|
||||
compile_lines.append(line)
|
||||
elif ':' in line:
|
||||
in_install = False
|
||||
in_compile = False
|
||||
|
||||
def check_target(lines, install):
|
||||
kdirpath = ''
|
||||
manual_install = False
|
||||
for line in lines:
|
||||
splitline = line.split()
|
||||
if splitline[0] in ['make', 'gmake', '$(MAKE)']:
|
||||
if '-C' in splitline:
|
||||
idx = splitline.index('-C') + 1
|
||||
if idx < len(splitline):
|
||||
kdirpath = splitline[idx]
|
||||
break
|
||||
elif install and splitline[0] == 'install':
|
||||
if '.ko' in line:
|
||||
manual_install = True
|
||||
return kdirpath, manual_install
|
||||
|
||||
kdirpath = None
|
||||
manual_install = False
|
||||
if install_lines:
|
||||
kdirpath, manual_install = check_target(install_lines, install=True)
|
||||
if compile_lines and not kdirpath:
|
||||
kdirpath, _ = check_target(compile_lines, install=False)
|
||||
|
||||
if manual_install or not install_lines:
|
||||
lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
|
||||
elif install_target and install_target != 'modules_install':
|
||||
lines_after.append('MODULES_INSTALL_TARGET = "install"')
|
||||
|
||||
warnmsg = None
|
||||
kdirvar = None
|
||||
if kdirpath:
|
||||
res = re.match(r'\$\(([^$)]+)\)', kdirpath)
|
||||
if res:
|
||||
kdirvar = res.group(1)
|
||||
if kdirvar != 'KERNEL_SRC':
|
||||
lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar)
|
||||
elif kdirpath.startswith('/lib/'):
|
||||
warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile'
|
||||
if not kdirvar and not warnmsg:
|
||||
warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
|
||||
if warnmsg:
|
||||
warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
|
||||
logger.warning(warnmsg)
|
||||
lines_after.append('# %s' % warnmsg)
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def register_recipe_handlers(handlers):
|
||||
handlers.append((KernelModuleRecipeHandler(), 15))
|
||||
@@ -0,0 +1,310 @@
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
# Copyright (C) 2020 Savoir-Faire Linux
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Recipe creation tool - npm module support plugin"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import bb
|
||||
from bb.fetch2.npm import NpmEnvironment
|
||||
from bb.fetch2.npm import npm_package
|
||||
from bb.fetch2.npmsw import foreach_dependencies
|
||||
from recipetool.create import RecipeHandler
|
||||
from recipetool.create import get_license_md5sums
|
||||
from recipetool.create import guess_license
|
||||
from recipetool.create import split_pkg_licenses
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
TINFOIL = None
|
||||
|
||||
def tinfoil_init(instance):
|
||||
"""Initialize tinfoil"""
|
||||
global TINFOIL
|
||||
TINFOIL = instance
|
||||
|
||||
class NpmRecipeHandler(RecipeHandler):
|
||||
"""Class to handle the npm recipe creation"""
|
||||
|
||||
@staticmethod
|
||||
def _get_registry(lines):
|
||||
"""Get the registry value from the 'npm://registry' url"""
|
||||
registry = None
|
||||
|
||||
def _handle_registry(varname, origvalue, op, newlines):
|
||||
nonlocal registry
|
||||
if origvalue.startswith("npm://"):
|
||||
registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0])
|
||||
return origvalue, None, 0, True
|
||||
|
||||
bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry)
|
||||
|
||||
return registry
|
||||
|
||||
@staticmethod
|
||||
def _ensure_npm():
|
||||
"""Check if the 'npm' command is available in the recipes"""
|
||||
if not TINFOIL.recipes_parsed:
|
||||
TINFOIL.parse_recipes()
|
||||
|
||||
try:
|
||||
d = TINFOIL.parse_recipe("nodejs-native")
|
||||
except bb.providers.NoProvider:
|
||||
bb.error("Nothing provides 'nodejs-native' which is required for the build")
|
||||
bb.note("You will likely need to add a layer that provides nodejs")
|
||||
sys.exit(14)
|
||||
|
||||
bindir = d.getVar("STAGING_BINDIR_NATIVE")
|
||||
npmpath = os.path.join(bindir, "npm")
|
||||
|
||||
if not os.path.exists(npmpath):
|
||||
TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot")
|
||||
|
||||
if not os.path.exists(npmpath):
|
||||
bb.error("Failed to add 'npm' to sysroot")
|
||||
sys.exit(14)
|
||||
|
||||
return bindir
|
||||
|
||||
@staticmethod
|
||||
def _npm_global_configs(dev):
|
||||
"""Get the npm global configuration"""
|
||||
configs = []
|
||||
|
||||
if dev:
|
||||
configs.append(("also", "development"))
|
||||
else:
|
||||
configs.append(("only", "production"))
|
||||
|
||||
configs.append(("save", "false"))
|
||||
configs.append(("package-lock", "false"))
|
||||
configs.append(("shrinkwrap", "false"))
|
||||
return configs
|
||||
|
||||
def _run_npm_install(self, d, srctree, registry, dev):
|
||||
"""Run the 'npm install' command without building the addons"""
|
||||
configs = self._npm_global_configs(dev)
|
||||
configs.append(("ignore-scripts", "true"))
|
||||
|
||||
if registry:
|
||||
configs.append(("registry", registry))
|
||||
|
||||
bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
|
||||
|
||||
env = NpmEnvironment(d, configs=configs)
|
||||
env.run("npm install", workdir=srctree)
|
||||
|
||||
def _generate_shrinkwrap(self, d, srctree, dev):
|
||||
"""Check and generate the 'npm-shrinkwrap.json' file if needed"""
|
||||
configs = self._npm_global_configs(dev)
|
||||
|
||||
env = NpmEnvironment(d, configs=configs)
|
||||
env.run("npm shrinkwrap", workdir=srctree)
|
||||
|
||||
return os.path.join(srctree, "npm-shrinkwrap.json")
|
||||
|
||||
def _handle_licenses(self, srctree, shrinkwrap_file, dev):
|
||||
"""Return the extra license files and the list of packages"""
|
||||
licfiles = []
|
||||
packages = {}
|
||||
|
||||
# Handle the parent package
|
||||
packages["${PN}"] = ""
|
||||
|
||||
def _licfiles_append_fallback_readme_files(destdir):
|
||||
"""Append README files as fallback to license files if a license files is missing"""
|
||||
|
||||
fallback = True
|
||||
readmes = []
|
||||
basedir = os.path.join(srctree, destdir)
|
||||
for fn in os.listdir(basedir):
|
||||
upper = fn.upper()
|
||||
if upper.startswith("README"):
|
||||
fullpath = os.path.join(basedir, fn)
|
||||
readmes.append(fullpath)
|
||||
if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper:
|
||||
fallback = False
|
||||
if fallback:
|
||||
for readme in readmes:
|
||||
licfiles.append(os.path.relpath(readme, srctree))
|
||||
|
||||
# Handle the dependencies
|
||||
def _handle_dependency(name, params, destdir):
|
||||
deptree = destdir.split('node_modules/')
|
||||
suffix = "-".join([npm_package(dep) for dep in deptree])
|
||||
packages["${PN}" + suffix] = destdir
|
||||
_licfiles_append_fallback_readme_files(destdir)
|
||||
|
||||
with open(shrinkwrap_file, "r") as f:
|
||||
shrinkwrap = json.load(f)
|
||||
|
||||
foreach_dependencies(shrinkwrap, _handle_dependency, dev)
|
||||
|
||||
return licfiles, packages
|
||||
|
||||
# Handle the peer dependencies
|
||||
def _handle_peer_dependency(self, shrinkwrap_file):
|
||||
"""Check if package has peer dependencies and show warning if it is the case"""
|
||||
with open(shrinkwrap_file, "r") as f:
|
||||
shrinkwrap = json.load(f)
|
||||
|
||||
packages = shrinkwrap.get("packages", {})
|
||||
peer_deps = packages.get("", {}).get("peerDependencies", {})
|
||||
|
||||
for peer_dep in peer_deps:
|
||||
peer_dep_yocto_name = npm_package(peer_dep)
|
||||
bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
|
||||
"Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
|
||||
% peer_dep_yocto_name)
|
||||
|
||||
|
||||
|
||||
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
|
||||
"""Handle the npm recipe creation"""
|
||||
|
||||
if "buildsystem" in handled:
|
||||
return False
|
||||
|
||||
files = RecipeHandler.checkfiles(srctree, ["package.json"])
|
||||
|
||||
if not files:
|
||||
return False
|
||||
|
||||
with open(files[0], "r") as f:
|
||||
data = json.load(f)
|
||||
|
||||
if "name" not in data or "version" not in data:
|
||||
return False
|
||||
|
||||
extravalues["PN"] = npm_package(data["name"])
|
||||
extravalues["PV"] = data["version"]
|
||||
|
||||
if "description" in data:
|
||||
extravalues["SUMMARY"] = data["description"]
|
||||
|
||||
if "homepage" in data:
|
||||
extravalues["HOMEPAGE"] = data["homepage"]
|
||||
|
||||
dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False)
|
||||
registry = self._get_registry(lines_before)
|
||||
|
||||
bb.note("Checking if npm is available ...")
|
||||
# The native npm is used here (and not the host one) to ensure that the
|
||||
# npm version is high enough to ensure an efficient dependency tree
|
||||
# resolution and avoid issue with the shrinkwrap file format.
|
||||
# Moreover the native npm is mandatory for the build.
|
||||
bindir = self._ensure_npm()
|
||||
|
||||
d = bb.data.createCopy(TINFOIL.config_data)
|
||||
d.prependVar("PATH", bindir + ":")
|
||||
d.setVar("S", srctree)
|
||||
|
||||
bb.note("Generating shrinkwrap file ...")
|
||||
# To generate the shrinkwrap file the dependencies have to be installed
|
||||
# first. During the generation process some files may be updated /
|
||||
# deleted. By default devtool tracks the diffs in the srctree and raises
|
||||
# errors when finishing the recipe if some diffs are found.
|
||||
git_exclude_file = os.path.join(srctree, ".git", "info", "exclude")
|
||||
if os.path.exists(git_exclude_file):
|
||||
with open(git_exclude_file, "r+") as f:
|
||||
lines = f.readlines()
|
||||
for line in ["/node_modules/", "/npm-shrinkwrap.json"]:
|
||||
if line not in lines:
|
||||
f.write(line + "\n")
|
||||
|
||||
lock_file = os.path.join(srctree, "package-lock.json")
|
||||
lock_copy = lock_file + ".copy"
|
||||
if os.path.exists(lock_file):
|
||||
bb.utils.copyfile(lock_file, lock_copy)
|
||||
|
||||
self._run_npm_install(d, srctree, registry, dev)
|
||||
shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev)
|
||||
|
||||
with open(shrinkwrap_file, "r") as f:
|
||||
shrinkwrap = json.load(f)
|
||||
|
||||
if os.path.exists(lock_copy):
|
||||
bb.utils.movefile(lock_copy, lock_file)
|
||||
|
||||
# Add the shrinkwrap file as 'extrafiles'
|
||||
shrinkwrap_copy = shrinkwrap_file + ".copy"
|
||||
bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy)
|
||||
extravalues.setdefault("extrafiles", {})
|
||||
extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy
|
||||
|
||||
url_local = "npmsw://%s" % shrinkwrap_file
|
||||
url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json"
|
||||
|
||||
if dev:
|
||||
url_local += ";dev=1"
|
||||
url_recipe += ";dev=1"
|
||||
|
||||
# Add the npmsw url in the SRC_URI of the generated recipe
|
||||
def _handle_srcuri(varname, origvalue, op, newlines):
|
||||
"""Update the version value and add the 'npmsw://' url"""
|
||||
value = origvalue.replace("version=" + data["version"], "version=${PV}")
|
||||
value = value.replace("version=latest", "version=${PV}")
|
||||
values = [line.strip() for line in value.strip('\n').splitlines()]
|
||||
if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
|
||||
values.append(url_recipe)
|
||||
return values, None, 4, False
|
||||
|
||||
(_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri)
|
||||
lines_before[:] = [line.rstrip('\n') for line in newlines]
|
||||
|
||||
# In order to generate correct licence checksums in the recipe the
|
||||
# dependencies have to be fetched again using the npmsw url
|
||||
bb.note("Fetching npm dependencies ...")
|
||||
bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
|
||||
fetcher = bb.fetch2.Fetch([url_local], d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(srctree)
|
||||
|
||||
bb.note("Handling licences ...")
|
||||
(licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
|
||||
|
||||
def _guess_odd_license(licfiles):
|
||||
import bb
|
||||
|
||||
md5sums = get_license_md5sums(d, linenumbers=True)
|
||||
|
||||
chksums = []
|
||||
licenses = []
|
||||
for licfile in licfiles:
|
||||
f = os.path.join(srctree, licfile)
|
||||
md5value = bb.utils.md5_file(f)
|
||||
(license, beginline, endline, md5) = md5sums.get(md5value,
|
||||
(None, "", "", ""))
|
||||
if not license:
|
||||
license = "Unknown"
|
||||
logger.info("Please add the following line for '%s' to a "
|
||||
"'lib/recipetool/licenses.csv' and replace `Unknown`, "
|
||||
"`X`, `Y` and `MD5` with the license, begin line, "
|
||||
"end line and partial MD5 checksum:\n" \
|
||||
"%s,Unknown,X,Y,MD5" % (licfile, md5value))
|
||||
chksums.append("file://%s%s%s;md5=%s" % (licfile,
|
||||
";beginline=%s" % (beginline) if beginline else "",
|
||||
";endline=%s" % (endline) if endline else "",
|
||||
md5 if md5 else md5value))
|
||||
licenses.append((license, licfile, md5value))
|
||||
return (licenses, chksums)
|
||||
|
||||
(licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
|
||||
split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
|
||||
|
||||
classes.append("npm")
|
||||
handled.append("buildsystem")
|
||||
|
||||
# Check if package has peer dependencies and inform the user
|
||||
self._handle_peer_dependency(shrinkwrap_file)
|
||||
|
||||
return True
|
||||
|
||||
def register_recipe_handlers(handlers):
|
||||
"""Register the npm handler"""
|
||||
handlers.append((NpmRecipeHandler(), 60))
|
||||
@@ -0,0 +1,44 @@
|
||||
# Recipe creation tool - edit plugin
|
||||
#
|
||||
# This sub-command edits the recipe and appends for the specified target
|
||||
#
|
||||
# Example: recipetool edit busybox
|
||||
#
|
||||
# Copyright (C) 2018 Mentor Graphics Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import scriptutils
|
||||
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
tinfoil = None
|
||||
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
def edit(args):
|
||||
import oe.recipeutils
|
||||
|
||||
recipe_path = tinfoil.get_recipe_file(args.target)
|
||||
appends = tinfoil.get_file_appends(recipe_path)
|
||||
|
||||
return scriptutils.run_editor([recipe_path] + list(appends), logger)
|
||||
|
||||
|
||||
def register_commands(subparsers):
|
||||
parser = subparsers.add_parser('edit',
|
||||
help='Edit the recipe and appends for the specified target. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.')
|
||||
parser.add_argument('target', help='Target recipe/provide to edit')
|
||||
parser.set_defaults(func=edit, parserecipes=True)
|
||||
@@ -0,0 +1,37 @@
|
||||
0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
|
||||
12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
|
||||
18810669f13b87348459e611d31ab760,GPL-2.0-only
|
||||
252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
|
||||
2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
|
||||
3214f080875748938ba060314b4f727d,LGPL-2.0-only
|
||||
385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
|
||||
393a5ca445f6965873eca0259a17f833,GPL-2.0-only
|
||||
3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
|
||||
3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
|
||||
4325afd396febcb659c36b49533135d4,GPL-2.0-only
|
||||
4fbd65380cdd255951079008b364516c,LGPL-2.1-only
|
||||
54c7042be62e169199200bc6477f04d1,BSD-3-Clause
|
||||
55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
|
||||
59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
|
||||
5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
|
||||
6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
|
||||
751419260aa954499f7abaabaa882bbe,GPL-2.0-only
|
||||
7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
|
||||
8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
|
||||
94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
|
||||
9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
|
||||
9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
|
||||
a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
|
||||
b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
|
||||
bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
|
||||
bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
|
||||
c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
|
||||
d32239bcb673463ab874e80d47fae504,GPL-3.0-only
|
||||
d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
|
||||
d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
|
||||
db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
|
||||
eb723b61539feef013de476e68b5c50a,GPL-2.0-only
|
||||
ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
|
||||
f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
|
||||
fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
|
||||
fbc093901857fcd118f065f900982c24,LGPL-2.1-only
|
||||
|
@@ -0,0 +1,79 @@
|
||||
# Recipe creation tool - newappend plugin
|
||||
#
|
||||
# This sub-command creates a bbappend for the specified target and prints the
|
||||
# path to the bbappend.
|
||||
#
|
||||
# Example: recipetool newappend meta-mylayer busybox
|
||||
#
|
||||
# Copyright (C) 2015 Christopher Larson <kergoth@gmail.com>
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import scriptutils
|
||||
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
tinfoil = None
|
||||
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
|
||||
def layer(layerpath):
|
||||
if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
|
||||
raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
|
||||
return layerpath
|
||||
|
||||
|
||||
def newappend(args):
|
||||
import oe.recipeutils
|
||||
|
||||
recipe_path = tinfoil.get_recipe_file(args.target)
|
||||
|
||||
rd = tinfoil.config_data.createCopy()
|
||||
rd.setVar('FILE', recipe_path)
|
||||
append_path, path_ok = oe.recipeutils.get_bbappend_path(rd, args.destlayer, args.wildcard_version)
|
||||
if not append_path:
|
||||
logger.error('Unable to determine layer directory containing %s', recipe_path)
|
||||
return 1
|
||||
|
||||
if not path_ok:
|
||||
logger.warning('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path))
|
||||
|
||||
layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
|
||||
if not os.path.abspath(args.destlayer) in layerdirs:
|
||||
logger.warning('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active')
|
||||
|
||||
if not os.path.exists(append_path):
|
||||
bb.utils.mkdirhier(os.path.dirname(append_path))
|
||||
|
||||
try:
|
||||
open(append_path, 'a').close()
|
||||
except (OSError, IOError) as exc:
|
||||
logger.critical(str(exc))
|
||||
return 1
|
||||
|
||||
if args.edit:
|
||||
return scriptutils.run_editor([append_path, recipe_path], logger)
|
||||
else:
|
||||
print(append_path)
|
||||
|
||||
|
||||
def register_commands(subparsers):
|
||||
parser = subparsers.add_parser('newappend',
|
||||
help='Create a bbappend for the specified target in the specified layer')
|
||||
parser.add_argument('-e', '--edit', help='Edit the new append. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.', action='store_true')
|
||||
parser.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
|
||||
parser.add_argument('destlayer', help='Base directory of the destination layer to write the bbappend to', type=layer)
|
||||
parser.add_argument('target', help='Target recipe/provide to append')
|
||||
parser.set_defaults(func=newappend, parserecipes=True)
|
||||
@@ -0,0 +1,65 @@
|
||||
# Recipe creation tool - set variable plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import glob
|
||||
import fnmatch
|
||||
import re
|
||||
import logging
|
||||
import scriptutils
|
||||
|
||||
logger = logging.getLogger('recipetool')
|
||||
|
||||
tinfoil = None
|
||||
plugins = None
|
||||
|
||||
def tinfoil_init(instance):
|
||||
global tinfoil
|
||||
tinfoil = instance
|
||||
|
||||
def setvar(args):
|
||||
import oe.recipeutils
|
||||
|
||||
if args.delete:
|
||||
if args.value:
|
||||
logger.error('-D/--delete and specifying a value are mutually exclusive')
|
||||
return 1
|
||||
value = None
|
||||
else:
|
||||
if args.value is None:
|
||||
logger.error('You must specify a value if not using -D/--delete')
|
||||
return 1
|
||||
value = args.value
|
||||
varvalues = {args.varname: value}
|
||||
|
||||
if args.recipe_only:
|
||||
patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)]
|
||||
else:
|
||||
rd = tinfoil.parse_recipe_file(args.recipefile, False)
|
||||
if not rd:
|
||||
return 1
|
||||
patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch)
|
||||
if args.patch:
|
||||
for patch in patches:
|
||||
for line in patch:
|
||||
sys.stdout.write(line)
|
||||
return 0
|
||||
|
||||
|
||||
def register_commands(subparsers):
|
||||
parser_setvar = subparsers.add_parser('setvar',
|
||||
help='Set a variable within a recipe',
|
||||
description='Adds/updates the value a variable is set to in a recipe')
|
||||
parser_setvar.add_argument('recipefile', help='Recipe file to update')
|
||||
parser_setvar.add_argument('varname', help='Variable name to set')
|
||||
parser_setvar.add_argument('value', nargs='?', help='New value to set the variable to')
|
||||
parser_setvar.add_argument('--recipe-only', '-r', help='Do not set variable in any include file if present', action='store_true')
|
||||
parser_setvar.add_argument('--patch', '-p', help='Create a patch to make the change instead of modifying the recipe', action='store_true')
|
||||
parser_setvar.add_argument('--delete', '-D', help='Delete the specified value instead of setting it', action='store_true')
|
||||
parser_setvar.set_defaults(func=setvar)
|
||||
@@ -0,0 +1,107 @@
|
||||
# resulttool - Show logs
|
||||
#
|
||||
# Copyright (c) 2019 Garmin International
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
import os
|
||||
import resulttool.resultutils as resultutils
|
||||
|
||||
def show_ptest(result, ptest, logger):
|
||||
logdata = resultutils.ptestresult_get_log(result, ptest)
|
||||
if logdata is not None:
|
||||
print(logdata)
|
||||
return 0
|
||||
|
||||
print("ptest '%s' log not found" % ptest)
|
||||
return 1
|
||||
|
||||
def show_reproducible(result, reproducible, logger):
|
||||
try:
|
||||
print(result['reproducible'][reproducible]['diffoscope.text'])
|
||||
return 0
|
||||
|
||||
except KeyError:
|
||||
print("reproducible '%s' not found" % reproducible)
|
||||
return 1
|
||||
|
||||
def log(args, logger):
|
||||
results = resultutils.load_resultsdata(args.source)
|
||||
|
||||
for _, run_name, _, r in resultutils.test_run_results(results):
|
||||
if args.list_ptest:
|
||||
print('\n'.join(sorted(r['ptestresult.sections'].keys())))
|
||||
|
||||
if args.dump_ptest:
|
||||
for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
|
||||
if sectname in r:
|
||||
for name, ptest in r[sectname].items():
|
||||
logdata = resultutils.generic_get_log(sectname, r, name)
|
||||
if logdata is not None:
|
||||
dest_dir = args.dump_ptest
|
||||
if args.prepend_run:
|
||||
dest_dir = os.path.join(dest_dir, run_name)
|
||||
if not sectname.startswith("ptest"):
|
||||
dest_dir = os.path.join(dest_dir, sectname.split(".")[0])
|
||||
|
||||
os.makedirs(dest_dir, exist_ok=True)
|
||||
dest = os.path.join(dest_dir, '%s.log' % name)
|
||||
if os.path.exists(dest):
|
||||
print("Overlapping ptest logs found, skipping %s. The '--prepend-run' option would avoid this" % name)
|
||||
continue
|
||||
print(dest)
|
||||
with open(dest, 'w') as f:
|
||||
f.write(logdata)
|
||||
|
||||
if args.raw_ptest:
|
||||
found = False
|
||||
for sectname in ['ptestresult.rawlogs', 'ltpposixresult.rawlogs', 'ltpresult.rawlogs']:
|
||||
rawlog = resultutils.generic_get_rawlogs(sectname, r)
|
||||
if rawlog is not None:
|
||||
print(rawlog)
|
||||
found = True
|
||||
if not found:
|
||||
print('Raw ptest logs not found')
|
||||
return 1
|
||||
|
||||
if args.raw_reproducible:
|
||||
if 'reproducible.rawlogs' in r:
|
||||
print(r['reproducible.rawlogs']['log'])
|
||||
else:
|
||||
print('Raw reproducible logs not found')
|
||||
return 1
|
||||
|
||||
for ptest in args.ptest:
|
||||
if not show_ptest(r, ptest, logger):
|
||||
return 1
|
||||
|
||||
for reproducible in args.reproducible:
|
||||
if not show_reproducible(r, reproducible, logger):
|
||||
return 1
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
parser = subparsers.add_parser('log', help='show logs',
|
||||
description='show the logs from test results',
|
||||
group='analysis')
|
||||
parser.set_defaults(func=log)
|
||||
parser.add_argument('source',
|
||||
help='the results file/directory/URL to import')
|
||||
parser.add_argument('--list-ptest', action='store_true',
|
||||
help='list the ptest test names')
|
||||
parser.add_argument('--ptest', action='append', default=[],
|
||||
help='show logs for a ptest')
|
||||
parser.add_argument('--dump-ptest', metavar='DIR',
|
||||
help='Dump all ptest log files to the specified directory.')
|
||||
parser.add_argument('--reproducible', action='append', default=[],
|
||||
help='show logs for a reproducible test')
|
||||
parser.add_argument('--prepend-run', action='store_true',
|
||||
help='''Dump ptest results to a subdirectory named after the test run when using --dump-ptest.
|
||||
Required if more than one test run is present in the result file''')
|
||||
parser.add_argument('--raw', action='store_true',
|
||||
help='show raw (ptest) logs. Deprecated. Alias for "--raw-ptest"', dest='raw_ptest')
|
||||
parser.add_argument('--raw-ptest', action='store_true',
|
||||
help='show raw ptest log')
|
||||
parser.add_argument('--raw-reproducible', action='store_true',
|
||||
help='show raw reproducible build logs')
|
||||
|
||||
+235
@@ -0,0 +1,235 @@
|
||||
# test case management tool - manual execution from testopia test cases
|
||||
#
|
||||
# Copyright (c) 2018, Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import re
|
||||
import copy
|
||||
from oeqa.core.runner import OETestResultJSONHelper
|
||||
|
||||
|
||||
def load_json_file(f):
|
||||
with open(f, "r") as filedata:
|
||||
return json.load(filedata)
|
||||
|
||||
def write_json_file(f, json_data):
|
||||
os.makedirs(os.path.dirname(f), exist_ok=True)
|
||||
with open(f, 'w') as filedata:
|
||||
filedata.write(json.dumps(json_data, sort_keys=True, indent=4))
|
||||
|
||||
class ManualTestRunner(object):
|
||||
|
||||
def _get_test_module(self, case_file):
|
||||
return os.path.basename(case_file).split('.')[0]
|
||||
|
||||
def _get_input(self, config):
|
||||
while True:
|
||||
output = input('{} = '.format(config))
|
||||
if re.match('^[a-z0-9-.]+$', output):
|
||||
break
|
||||
print('Only lowercase alphanumeric, hyphen and dot are allowed. Please try again')
|
||||
return output
|
||||
|
||||
def _get_available_config_options(self, config_options, test_module, target_config):
|
||||
avail_config_options = None
|
||||
if test_module in config_options:
|
||||
avail_config_options = config_options[test_module].get(target_config)
|
||||
return avail_config_options
|
||||
|
||||
def _choose_config_option(self, options):
|
||||
while True:
|
||||
output = input('{} = '.format('Option index number'))
|
||||
if output in options:
|
||||
break
|
||||
print('Only integer index inputs from above available configuration options are allowed. Please try again.')
|
||||
return options[output]
|
||||
|
||||
def _get_config(self, config_options, test_module):
|
||||
from oeqa.utils.metadata import get_layers
|
||||
from oeqa.utils.commands import get_bb_var
|
||||
from resulttool.resultutils import store_map
|
||||
|
||||
layers = get_layers(get_bb_var('BBLAYERS'))
|
||||
configurations = {}
|
||||
configurations['LAYERS'] = layers
|
||||
configurations['STARTTIME'] = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
|
||||
configurations['TEST_TYPE'] = 'manual'
|
||||
configurations['TEST_MODULE'] = test_module
|
||||
|
||||
extra_config = set(store_map['manual']) - set(configurations)
|
||||
for config in sorted(extra_config):
|
||||
avail_config_options = self._get_available_config_options(config_options, test_module, config)
|
||||
if avail_config_options:
|
||||
print('---------------------------------------------')
|
||||
print('These are available configuration #%s options:' % config)
|
||||
print('---------------------------------------------')
|
||||
for option, _ in sorted(avail_config_options.items(), key=lambda x: int(x[0])):
|
||||
print('%s: %s' % (option, avail_config_options[option]))
|
||||
print('Please select configuration option, enter the integer index number.')
|
||||
value_conf = self._choose_config_option(avail_config_options)
|
||||
print('---------------------------------------------\n')
|
||||
else:
|
||||
print('---------------------------------------------')
|
||||
print('This is configuration #%s. Please provide configuration value(use "None" if not applicable).' % config)
|
||||
print('---------------------------------------------')
|
||||
value_conf = self._get_input('Configuration Value')
|
||||
print('---------------------------------------------\n')
|
||||
configurations[config] = value_conf
|
||||
return configurations
|
||||
|
||||
def _execute_test_steps(self, case):
|
||||
test_result = {}
|
||||
print('------------------------------------------------------------------------')
|
||||
print('Executing test case: %s' % case['test']['@alias'])
|
||||
print('------------------------------------------------------------------------')
|
||||
print('You have total %s test steps to be executed.' % len(case['test']['execution']))
|
||||
print('------------------------------------------------------------------------\n')
|
||||
for step, _ in sorted(case['test']['execution'].items(), key=lambda x: int(x[0])):
|
||||
print('Step %s: %s' % (step, case['test']['execution'][step]['action']))
|
||||
expected_output = case['test']['execution'][step]['expected_results']
|
||||
if expected_output:
|
||||
print('Expected output: %s' % expected_output)
|
||||
while True:
|
||||
done = input('\nPlease provide test results: (P)assed/(F)ailed/(B)locked/(S)kipped? \n').lower()
|
||||
result_types = {'p':'PASSED',
|
||||
'f':'FAILED',
|
||||
'b':'BLOCKED',
|
||||
's':'SKIPPED'}
|
||||
if done in result_types:
|
||||
for r in result_types:
|
||||
if done == r:
|
||||
res = result_types[r]
|
||||
if res == 'FAILED':
|
||||
log_input = input('\nPlease enter the error and the description of the log: (Ex:log:211 Error Bitbake)\n')
|
||||
test_result.update({case['test']['@alias']: {'status': '%s' % res, 'log': '%s' % log_input}})
|
||||
else:
|
||||
test_result.update({case['test']['@alias']: {'status': '%s' % res}})
|
||||
break
|
||||
print('Invalid input!')
|
||||
return test_result
|
||||
|
||||
def _get_write_dir(self):
|
||||
return os.environ['BUILDDIR'] + '/tmp/log/manual/'
|
||||
|
||||
def run_test(self, case_file, config_options_file, testcase_config_file):
|
||||
test_module = self._get_test_module(case_file)
|
||||
cases = load_json_file(case_file)
|
||||
config_options = {}
|
||||
if config_options_file:
|
||||
config_options = load_json_file(config_options_file)
|
||||
configurations = self._get_config(config_options, test_module)
|
||||
result_id = 'manual_%s_%s' % (test_module, configurations['STARTTIME'])
|
||||
test_results = {}
|
||||
if testcase_config_file:
|
||||
test_case_config = load_json_file(testcase_config_file)
|
||||
test_case_to_execute = test_case_config['testcases']
|
||||
for case in copy.deepcopy(cases) :
|
||||
if case['test']['@alias'] not in test_case_to_execute:
|
||||
cases.remove(case)
|
||||
|
||||
print('\nTotal number of test cases in this test suite: %s\n' % len(cases))
|
||||
for c in cases:
|
||||
test_result = self._execute_test_steps(c)
|
||||
test_results.update(test_result)
|
||||
return configurations, result_id, self._get_write_dir(), test_results
|
||||
|
||||
def _get_true_false_input(self, input_message):
|
||||
yes_list = ['Y', 'YES']
|
||||
no_list = ['N', 'NO']
|
||||
while True:
|
||||
more_config_option = input(input_message).upper()
|
||||
if more_config_option in yes_list or more_config_option in no_list:
|
||||
break
|
||||
print('Invalid input!')
|
||||
if more_config_option in no_list:
|
||||
return False
|
||||
return True
|
||||
|
||||
def make_config_option_file(self, logger, case_file, config_options_file):
|
||||
config_options = {}
|
||||
if config_options_file:
|
||||
config_options = load_json_file(config_options_file)
|
||||
new_test_module = self._get_test_module(case_file)
|
||||
print('Creating configuration options file for test module: %s' % new_test_module)
|
||||
new_config_options = {}
|
||||
|
||||
while True:
|
||||
config_name = input('\nPlease provide test configuration to create:\n').upper()
|
||||
new_config_options[config_name] = {}
|
||||
while True:
|
||||
config_value = self._get_input('Configuration possible option value')
|
||||
config_option_index = len(new_config_options[config_name]) + 1
|
||||
new_config_options[config_name][config_option_index] = config_value
|
||||
more_config_option = self._get_true_false_input('\nIs there more configuration option input: (Y)es/(N)o\n')
|
||||
if not more_config_option:
|
||||
break
|
||||
more_config = self._get_true_false_input('\nIs there more configuration to create: (Y)es/(N)o\n')
|
||||
if not more_config:
|
||||
break
|
||||
|
||||
if new_config_options:
|
||||
config_options[new_test_module] = new_config_options
|
||||
if not config_options_file:
|
||||
config_options_file = os.path.join(self._get_write_dir(), 'manual_config_options.json')
|
||||
write_json_file(config_options_file, config_options)
|
||||
logger.info('Configuration option file created at %s' % config_options_file)
|
||||
|
||||
def make_testcase_config_file(self, logger, case_file, testcase_config_file):
|
||||
if testcase_config_file:
|
||||
if os.path.exists(testcase_config_file):
|
||||
print('\nTest configuration file with name %s already exists. Please provide a unique file name' % (testcase_config_file))
|
||||
return 0
|
||||
|
||||
if not testcase_config_file:
|
||||
testcase_config_file = os.path.join(self._get_write_dir(), "testconfig_new.json")
|
||||
|
||||
testcase_config = {}
|
||||
cases = load_json_file(case_file)
|
||||
new_test_module = self._get_test_module(case_file)
|
||||
new_testcase_config = {}
|
||||
new_testcase_config['testcases'] = []
|
||||
|
||||
print('\nAdd testcases for this configuration file:')
|
||||
for case in cases:
|
||||
print('\n' + case['test']['@alias'])
|
||||
add_tc_config = self._get_true_false_input('\nDo you want to add this test case to test configuration : (Y)es/(N)o\n')
|
||||
if add_tc_config:
|
||||
new_testcase_config['testcases'].append(case['test']['@alias'])
|
||||
write_json_file(testcase_config_file, new_testcase_config)
|
||||
logger.info('Testcase Configuration file created at %s' % testcase_config_file)
|
||||
|
||||
def manualexecution(args, logger):
|
||||
testrunner = ManualTestRunner()
|
||||
if args.make_config_options_file:
|
||||
testrunner.make_config_option_file(logger, args.file, args.config_options_file)
|
||||
return 0
|
||||
if args.make_testcase_config_file:
|
||||
testrunner.make_testcase_config_file(logger, args.file, args.testcase_config_file)
|
||||
return 0
|
||||
configurations, result_id, write_dir, test_results = testrunner.run_test(args.file, args.config_options_file, args.testcase_config_file)
|
||||
resultjsonhelper = OETestResultJSONHelper()
|
||||
resultjsonhelper.dump_testresult_file(write_dir, configurations, result_id, test_results)
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('manualexecution', help='helper script for results populating during manual test execution.',
|
||||
description='helper script for results populating during manual test execution. You can find manual test case JSON file in meta/lib/oeqa/manual/',
|
||||
group='manualexecution')
|
||||
parser_build.set_defaults(func=manualexecution)
|
||||
parser_build.add_argument('file', help='specify path to manual test case JSON file.Note: Please use \"\" to encapsulate the file path.')
|
||||
parser_build.add_argument('-c', '--config-options-file', default='',
|
||||
help='the config options file to import and used as available configuration option selection or make config option file')
|
||||
parser_build.add_argument('-m', '--make-config-options-file', action='store_true',
|
||||
help='make the configuration options file based on provided inputs')
|
||||
parser_build.add_argument('-t', '--testcase-config-file', default='',
|
||||
help='the testcase configuration file to enable user to run a selected set of test case or make a testcase configuration file')
|
||||
parser_build.add_argument('-d', '--make-testcase-config-file', action='store_true',
|
||||
help='make the testcase configuration file to run a set of test cases based on user selection')
|
||||
@@ -0,0 +1,46 @@
|
||||
# resulttool - merge multiple testresults.json files into a file or directory
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation.
|
||||
# Copyright (c) 2019, Linux Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import json
|
||||
import resulttool.resultutils as resultutils
|
||||
|
||||
def merge(args, logger):
|
||||
configvars = {}
|
||||
if not args.not_add_testseries:
|
||||
configvars = resultutils.extra_configvars.copy()
|
||||
if args.executed_by:
|
||||
configvars['EXECUTED_BY'] = args.executed_by
|
||||
if resultutils.is_url(args.target_results) or os.path.isdir(args.target_results):
|
||||
results = resultutils.load_resultsdata(args.target_results, configmap=resultutils.store_map, configvars=configvars)
|
||||
resultutils.append_resultsdata(results, args.base_results, configmap=resultutils.store_map, configvars=configvars)
|
||||
resultutils.save_resultsdata(results, args.target_results)
|
||||
else:
|
||||
results = resultutils.load_resultsdata(args.base_results, configmap=resultutils.flatten_map, configvars=configvars)
|
||||
if os.path.exists(args.target_results):
|
||||
resultutils.append_resultsdata(results, args.target_results, configmap=resultutils.flatten_map, configvars=configvars)
|
||||
resultutils.save_resultsdata(results, os.path.dirname(args.target_results), fn=os.path.basename(args.target_results))
|
||||
|
||||
logger.info('Merged results to %s' % os.path.dirname(args.target_results))
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('merge', help='merge test result files/directories/URLs',
|
||||
description='merge the results from multiple files/directories/URLs into the target file or directory',
|
||||
group='setup')
|
||||
parser_build.set_defaults(func=merge)
|
||||
parser_build.add_argument('base_results',
|
||||
help='the results file/directory/URL to import')
|
||||
parser_build.add_argument('target_results',
|
||||
help='the target file or directory to merge the base_results with')
|
||||
parser_build.add_argument('-t', '--not-add-testseries', action='store_true',
|
||||
help='do not add testseries configuration to results')
|
||||
parser_build.add_argument('-x', '--executed-by', default='',
|
||||
help='add executed-by configuration to each result file')
|
||||
@@ -0,0 +1,387 @@
|
||||
# resulttool - regression analysis
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation.
|
||||
# Copyright (c) 2019, Linux Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import resulttool.resultutils as resultutils
|
||||
|
||||
from oeqa.utils.git import GitRepo
|
||||
import oeqa.utils.gitarchive as gitarchive
|
||||
|
||||
METADATA_MATCH_TABLE = {
|
||||
"oeselftest": "OESELFTEST_METADATA"
|
||||
}
|
||||
|
||||
OESELFTEST_METADATA_GUESS_TABLE={
|
||||
"trigger-build-posttrigger": {
|
||||
"run_all_tests": False,
|
||||
"run_tests":["buildoptions.SourceMirroring.test_yocto_source_mirror"],
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags":None,
|
||||
"exclude_tags": None
|
||||
},
|
||||
"reproducible": {
|
||||
"run_all_tests": False,
|
||||
"run_tests":["reproducible"],
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags":None,
|
||||
"exclude_tags": None
|
||||
},
|
||||
"arch-qemu-quick": {
|
||||
"run_all_tests": True,
|
||||
"run_tests":None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags":["machine"],
|
||||
"exclude_tags": None
|
||||
},
|
||||
"arch-qemu-full-x86-or-x86_64": {
|
||||
"run_all_tests": True,
|
||||
"run_tests":None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags":["machine", "toolchain-system"],
|
||||
"exclude_tags": None
|
||||
},
|
||||
"arch-qemu-full-others": {
|
||||
"run_all_tests": True,
|
||||
"run_tests":None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags":["machine", "toolchain-user"],
|
||||
"exclude_tags": None
|
||||
},
|
||||
"selftest": {
|
||||
"run_all_tests": True,
|
||||
"run_tests":None,
|
||||
"skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"],
|
||||
"machine": None,
|
||||
"select_tags":None,
|
||||
"exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
|
||||
},
|
||||
"bringup": {
|
||||
"run_all_tests": True,
|
||||
"run_tests":None,
|
||||
"skips": ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"],
|
||||
"machine": None,
|
||||
"select_tags":None,
|
||||
"exclude_tags": ["machine", "toolchain-system", "toolchain-user"]
|
||||
}
|
||||
}
|
||||
|
||||
def test_has_at_least_one_matching_tag(test, tag_list):
|
||||
return "oetags" in test and any(oetag in tag_list for oetag in test["oetags"])
|
||||
|
||||
def all_tests_have_at_least_one_matching_tag(results, tag_list):
|
||||
return all(test_has_at_least_one_matching_tag(test_result, tag_list) or test_name.startswith("ptestresult") for (test_name, test_result) in results.items())
|
||||
|
||||
def any_test_have_any_matching_tag(results, tag_list):
|
||||
return any(test_has_at_least_one_matching_tag(test, tag_list) for test in results.values())
|
||||
|
||||
def have_skipped_test(result, test_prefix):
|
||||
return all( result[test]['status'] == "SKIPPED" for test in result if test.startswith(test_prefix))
|
||||
|
||||
def have_all_tests_skipped(result, test_prefixes_list):
|
||||
return all(have_skipped_test(result, test_prefix) for test_prefix in test_prefixes_list)
|
||||
|
||||
def guess_oeselftest_metadata(results):
|
||||
"""
|
||||
When an oeselftest test result is lacking OESELFTEST_METADATA, we can try to guess it based on results content.
|
||||
Check results for specific values (absence/presence of oetags, number and name of executed tests...),
|
||||
and if it matches one of known configuration from autobuilder configuration, apply guessed OSELFTEST_METADATA
|
||||
to it to allow proper test filtering.
|
||||
This guessing process is tightly coupled to config.json in autobuilder. It should trigger less and less,
|
||||
as new tests will have OESELFTEST_METADATA properly appended at test reporting time
|
||||
"""
|
||||
|
||||
if len(results) == 1 and "buildoptions.SourceMirroring.test_yocto_source_mirror" in results:
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['trigger-build-posttrigger']
|
||||
elif all(result.startswith("reproducible") for result in results):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['reproducible']
|
||||
elif all_tests_have_at_least_one_matching_tag(results, ["machine"]):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-quick']
|
||||
elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-system"]):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-x86-or-x86_64']
|
||||
elif all_tests_have_at_least_one_matching_tag(results, ["machine", "toolchain-user"]):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['arch-qemu-full-others']
|
||||
elif not any_test_have_any_matching_tag(results, ["machine", "toolchain-user", "toolchain-system"]):
|
||||
if have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror", "reproducible"]):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['selftest']
|
||||
elif have_all_tests_skipped(results, ["distrodata.Distrodata.test_checkpkg", "buildoptions.SourceMirroring.test_yocto_source_mirror"]):
|
||||
return OESELFTEST_METADATA_GUESS_TABLE['bringup']
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def metadata_matches(base_configuration, target_configuration):
|
||||
"""
|
||||
For passed base and target, check test type. If test type matches one of
|
||||
properties described in METADATA_MATCH_TABLE, compare metadata if it is
|
||||
present in base. Return true if metadata matches, or if base lacks some
|
||||
data (either TEST_TYPE or the corresponding metadata)
|
||||
"""
|
||||
test_type = base_configuration.get('TEST_TYPE')
|
||||
if test_type not in METADATA_MATCH_TABLE:
|
||||
return True
|
||||
|
||||
metadata_key = METADATA_MATCH_TABLE.get(test_type)
|
||||
if target_configuration.get(metadata_key) != base_configuration.get(metadata_key):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def machine_matches(base_configuration, target_configuration):
|
||||
return base_configuration.get('MACHINE') == target_configuration.get('MACHINE')
|
||||
|
||||
|
||||
def can_be_compared(logger, base, target):
|
||||
"""
|
||||
Some tests are not relevant to be compared, for example some oeselftest
|
||||
run with different tests sets or parameters. Return true if tests can be
|
||||
compared
|
||||
"""
|
||||
ret = True
|
||||
base_configuration = base['configuration']
|
||||
target_configuration = target['configuration']
|
||||
|
||||
# Older test results lack proper OESELFTEST_METADATA: if not present, try to guess it based on tests results.
|
||||
if base_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in base_configuration:
|
||||
guess = guess_oeselftest_metadata(base['result'])
|
||||
if guess is None:
|
||||
logger.error(f"ERROR: did not manage to guess oeselftest metadata for {base_configuration['STARTTIME']}")
|
||||
else:
|
||||
logger.debug(f"Enriching {base_configuration['STARTTIME']} with {guess}")
|
||||
base_configuration['OESELFTEST_METADATA'] = guess
|
||||
if target_configuration.get('TEST_TYPE') == 'oeselftest' and 'OESELFTEST_METADATA' not in target_configuration:
|
||||
guess = guess_oeselftest_metadata(target['result'])
|
||||
if guess is None:
|
||||
logger.error(f"ERROR: did not manage to guess oeselftest metadata for {target_configuration['STARTTIME']}")
|
||||
else:
|
||||
logger.debug(f"Enriching {target_configuration['STARTTIME']} with {guess}")
|
||||
target_configuration['OESELFTEST_METADATA'] = guess
|
||||
|
||||
# Test runs with LTP results in should only be compared with other runs with LTP tests in them
|
||||
if base_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in base['result']):
|
||||
ret = target_configuration.get('TEST_TYPE') == 'runtime' and any(result.startswith("ltpresult") for result in target['result'])
|
||||
|
||||
return ret and metadata_matches(base_configuration, target_configuration) \
|
||||
and machine_matches(base_configuration, target_configuration)
|
||||
|
||||
|
||||
def compare_result(logger, base_name, target_name, base_result, target_result):
|
||||
base_result = base_result.get('result')
|
||||
target_result = target_result.get('result')
|
||||
result = {}
|
||||
if base_result and target_result:
|
||||
for k in base_result:
|
||||
base_testcase = base_result[k]
|
||||
base_status = base_testcase.get('status')
|
||||
if base_status:
|
||||
target_testcase = target_result.get(k, {})
|
||||
target_status = target_testcase.get('status')
|
||||
if base_status != target_status:
|
||||
result[k] = {'base': base_status, 'target': target_status}
|
||||
else:
|
||||
logger.error('Failed to retrieved base test case status: %s' % k)
|
||||
if result:
|
||||
new_pass_count = sum(test['target'] is not None and test['target'].startswith("PASS") for test in result.values())
|
||||
# Print a regression report only if at least one test has a regression status (FAIL, SKIPPED, absent...)
|
||||
if new_pass_count < len(result):
|
||||
resultstring = "Regression: %s\n %s\n" % (base_name, target_name)
|
||||
for k in sorted(result):
|
||||
if not result[k]['target'] or not result[k]['target'].startswith("PASS"):
|
||||
resultstring += ' %s: %s -> %s\n' % (k, result[k]['base'], result[k]['target'])
|
||||
if new_pass_count > 0:
|
||||
resultstring += f' Additionally, {new_pass_count} previously failing test(s) is/are now passing\n'
|
||||
else:
|
||||
resultstring = "Improvement: %s\n %s\n (+%d test(s) passing)" % (base_name, target_name, new_pass_count)
|
||||
result = None
|
||||
else:
|
||||
resultstring = "Match: %s\n %s" % (base_name, target_name)
|
||||
return result, resultstring
|
||||
|
||||
def get_results(logger, source):
|
||||
return resultutils.load_resultsdata(source, configmap=resultutils.regression_map)
|
||||
|
||||
def regression(args, logger):
|
||||
base_results = get_results(logger, args.base_result)
|
||||
target_results = get_results(logger, args.target_result)
|
||||
|
||||
regression_common(args, logger, base_results, target_results)
|
||||
|
||||
# Some test case naming is poor and contains random strings, particularly lttng/babeltrace.
|
||||
# Truncating the test names works since they contain file and line number identifiers
|
||||
# which allows us to match them without the random components.
|
||||
def fixup_ptest_names(results, logger):
|
||||
for r in results:
|
||||
for i in results[r]:
|
||||
tests = list(results[r][i]['result'].keys())
|
||||
for test in tests:
|
||||
new = None
|
||||
if test.startswith(("ptestresult.lttng-tools.", "ptestresult.babeltrace.", "ptestresult.babeltrace2")) and "_-_" in test:
|
||||
new = test.split("_-_")[0]
|
||||
elif test.startswith(("ptestresult.curl.")) and "__" in test:
|
||||
new = test.split("__")[0]
|
||||
elif test.startswith(("ptestresult.dbus.")) and "__" in test:
|
||||
new = test.split("__")[0]
|
||||
elif test.startswith("ptestresult.binutils") and "build-st-" in test:
|
||||
new = test.split(" ")[0]
|
||||
elif test.startswith("ptestresult.gcc") and "/tmp/runtest." in test:
|
||||
new = ".".join(test.split(".")[:2])
|
||||
if new:
|
||||
results[r][i]['result'][new] = results[r][i]['result'][test]
|
||||
del results[r][i]['result'][test]
|
||||
|
||||
def regression_common(args, logger, base_results, target_results):
|
||||
if args.base_result_id:
|
||||
base_results = resultutils.filter_resultsdata(base_results, args.base_result_id)
|
||||
if args.target_result_id:
|
||||
target_results = resultutils.filter_resultsdata(target_results, args.target_result_id)
|
||||
|
||||
fixup_ptest_names(base_results, logger)
|
||||
fixup_ptest_names(target_results, logger)
|
||||
|
||||
matches = []
|
||||
regressions = []
|
||||
notfound = []
|
||||
|
||||
for a in base_results:
|
||||
if a in target_results:
|
||||
base = list(base_results[a].keys())
|
||||
target = list(target_results[a].keys())
|
||||
# We may have multiple base/targets which are for different configurations. Start by
|
||||
# removing any pairs which match
|
||||
for c in base.copy():
|
||||
for b in target.copy():
|
||||
if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
|
||||
continue
|
||||
res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b])
|
||||
if not res:
|
||||
matches.append(resstr)
|
||||
base.remove(c)
|
||||
target.remove(b)
|
||||
break
|
||||
# Should only now see regressions, we may not be able to match multiple pairs directly
|
||||
for c in base:
|
||||
for b in target:
|
||||
if not can_be_compared(logger, base_results[a][c], target_results[a][b]):
|
||||
continue
|
||||
res, resstr = compare_result(logger, c, b, base_results[a][c], target_results[a][b])
|
||||
if res:
|
||||
regressions.append(resstr)
|
||||
else:
|
||||
notfound.append("%s not found in target" % a)
|
||||
print("\n".join(sorted(matches)))
|
||||
print("\n")
|
||||
print("\n".join(sorted(regressions)))
|
||||
print("\n".join(sorted(notfound)))
|
||||
return 0
|
||||
|
||||
def regression_git(args, logger):
|
||||
base_results = {}
|
||||
target_results = {}
|
||||
|
||||
tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
|
||||
repo = GitRepo(args.repo)
|
||||
|
||||
revs = gitarchive.get_test_revs(logger, repo, tag_name, branch=args.branch)
|
||||
|
||||
if args.branch2:
|
||||
revs2 = gitarchive.get_test_revs(logger, repo, tag_name, branch=args.branch2)
|
||||
if not len(revs2):
|
||||
logger.error("No revisions found to compare against")
|
||||
return 1
|
||||
if not len(revs):
|
||||
logger.error("No revision to report on found")
|
||||
return 1
|
||||
else:
|
||||
if len(revs) < 2:
|
||||
logger.error("Only %d tester revisions found, unable to generate report" % len(revs))
|
||||
return 1
|
||||
|
||||
# Pick revisions
|
||||
if args.commit:
|
||||
if args.commit_number:
|
||||
logger.warning("Ignoring --commit-number as --commit was specified")
|
||||
index1 = gitarchive.rev_find(revs, 'commit', args.commit)
|
||||
elif args.commit_number:
|
||||
index1 = gitarchive.rev_find(revs, 'commit_number', args.commit_number)
|
||||
else:
|
||||
index1 = len(revs) - 1
|
||||
|
||||
if args.branch2:
|
||||
revs2.append(revs[index1])
|
||||
index1 = len(revs2) - 1
|
||||
revs = revs2
|
||||
|
||||
if args.commit2:
|
||||
if args.commit_number2:
|
||||
logger.warning("Ignoring --commit-number2 as --commit2 was specified")
|
||||
index2 = gitarchive.rev_find(revs, 'commit', args.commit2)
|
||||
elif args.commit_number2:
|
||||
index2 = gitarchive.rev_find(revs, 'commit_number', args.commit_number2)
|
||||
else:
|
||||
if index1 > 0:
|
||||
index2 = index1 - 1
|
||||
# Find the closest matching commit number for comparision
|
||||
# In future we could check the commit is a common ancestor and
|
||||
# continue back if not but this good enough for now
|
||||
while index2 > 0 and revs[index2].commit_number > revs[index1].commit_number:
|
||||
index2 = index2 - 1
|
||||
else:
|
||||
logger.error("Unable to determine the other commit, use "
|
||||
"--commit2 or --commit-number2 to specify it")
|
||||
return 1
|
||||
|
||||
logger.info("Comparing:\n%s\nto\n%s\n" % (revs[index1], revs[index2]))
|
||||
|
||||
base_results = resultutils.git_get_result(repo, revs[index1][2])
|
||||
target_results = resultutils.git_get_result(repo, revs[index2][2])
|
||||
|
||||
regression_common(args, logger, base_results, target_results)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
|
||||
parser_build = subparsers.add_parser('regression', help='regression file/directory analysis',
|
||||
description='regression analysis comparing the base set of results to the target results',
|
||||
group='analysis')
|
||||
parser_build.set_defaults(func=regression)
|
||||
parser_build.add_argument('base_result',
|
||||
help='base result file/directory/URL for the comparison')
|
||||
parser_build.add_argument('target_result',
|
||||
help='target result file/directory/URL to compare with')
|
||||
parser_build.add_argument('-b', '--base-result-id', default='',
|
||||
help='(optional) filter the base results to this result ID')
|
||||
parser_build.add_argument('-t', '--target-result-id', default='',
|
||||
help='(optional) filter the target results to this result ID')
|
||||
|
||||
parser_build = subparsers.add_parser('regression-git', help='regression git analysis',
|
||||
description='regression analysis comparing base result set to target '
|
||||
'result set',
|
||||
group='analysis')
|
||||
parser_build.set_defaults(func=regression_git)
|
||||
parser_build.add_argument('repo',
|
||||
help='the git repository containing the data')
|
||||
parser_build.add_argument('-b', '--base-result-id', default='',
|
||||
help='(optional) default select regression based on configurations unless base result '
|
||||
'id was provided')
|
||||
parser_build.add_argument('-t', '--target-result-id', default='',
|
||||
help='(optional) default select regression based on configurations unless target result '
|
||||
'id was provided')
|
||||
|
||||
parser_build.add_argument('--branch', '-B', default='master', help="Branch to find commit in")
|
||||
parser_build.add_argument('--branch2', help="Branch to find comparision revisions in")
|
||||
parser_build.add_argument('--commit', help="Revision to search for")
|
||||
parser_build.add_argument('--commit-number', help="Revision number to search for, redundant if --commit is specified")
|
||||
parser_build.add_argument('--commit2', help="Revision to compare with")
|
||||
parser_build.add_argument('--commit-number2', help="Revision number to compare with, redundant if --commit2 is specified")
|
||||
|
||||
@@ -0,0 +1,312 @@
|
||||
# test result tool - report text based test results
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation.
|
||||
# Copyright (c) 2019, Linux Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
import resulttool.resultutils as resultutils
|
||||
from oeqa.utils.git import GitRepo
|
||||
import oeqa.utils.gitarchive as gitarchive
|
||||
|
||||
|
||||
class ResultsTextReport(object):
|
||||
def __init__(self):
|
||||
self.ptests = {}
|
||||
self.ltptests = {}
|
||||
self.ltpposixtests = {}
|
||||
self.result_types = {'passed': ['PASSED', 'passed', 'PASS', 'XFAIL'],
|
||||
'failed': ['FAILED', 'failed', 'FAIL', 'ERROR', 'error', 'UNKNOWN', 'XPASS'],
|
||||
'skipped': ['SKIPPED', 'skipped', 'UNSUPPORTED', 'UNTESTED', 'UNRESOLVED']}
|
||||
|
||||
|
||||
def handle_ptest_result(self, k, status, result, machine):
|
||||
if machine not in self.ptests:
|
||||
self.ptests[machine] = {}
|
||||
|
||||
if k == 'ptestresult.sections':
|
||||
# Ensure tests without any test results still show up on the report
|
||||
for suite in result['ptestresult.sections']:
|
||||
if suite not in self.ptests[machine]:
|
||||
self.ptests[machine][suite] = {
|
||||
'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
|
||||
'failed_testcases': [], "testcases": set(),
|
||||
}
|
||||
if 'duration' in result['ptestresult.sections'][suite]:
|
||||
self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration']
|
||||
if 'timeout' in result['ptestresult.sections'][suite]:
|
||||
self.ptests[machine][suite]['duration'] += " T"
|
||||
return True
|
||||
|
||||
# process test result
|
||||
try:
|
||||
_, suite, test = k.split(".", 2)
|
||||
except ValueError:
|
||||
return True
|
||||
|
||||
# Handle 'glib-2.0'
|
||||
if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']:
|
||||
try:
|
||||
_, suite, suite1, test = k.split(".", 3)
|
||||
if suite + "." + suite1 in result['ptestresult.sections']:
|
||||
suite = suite + "." + suite1
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if suite not in self.ptests[machine]:
|
||||
self.ptests[machine][suite] = {
|
||||
'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
|
||||
'failed_testcases': [], "testcases": set(),
|
||||
}
|
||||
|
||||
# do not process duplicate results
|
||||
if test in self.ptests[machine][suite]["testcases"]:
|
||||
print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine))
|
||||
return False
|
||||
|
||||
for tk in self.result_types:
|
||||
if status in self.result_types[tk]:
|
||||
self.ptests[machine][suite][tk] += 1
|
||||
self.ptests[machine][suite]["testcases"].add(test)
|
||||
return True
|
||||
|
||||
def handle_ltptest_result(self, k, status, result, machine):
|
||||
if machine not in self.ltptests:
|
||||
self.ltptests[machine] = {}
|
||||
|
||||
if k == 'ltpresult.sections':
|
||||
# Ensure tests without any test results still show up on the report
|
||||
for suite in result['ltpresult.sections']:
|
||||
if suite not in self.ltptests[machine]:
|
||||
self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
|
||||
if 'duration' in result['ltpresult.sections'][suite]:
|
||||
self.ltptests[machine][suite]['duration'] = result['ltpresult.sections'][suite]['duration']
|
||||
if 'timeout' in result['ltpresult.sections'][suite]:
|
||||
self.ltptests[machine][suite]['duration'] += " T"
|
||||
return
|
||||
try:
|
||||
_, suite, test = k.split(".", 2)
|
||||
except ValueError:
|
||||
return
|
||||
# Handle 'glib-2.0'
|
||||
if 'ltpresult.sections' in result and suite not in result['ltpresult.sections']:
|
||||
try:
|
||||
_, suite, suite1, test = k.split(".", 3)
|
||||
if suite + "." + suite1 in result['ltpresult.sections']:
|
||||
suite = suite + "." + suite1
|
||||
except ValueError:
|
||||
pass
|
||||
if suite not in self.ltptests[machine]:
|
||||
self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
|
||||
for tk in self.result_types:
|
||||
if status in self.result_types[tk]:
|
||||
self.ltptests[machine][suite][tk] += 1
|
||||
|
||||
def handle_ltpposixtest_result(self, k, status, result, machine):
|
||||
if machine not in self.ltpposixtests:
|
||||
self.ltpposixtests[machine] = {}
|
||||
|
||||
if k == 'ltpposixresult.sections':
|
||||
# Ensure tests without any test results still show up on the report
|
||||
for suite in result['ltpposixresult.sections']:
|
||||
if suite not in self.ltpposixtests[machine]:
|
||||
self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
|
||||
if 'duration' in result['ltpposixresult.sections'][suite]:
|
||||
self.ltpposixtests[machine][suite]['duration'] = result['ltpposixresult.sections'][suite]['duration']
|
||||
return
|
||||
try:
|
||||
_, suite, test = k.split(".", 2)
|
||||
except ValueError:
|
||||
return
|
||||
# Handle 'glib-2.0'
|
||||
if 'ltpposixresult.sections' in result and suite not in result['ltpposixresult.sections']:
|
||||
try:
|
||||
_, suite, suite1, test = k.split(".", 3)
|
||||
if suite + "." + suite1 in result['ltpposixresult.sections']:
|
||||
suite = suite + "." + suite1
|
||||
except ValueError:
|
||||
pass
|
||||
if suite not in self.ltpposixtests[machine]:
|
||||
self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
|
||||
for tk in self.result_types:
|
||||
if status in self.result_types[tk]:
|
||||
self.ltpposixtests[machine][suite][tk] += 1
|
||||
|
||||
def get_aggregated_test_result(self, logger, testresult, machine):
|
||||
test_count_report = {'passed': 0, 'failed': 0, 'skipped': 0, 'failed_testcases': []}
|
||||
result = testresult.get('result', [])
|
||||
for k in result:
|
||||
test_status = result[k].get('status', [])
|
||||
if k.startswith("ptestresult."):
|
||||
if not self.handle_ptest_result(k, test_status, result, machine):
|
||||
continue
|
||||
elif k.startswith("ltpresult."):
|
||||
self.handle_ltptest_result(k, test_status, result, machine)
|
||||
elif k.startswith("ltpposixresult."):
|
||||
self.handle_ltpposixtest_result(k, test_status, result, machine)
|
||||
|
||||
# process result if it was not skipped by a handler
|
||||
for tk in self.result_types:
|
||||
if test_status in self.result_types[tk]:
|
||||
test_count_report[tk] += 1
|
||||
if test_status in self.result_types['failed']:
|
||||
test_count_report['failed_testcases'].append(k)
|
||||
return test_count_report
|
||||
|
||||
def print_test_report(self, template_file_name, test_count_reports):
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||
file_loader = FileSystemLoader(script_path + '/template')
|
||||
env = Environment(loader=file_loader, trim_blocks=True)
|
||||
template = env.get_template(template_file_name)
|
||||
havefailed = False
|
||||
reportvalues = []
|
||||
machines = []
|
||||
cols = ['passed', 'failed', 'skipped']
|
||||
maxlen = {'passed' : 0, 'failed' : 0, 'skipped' : 0, 'result_id': 0, 'testseries' : 0, 'ptest' : 0 ,'ltptest': 0, 'ltpposixtest': 0}
|
||||
for line in test_count_reports:
|
||||
total_tested = line['passed'] + line['failed'] + line['skipped']
|
||||
vals = {}
|
||||
vals['result_id'] = line['result_id']
|
||||
vals['testseries'] = line['testseries']
|
||||
vals['sort'] = line['testseries'] + "_" + line['result_id']
|
||||
vals['failed_testcases'] = line['failed_testcases']
|
||||
for k in cols:
|
||||
vals[k] = "%d (%s%%)" % (line[k], format(line[k] / total_tested * 100, '.0f'))
|
||||
for k in maxlen:
|
||||
if k in vals and len(vals[k]) > maxlen[k]:
|
||||
maxlen[k] = len(vals[k])
|
||||
reportvalues.append(vals)
|
||||
if line['failed_testcases']:
|
||||
havefailed = True
|
||||
if line['machine'] not in machines:
|
||||
machines.append(line['machine'])
|
||||
reporttotalvalues = {}
|
||||
for k in cols:
|
||||
reporttotalvalues[k] = '%s' % sum([line[k] for line in test_count_reports])
|
||||
reporttotalvalues['count'] = '%s' % len(test_count_reports)
|
||||
for (machine, report) in self.ptests.items():
|
||||
for ptest in self.ptests[machine]:
|
||||
if len(ptest) > maxlen['ptest']:
|
||||
maxlen['ptest'] = len(ptest)
|
||||
for (machine, report) in self.ltptests.items():
|
||||
for ltptest in self.ltptests[machine]:
|
||||
if len(ltptest) > maxlen['ltptest']:
|
||||
maxlen['ltptest'] = len(ltptest)
|
||||
for (machine, report) in self.ltpposixtests.items():
|
||||
for ltpposixtest in self.ltpposixtests[machine]:
|
||||
if len(ltpposixtest) > maxlen['ltpposixtest']:
|
||||
maxlen['ltpposixtest'] = len(ltpposixtest)
|
||||
output = template.render(reportvalues=reportvalues,
|
||||
reporttotalvalues=reporttotalvalues,
|
||||
havefailed=havefailed,
|
||||
machines=machines,
|
||||
ptests=self.ptests,
|
||||
ltptests=self.ltptests,
|
||||
ltpposixtests=self.ltpposixtests,
|
||||
maxlen=maxlen)
|
||||
print(output)
|
||||
|
||||
def view_test_report(self, logger, source_dir, branch, commit, tag, use_regression_map, raw_test, selected_test_case_only):
|
||||
def print_selected_testcase_result(testresults, selected_test_case_only):
|
||||
for testsuite in testresults:
|
||||
for resultid in testresults[testsuite]:
|
||||
result = testresults[testsuite][resultid]['result']
|
||||
test_case_result = result.get(selected_test_case_only, {})
|
||||
if test_case_result.get('status'):
|
||||
print('Found selected test case result for %s from %s' % (selected_test_case_only,
|
||||
resultid))
|
||||
print(test_case_result['status'])
|
||||
else:
|
||||
print('Could not find selected test case result for %s from %s' % (selected_test_case_only,
|
||||
resultid))
|
||||
if test_case_result.get('log'):
|
||||
print(test_case_result['log'])
|
||||
test_count_reports = []
|
||||
configmap = resultutils.store_map
|
||||
if use_regression_map:
|
||||
configmap = resultutils.regression_map
|
||||
if commit:
|
||||
if tag:
|
||||
logger.warning("Ignoring --tag as --commit was specified")
|
||||
tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
|
||||
repo = GitRepo(source_dir)
|
||||
revs = gitarchive.get_test_revs(logger, repo, tag_name, branch=branch)
|
||||
rev_index = gitarchive.rev_find(revs, 'commit', commit)
|
||||
testresults = resultutils.git_get_result(repo, revs[rev_index][2], configmap=configmap)
|
||||
elif tag:
|
||||
repo = GitRepo(source_dir)
|
||||
testresults = resultutils.git_get_result(repo, [tag], configmap=configmap)
|
||||
else:
|
||||
testresults = resultutils.load_resultsdata(source_dir, configmap=configmap)
|
||||
if raw_test:
|
||||
raw_results = {}
|
||||
for testsuite in testresults:
|
||||
result = testresults[testsuite].get(raw_test, {})
|
||||
if result:
|
||||
raw_results[testsuite] = {raw_test: result}
|
||||
if raw_results:
|
||||
if selected_test_case_only:
|
||||
print_selected_testcase_result(raw_results, selected_test_case_only)
|
||||
else:
|
||||
print(json.dumps(raw_results, sort_keys=True, indent=4))
|
||||
else:
|
||||
print('Could not find raw test result for %s' % raw_test)
|
||||
return 0
|
||||
if selected_test_case_only:
|
||||
print_selected_testcase_result(testresults, selected_test_case_only)
|
||||
return 0
|
||||
for testsuite in testresults:
|
||||
for resultid in testresults[testsuite]:
|
||||
skip = False
|
||||
result = testresults[testsuite][resultid]
|
||||
machine = result['configuration']['MACHINE']
|
||||
|
||||
# Check to see if there is already results for these kinds of tests for the machine
|
||||
for key in result['result'].keys():
|
||||
testtype = str(key).split('.')[0]
|
||||
if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
|
||||
(machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])):
|
||||
print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid))
|
||||
skip = True
|
||||
break
|
||||
if skip:
|
||||
break
|
||||
|
||||
test_count_report = self.get_aggregated_test_result(logger, result, machine)
|
||||
test_count_report['machine'] = machine
|
||||
test_count_report['testseries'] = result['configuration']['TESTSERIES']
|
||||
test_count_report['result_id'] = resultid
|
||||
test_count_reports.append(test_count_report)
|
||||
self.print_test_report('test_report_full_text.txt', test_count_reports)
|
||||
|
||||
def report(args, logger):
|
||||
report = ResultsTextReport()
|
||||
report.view_test_report(logger, args.source_dir, args.branch, args.commit, args.tag, args.use_regression_map,
|
||||
args.raw_test_only, args.selected_test_case_only)
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('report', help='summarise test results',
|
||||
description='print a text-based summary of the test results',
|
||||
group='analysis')
|
||||
parser_build.set_defaults(func=report)
|
||||
parser_build.add_argument('source_dir',
|
||||
help='source file/directory/URL that contain the test result files to summarise')
|
||||
parser_build.add_argument('--branch', '-B', default='master', help="Branch to find commit in")
|
||||
parser_build.add_argument('--commit', help="Revision to report")
|
||||
parser_build.add_argument('-t', '--tag', default='',
|
||||
help='source_dir is a git repository, report on the tag specified from that repository')
|
||||
parser_build.add_argument('-m', '--use_regression_map', action='store_true',
|
||||
help='instead of the default "store_map", use the "regression_map" for report')
|
||||
parser_build.add_argument('-r', '--raw_test_only', default='',
|
||||
help='output raw test result only for the user provided test result id')
|
||||
parser_build.add_argument('-s', '--selected_test_case_only', default='',
|
||||
help='output selected test case result for the user provided test case id, if both test '
|
||||
'result id and test case id are provided then output the selected test case result '
|
||||
'from the provided test result id')
|
||||
@@ -0,0 +1,228 @@
|
||||
# resulttool - common library/utility functions
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation.
|
||||
# Copyright (c) 2019, Linux Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import base64
|
||||
import zlib
|
||||
import json
|
||||
import scriptpath
|
||||
import copy
|
||||
import urllib.request
|
||||
import posixpath
|
||||
scriptpath.add_oe_lib_path()
|
||||
|
||||
flatten_map = {
|
||||
"oeselftest": [],
|
||||
"runtime": [],
|
||||
"sdk": [],
|
||||
"sdkext": [],
|
||||
"manual": []
|
||||
}
|
||||
regression_map = {
|
||||
"oeselftest": ['TEST_TYPE', 'MACHINE'],
|
||||
"runtime": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'IMAGE_PKGTYPE', 'DISTRO'],
|
||||
"sdk": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
|
||||
"sdkext": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
|
||||
"manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
|
||||
}
|
||||
store_map = {
|
||||
"oeselftest": ['TEST_TYPE'],
|
||||
"runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
|
||||
"sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
|
||||
"sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
|
||||
"manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
|
||||
}
|
||||
|
||||
def is_url(p):
|
||||
"""
|
||||
Helper for determining if the given path is a URL
|
||||
"""
|
||||
return p.startswith('http://') or p.startswith('https://')
|
||||
|
||||
extra_configvars = {'TESTSERIES': ''}
|
||||
|
||||
#
|
||||
# Load the json file and append the results data into the provided results dict
|
||||
#
|
||||
def append_resultsdata(results, f, configmap=store_map, configvars=extra_configvars):
|
||||
if type(f) is str:
|
||||
if is_url(f):
|
||||
with urllib.request.urlopen(f) as response:
|
||||
data = json.loads(response.read().decode('utf-8'))
|
||||
url = urllib.parse.urlparse(f)
|
||||
testseries = posixpath.basename(posixpath.dirname(url.path))
|
||||
else:
|
||||
with open(f, "r") as filedata:
|
||||
try:
|
||||
data = json.load(filedata)
|
||||
except json.decoder.JSONDecodeError:
|
||||
print("Cannot decode {}. Possible corruption. Skipping.".format(f))
|
||||
data = ""
|
||||
testseries = os.path.basename(os.path.dirname(f))
|
||||
else:
|
||||
data = f
|
||||
for res in data:
|
||||
if "configuration" not in data[res] or "result" not in data[res]:
|
||||
raise ValueError("Test results data without configuration or result section?")
|
||||
for config in configvars:
|
||||
if config == "TESTSERIES" and "TESTSERIES" not in data[res]["configuration"]:
|
||||
data[res]["configuration"]["TESTSERIES"] = testseries
|
||||
continue
|
||||
if config not in data[res]["configuration"]:
|
||||
data[res]["configuration"][config] = configvars[config]
|
||||
testtype = data[res]["configuration"].get("TEST_TYPE")
|
||||
if testtype not in configmap:
|
||||
raise ValueError("Unknown test type %s" % testtype)
|
||||
testpath = "/".join(data[res]["configuration"].get(i) for i in configmap[testtype])
|
||||
if testpath not in results:
|
||||
results[testpath] = {}
|
||||
results[testpath][res] = data[res]
|
||||
|
||||
#
|
||||
# Walk a directory and find/load results data
|
||||
# or load directly from a file
|
||||
#
|
||||
def load_resultsdata(source, configmap=store_map, configvars=extra_configvars):
|
||||
results = {}
|
||||
if is_url(source) or os.path.isfile(source):
|
||||
append_resultsdata(results, source, configmap, configvars)
|
||||
return results
|
||||
for root, dirs, files in os.walk(source):
|
||||
for name in files:
|
||||
f = os.path.join(root, name)
|
||||
if name == "testresults.json":
|
||||
append_resultsdata(results, f, configmap, configvars)
|
||||
return results
|
||||
|
||||
def filter_resultsdata(results, resultid):
|
||||
newresults = {}
|
||||
for r in results:
|
||||
for i in results[r]:
|
||||
if i == resultsid:
|
||||
newresults[r] = {}
|
||||
newresults[r][i] = results[r][i]
|
||||
return newresults
|
||||
|
||||
def strip_ptestresults(results):
|
||||
newresults = copy.deepcopy(results)
|
||||
#for a in newresults2:
|
||||
# newresults = newresults2[a]
|
||||
for res in newresults:
|
||||
if 'result' not in newresults[res]:
|
||||
continue
|
||||
if 'ptestresult.rawlogs' in newresults[res]['result']:
|
||||
del newresults[res]['result']['ptestresult.rawlogs']
|
||||
if 'ptestresult.sections' in newresults[res]['result']:
|
||||
for i in newresults[res]['result']['ptestresult.sections']:
|
||||
if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
|
||||
del newresults[res]['result']['ptestresult.sections'][i]['log']
|
||||
return newresults
|
||||
|
||||
def decode_log(logdata):
|
||||
if isinstance(logdata, str):
|
||||
return logdata
|
||||
elif isinstance(logdata, dict):
|
||||
if "compressed" in logdata:
|
||||
data = logdata.get("compressed")
|
||||
data = base64.b64decode(data.encode("utf-8"))
|
||||
data = zlib.decompress(data)
|
||||
return data.decode("utf-8", errors='ignore')
|
||||
return None
|
||||
|
||||
def generic_get_log(sectionname, results, section):
|
||||
if sectionname not in results:
|
||||
return None
|
||||
if section not in results[sectionname]:
|
||||
return None
|
||||
|
||||
ptest = results[sectionname][section]
|
||||
if 'log' not in ptest:
|
||||
return None
|
||||
return decode_log(ptest['log'])
|
||||
|
||||
def ptestresult_get_log(results, section):
|
||||
return generic_get_log('ptestresult.sections', results, section)
|
||||
|
||||
def generic_get_rawlogs(sectname, results):
|
||||
if sectname not in results:
|
||||
return None
|
||||
if 'log' not in results[sectname]:
|
||||
return None
|
||||
return decode_log(results[sectname]['log'])
|
||||
|
||||
def ptestresult_get_rawlogs(results):
|
||||
return generic_get_rawlogs('ptestresult.rawlogs', results)
|
||||
|
||||
def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
|
||||
for res in results:
|
||||
if res:
|
||||
dst = destdir + "/" + res + "/" + fn
|
||||
else:
|
||||
dst = destdir + "/" + fn
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
resultsout = results[res]
|
||||
if not ptestjson:
|
||||
resultsout = strip_ptestresults(results[res])
|
||||
with open(dst, 'w') as f:
|
||||
f.write(json.dumps(resultsout, sort_keys=True, indent=4))
|
||||
for res2 in results[res]:
|
||||
if ptestlogs and 'result' in results[res][res2]:
|
||||
seriesresults = results[res][res2]['result']
|
||||
rawlogs = ptestresult_get_rawlogs(seriesresults)
|
||||
if rawlogs is not None:
|
||||
with open(dst.replace(fn, "ptest-raw.log"), "w+") as f:
|
||||
f.write(rawlogs)
|
||||
if 'ptestresult.sections' in seriesresults:
|
||||
for i in seriesresults['ptestresult.sections']:
|
||||
sectionlog = ptestresult_get_log(seriesresults, i)
|
||||
if sectionlog is not None:
|
||||
with open(dst.replace(fn, "ptest-%s.log" % i), "w+") as f:
|
||||
f.write(sectionlog)
|
||||
|
||||
def git_get_result(repo, tags, configmap=store_map):
|
||||
git_objs = []
|
||||
for tag in tags:
|
||||
files = repo.run_cmd(['ls-tree', "--name-only", "-r", tag]).splitlines()
|
||||
git_objs.extend([tag + ':' + f for f in files if f.endswith("testresults.json")])
|
||||
|
||||
def parse_json_stream(data):
|
||||
"""Parse multiple concatenated JSON objects"""
|
||||
objs = []
|
||||
json_d = ""
|
||||
for line in data.splitlines():
|
||||
if line == '}{':
|
||||
json_d += '}'
|
||||
objs.append(json.loads(json_d))
|
||||
json_d = '{'
|
||||
else:
|
||||
json_d += line
|
||||
objs.append(json.loads(json_d))
|
||||
return objs
|
||||
|
||||
# Optimize by reading all data with one git command
|
||||
results = {}
|
||||
for obj in parse_json_stream(repo.run_cmd(['show'] + git_objs + ['--'])):
|
||||
append_resultsdata(results, obj, configmap=configmap)
|
||||
|
||||
return results
|
||||
|
||||
def test_run_results(results):
|
||||
"""
|
||||
Convenient generator function that iterates over all test runs that have a
|
||||
result section.
|
||||
|
||||
Generates a tuple of:
|
||||
(result json file path, test run name, test run (dict), test run "results" (dict))
|
||||
for each test run that has a "result" section
|
||||
"""
|
||||
for path in results:
|
||||
for run_name, test_run in results[path].items():
|
||||
if not 'result' in test_run:
|
||||
continue
|
||||
yield path, run_name, test_run, test_run['result']
|
||||
|
||||
@@ -0,0 +1,104 @@
|
||||
# resulttool - store test results
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation.
|
||||
# Copyright (c) 2019, Linux Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import shutil
|
||||
import scriptpath
|
||||
scriptpath.add_bitbake_lib_path()
|
||||
scriptpath.add_oe_lib_path()
|
||||
import resulttool.resultutils as resultutils
|
||||
import oeqa.utils.gitarchive as gitarchive
|
||||
|
||||
|
||||
def store(args, logger):
|
||||
tempdir = tempfile.mkdtemp(prefix='testresults.')
|
||||
try:
|
||||
configvars = resultutils.extra_configvars.copy()
|
||||
if args.executed_by:
|
||||
configvars['EXECUTED_BY'] = args.executed_by
|
||||
if args.extra_test_env:
|
||||
configvars['EXTRA_TEST_ENV'] = args.extra_test_env
|
||||
results = {}
|
||||
logger.info('Reading files from %s' % args.source)
|
||||
if resultutils.is_url(args.source) or os.path.isfile(args.source):
|
||||
resultutils.append_resultsdata(results, args.source, configvars=configvars)
|
||||
else:
|
||||
for root, dirs, files in os.walk(args.source):
|
||||
for name in files:
|
||||
f = os.path.join(root, name)
|
||||
if name == "testresults.json":
|
||||
resultutils.append_resultsdata(results, f, configvars=configvars)
|
||||
elif args.all:
|
||||
dst = f.replace(args.source, tempdir + "/")
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
shutil.copyfile(f, dst)
|
||||
|
||||
revisions = {}
|
||||
|
||||
if not results and not args.all:
|
||||
if args.allow_empty:
|
||||
logger.info("No results found to store")
|
||||
return 0
|
||||
logger.error("No results found to store")
|
||||
return 1
|
||||
|
||||
# Find the branch/commit/commit_count and ensure they all match
|
||||
for suite in results:
|
||||
for result in results[suite]:
|
||||
config = results[suite][result]['configuration']['LAYERS']['meta']
|
||||
revision = (config['commit'], config['branch'], str(config['commit_count']))
|
||||
if revision not in revisions:
|
||||
revisions[revision] = {}
|
||||
if suite not in revisions[revision]:
|
||||
revisions[revision][suite] = {}
|
||||
revisions[revision][suite][result] = results[suite][result]
|
||||
|
||||
logger.info("Found %d revisions to store" % len(revisions))
|
||||
|
||||
for r in revisions:
|
||||
results = revisions[r]
|
||||
keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
|
||||
subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"])
|
||||
resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
|
||||
|
||||
logger.info('Storing test result into git repository %s' % args.git_dir)
|
||||
|
||||
gitarchive.gitarchive(tempdir, args.git_dir, False, False,
|
||||
"Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
|
||||
False, "{branch}/{commit_count}-g{commit}/{tag_number}",
|
||||
'Test run #{tag_number} of {branch}:{commit}', '',
|
||||
[], [], False, keywords, logger)
|
||||
|
||||
finally:
|
||||
subprocess.check_call(["rm", "-rf", tempdir])
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers):
|
||||
"""Register subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('store', help='store test results into a git repository',
|
||||
description='takes a results file or directory of results files and stores '
|
||||
'them into the destination git repository, splitting out the results '
|
||||
'files as configured',
|
||||
group='setup')
|
||||
parser_build.set_defaults(func=store)
|
||||
parser_build.add_argument('source',
|
||||
help='source file/directory/URL that contain the test result files to be stored')
|
||||
parser_build.add_argument('git_dir',
|
||||
help='the location of the git repository to store the results in')
|
||||
parser_build.add_argument('-a', '--all', action='store_true',
|
||||
help='include all files, not just testresults.json files')
|
||||
parser_build.add_argument('-e', '--allow-empty', action='store_true',
|
||||
help='don\'t error if no results to store are found')
|
||||
parser_build.add_argument('-x', '--executed-by', default='',
|
||||
help='add executed-by configuration to each result file')
|
||||
parser_build.add_argument('-t', '--extra-test-env', default='',
|
||||
help='add extra test environment data to each result file configuration')
|
||||
@@ -0,0 +1,79 @@
|
||||
==============================================================================================================
|
||||
Test Result Status Summary (Counts/Percentages sorted by testseries, ID)
|
||||
==============================================================================================================
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{{ 'Test Series'.ljust(maxlen['testseries']) }} | {{ 'ID'.ljust(maxlen['result_id']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% for report in reportvalues |sort(attribute='sort') %}
|
||||
{{ report.testseries.ljust(maxlen['testseries']) }} | {{ report.result_id.ljust(maxlen['result_id']) }} | {{ (report.passed|string).ljust(maxlen['passed']) }} | {{ (report.failed|string).ljust(maxlen['failed']) }} | {{ (report.skipped|string).ljust(maxlen['skipped']) }}
|
||||
{% endfor %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{{ 'Total'.ljust(maxlen['testseries']) }} | {{ reporttotalvalues['count'].ljust(maxlen['result_id']) }} | {{ reporttotalvalues['passed'].ljust(maxlen['passed']) }} | {{ reporttotalvalues['failed'].ljust(maxlen['failed']) }} | {{ reporttotalvalues['skipped'].ljust(maxlen['skipped']) }}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
|
||||
{% for machine in machines %}
|
||||
{% if ptests[machine] %}
|
||||
==============================================================================================================
|
||||
{{ machine }} PTest Result Summary
|
||||
==============================================================================================================
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{{ 'Recipe'.ljust(maxlen['ptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% for ptest in ptests[machine] |sort %}
|
||||
{{ ptest.ljust(maxlen['ptest']) }} | {{ (ptests[machine][ptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ptests[machine][ptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ptests[machine][ptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ptests[machine][ptest]['duration']|string) }}
|
||||
{% endfor %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{% for machine in machines %}
|
||||
{% if ltptests[machine] %}
|
||||
==============================================================================================================
|
||||
{{ machine }} Ltp Test Result Summary
|
||||
==============================================================================================================
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{{ 'Recipe'.ljust(maxlen['ltptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% for ltptest in ltptests[machine] |sort %}
|
||||
{{ ltptest.ljust(maxlen['ltptest']) }} | {{ (ltptests[machine][ltptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltptests[machine][ltptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltptests[machine][ltptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltptests[machine][ltptest]['duration']|string) }}
|
||||
{% endfor %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{% for machine in machines %}
|
||||
{% if ltpposixtests[machine] %}
|
||||
==============================================================================================================
|
||||
{{ machine }} Ltp Posix Result Summary
|
||||
==============================================================================================================
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{{ 'Recipe'.ljust(maxlen['ltpposixtest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% for ltpposixtest in ltpposixtests[machine] |sort %}
|
||||
{{ ltpposixtest.ljust(maxlen['ltpposixtest']) }} | {{ (ltpposixtests[machine][ltpposixtest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltpposixtests[machine][ltpposixtest]['duration']|string) }}
|
||||
{% endfor %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
==============================================================================================================
|
||||
Failed test cases (sorted by testseries, ID)
|
||||
==============================================================================================================
|
||||
{% if havefailed %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% for report in reportvalues |sort(attribute='sort') %}
|
||||
{% if report.failed_testcases %}
|
||||
testseries | result_id : {{ report.testseries }} | {{ report.result_id }}
|
||||
{% for testcase in report.failed_testcases %}
|
||||
{{ testcase }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
--------------------------------------------------------------------------------------------------------------
|
||||
{% else %}
|
||||
There were no test failures
|
||||
{% endif %}
|
||||
@@ -0,0 +1,32 @@
|
||||
# Path utility functions for OE python scripts
|
||||
#
|
||||
# Copyright (C) 2012-2014 Intel Corporation
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
|
||||
def add_oe_lib_path():
|
||||
basepath = os.path.abspath(os.path.dirname(__file__) + '/../..')
|
||||
newpath = basepath + '/meta/lib'
|
||||
sys.path.insert(0, newpath)
|
||||
|
||||
def add_bitbake_lib_path():
|
||||
basepath = os.path.abspath(os.path.dirname(__file__) + '/../..')
|
||||
bitbakepath = None
|
||||
if os.path.exists(basepath + '/bitbake/lib/bb'):
|
||||
bitbakepath = basepath + '/bitbake'
|
||||
else:
|
||||
# look for bitbake/bin dir in PATH
|
||||
for pth in os.environ['PATH'].split(':'):
|
||||
if os.path.exists(os.path.join(pth, '../lib/bb')):
|
||||
bitbakepath = os.path.abspath(os.path.join(pth, '..'))
|
||||
break
|
||||
|
||||
if bitbakepath:
|
||||
sys.path.insert(0, bitbakepath + '/lib')
|
||||
return bitbakepath
|
||||
@@ -0,0 +1,282 @@
|
||||
# Script utility functions
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shlex
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import importlib
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
|
||||
class KeepAliveStreamHandler(logging.StreamHandler):
|
||||
def __init__(self, keepalive=True, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
if keepalive is True:
|
||||
keepalive = 5000 # default timeout
|
||||
self._timeout = threading.Condition()
|
||||
self._stop = False
|
||||
|
||||
# background thread waits on condition, if the condition does not
|
||||
# happen emit a keep alive message
|
||||
def thread():
|
||||
while not self._stop:
|
||||
with self._timeout:
|
||||
if not self._timeout.wait(keepalive):
|
||||
self.emit(logging.LogRecord("keepalive", logging.INFO,
|
||||
None, None, "Keepalive message", None, None))
|
||||
|
||||
self._thread = threading.Thread(target=thread, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
def close(self):
|
||||
# mark the thread to stop and notify it
|
||||
self._stop = True
|
||||
with self._timeout:
|
||||
self._timeout.notify()
|
||||
# wait for it to join
|
||||
self._thread.join()
|
||||
super().close()
|
||||
|
||||
def emit(self, record):
|
||||
super().emit(record)
|
||||
# trigger timer reset
|
||||
with self._timeout:
|
||||
self._timeout.notify()
|
||||
|
||||
def logger_create(name, stream=None, keepalive=None):
|
||||
logger = logging.getLogger(name)
|
||||
if keepalive is not None:
|
||||
loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
|
||||
else:
|
||||
loggerhandler = logging.StreamHandler(stream=stream)
|
||||
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
||||
logger.addHandler(loggerhandler)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
def logger_setup_color(logger, color='auto'):
|
||||
from bb.msg import BBLogFormatter
|
||||
|
||||
for handler in logger.handlers:
|
||||
if (isinstance(handler, logging.StreamHandler) and
|
||||
isinstance(handler.formatter, BBLogFormatter)):
|
||||
if color == 'always' or (color == 'auto' and handler.stream.isatty()):
|
||||
handler.formatter.enable_color()
|
||||
|
||||
|
||||
def load_plugins(logger, plugins, pluginpath):
|
||||
def load_plugin(name):
|
||||
logger.debug('Loading plugin %s' % name)
|
||||
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
|
||||
if spec:
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
def plugin_name(filename):
|
||||
return os.path.splitext(os.path.basename(filename))[0]
|
||||
|
||||
known_plugins = [plugin_name(p.__name__) for p in plugins]
|
||||
logger.debug('Loading plugins from %s...' % pluginpath)
|
||||
for fn in glob.glob(os.path.join(pluginpath, '*.py')):
|
||||
name = plugin_name(fn)
|
||||
if name != '__init__' and name not in known_plugins:
|
||||
plugin = load_plugin(name)
|
||||
if hasattr(plugin, 'plugin_init'):
|
||||
plugin.plugin_init(plugins)
|
||||
plugins.append(plugin)
|
||||
|
||||
|
||||
def git_convert_standalone_clone(repodir):
|
||||
"""If specified directory is a git repository, ensure it's a standalone clone"""
|
||||
import bb.process
|
||||
if os.path.exists(os.path.join(repodir, '.git')):
|
||||
alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
|
||||
if os.path.exists(alternatesfile):
|
||||
# This will have been cloned with -s, so we need to convert it so none
|
||||
# of the contents is shared
|
||||
bb.process.run('git repack -a', cwd=repodir)
|
||||
os.remove(alternatesfile)
|
||||
|
||||
def _get_temp_recipe_dir(d):
|
||||
# This is a little bit hacky but we need to find a place where we can put
|
||||
# the recipe so that bitbake can find it. We're going to delete it at the
|
||||
# end so it doesn't really matter where we put it.
|
||||
bbfiles = d.getVar('BBFILES').split()
|
||||
fetchrecipedir = None
|
||||
for pth in bbfiles:
|
||||
if pth.endswith('.bb'):
|
||||
pthdir = os.path.dirname(pth)
|
||||
if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
|
||||
fetchrecipedir = pthdir.replace('*', 'recipetool')
|
||||
if pthdir.endswith('workspace/recipes/*'):
|
||||
# Prefer the workspace
|
||||
break
|
||||
return fetchrecipedir
|
||||
|
||||
class FetchUrlFailure(Exception):
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
def __str__(self):
|
||||
return "Failed to fetch URL %s" % self.url
|
||||
|
||||
def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
|
||||
"""
|
||||
Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
|
||||
any dependencies that need to be satisfied in order to support the fetch
|
||||
operation will be taken care of
|
||||
"""
|
||||
|
||||
import bb
|
||||
|
||||
checksums = {}
|
||||
fetchrecipepn = None
|
||||
|
||||
# We need to put our temp directory under ${BASE_WORKDIR} otherwise
|
||||
# we may have problems with the recipe-specific sysroot population
|
||||
tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
|
||||
bb.utils.mkdirhier(tmpparent)
|
||||
tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
|
||||
try:
|
||||
tmpworkdir = os.path.join(tmpdir, 'work')
|
||||
logger.debug('fetch_url: temp dir is %s' % tmpdir)
|
||||
|
||||
fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
|
||||
if not fetchrecipedir:
|
||||
logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
|
||||
sys.exit(1)
|
||||
fetchrecipe = None
|
||||
bb.utils.mkdirhier(fetchrecipedir)
|
||||
try:
|
||||
# Generate a dummy recipe so we can follow more or less normal paths
|
||||
# for do_fetch and do_unpack
|
||||
# I'd use tempfile functions here but underscores can be produced by that and those
|
||||
# aren't allowed in recipe file names except to separate the version
|
||||
rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
|
||||
fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
|
||||
fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
|
||||
logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
|
||||
with open(fetchrecipe, 'w') as f:
|
||||
# We don't want to have to specify LIC_FILES_CHKSUM
|
||||
f.write('LICENSE = "CLOSED"\n')
|
||||
# We don't need the cross-compiler
|
||||
f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
|
||||
# We don't have the checksums yet so we can't require them
|
||||
f.write('BB_STRICT_CHECKSUM = "ignore"\n')
|
||||
f.write('SRC_URI = "%s"\n' % srcuri)
|
||||
f.write('SRCREV = "%s"\n' % srcrev)
|
||||
f.write('PV = "0.0+${SRCPV}"\n')
|
||||
f.write('WORKDIR = "%s"\n' % tmpworkdir)
|
||||
# Set S out of the way so it doesn't get created under the workdir
|
||||
f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
|
||||
if not mirrors:
|
||||
# We do not need PREMIRRORS since we are almost certainly
|
||||
# fetching new source rather than something that has already
|
||||
# been fetched. Hence, we disable them by default.
|
||||
# However, we provide an option for users to enable it.
|
||||
f.write('PREMIRRORS = ""\n')
|
||||
f.write('MIRRORS = ""\n')
|
||||
|
||||
logger.info('Fetching %s...' % srcuri)
|
||||
|
||||
# FIXME this is too noisy at the moment
|
||||
|
||||
# Parse recipes so our new recipe gets picked up
|
||||
tinfoil.parse_recipes()
|
||||
|
||||
def eventhandler(event):
|
||||
if isinstance(event, bb.fetch2.MissingChecksumEvent):
|
||||
checksums.update(event.checksums)
|
||||
return True
|
||||
return False
|
||||
|
||||
# Run the fetch + unpack tasks
|
||||
res = tinfoil.build_targets(fetchrecipepn,
|
||||
'do_unpack',
|
||||
handle_events=True,
|
||||
extra_events=['bb.fetch2.MissingChecksumEvent'],
|
||||
event_callback=eventhandler)
|
||||
if not res:
|
||||
raise FetchUrlFailure(srcuri)
|
||||
|
||||
# Remove unneeded directories
|
||||
rd = tinfoil.parse_recipe(fetchrecipepn)
|
||||
if rd:
|
||||
pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
|
||||
for pathvar in pathvars:
|
||||
path = rd.getVar(pathvar)
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
finally:
|
||||
if fetchrecipe:
|
||||
try:
|
||||
os.remove(fetchrecipe)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.rmdir(fetchrecipedir)
|
||||
except OSError as e:
|
||||
import errno
|
||||
if e.errno != errno.ENOTEMPTY:
|
||||
raise
|
||||
|
||||
bb.utils.mkdirhier(destdir)
|
||||
for fn in os.listdir(tmpworkdir):
|
||||
shutil.move(os.path.join(tmpworkdir, fn), destdir)
|
||||
|
||||
finally:
|
||||
if not preserve_tmp:
|
||||
shutil.rmtree(tmpdir)
|
||||
tmpdir = None
|
||||
|
||||
return checksums, tmpdir
|
||||
|
||||
|
||||
def run_editor(fn, logger=None):
|
||||
if isinstance(fn, str):
|
||||
files = [fn]
|
||||
else:
|
||||
files = fn
|
||||
|
||||
editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
|
||||
try:
|
||||
#print(shlex.split(editor) + files)
|
||||
return subprocess.check_call(shlex.split(editor) + files)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
logger.error("Execution of '%s' failed: %s" % (editor, exc))
|
||||
return 1
|
||||
|
||||
def is_src_url(param):
|
||||
"""
|
||||
Check if a parameter is a URL and return True if so
|
||||
NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
|
||||
"""
|
||||
if not param:
|
||||
return False
|
||||
elif '://' in param:
|
||||
return True
|
||||
elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
|
||||
return True
|
||||
return False
|
||||
|
||||
def filter_src_subdirs(pth):
|
||||
"""
|
||||
Filter out subdirectories of initial unpacked source trees that we do not care about.
|
||||
Used by devtool and recipetool.
|
||||
"""
|
||||
dirlist = os.listdir(pth)
|
||||
filterout = ['git.indirectionsymlink', 'source-date-epoch']
|
||||
dirlist = [x for x in dirlist if x not in filterout]
|
||||
return dirlist
|
||||
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (c) 2007 Red Hat, Inc.
|
||||
# Copyright (c) 2011 Intel, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
class WicError(Exception):
|
||||
pass
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user