Skip to content

Instantly share code, notes, and snippets.

@kergoth
Last active December 14, 2015 00:39
Show Gist options
  • Save kergoth/5001000 to your computer and use it in GitHub Desktop.
Save kergoth/5001000 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# Usage: bb test-tasks
# Summary: Experiments with bbtasks.py
# Help: See bb test-tasks -h for detailed usage information.
def setup_command_import(command, relpath='../lib'):
"""Set up the import path based on the location of a binary in the PATH """
PATH = os.getenv('PATH').split(':')
cmd_paths = [os.path.join(path, relpath)
for path in PATH if os.path.exists(os.path.join(path, command))]
if not cmd_paths:
raise ImportError("Unable to locate bb, please ensure PATH is set correctly.")
sys.path[0:0] = cmd_paths
import logging
import os
import sys
# This call is only needed if this bb subcommand doesn't live in bb/libexec/
setup_command_import('bb', '../libexec')
import bbcmd
import bbtasks
logger = logging.getLogger('bb.test_tasks')
def test_tasks(args):
# tinfoil sets up log output for the bitbake loggers, but bb uses
# a separate namespace at this time
bbcmd.setup_log_handler(logging.getLogger('bb'))
tinfoil = bbcmd.Tinfoil(output=sys.stderr)
# tinfoil.prepare(config_only=True) - if we don't need recipes parsed
tinfoil.prepare()
cache_data = tinfoil.cooker.status
graph = bbtasks.generate_task_graph(cache_data, tinfoil.localdata)
print(graph.to_dot())
if args.tmpdir:
print(tinfoil.localdata.getVar('TMPDIR', True))
elif args.topdir:
print(tinfoil.localdata.getVar('TOPDIR', True))
# provide bb completions
def main(arguments):
parser = bbcmd.CompleteParser()
args = parser.parse_args(arguments)
if args is None:
# showing completions
return
else:
return test_tasks(args)
if __name__ == '__main__':
bbcmd.run_main(main)
from collections import defaultdict
import bb.providers
class TaskGraph(object):
def __init__(self):
self.task_count = 0
self.nodes = set()
self.taskids = {}
self.depends = defaultdict(set)
self.parents = defaultdict(set)
self.node_data = {}
self.notes = {}
def get_taskid(self, filename, task):
key = (filename, task)
if key not in self.taskids:
self.task_count += 1
self.taskids[key] = self.task_count
taskid = self.taskids[key]
self.node_data[taskid] = key
if taskid not in self.nodes:
self.nodes.add(taskid)
return taskid
def add_task(self, taskid):
if taskid not in self.nodes:
self.nodes.add(taskid)
def set_edge_note(self, sourceid, destid, note):
self.notes[(sourceid, destid)] = note
def get_edge_note(self, sourceid, destid):
return self.notes[(sourceid, destid)]
def add_dependency(self, sourceid, destid, note=None):
self.depends[sourceid].add(destid)
self.parents[destid].add(sourceid)
if note is not None:
edge = (sourceid, destid)
self.notes[edge] = note
def task_data(self, taskid):
return self.node_data[taskid]
def iter_leaves(self):
for node, deps in self.depends.iteritems():
if not deps:
yield node
def walk_up(self, start=None):
if start is not None:
start_nodes = (start,)
else:
start_nodes = self.iter_leaves()
for node in start_nodes:
yield node
parents = self.parents[node]
for parent in parents:
for parent_node in self.walk_up(parent):
yield parent_node
def walk(self, start):
yield start
for node in self.depends[start]:
for depend_node in self.walk(node):
yield depend_node
def to_dot(self):
import pydot
# edges = []
dot_graph = pydot.Dot(graph_type='digraph')
for sourceid, destids in self.depends.iteritems():
for destid in destids:
edge = (sourceid, destid)
note = self.notes.get(edge)
dot_edge = pydot.Edge(sourceid, destid, label=note)
dot_graph.add_edge(dot_edge)
# edges.append((sourceid, destid))
# dot_graph = pydot.graph_from_edges(edges, directed=True)
return dot_graph.to_string()
def prepare_cache_data(cache_data, cfgdata):
preferred = {}
for provide, filenames in cache_data.providers.iteritems():
eligible, foundUnique = bb.providers.filterProviders(filenames, provide,
cfgdata,
cache_data)
preferred[provide] = eligible[0]
cache_data.preferred_provide_fn = preferred
fn_all_rprovides = defaultdict(list)
fn_rprovides = defaultdict(set)
for rprovide, fns in cache_data.rproviders.iteritems():
for fn in fns:
fn_rprovides[fn].add(rprovide)
fn_all_rprovides[fn].append(rprovide)
cache_data.fn_rprovides = fn_rprovides
fn_packages = defaultdict(list)
for package, fns in cache_data.packages.iteritems():
for fn in fns:
fn_packages[fn].append(package)
fn_all_rprovides[fn].append(package)
cache_data.fn_packages = fn_packages
fn_packages_dynamic = defaultdict(list)
for dynamic, fns in cache_data.packages_dynamic.iteritems():
for fn in fns:
fn_packages_dynamic[fn].append(dynamic)
# fn_all_rprovides[fn].append(dynamic)
cache_data.fn_packages_dynamic = fn_packages_dynamic
cache_data.fn_all_rprovides = fn_all_rprovides
rpreferred = {}
for rprovide, filenames in cache_data.rproviders.iteritems():
eligible, foundUnique = bb.providers.filterProvidersRunTime(filenames, rprovide,
cfgdata,
cache_data)
rpreferred[rprovide] = eligible[0]
cache_data.preferred_rprovide_fn = rpreferred
def prepare_task_deps(cache_data, filename):
full_depends = defaultdict(set)
full_rdepends = defaultdict(set)
task_deps = cache_data.task_deps[filename]
depends = set(cache_data.deps[filename])
rdepends = set()
for package, rdeps in cache_data.rundeps[filename].iteritems():
rdepends |= set(rdeps)
tdepends = task_deps.get('depends') or {}
for task, dep_string in tdepends.iteritems():
for dep in dep_string.split():
provide, deptask = dep.split(':')
full_depends[task].add((provide, deptask, 'depends'))
depends.add(provide)
trdepends = task_deps.get('rdepends') or {}
for task, rdep_string in trdepends.iteritems():
for rdep in rdep_string.split():
rprovide, deptask = rdep.split(':')
full_rdepends[task].add((rprovide, deptask, 'rdepends'))
rdepends.add(rprovide)
tdeptasks = task_deps.get('deptask') or {}
for task, deptasks_string in tdeptasks.iteritems():
for deptask in deptasks_string.split():
full_depends[task] |= set((dep, deptask, 'deptask')
for dep in depends)
trdeptasks = task_deps.get('rdeptask') or {}
for task, rdeptasks_string in trdeptasks.iteritems():
for deptask in rdeptasks_string.split():
full_rdepends[task] |= set((rdep, deptask, 'rdeptask')
for rdep in rdepends)
recrdeptasks = task_deps.get('recrdeptask') or {}
for task, deptasks_string in tdeptasks.iteritems():
for deptask in deptasks_string.split():
full_depends[task] |= set((dep, deptask, 'deptask')
for dep in depends)
full_rdepends[task] |= set((rdep, deptask, 'deptask')
for rdep in rdepends)
return full_depends, full_rdepends
def generate_task_graph(cache_data, cfgdata):
graph = TaskGraph()
if not hasattr(cache_data, 'preferred_provide_fn'):
prepare_cache_data(cache_data, cfgdata)
for filename in cache_data.file_checksums:
task_deps = cache_data.task_deps[filename]
deps = task_deps.get('deps')
if deps:
for taskname, taskdeps in deps.iteritems():
sourceid = (filename, taskname)
for taskdep in taskdeps:
destid = (filename, taskdep)
graph.add_dependency(sourceid, destid, 'deps'.format(taskname))
task_depends, task_rdepends = prepare_task_deps(cache_data, filename)
for task, deps in task_depends.iteritems():
sourceid = graph.get_taskid(filename, task)
for depprovide, deptask, note in deps:
dep_filename = cache_data.preferred_provide_fn[depprovide]
destid = graph.get_taskid(dep_filename, deptask)
graph.add_dependency(sourceid, destid, note)
for task, rdeps in task_rdepends.iteritems():
sourceid = graph.get_taskid(filename, task)
for deprprovide, deptask, note in rdeps:
dep_filename = cache_data.preferred_rprovide_fn[deprprovide]
destid = graph.get_taskid(dep_filename, deptask)
graph.add_dependency(sourceid, destid, note)
return graph
# def generate_task_graph(cache_data):
# graph = TaskGraph()
# for filename in cache_data.file_checksums:
# task_depends = defaultdict(set)
# task_rdepends = defaultdict(set)
# deps = task_deps.get('deps')
# if deps:
# for taskname, taskdeps in deps.iteritems():
# sourceid = (filename, taskname)
# for taskdep in taskdeps:
# destid = (filename, taskdep)
# graph.add_dependency(sourceid, destid, 'deps'.format(taskname))
# tdepends = task_deps.get('depends')
# if tdepends:
# for task, dep_string in tdepends.iteritems():
# provide, deptask = dep_string.split(':')
# task_depends[task].add((provide, deptask, 'depends'))
# trdepends = task_deps.get('rdepends')
# if trdepends:
# for task, rdep_string in trdepends.iteritems():
# rprovide, deptask = dep_string.split(':')
# task_rdepends[task].add((rprovide, deptask, 'rdepends'))
# for task, deps in task_depends.iteritems():
# sourceid = graph.get_taskid(filename, task)
# for depprovide, deptask, note in deps:
# dep_filename = cache_data.preferred_provide_fn[depprovide]
# destid = graph.get_taskid(dep_filename, deptask)
# graph.add_dependency(sourceid, destid, note)
# for task, rdeps in task_rdepends.iteritems():
# sourceid = graph.get_taskid(filename, task)
# for deprprovide, deptask, note in deps:
# dep_filename = cache_data.preferred_rprovide_fn[deprprovide]
# destid = graph.get_taskid(dep_filename, deptask)
# graph.add_dependency(sourceid, destid, note)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment