Created
June 15, 2016 02:32
-
-
Save mpkocher/2e8af3126877818a721da9c398165619 to your computer and use it in GitHub Desktop.
Get Most Recent Analysis Job from SubreadSet by UUID
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import os | |
import sys | |
import logging | |
from pbcommand.utils import setup_log | |
try: | |
import pbcommand | |
except ImportError: | |
sys.stderr.write("Please install pbcommand https://github.com/PacificBiosciences/pbcommand\n") | |
raise | |
from pbcommand.cli import (get_default_argparser_with_base_opts, pacbio_args_runner) | |
from pbcommand.services.service_access_layer import ServiceAccessLayer, rqget | |
__version__ = "0.1.0" | |
log = logging.getLogger(__name__) | |
class NotFoundException(BaseException): | |
pass | |
def or_raise_not_found(result, msg): | |
if result is None: | |
raise NotFoundException(msg) | |
return result | |
def get_job_by_subreadset_uuid(sal, uuid): | |
""" | |
:type sal: ServiceAccessLayer | |
""" | |
ds = sal.get_subreadset_by_id(uuid) | |
import_job_id = or_raise_not_found(ds['jobId'], "Unable to find {}".format(uuid)) | |
# for the Report Metrics | |
import_job = or_raise_not_found(sal.get_job_by_id(import_job_id), "Unable to find Import Job {}".format(import_job_id)) | |
print "Found import job for SubreadSet" | |
print "UUID : {}".format(uuid) | |
print "Import Job Id : {}".format(ds['jobId']) | |
print "context : {}".format(ds['metadataContextId']) | |
print "path : {}".format(ds['path']) | |
log.info("Import Job for SubreadSet {}".format(uuid)) | |
log.info(import_job) | |
# This is really brutal. Reverse the order and Assume newer Analysis jobs | |
# are what the user is interested in. | |
all_jobs = sal.get_analysis_jobs() | |
all_jobs.reverse() | |
log.info("Found {} total analysis jobs".format(len(all_jobs))) | |
for job in all_jobs: | |
if job.state == "SUCCESSFUL": | |
epoints = sal.get_analysis_job_entry_points(job.id) | |
for ep in epoints: | |
if ep.dataset_uuid == uuid: | |
return job | |
raise NotFoundException("Unable to find SUCCESSFUL analysis job for SubreadSet UUID {}".format(uuid)) | |
def get_parser(): | |
"""Define Parser. Use the helper methods in validators to validate input""" | |
p = get_default_argparser_with_base_opts(__version__, __doc__, default_level=logging.ERROR) | |
f = p.add_argument | |
f('subreadset_uuid', type=str, help="SubreadSet UUID") | |
f('--host', type=str, default="smrtlink-beta", help="SMRT Link Host") | |
f('--port', type=int, default=8081, help="SMRT Link Port") | |
return p | |
def run_main(host, port, subreadset_uuid): | |
sal = ServiceAccessLayer(host, port) | |
job = get_job_by_subreadset_uuid(sal, subreadset_uuid) | |
print "Most Recent Successful Analysis Job on {}".format(sal) | |
print "id : {}".format(job.id) | |
print "uuid : {}".format(job.uuid) | |
print "name : {}".format(job.name) | |
print "created at : {}".format(job.created_at) | |
print "path : {}".format(job.path) | |
return 0 | |
def args_runner(args): | |
log.info("Raw args {a}".format(a=args)) | |
return run_main(args.host, args.port, args.subreadset_uuid) | |
def main(argv): | |
return pacbio_args_runner(argv[1:], | |
get_parser(), | |
args_runner, | |
log, | |
setup_log_func=setup_log) | |
if __name__ == '__main__': | |
sys.exit(main(sys.argv)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment