Last active
December 1, 2023 15:04
-
-
Save kwk/f47d6a8083534639ddf67c1350e3e1e2 to your computer and use it in GitHub Desktop.
OMP GDB Problem
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/__init__.py | |
import sys | |
import os.path | |
import traceback | |
if __name__ == "__main__": | |
try: | |
sys.path.append(os.path.dirname(__file__)) | |
import ompd | |
ompd.main() | |
print("OMPD GDB support loaded") | |
print("Run 'ompd init' to start debugging") | |
except Exception as e: | |
traceback.print_exc() | |
print("Error: OMPD support could not be loaded", e) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ env OMP_DEBUG=enabled gdb -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/__init__.py -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c.cmd /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp | |
GNU gdb (GDB) Fedora Linux 13.2-3.fc38 | |
Copyright (C) 2023 Free Software Foundation, Inc. | |
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html> | |
This is free software: you are free to change and redistribute it. | |
There is NO WARRANTY, to the extent permitted by law. | |
Type "show copying" and "show warranty" for details. | |
This GDB was configured as "x86_64-redhat-linux-gnu". | |
Type "show configuration" for configuration details. | |
For bug reporting instructions, please see: | |
<https://www.gnu.org/software/gdb/bugs/>. | |
Find the GDB manual and other documentation resources online at: | |
<http://www.gnu.org/software/gdb/documentation/>. | |
For help, type "help". | |
Type "apropos word" to search for commands related to "word"... | |
Reading symbols from /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp... | |
OMPD GDB support loaded | |
Run 'ompd init' to start debugging | |
Temporary breakpoint 1 at 0x40116f: file /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c, line 9. | |
This GDB supports auto-downloading debuginfo from the following URLs: | |
<https://debuginfod.fedoraproject.org/> | |
Enable debuginfod for this session? (y or [n]) [answered N; input not from terminal] | |
Debuginfod has been disabled. | |
To make this setting permanent, add 'set debuginfod enabled off' to .gdbinit. | |
[Thread debugging using libthread_db enabled] | |
Using host libthread_db library "/lib64/libthread_db.so.1". | |
Temporary breakpoint 1, main () at /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c:9 | |
9 omp_set_num_threads(2); | |
Temporary breakpoint 2 at 0x7ffff7f8c6a0 | |
OMP_OMPD active | |
Temporary breakpoint 2, 0x00007ffff7f8c6a0 in ompd_dll_locations_valid() () | |
from /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0~pre20231128.ge1f911e40ce6ad-1.fc38.x86_64//usr/lib64/libomp.so | |
Loaded OMPD lib successfully! | |
Breakpoint 3 at 0x4011b0: file /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c, line 11. | |
[New Thread 0x7ffff791db40 (LWP 576885)] | |
Thread 1 "test_ompd_get_s" hit Breakpoint 3, main.omp_outlined_debug__ (.global_tid.=0x7fffffffd560, .bound_tid.=0x7fffffffd558) | |
at /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c:11 | |
11 { printf("Parallel level 1, thread num = %d.\n", omp_get_thread_num()); } | |
Traceback (most recent call last): | |
File "/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/ompd_callbacks.py", line 30, in _sym_addr | |
return int(gdb.parse_and_eval("&" + symbol_name)) | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
gdb.error: No symbol "ompd_sizeof____kmp_gtid" in current context. | |
Traceback (most recent call last): | |
File "/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/ompd_callbacks.py", line 60, in _read | |
ret_buf.append(int(buf[i])) | |
^^^^^^^^^^^ | |
gdb.MemoryError: Cannot access memory at address 0xffffffffffffffff | |
Fatal signal: Segmentation fault | |
----- Backtrace ----- | |
0x56051cfea8a0 ??? | |
0x56051d1254cd ??? | |
0x56051d125699 ??? | |
0x7fa59c85fb6f ??? | |
0x7fa599640ab9 Py_TYPE | |
/usr/include/python3.11/object.h:133 | |
0x7fa599640ab9 Py_IS_TYPE | |
/usr/include/python3.11/object.h:150 | |
0x7fa599640ab9 PyObject_TypeCheck | |
/usr/include/python3.11/object.h:263 | |
0x7fa599640ab9 _read | |
/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/gdb-plugin/ompdModule.c:686 | |
0x7fa580267b0e ??? | |
0x7fa58026e426 ??? | |
0x7fa5996411f8 get_thread_handle | |
/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/gdb-plugin/ompdModule.c:863 | |
0x7fa59cfcdc77 ??? | |
0x7fa59cfb0d92 ??? | |
0x7fa59cfb97c3 ??? | |
0x7fa59cfb57d9 ??? | |
0x7fa59cfcd3e5 ??? | |
0x7fa59cff1066 ??? | |
0x56051d2b3ccf ??? | |
0x56051d025104 ??? | |
0x56051d4256c4 ??? | |
0x56051d125cce ??? | |
0x56051d42428c ??? | |
0x56051d035df8 ??? | |
0x56051d02324a ??? | |
0x56051d20ba39 ??? | |
0x56051d20bae7 ??? | |
0x56051d20de28 ??? | |
0x56051d20e9df ??? | |
0x56051cefba6e ??? | |
0x7fa59c849b49 ??? | |
0x7fa59c849c0a ??? | |
0x56051cf06ea4 ??? | |
0xffffffffffffffff ??? | |
--------------------- | |
A fatal error internal to GDB has been detected, further | |
debugging is not possible. GDB will now terminate. | |
This is a bug, please report it. For instructions, see: | |
<https://www.gnu.org/software/gdb/bugs/>. | |
Segmentation fault (core dumped) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/gdb-plugin/python-module/ompd/ompd.py | |
import ompdModule | |
import gdb | |
import re | |
import traceback | |
from ompd_address_space import ompd_address_space | |
from ompd_handles import ompd_thread, ompd_task, ompd_parallel | |
from frame_filter import FrameFilter | |
from enum import Enum | |
import sys | |
addr_space = None | |
ff = None | |
icv_map = None | |
ompd_scope_map = { | |
1: "global", | |
2: "address_space", | |
3: "thread", | |
4: "parallel", | |
5: "implicit_task", | |
6: "task", | |
} | |
in_task_function = False | |
class ompd(gdb.Command): | |
def __init__(self): | |
super(ompd, self).__init__("ompd", gdb.COMMAND_STATUS, gdb.COMPLETE_NONE, True) | |
class ompd_init(gdb.Command): | |
"""Find and initialize ompd library""" | |
# first parameter is command-line input, second parameter is gdb-specific data | |
def __init__(self): | |
self.__doc__ = "Find and initialize OMPD library\n usage: ompd init" | |
super(ompd_init, self).__init__("ompd init", gdb.COMMAND_DATA) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
global ff | |
try: | |
try: | |
print(gdb.newest_frame()) | |
except: | |
gdb.execute("start") | |
try: | |
lib_list = gdb.parse_and_eval("(char**)ompd_dll_locations") | |
except gdb.error: | |
raise ValueError( | |
"No ompd_dll_locations symbol in execution, make sure to have an OMPD enabled OpenMP runtime" | |
) | |
while gdb.parse_and_eval("(char**)ompd_dll_locations") == False: | |
gdb.execute("tbreak ompd_dll_locations_valid") | |
gdb.execute("continue") | |
lib_list = gdb.parse_and_eval("(char**)ompd_dll_locations") | |
i = 0 | |
while lib_list[i]: | |
ret = ompdModule.ompd_open(lib_list[i].string()) | |
if ret == -1: | |
raise ValueError("Handle of OMPD library is not a valid string!") | |
if ret == -2: | |
print("ret == -2") | |
pass # It's ok to fail on dlopen | |
if ret == -3: | |
print("ret == -3") | |
pass # It's ok to fail on dlsym | |
if ret < -10: | |
raise ValueError("OMPD error code %i!" % (-10 - ret)) | |
if ret > 0: | |
print("Loaded OMPD lib successfully!") | |
try: | |
addr_space = ompd_address_space() | |
ff = FrameFilter(addr_space) | |
except: | |
traceback.print_exc() | |
return | |
i = i + 1 | |
raise ValueError("OMPD library could not be loaded!") | |
except: | |
traceback.print_exc() | |
class ompd_threads(gdb.Command): | |
"""Register thread ids of current context""" | |
def __init__(self): | |
self.__doc__ = ( | |
"Provide information on threads of current context.\n usage: ompd threads" | |
) | |
super(ompd_threads, self).__init__("ompd threads", gdb.COMMAND_STATUS) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
if init_error(): | |
return | |
addr_space.list_threads(True) | |
def print_parallel_region(curr_parallel, team_size): | |
"""Helper function for ompd_parallel_region. To print out the details of the parallel region.""" | |
for omp_thr in range(team_size): | |
thread = curr_parallel.get_thread_in_parallel(omp_thr) | |
ompd_state = str(addr_space.states[thread.get_state()[0]]) | |
ompd_wait_id = thread.get_state()[1] | |
task = curr_parallel.get_task_in_parallel(omp_thr) | |
task_func_addr = task.get_task_function() | |
# Get the function this addr belongs to | |
sal = gdb.find_pc_line(task_func_addr) | |
block = gdb.block_for_pc(task_func_addr) | |
while block and not block.function: | |
block = block.superblock | |
if omp_thr == 0: | |
print( | |
"%6d (master) %-37s %ld 0x%lx %-25s %-17s:%d" | |
% ( | |
omp_thr, | |
ompd_state, | |
ompd_wait_id, | |
task_func_addr, | |
block.function.print_name, | |
sal.symtab.filename, | |
sal.line, | |
) | |
) | |
else: | |
print( | |
"%6d %-37s %ld 0x%lx %-25s %-17s:%d" | |
% ( | |
omp_thr, | |
ompd_state, | |
ompd_wait_id, | |
task_func_addr, | |
block.function.print_name, | |
sal.symtab.filename, | |
sal.line, | |
) | |
) | |
class ompd_parallel_region(gdb.Command): | |
"""Parallel Region Details""" | |
def __init__(self): | |
self.__doc__ = "Display the details of the current and enclosing parallel regions.\n usage: ompd parallel" | |
super(ompd_parallel_region, self).__init__("ompd parallel", gdb.COMMAND_STATUS) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
if init_error(): | |
return | |
if addr_space.icv_map is None: | |
addr_space.get_icv_map() | |
if addr_space.states is None: | |
addr_space.enumerate_states() | |
curr_thread_handle = addr_space.get_curr_thread() | |
curr_parallel_handle = curr_thread_handle.get_current_parallel_handle() | |
curr_parallel = ompd_parallel(curr_parallel_handle) | |
while curr_parallel_handle is not None and curr_parallel is not None: | |
nest_level = ompdModule.call_ompd_get_icv_from_scope( | |
curr_parallel_handle, | |
addr_space.icv_map["levels-var"][1], | |
addr_space.icv_map["levels-var"][0], | |
) | |
if nest_level == 0: | |
break | |
team_size = ompdModule.call_ompd_get_icv_from_scope( | |
curr_parallel_handle, | |
addr_space.icv_map["team-size-var"][1], | |
addr_space.icv_map["team-size-var"][0], | |
) | |
print("") | |
print( | |
"Parallel Region: Nesting Level %d: Team Size: %d" | |
% (nest_level, team_size) | |
) | |
print("================================================") | |
print("") | |
print( | |
"OMP Thread Nbr Thread State Wait Id EntryAddr FuncName File:Line" | |
) | |
print( | |
"======================================================================================================" | |
) | |
print_parallel_region(curr_parallel, team_size) | |
enclosing_parallel = curr_parallel.get_enclosing_parallel() | |
enclosing_parallel_handle = curr_parallel.get_enclosing_parallel_handle() | |
curr_parallel = enclosing_parallel | |
curr_parallel_handle = enclosing_parallel_handle | |
class ompd_icvs(gdb.Command): | |
"""ICVs""" | |
def __init__(self): | |
self.__doc__ = ( | |
"Display the values of the Internal Control Variables.\n usage: ompd icvs" | |
) | |
super(ompd_icvs, self).__init__("ompd icvs", gdb.COMMAND_STATUS) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
global ompd_scope_map | |
if init_error(): | |
return | |
curr_thread_handle = addr_space.get_curr_thread() | |
if addr_space.icv_map is None: | |
addr_space.get_icv_map() | |
print("ICV Name Scope Value") | |
print("===============================================================") | |
try: | |
for icv_name in addr_space.icv_map: | |
scope = addr_space.icv_map[icv_name][1] | |
# {1:'global', 2:'address_space', 3:'thread', 4:'parallel', 5:'implicit_task', 6:'task'} | |
if scope == 2: | |
handle = addr_space.addr_space | |
elif scope == 3: | |
handle = curr_thread_handle.thread_handle | |
elif scope == 4: | |
handle = curr_thread_handle.get_current_parallel_handle() | |
elif scope == 6: | |
handle = curr_thread_handle.get_current_task_handle() | |
else: | |
raise ValueError("Invalid scope") | |
if icv_name == "nthreads-var" or icv_name == "bind-var": | |
icv_value = ompdModule.call_ompd_get_icv_from_scope( | |
handle, scope, addr_space.icv_map[icv_name][0] | |
) | |
if icv_value is None: | |
icv_string = ompdModule.call_ompd_get_icv_string_from_scope( | |
handle, scope, addr_space.icv_map[icv_name][0] | |
) | |
print( | |
"%-31s %-26s %s" | |
% (icv_name, ompd_scope_map[scope], icv_string) | |
) | |
else: | |
print( | |
"%-31s %-26s %d" | |
% (icv_name, ompd_scope_map[scope], icv_value) | |
) | |
elif ( | |
icv_name == "affinity-format-var" | |
or icv_name == "run-sched-var" | |
or icv_name == "tool-libraries-var" | |
or icv_name == "tool-verbose-init-var" | |
): | |
icv_string = ompdModule.call_ompd_get_icv_string_from_scope( | |
handle, scope, addr_space.icv_map[icv_name][0] | |
) | |
print( | |
"%-31s %-26s %s" % (icv_name, ompd_scope_map[scope], icv_string) | |
) | |
else: | |
icv_value = ompdModule.call_ompd_get_icv_from_scope( | |
handle, scope, addr_space.icv_map[icv_name][0] | |
) | |
print( | |
"%-31s %-26s %d" % (icv_name, ompd_scope_map[scope], icv_value) | |
) | |
except: | |
traceback.print_exc() | |
def curr_thread(): | |
"""Helper function for ompd_step. Returns the thread object for the current thread number.""" | |
global addr_space | |
if addr_space is not None: | |
return addr_space.threads[int(gdb.selected_thread().num)] | |
return None | |
class ompd_test(gdb.Command): | |
"""Test area""" | |
def __init__(self): | |
self.__doc__ = "Test functionalities for correctness\n usage: ompd test" | |
super(ompd_test, self).__init__("ompd test", gdb.COMMAND_OBSCURE) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
if init_error(): | |
return | |
# get task function for current task of current thread | |
try: | |
current_thread = int(gdb.selected_thread().num) | |
current_thread_obj = addr_space.threads[current_thread] | |
task_function = current_thread_obj.get_current_task().get_task_function() | |
print("bt value:", int("0x0000000000400b6c", 0)) | |
print("get_task_function value:", task_function) | |
# get task function of implicit task in current parallel region for current thread | |
current_parallel_obj = current_thread_obj.get_current_parallel() | |
task_in_parallel = current_parallel_obj.get_task_in_parallel(current_thread) | |
task_function_in_parallel = task_in_parallel.get_task_function() | |
print("task_function_in_parallel:", task_function_in_parallel) | |
except: | |
print("Task function value not found for this thread") | |
class ompdtestapi(gdb.Command): | |
"""To test API's return code""" | |
def __init__(self): | |
self.__doc__ = "Test OMPD tool Interface APIs.\nUsage: ompdtestapi <api name>" | |
super(ompdtestapi, self).__init__("ompdtestapi", gdb.COMMAND_OBSCURE) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
if init_error(): | |
print("Error in Initialization.") | |
return | |
if not arg: | |
print("No API provided to test, eg: ompdtestapi ompd_initialize") | |
if arg == "ompd_get_thread_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
ompdModule.test_ompd_get_thread_handle(addr_handle, threadId) | |
elif arg == "ompd_get_curr_parallel_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
ompdModule.test_ompd_get_curr_parallel_handle(thread_handle) | |
elif arg == "ompd_get_thread_in_parallel": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
ompdModule.test_ompd_get_thread_in_parallel(parallel_handle) | |
elif arg == "ompd_thread_handle_compare": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
thread_handle1 = ompdModule.call_ompd_get_thread_in_parallel( | |
parallel_handle, 1 | |
) | |
thread_handle2 = ompdModule.call_ompd_get_thread_in_parallel( | |
parallel_handle, 2 | |
) | |
ompdModule.test_ompd_thread_handle_compare(thread_handle1, thread_handle1) | |
ompdModule.test_ompd_thread_handle_compare(thread_handle1, thread_handle2) | |
elif arg == "ompd_get_thread_id": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
ompdModule.test_ompd_get_thread_id(thread_handle) | |
elif arg == "ompd_rel_thread_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
ompdModule.test_ompd_rel_thread_handle(thread_handle) | |
elif arg == "ompd_get_enclosing_parallel_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
ompdModule.test_ompd_get_enclosing_parallel_handle(parallel_handle) | |
elif arg == "ompd_parallel_handle_compare": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle1 = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
parallel_handle2 = ompdModule.call_ompd_get_enclosing_parallel_handle( | |
parallel_handle1 | |
) | |
ompdModule.test_ompd_parallel_handle_compare( | |
parallel_handle1, parallel_handle1 | |
) | |
ompdModule.test_ompd_parallel_handle_compare( | |
parallel_handle1, parallel_handle2 | |
) | |
elif arg == "ompd_rel_parallel_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
ompdModule.test_ompd_rel_parallel_handle(parallel_handle) | |
elif arg == "ompd_initialize": | |
ompdModule.test_ompd_initialize() | |
elif arg == "ompd_get_api_version": | |
ompdModule.test_ompd_get_api_version() | |
elif arg == "ompd_get_version_string": | |
ompdModule.test_ompd_get_version_string() | |
elif arg == "ompd_finalize": | |
ompdModule.test_ompd_finalize() | |
elif arg == "ompd_process_initialize": | |
ompdModule.call_ompd_initialize() | |
ompdModule.test_ompd_process_initialize() | |
elif arg == "ompd_device_initialize": | |
ompdModule.test_ompd_device_initialize() | |
elif arg == "ompd_rel_address_space_handle": | |
ompdModule.test_ompd_rel_address_space_handle() | |
elif arg == "ompd_get_omp_version": | |
addr_handle = addr_space.addr_space | |
ompdModule.test_ompd_get_omp_version(addr_handle) | |
elif arg == "ompd_get_omp_version_string": | |
addr_handle = addr_space.addr_space | |
ompdModule.test_ompd_get_omp_version_string(addr_handle) | |
elif arg == "ompd_get_curr_task_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
ompdModule.test_ompd_get_curr_task_handle(thread_handle) | |
elif arg == "ompd_get_task_parallel_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_task_parallel_handle(task_handle) | |
elif arg == "ompd_get_generating_task_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_generating_task_handle(task_handle) | |
elif arg == "ompd_get_scheduling_task_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_scheduling_task_handle(task_handle) | |
elif arg == "ompd_get_task_in_parallel": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
ompdModule.test_ompd_get_task_in_parallel(parallel_handle) | |
elif arg == "ompd_rel_task_handle": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_rel_task_handle(task_handle) | |
elif arg == "ompd_task_handle_compare": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle1 = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
task_handle2 = ompdModule.call_ompd_get_generating_task_handle(task_handle1) | |
ompdModule.test_ompd_task_handle_compare(task_handle1, task_handle2) | |
ompdModule.test_ompd_task_handle_compare(task_handle2, task_handle1) | |
elif arg == "ompd_get_task_function": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_task_function(task_handle) | |
elif arg == "ompd_get_task_frame": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_task_frame(task_handle) | |
elif arg == "ompd_get_state": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
ompdModule.test_ompd_get_state(thread_handle) | |
elif arg == "ompd_get_display_control_vars": | |
addr_handle = addr_space.addr_space | |
ompdModule.test_ompd_get_display_control_vars(addr_handle) | |
elif arg == "ompd_rel_display_control_vars": | |
ompdModule.test_ompd_rel_display_control_vars() | |
elif arg == "ompd_enumerate_icvs": | |
addr_handle = addr_space.addr_space | |
ompdModule.test_ompd_enumerate_icvs(addr_handle) | |
elif arg == "ompd_get_icv_from_scope": | |
addr_handle = addr_space.addr_space | |
threadId = gdb.selected_thread().ptid[1] | |
thread_handle = ompdModule.get_thread_handle(threadId, addr_handle) | |
parallel_handle = ompdModule.call_ompd_get_curr_parallel_handle( | |
thread_handle | |
) | |
task_handle = ompdModule.call_ompd_get_curr_task_handle(thread_handle) | |
ompdModule.test_ompd_get_icv_from_scope_with_addr_handle(addr_handle) | |
ompdModule.test_ompd_get_icv_from_scope_with_thread_handle(thread_handle) | |
ompdModule.test_ompd_get_icv_from_scope_with_parallel_handle( | |
parallel_handle | |
) | |
ompdModule.test_ompd_get_icv_from_scope_with_task_handle(task_handle) | |
elif arg == "ompd_get_icv_string_from_scope": | |
addr_handle = addr_space.addr_space | |
ompdModule.test_ompd_get_icv_string_from_scope(addr_handle) | |
elif arg == "ompd_get_tool_data": | |
ompdModule.test_ompd_get_tool_data() | |
elif arg == "ompd_enumerate_states": | |
ompdModule.test_ompd_enumerate_states() | |
else: | |
print("Invalid API.") | |
class ompd_bt(gdb.Command): | |
"""Turn filter for 'bt' on/off for output to only contain frames relevant to the application or all frames.""" | |
def __init__(self): | |
self.__doc__ = 'Turn filter for "bt" output on or off. Specify "on continued" option to trace worker threads back to master threads.\n usage: ompd bt on|on continued|off' | |
super(ompd_bt, self).__init__("ompd bt", gdb.COMMAND_STACK) | |
def invoke(self, arg, from_tty): | |
global ff | |
global addr_space | |
global icv_map | |
global ompd_scope_map | |
if init_error(): | |
return | |
if icv_map is None: | |
icv_map = {} | |
current = 0 | |
more = 1 | |
while more > 0: | |
tup = ompdModule.call_ompd_enumerate_icvs( | |
addr_space.addr_space, current | |
) | |
(current, next_icv, next_scope, more) = tup | |
icv_map[next_icv] = (current, next_scope, ompd_scope_map[next_scope]) | |
print('Initialized ICV map successfully for filtering "bt".') | |
arg_list = gdb.string_to_argv(arg) | |
if len(arg_list) == 0: | |
print( | |
'When calling "ompd bt", you must either specify "on", "on continued" or "off". Check "help ompd".' | |
) | |
elif len(arg_list) == 1 and arg_list[0] == "on": | |
addr_space.list_threads(False) | |
ff.set_switch(True) | |
ff.set_switch_continue(False) | |
elif arg_list[0] == "on" and arg_list[1] == "continued": | |
ff.set_switch(True) | |
ff.set_switch_continue(True) | |
elif len(arg_list) == 1 and arg_list[0] == "off": | |
ff.set_switch(False) | |
ff.set_switch_continue(False) | |
else: | |
print( | |
'When calling "ompd bt", you must either specify "on", "on continued" or "off". Check "help ompd".' | |
) | |
# TODO: remove | |
class ompd_taskframes(gdb.Command): | |
"""Prints task handles for relevant task frames. Meant for debugging.""" | |
def __init__(self): | |
self.__doc__ = "Prints list of tasks.\nUsage: ompd taskframes" | |
super(ompd_taskframes, self).__init__("ompd taskframes", gdb.COMMAND_STACK) | |
def invoke(self, arg, from_tty): | |
global addr_space | |
if init_error(): | |
return | |
frame = gdb.newest_frame() | |
while frame: | |
print(frame.read_register("sp")) | |
frame = frame.older() | |
curr_task_handle = None | |
if addr_space.threads and addr_space.threads.get(gdb.selected_thread().num): | |
curr_thread_handle = curr_thread().thread_handle | |
curr_task_handle = ompdModule.call_ompd_get_curr_task_handle( | |
curr_thread_handle | |
) | |
if not curr_task_handle: | |
return None | |
prev_frames = None | |
try: | |
while 1: | |
frames_with_flags = ompdModule.call_ompd_get_task_frame( | |
curr_task_handle | |
) | |
frames = (frames_with_flags[0], frames_with_flags[3]) | |
if prev_frames == frames: | |
break | |
if not isinstance(frames, tuple): | |
break | |
(ompd_enter_frame, ompd_exit_frame) = frames | |
print(hex(ompd_enter_frame), hex(ompd_exit_frame)) | |
curr_task_handle = ompdModule.call_ompd_get_scheduling_task_handle( | |
curr_task_handle | |
) | |
prev_frames = frames | |
if not curr_task_handle: | |
break | |
except: | |
traceback.print_exc() | |
def print_and_exec(string): | |
"""Helper function for ompd_step. Executes the given command in GDB and prints it.""" | |
print(string) | |
gdb.execute(string) | |
class TempFrameFunctionBp(gdb.Breakpoint): | |
"""Helper class for ompd_step. Defines stop function for breakpoint on frame function.""" | |
def stop(self): | |
global in_task_function | |
in_task_function = True | |
self.enabled = False | |
class ompd_step(gdb.Command): | |
"""Executes 'step' and skips frames irrelevant to the application / the ones without debug information.""" | |
def __init__(self): | |
self.__doc__ = 'Executes "step" and skips runtime frames as much as possible.' | |
super(ompd_step, self).__init__("ompd step", gdb.COMMAND_STACK) | |
class TaskBeginBp(gdb.Breakpoint): | |
"""Helper class. Defines stop function for breakpoint ompd_bp_task_begin.""" | |
def stop(self): | |
try: | |
code_line = curr_thread().get_current_task().get_task_function() | |
frame_fct_bp = TempFrameFunctionBp( | |
("*%i" % code_line), temporary=True, internal=True | |
) | |
frame_fct_bp.thread = self.thread | |
return False | |
except: | |
return False | |
def invoke(self, arg, from_tty): | |
global in_task_function | |
if init_error(): | |
return | |
tbp = self.TaskBeginBp("ompd_bp_task_begin", temporary=True, internal=True) | |
tbp.thread = int(gdb.selected_thread().num) | |
print_and_exec("step") | |
while gdb.selected_frame().find_sal().symtab is None: | |
if not in_task_function: | |
print_and_exec("finish") | |
else: | |
print_and_exec("si") | |
def init_error(): | |
global addr_space | |
if (gdb.selected_thread() is None) or (addr_space is None) or (not addr_space): | |
print("Run 'ompd init' before running any of the ompd commands") | |
return True | |
return False | |
def main(): | |
ompd() | |
ompd_init() | |
ompd_threads() | |
ompd_icvs() | |
ompd_parallel_region() | |
ompd_test() | |
ompdtestapi() | |
ompd_taskframes() | |
ompd_bt() | |
ompd_step() | |
if __name__ == "__main__": | |
try: | |
main() | |
except: | |
traceback.print_exc() | |
# NOTE: test code using: | |
# OMP_NUM_THREADS=... gdb a.out -x ../../projects/gdb_plugin/gdb-ompd/__init__.py | |
# ompd init | |
# ompd threads |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-- Testing: 1 of 73764 tests, 1 workers -- | |
FAIL: ompd-test :: api_tests/test_ompd_get_state.c (1 of 1) | |
******************** TEST 'ompd-test :: api_tests/test_ompd_get_state.c' FAILED ******************** | |
Exit Code: -11 | |
Command Output (stdout): | |
-- | |
# RUN: at line 1 | |
/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/./bin/clang -fopenmp -g -gdwarf-4 /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c -o /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp -I /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test -I /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/runtime/src -L /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/runtime/src 2>&1 | tee /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp.compile | |
# executed command: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/./bin/clang -fopenmp -g -gdwarf-4 /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c -o /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp -I /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test -I /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/runtime/src -L /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/runtime/src | |
# executed command: tee /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp.compile | |
# RUN: at line 2 | |
env OMP_DEBUG=enabled gdb -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/__init__.py -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c.cmd /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp 2>&1 | tee /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp.out | /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/./bin/FileCheck --dump-input=always /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c | |
# executed command: env OMP_DEBUG=enabled gdb -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/__init__.py -x /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c.cmd /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp | |
# note: command had no output on stdout or stderr | |
# error: command failed with exit status: -11 | |
# executed command: tee /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp.out | |
# executed command: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/./bin/FileCheck --dump-input=always /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c | |
# .---command stderr------------ | |
# | | |
# | Input file: <stdin> | |
# | Check file: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c | |
# | | |
# | -dump-input=help explains the following input dump. | |
# | | |
# | Input was: | |
# | <<<<<< | |
# | 1: gdb: warning: Couldn't determine a path for the index cache directory. | |
# | 2: GNU gdb (GDB) Fedora Linux 13.2-3.fc38 | |
# | 3: Copyright (C) 2023 Free Software Foundation, Inc. | |
# | 4: License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html> | |
# | 5: This is free software: you are free to change and redistribute it. | |
# | 6: There is NO WARRANTY, to the extent permitted by law. | |
# | 7: Type "show copying" and "show warranty" for details. | |
# | 8: This GDB was configured as "x86_64-redhat-linux-gnu". | |
# | 9: Type "show configuration" for configuration details. | |
# | 10: For bug reporting instructions, please see: | |
# | 11: <https://www.gnu.org/software/gdb/bugs/>. | |
# | 12: Find the GDB manual and other documentation resources online at: | |
# | 13: <http://www.gnu.org/software/gdb/documentation/>. | |
# | 14: | |
# | 15: For help, type "help". | |
# | 16: Type "apropos word" to search for commands related to "word"... | |
# | 17: Reading symbols from /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/api_tests/Output/test_ompd_get_state.c.tmp... | |
# | 18: OMPD GDB support loaded | |
# | 19: Run 'ompd init' to start debugging | |
# | 20: Temporary breakpoint 1 at 0x40116f: file /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c, line 9. | |
# | 21: [Thread debugging using libthread_db enabled] | |
# | 22: Using host libthread_db library "/lib64/libthread_db.so.1". | |
# | 23: | |
# | 24: Temporary breakpoint 1, main () | |
# | 25: at /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c:9 | |
# | 26: 9 omp_set_num_threads(2); | |
# | 27: Temporary breakpoint 2 at 0x7ffff7f8c6a0: file /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/runtime/src/ompd-specific.cpp, line 121. | |
# | 28: OMP_OMPD active | |
# | 29: | |
# | 30: Temporary breakpoint 2, ompd_dll_locations_valid () | |
# | 31: at /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/runtime/src/ompd-specific.cpp:121 | |
# | 32: 121 } | |
# | 33: Loaded OMPD lib successfully! | |
# | 34: Breakpoint 3 at 0x4011b0: file /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c, line 11. | |
# | 35: [New Thread 0x7ffff791db40 (LWP 578128)] | |
# | 36: | |
# | 37: Thread 1 "test_ompd_get_s" hit Breakpoint 3, main.omp_outlined_debug__ ( | |
# | 38: .global_tid.=0x7fffffffe310, .bound_tid.=0x7fffffffe308) | |
# | 39: at /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c:11 | |
# | 40: 11 { printf("Parallel level 1, thread num = %d.\n", omp_get_thread_num()); } | |
# | 41: Traceback (most recent call last): | |
# | 42: File "/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/ompd_callbacks.py", line 30, in _sym_addr | |
# | 43: return int(gdb.parse_and_eval("&" + symbol_name)) | |
# | 44: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
# | 45: gdb.error: Missing ELF symbol "ompd_sizeof____kmp_gtid". | |
# | 46: Traceback (most recent call last): | |
# | 47: File "/home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/llvm-18.0.0.src/redhat-linux-build/runtimes/runtimes-bins/openmp/libompd/test/../gdb-plugin/python-module/ompd/ompd_callbacks.py", line 60, in _read | |
# | 48: ret_buf.append(int(buf[i])) | |
# | 49: ^^^^^^^^^^^ | |
# | 50: gdb.MemoryError: Cannot access memory at address 0xffffffffffffffff | |
# | 51: | |
# | 52: | |
# | 53: Fatal signal: Segmentation fault | |
# | 54: ----- Backtrace ----- | |
# | 55: 0x5617282f58a0 ??? | |
# | 56: 0x5617284304cd ??? | |
# | 57: 0x561728430699 ??? | |
# | 58: 0x7f1a8c45fb6f ??? | |
# | 59: 0x7f1a8801fab9 Py_TYPE | |
# | 60: /usr/include/python3.11/object.h:133 | |
# | 61: 0x7f1a8801fab9 Py_IS_TYPE | |
# | 62: /usr/include/python3.11/object.h:150 | |
# | 63: 0x7f1a8801fab9 PyObject_TypeCheck | |
# | 64: /usr/include/python3.11/object.h:263 | |
# | 65: 0x7f1a8801fab9 _read | |
# | 66: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/gdb-plugin/ompdModule.c:686 | |
# | 67: 0x7f1a78231b0e _ZN5TType7getSizeEPm | |
# | 68: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/src/TargetValue.cpp:74 | |
# | 69: 0x7f1a78238426 _ZN6TValue8castBaseEPKc | |
# | 70: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/src/TargetValue.cpp:320 | |
# | 71: 0x7f1a78238426 ompd_get_thread_handle | |
# | 72: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/src/omp-debug.cpp:719 | |
# | 73: 0x7f1a880201f8 get_thread_handle | |
# | 74: /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/gdb-plugin/ompdModule.c:863 | |
# | 75: 0x7f1a8cbcdc77 ??? | |
# | 76: 0x7f1a8cbb0d92 ??? | |
# | 77: 0x7f1a8cbb97c3 ??? | |
# | 78: 0x7f1a8cbb57d9 ??? | |
# | 79: 0x7f1a8cbcd3e5 ??? | |
# | 80: 0x7f1a8cbf1066 ??? | |
# | 81: 0x5617285beccf ??? | |
# | 82: 0x561728330104 ??? | |
# | 83: 0x5617287306c4 ??? | |
# | 84: 0x561728430cce ??? | |
# | 85: 0x56172872f28c ??? | |
# | 86: 0x561728340df8 ??? | |
# | 87: 0x56172832e24a ??? | |
# | 88: 0x561728516a39 ??? | |
# | 89: 0x561728516ae7 ??? | |
# | 90: 0x561728518e28 ??? | |
# | 91: 0x5617285199df ??? | |
# | 92: 0x561728206a6e ??? | |
# | 93: 0x7f1a8c449b49 ??? | |
# | 94: 0x7f1a8c449c0a ??? | |
# | 95: 0x561728211ea4 ??? | |
# | 96: 0xffffffffffffffff ??? | |
# | 97: --------------------- | |
# | 98: A fatal error internal to GDB has been detected, further | |
# | 99: debugging is not possible. GDB will now terminate. | |
# | 100: | |
# | 101: This is a bug, please report it. For instructions, see: | |
# | 102: <https://www.gnu.org/software/gdb/bugs/>. | |
# | 103: | |
# | >>>>>> | |
# `----------------------------- | |
-- | |
******************** | |
******************** | |
Failed Tests (1): | |
ompd-test :: api_tests/test_ompd_get_state.c | |
Testing Time: 1.53s | |
Total Discovered Tests: 69378 | |
Excluded: 69377 (100.00%) | |
Failed : 1 (0.00%) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c | |
// RUN: %gdb-compile 2>&1 | tee %t.compile | |
// RUN: %gdb-test -x %s.cmd %t 2>&1 | tee %t.out | FileCheck %s | |
#include <omp.h> | |
#include <stdio.h> | |
int main() { | |
omp_set_num_threads(2); | |
#pragma omp parallel | |
{ printf("Parallel level 1, thread num = %d.\n", omp_get_thread_num()); } | |
return 0; | |
} | |
// CHECK-NOT: Failed | |
// CHECK-NOT: Skip |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# /home/fedora/src/llvm-rpms/llvm-big-merge/BUILD/openmp/libompd/test/api_tests/test_ompd_get_state.c.cmd | |
ompd init | |
b test_ompd_get_state.c:11 | |
c | |
ompdtestapi ompd_get_state |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment