Skip to content
Snippets Groups Projects
Commit 9674d6e8 authored by Ankur Bansal's avatar Ankur Bansal
Browse files

lrdp_V2 : Parsing refcount of dmabuf and fix the timealive of dmabuf


Parsing the refcount for each dmabuf and correct the Time Alive
for each dmabuf.

And shorting the slabsummary info based on total slab usages.

Change-Id: I7c573d8a9c70786a60156a122f412e0e884c0cb5
Signed-off-by: default avatarAnkur Bansal <ankban@codeaurora.org>
parent e9a51f85
No related branches found
No related tags found
No related merge requests found
......@@ -62,15 +62,17 @@ def ion_buffer_info(self, ramdump, ion_info):
list_node_offset = ramdump.field_offset('struct dma_buf', 'list_node')
size_offset = ramdump.field_offset('struct dma_buf', 'size')
file_offset = ramdump.field_offset('struct dma_buf', 'file')
f_count_offset = ramdump.field_offset('struct file', 'f_count')
name_offset = ramdump.field_offset('struct dma_buf', 'name')
exp_name_offset = ramdump.field_offset('struct dma_buf', 'exp_name')
ion_info.write("{0:40} {1:15} {2:10} {3:10} {4:10} {5:20}\n".format(
'File_addr', 'Name', 'Size', 'Exp', 'Heap', 'Size in KB'))
ion_info.write("{0:40} {1:4} {2:15} {3:10} {4:10} {5:10} {6:20}\n".format(
'File_addr', 'REF', 'Name', 'Size', 'Exp', 'Heap', 'Size in KB'))
dma_buf_info = []
while (head != db_list):
dma_buf_addr = head - list_node_offset
size = ramdump.read_word(dma_buf_addr + size_offset)
file = ramdump.read_word(dma_buf_addr + file_offset)
f_count = ramdump.read_u64(file + f_count_offset)
exp_name = ramdump.read_word(dma_buf_addr + exp_name_offset)
exp_name = ramdump.read_cstring(exp_name, 48)
ionheap_name = ""
......@@ -81,7 +83,8 @@ def ion_buffer_info(self, ramdump, ion_info):
ionheap_name = ramdump.read_cstring(ionheap_name_addr, TASK_NAME_LENGTH)
name = ramdump.read_word(dma_buf_addr + name_offset)
name = ramdump.read_cstring(name, 48)
dma_buf_info.append([file, name, hex(size), exp_name, ionheap_name, bytes_to_KB(size)])
dma_buf_info.append([file, f_count, name, hex(size), exp_name,
ionheap_name, bytes_to_KB(size)])
head = ramdump.read_word(head)
next_node = ramdump.read_word(head + next_offset)
if next_node == 0:
......@@ -92,6 +95,7 @@ def ion_buffer_info(self, ramdump, ion_info):
dma_buf_addr = head - list_node_offset
size = ramdump.read_word(dma_buf_addr + size_offset)
file = ramdump.read_word(dma_buf_addr + file_offset)
f_count = ramdump.read_u64(file + f_count_offset)
exp_name = ramdump.read_word(dma_buf_addr + exp_name_offset)
exp_name = ramdump.read_cstring(exp_name, 48)
ionheap_name = ""
......@@ -102,69 +106,58 @@ def ion_buffer_info(self, ramdump, ion_info):
ionheap_name = ramdump.read_cstring(ionheap_name_addr, TASK_NAME_LENGTH)
name = ramdump.read_word(dma_buf_addr + name_offset)
name = ramdump.read_cstring(name, 48)
dma_buf_info.append([file, name, hex(size), exp_name, ionheap_name, bytes_to_KB(size)])
dma_buf_info.append([file, f_count, name, hex(size), exp_name,
ionheap_name, bytes_to_KB(size)])
head = ramdump.read_word(head + prev_offset)
prev_node = ramdump.read_word(head + prev_offset)
if prev_node == 0:
break
break
dma_buf_info = sorted(dma_buf_info, key=lambda l: l[5], reverse=True)
dma_buf_info = sorted(dma_buf_info, key=lambda l: l[6], reverse=True)
for item in dma_buf_info:
str = "v.v (struct file *)0x{0:x}\t {1:15} {2:10} {3:10} {4:10} ({5} KB)\n".\
format(item[0], item[1], item[2], item[3], item[4], item[5] )
str = "v.v (struct file *)0x{0:x}\t {1:2} {2:15} {3:10} {4:10} {" \
"5:10} ({6} KB)\n".format(item[0], item[1], item[2], item[3],
item[4], item[5], item[6])
ion_info.write(str)
def get_bufs(task, bufs, ion_info, ramdump):
def get_bufs(self, task, bufs, ion_info, ramdump):
t_size = 0
dma_buf_fops = ramdump.address_of('dma_buf_fops')
if dma_buf_fops is None:
ion_info.write("NOTE: 'dma_buf_fops' not found for file information\n")
return 0
timekeeper = ramdump.address_of('shadow_timekeeper')
if timekeeper is None:
ion_info.write("NOTE: 'timekeeper' not found for timing information\n")
return 0
files_offset = ramdump.field_offset('struct task_struct', 'files')
fdt_offset = ramdump.field_offset('struct files_struct', 'fdt')
fd_offset = ramdump.field_offset('struct fdtable', 'fd')
max_fds_offset = ramdump.field_offset('struct fdtable', 'max_fds')
f_op_offset = ramdump.field_offset('struct file', 'f_op')
private_data_offset = ramdump.field_offset('struct file', 'private_data')
size_offset = ramdump.field_offset('struct dma_buf', 'size')
name_offset = ramdump.field_offset('struct dma_buf', 'name')
stime_offset = ramdump.field_offset('struct timekeeper', 'xtime_sec')
if task is None:
return 0
files = ramdump.read_pointer(task + files_offset)
files = ramdump.read_pointer(task + self.files_offset)
if files is None:
return 0
fdt = ramdump.read_pointer(files + fdt_offset)
fdt = ramdump.read_pointer(files + self.fdt_offset)
if fdt is None:
return 0
fd = ramdump.read_pointer(fdt + fd_offset)
max_fds = ramdump.read_halfword(fdt + max_fds_offset)
stime = ramdump.read_word(timekeeper + stime_offset)
ctime_offset = ramdump.field_offset('struct dma_buf', 'ctime')
fd = ramdump.read_pointer(fdt + self.fd_offset)
max_fds = ramdump.read_halfword(fdt + self.max_fds_offset)
stime = ramdump.read_word(self.timekeeper + self.stime_offset)
ctime_offset = ramdump.field_offset('struct dma_buf', 'ktime')
if ctime_offset is not None:
ctime_offset += ramdump.field_offset('struct timespec', 'tv_sec')
for i in range(max_fds):
file = ramdump.read_pointer(fd + i*8)
if (file == 0):
continue
f_op = ramdump.read_pointer(file + f_op_offset)
f_op = ramdump.read_pointer(file + self.f_op_offset)
if (f_op != dma_buf_fops):
continue
dmabuf = ramdump.read_pointer(file + private_data_offset)
size = ramdump.read_word(dmabuf + size_offset)
dmabuf = ramdump.read_pointer(file + self.private_data_offset)
size = ramdump.read_word(dmabuf + self.size_offset)
time = 0
if ctime_offset is not None:
ctime = ramdump.read_word(dmabuf + ctime_offset)
ctime = ctime / 1000000000
time = stime - ctime
name = ramdump.read_word(dmabuf + name_offset)
name = ramdump.read_word(dmabuf + self.name_offset)
name = ramdump.read_cstring(name, 48)
item = [name, hex(size), bytes_to_KB(size), time]
......@@ -175,10 +168,10 @@ def get_bufs(task, bufs, ion_info, ramdump):
return t_size
def get_proc_bufs(task, bufs, ion_info, ramdump):
size = 0;
def get_proc_bufs(self, task, bufs, ion_info, ramdump):
size = 0
for curr in ramdump.for_each_thread(task):
size += get_bufs(curr, bufs, ion_info, ramdump)
size += get_bufs(self, curr, bufs, ion_info, ramdump)
return size
......@@ -190,7 +183,7 @@ def ion_proc_info(self, ramdump, ion_info):
dma_procs = []
for task in ramdump.for_each_process():
bufs = []
size = get_proc_bufs(task, bufs, ion_info, ramdump)
size = get_proc_bufs(self, task, bufs, ion_info, ramdump)
if (size == 0):
continue
comm = ramdump.read_cstring(task + comm_offset)
......@@ -525,6 +518,24 @@ def find_rb_root(self, ramdump, ion_dev_rb_root, ion_info):
@register_parser('--print-ionbuffer', 'Print ion buffer', optional=True)
class DumpIonBuffer(RamParser):
def __init__(self, *args):
super(DumpIonBuffer, self).__init__(*args)
self.timekeeper = self.ramdump.address_of('shadow_timekeeper')
self.files_offset = self.ramdump.field_offset(
'struct task_struct', 'files')
self.fdt_offset = self.ramdump.field_offset(
'struct files_struct', 'fdt')
self.fd_offset = self.ramdump.field_offset('struct fdtable', 'fd')
self.max_fds_offset = self.ramdump.field_offset(
'struct fdtable', 'max_fds')
self.f_op_offset = self.ramdump.field_offset('struct file', 'f_op')
self.private_data_offset = self.ramdump.field_offset('struct file',
'private_data')
self.size_offset = self.ramdump.field_offset('struct dma_buf', 'size')
self.name_offset = self.ramdump.field_offset('struct dma_buf', 'name')
self.stime_offset = self.ramdump.field_offset('struct timekeeper',
'ktime_sec')
def parse(self):
with self.ramdump.open_file('ionbuffer.txt') as ion_info:
if (self.ramdump.kernel_version < (3, 18, 0)):
......
......@@ -138,7 +138,7 @@ class Slabinfo_summary(RamParser):
num_slabs, slab_size]
nCounter += 1
sorted_summary = sorted(slab_summary.values(),
key=operator.itemgetter(4), reverse=True)
key=operator.itemgetter(5), reverse=True)
for val in sorted_summary:
slab_out.write(format_string.format(
val[0], val[1], val[2], val[3], val[4],
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment