summaryrefslogtreecommitdiff
path: root/startop/scripts/trace_analyzer/lib
diff options
context:
space:
mode:
Diffstat (limited to 'startop/scripts/trace_analyzer/lib')
-rw-r--r--startop/scripts/trace_analyzer/lib/trace2db.py355
-rwxr-xr-xstartop/scripts/trace_analyzer/lib/trace2db_test.py222
2 files changed, 577 insertions, 0 deletions
diff --git a/startop/scripts/trace_analyzer/lib/trace2db.py b/startop/scripts/trace_analyzer/lib/trace2db.py
new file mode 100644
index 000000000000..42a33aff046d
--- /dev/null
+++ b/startop/scripts/trace_analyzer/lib/trace2db.py
@@ -0,0 +1,355 @@
+#!/usr/bin/python3
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import sys
+
+from sqlalchemy import create_engine
+from sqlalchemy import Column, Date, Integer, Float, String, ForeignKey
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import relationship
+
+from sqlalchemy.orm import sessionmaker
+
+import sqlalchemy
+
+from typing import Optional, Tuple
+
+_DEBUG = False # print sql commands to console
+_FLUSH_LIMIT = 10000 # how many entries are parsed before flushing to DB from memory
+
+Base = declarative_base()
+
+class RawFtraceEntry(Base):
+ __tablename__ = 'raw_ftrace_entries'
+
+ id = Column(Integer, primary_key=True)
+ task_name = Column(String, nullable=True) # <...> -> None.
+ task_pid = Column(String, nullable=False)
+ tgid = Column(Integer, nullable=True) # ----- -> None.
+ cpu = Column(Integer, nullable=False)
+ timestamp = Column(Float, nullable=False)
+ function = Column(String, nullable=False)
+ function_args = Column(String, nullable=False)
+
+ # 1:1 relation with MmFilemapAddToPageCache.
+ mm_filemap_add_to_page_cache = relationship("MmFilemapAddToPageCache",
+ back_populates="raw_ftrace_entry")
+
+ @staticmethod
+ def parse_dict(line):
+ # ' <...>-5521 (-----) [003] ...1 17148.446877: tracing_mark_write: trace_event_clock_sync: parent_ts=17148.447266'
+ m = re.match('\s*(.*)-(\d+)\s+\(([^\)]+)\)\s+\[(\d+)\]\s+([\w.]{4})\s+(\d+[.]\d+):\s+(\w+):\s+(.*)', line)
+ if not m:
+ return None
+
+ groups = m.groups()
+ # groups example:
+ # ('<...>',
+ # '5521',
+ # '-----',
+ # '003',
+ # '...1',
+ # '17148.446877',
+ # 'tracing_mark_write',
+ # 'trace_event_clock_sync: parent_ts=17148.447266')
+ task_name = groups[0]
+ if task_name == '<...>':
+ task_name = None
+
+ task_pid = int(groups[1])
+ tgid = groups[2]
+ if tgid == '-----':
+ tgid = None
+
+ cpu = int(groups[3])
+ # irq_flags = groups[4]
+ timestamp = float(groups[5])
+ function = groups[6]
+ function_args = groups[7]
+
+ return {'task_name': task_name, 'task_pid': task_pid, 'tgid': tgid, 'cpu': cpu, 'timestamp': timestamp, 'function': function, 'function_args': function_args}
+
+class SchedSwitch(Base):
+ __tablename__ = 'sched_switches'
+
+ id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
+
+ prev_comm = Column(String, nullable=False)
+ prev_pid = Column(Integer, nullable=False)
+ prev_prio = Column(Integer, nullable=False)
+ prev_state = Column(String, nullable=False)
+
+ next_comm = Column(String, nullable=False)
+ next_pid = Column(Integer, nullable=False)
+ next_prio = Column(Integer, nullable=False)
+
+ @staticmethod
+ def parse_dict(function_args, id = None):
+ # 'prev_comm=kworker/u16:5 prev_pid=13971 prev_prio=120 prev_state=S ==> next_comm=swapper/4 next_pid=0 next_prio=120'
+ m = re.match("prev_comm=(.*) prev_pid=(\d+) prev_prio=(\d+) prev_state=(.*) ==> next_comm=(.*) next_pid=(\d+) next_prio=(\d+) ?", function_args)
+ if not m:
+ return None
+
+ groups = m.groups()
+ # ('kworker/u16:5', '13971', '120', 'S', 'swapper/4', '0', '120')
+ d = {}
+ if id is not None:
+ d['id'] = id
+ d['prev_comm'] = groups[0]
+ d['prev_pid'] = int(groups[1])
+ d['prev_prio'] = int(groups[2])
+ d['prev_state'] = groups[3]
+ d['next_comm'] = groups[4]
+ d['next_pid'] = int(groups[5])
+ d['next_prio'] = int(groups[6])
+
+ return d
+
+class SchedBlockedReason(Base):
+ __tablename__ = 'sched_blocked_reasons'
+
+ id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
+
+ pid = Column(Integer, nullable=False)
+ iowait = Column(Integer, nullable=False)
+ caller = Column(String, nullable=False)
+
+ @staticmethod
+ def parse_dict(function_args, id = None):
+ # 'pid=2289 iowait=1 caller=wait_on_page_bit_common+0x2a8/0x5f'
+ m = re.match("pid=(\d+) iowait=(\d+) caller=(.*) ?", function_args)
+ if not m:
+ return None
+
+ groups = m.groups()
+ # ('2289', '1', 'wait_on_page_bit_common+0x2a8/0x5f8')
+ d = {}
+ if id is not None:
+ d['id'] = id
+ d['pid'] = int(groups[0])
+ d['iowait'] = int(groups[1])
+ d['caller'] = groups[2]
+
+ return d
+
+class MmFilemapAddToPageCache(Base):
+ __tablename__ = 'mm_filemap_add_to_page_caches'
+
+ id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
+
+ dev = Column(Integer, nullable=False) # decoded from ${major}:${minor} syntax.
+ dev_major = Column(Integer, nullable=False) # original ${major} value.
+ dev_minor = Column(Integer, nullable=False) # original ${minor} value.
+
+ ino = Column(Integer, nullable=False) # decoded from hex to base 10
+ page = Column(Integer, nullable=False) # decoded from hex to base 10
+
+ pfn = Column(Integer, nullable=False)
+ ofs = Column(Integer, nullable=False)
+
+ # 1:1 relation with RawFtraceEntry.
+ raw_ftrace_entry = relationship("RawFtraceEntry", uselist=False)
+
+ @staticmethod
+ def parse_dict(function_args, id = None):
+ # dev 253:6 ino b2c7 page=00000000ec787cd9 pfn=1478539 ofs=4096
+ m = re.match("dev (\d+):(\d+) ino ([0-9a-fA-F]+) page=([0-9a-fA-F]+) pfn=(\d+) ofs=(\d+)", function_args)
+ if not m:
+ return None
+
+ groups = m.groups()
+ # ('253', '6', 'b2c7', '00000000ec787cd9', '1478539', '4096')
+ d = {}
+ if id is not None:
+ d['id'] = id
+
+ device_major = d['dev_major'] = int(groups[0])
+ device_minor = d['dev_minor'] = int(groups[1])
+ d['dev'] = device_major << 8 | device_minor
+ d['ino'] = int(groups[2], 16)
+ d['page'] = int(groups[3], 16)
+ d['pfn'] = int(groups[4])
+ d['ofs'] = int(groups[5])
+
+ return d
+
+class Trace2Db:
+ def __init__(self, db_filename: str):
+ (s, e) = self._init_sqlalchemy(db_filename)
+ self._session = s
+ self._engine = e
+ self._raw_ftrace_entry_filter = lambda x: True
+
+ def set_raw_ftrace_entry_filter(self, flt):
+ """
+ Install a function dict(RawFtraceEntry) -> bool
+
+ If this returns 'false', then we skip adding the RawFtraceEntry to the database.
+ """
+ self._raw_ftrace_entry_filter = flt
+
+ @staticmethod
+ def _init_sqlalchemy(db_filename: str) -> Tuple[object, object]:
+ global _DEBUG
+ engine = create_engine('sqlite:///' + db_filename, echo=_DEBUG)
+
+ # CREATE ... (tables)
+ Base.metadata.create_all(engine)
+
+ Session = sessionmaker(bind=engine)
+ session = Session()
+ return (session, engine)
+
+ def parse_file_into_db(self, filename: str, limit: Optional[int] = None):
+ """
+ Parse the ftrace/systrace at 'filename',
+ inserting the values into the current sqlite database.
+
+ :return: number of RawFtraceEntry inserted.
+ """
+ return parse_file(filename, self._session, self._engine, self._raw_ftrace_entry_filter, limit)
+
+ def parse_file_buf_into_db(self, file_buf, limit: Optional[int] = None):
+ """
+ Parse the ftrace/systrace at 'filename',
+ inserting the values into the current sqlite database.
+
+ :return: number of RawFtraceEntry inserted.
+ """
+ return parse_file_buf(file_buf, self._session, self._engine, self._raw_ftrace_entry_filter, limit)
+
+
+ @property
+ def session(self):
+ return self._session
+
+def insert_pending_entries(engine, kls, lst):
+ if len(lst) > 0:
+ # for some reason, it tries to generate an empty INSERT statement with len=0,
+ # which of course violates the first non-null constraint.
+ try:
+ # Performance-sensitive parsing according to:
+ # https://docs.sqlalchemy.org/en/13/faq/performance.html#i-m-inserting-400-000-rows-with-the-orm-and-it-s-really-slow
+ engine.execute(kls.__table__.insert(), lst)
+ lst.clear()
+ except sqlalchemy.exc.IntegrityError as err:
+ # possibly violating some SQL constraint, print data here.
+ print(err)
+ print(lst)
+ raise
+
+def parse_file(filename: str, *args, **kwargs) -> int:
+ # use explicit encoding to avoid UnicodeDecodeError.
+ with open(filename, encoding="ISO-8859-1") as f:
+ return parse_file_buf(f, *args, **kwargs)
+
+def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None) -> int:
+ global _FLUSH_LIMIT
+ count = 0
+ # count and id are not equal, because count still increases for invalid lines.
+ id = 0
+
+ pending_entries = []
+ pending_sched_switch = []
+ pending_sched_blocked_reasons = []
+ pending_mm_filemap_add_to_pagecaches = []
+
+ def insert_all_pending_entries():
+ insert_pending_entries(engine, RawFtraceEntry, pending_entries)
+ insert_pending_entries(engine, SchedSwitch, pending_sched_switch)
+ insert_pending_entries(engine, SchedBlockedReason, pending_sched_blocked_reasons)
+ insert_pending_entries(engine, MmFilemapAddToPageCache, pending_mm_filemap_add_to_pagecaches)
+
+ # for trace.html files produced by systrace,
+ # the actual ftrace is in the 'second' trace-data script class.
+ parsing_trace_data = 0
+ parsing_systrace_file = False
+
+ f = filebuf
+ for l in f:
+ if parsing_trace_data == 0 and l == "<!DOCTYPE html>\n":
+ parsing_systrace_file = True
+ continue
+ if parsing_trace_data != 2 and parsing_systrace_file:
+ if l == ' <script class="trace-data" type="application/text">\n':
+ parsing_trace_data = parsing_trace_data + 1
+ continue
+
+ if parsing_systrace_file and parsing_trace_data != 2:
+ continue
+ elif parsing_systrace_file and parsing_trace_data == 2 and l == " </script>\n":
+ # the rest of this file is just random html
+ break
+
+ # now parsing the ftrace data.
+ if len(l) > 1 and l[0] == '#':
+ continue
+
+ count = count + 1
+
+ if limit and count >= limit:
+ break
+
+ raw_ftrace_entry = RawFtraceEntry.parse_dict(l)
+ if not raw_ftrace_entry:
+ print("WARNING: Failed to parse raw ftrace entry: " + l)
+ continue
+
+ if not raw_ftrace_entry_filter(raw_ftrace_entry):
+ # Skip processing raw ftrace entries that don't match a filter.
+ # This is an optimization for when Trace2Db is used programatically
+ # to avoid having an overly large database.
+ continue
+
+ pending_entries.append(raw_ftrace_entry)
+ id = id + 1
+
+ if raw_ftrace_entry['function'] == 'sched_switch':
+ sched_switch = SchedSwitch.parse_dict(raw_ftrace_entry['function_args'], id)
+
+ if not sched_switch:
+ print("WARNING: Failed to parse sched_switch: " + l)
+ else:
+ pending_sched_switch.append(sched_switch)
+
+ elif raw_ftrace_entry['function'] == 'sched_blocked_reason':
+ sbr = SchedBlockedReason.parse_dict(raw_ftrace_entry['function_args'], id)
+
+ if not sbr:
+ print("WARNING: Failed to parse sched_blocked_reason: " + l)
+ else:
+ pending_sched_blocked_reasons.append(sbr)
+
+ elif raw_ftrace_entry['function'] == 'mm_filemap_add_to_page_cache':
+ d = MmFilemapAddToPageCache.parse_dict(raw_ftrace_entry['function_args'],
+ id)
+ if not d:
+ print("WARNING: Failed to parse mm_filemap_add_to_page_cache: " + l)
+ else:
+ pending_mm_filemap_add_to_pagecaches.append(d)
+
+ # Objects are cached in python memory, not yet sent to SQL database.
+
+ # Send INSERT/UPDATE/etc statements to the underlying SQL database.
+ if count % _FLUSH_LIMIT == 0:
+ insert_all_pending_entries()
+
+ insert_all_pending_entries()
+
+ # Ensure underlying database commits changes from memory to disk.
+ session.commit()
+
+ return count
diff --git a/startop/scripts/trace_analyzer/lib/trace2db_test.py b/startop/scripts/trace_analyzer/lib/trace2db_test.py
new file mode 100755
index 000000000000..3b326f000a7d
--- /dev/null
+++ b/startop/scripts/trace_analyzer/lib/trace2db_test.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Unit tests for inode2filename module.
+
+Install:
+ $> sudo apt-get install python3-pytest ## OR
+ $> pip install -U pytest
+See also https://docs.pytest.org/en/latest/getting-started.html
+
+Usage:
+ $> ./inode2filename_test.py
+ $> pytest inode2filename_test.py
+ $> python -m pytest inode2filename_test.py
+
+See also https://docs.pytest.org/en/latest/usage.html
+"""
+
+# global imports
+import io
+from copy import deepcopy
+
+# pip imports
+# local imports
+from trace2db import *
+
+# This pretty-prints the raw dictionary of the sqlalchemy object if it fails.
+class EqualsSqlAlchemyObject:
+ # For convenience to write shorter tests, we also add 'ignore_fields' which allow us to specify
+ # which fields to ignore when doing the comparison.
+ def __init__(self_, self, ignore_fields=[]):
+ self_.self = self
+ self_.ignore_fields = ignore_fields
+
+ # Do field-by-field comparison.
+ # It seems that SQLAlchemy does not implement __eq__ itself so we have to do it ourselves.
+ def __eq__(self_, other):
+ if isinstance(other, EqualsSqlAlchemyObject):
+ other = other.self
+
+ self = self_.self
+
+ classes_match = isinstance(other, self.__class__)
+ a, b = deepcopy(self.__dict__), deepcopy(other.__dict__)
+
+ #compare based on equality our attributes, ignoring SQLAlchemy internal stuff
+
+ a.pop('_sa_instance_state', None)
+ b.pop('_sa_instance_state', None)
+
+ for f in self_.ignore_fields:
+ a.pop(f, None)
+ b.pop(f, None)
+
+ attrs_match = (a == b)
+ return classes_match and attrs_match
+
+ def __repr__(self):
+ return repr(self.self.__dict__)
+
+
+def assert_eq_ignore_id(left, right):
+ # This pretty-prints the raw dictionary of the sqlalchemy object if it fails.
+ # It does field-by-field comparison, but ignores the 'id' field.
+ assert EqualsSqlAlchemyObject(left, ignore_fields=['id']) == EqualsSqlAlchemyObject(right)
+
+def parse_trace_file_to_db(*contents):
+ """
+ Make temporary in-memory sqlite3 database by parsing the string contents as a trace.
+
+ :return: Trace2Db instance
+ """
+ buf = io.StringIO()
+
+ for c in contents:
+ buf.write(c)
+ buf.write("\n")
+
+ buf.seek(0)
+
+ t2d = Trace2Db(":memory:")
+ t2d.parse_file_buf_into_db(buf)
+
+ buf.close()
+
+ return t2d
+
+def test_ftrace_mm_filemap_add_to_pagecache():
+ test_contents = """
+MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
+MediaStoreImpor-27212 (27176) [000] .... 16136.595920: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000048e2e156 pfn=677645 ofs=126976
+MediaStoreImpor-27212 (27176) [000] .... 16136.597793: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000051eabfb2 pfn=677644 ofs=122880
+MediaStoreImpor-27212 (27176) [000] .... 16136.597815: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000ce7cd606 pfn=677643 ofs=131072
+MediaStoreImpor-27212 (27176) [000] .... 16136.603732: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=000000008ffd3030 pfn=730119 ofs=186482688
+MediaStoreImpor-27212 (27176) [000] .... 16136.604126: mm_filemap_add_to_page_cache: dev 253:6 ino b1d8 page=0000000098d4d2e2 pfn=829676 ofs=0
+ <...>-27197 (-----) [002] .... 16136.613471: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000aca88a97 pfn=743346 ofs=241664
+ <...>-27197 (-----) [002] .... 16136.615979: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000351f2bc1 pfn=777799 ofs=106496
+ <...>-27224 (-----) [006] .... 16137.400090: mm_filemap_add_to_page_cache: dev 253:6 ino 712d page=000000006ff7ffdb pfn=754861 ofs=0
+ <...>-1396 (-----) [000] .... 16137.451660: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000ba0cbb34 pfn=769173 ofs=187191296
+ <...>-1396 (-----) [000] .... 16137.453020: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000f6ef038e pfn=820291 ofs=0
+ <...>-1396 (-----) [000] .... 16137.453067: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=0000000083ebc446 pfn=956463 ofs=4096
+ <...>-1396 (-----) [000] .... 16137.453101: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000009dc2cd25 pfn=822813 ofs=8192
+ <...>-1396 (-----) [000] .... 16137.453113: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000a11167fb pfn=928650 ofs=12288
+ <...>-1396 (-----) [000] .... 16137.453126: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000c1c3311b pfn=621110 ofs=16384
+ <...>-1396 (-----) [000] .... 16137.453139: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000009aa78342 pfn=689370 ofs=20480
+ <...>-1396 (-----) [000] .... 16137.453151: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=0000000082cddcd6 pfn=755584 ofs=24576
+ <...>-1396 (-----) [000] .... 16137.453162: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000b0249bc7 pfn=691431 ofs=28672
+ <...>-1396 (-----) [000] .... 16137.453183: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000006a776ff0 pfn=795084 ofs=32768
+ <...>-1396 (-----) [000] .... 16137.453203: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000001a4918a7 pfn=806998 ofs=36864
+ <...>-2578 (-----) [002] .... 16137.561871: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000d65af9d2 pfn=719246 ofs=187015168
+ <...>-2578 (-----) [002] .... 16137.562846: mm_filemap_add_to_page_cache: dev 253:6 ino b25a page=000000002f6ba74f pfn=864982 ofs=0
+ <...>-2578 (-----) [000] .... 16138.104500: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000f888d0f6 pfn=805812 ofs=192794624
+ <...>-2578 (-----) [000] .... 16138.105836: mm_filemap_add_to_page_cache: dev 253:6 ino b7dd page=000000003749523b pfn=977196 ofs=0
+ <...>-27215 (-----) [001] .... 16138.256881: mm_filemap_add_to_page_cache: dev 253:6 ino 758f page=000000001b375de1 pfn=755928 ofs=0
+ <...>-27215 (-----) [001] .... 16138.257526: mm_filemap_add_to_page_cache: dev 253:6 ino 7591 page=000000004e039481 pfn=841534 ofs=0
+ NonUserFacing6-5246 ( 1322) [005] .... 16138.356491: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000d65af9d2 pfn=719246 ofs=161890304
+ NonUserFacing6-5246 ( 1322) [005] .... 16138.357538: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000002f6ba74f pfn=864982 ofs=0
+ NonUserFacing6-5246 ( 1322) [005] .... 16138.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
+ <...>-27197 (-----) [005] .... 16140.143224: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000a42527c6 pfn=1076669 ofs=32768
+ """
+
+ t2d = parse_trace_file_to_db(test_contents)
+ session = t2d.session
+
+ first_row = session.query(MmFilemapAddToPageCache).order_by(MmFilemapAddToPageCache.id).first()
+
+ #dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
+ assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0x7580, page=0x0000000060e990c7, pfn=677646, ofs=159744), first_row)
+
+ second_to_last_row = session.query(MmFilemapAddToPageCache).filter(MmFilemapAddToPageCache.page.in_([0x000000006e0f8322])).first()
+
+ # dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
+ assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)
+
+def test_systrace_mm_filemap_add_to_pagecache():
+ test_contents = """
+<!DOCTYPE html>
+<html>
+<head i18n-values="dir:textdirection;">
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+<meta charset="utf-8"/>
+<title>Android System Trace</title>
+ <script class="trace-data" type="application/text">
+PROCESS DUMP
+USER PID PPID VSZ RSS WCHAN PC S NAME COMM
+root 1 0 62148 5976 0 0 S init [init]
+root 2 0 0 0 0 0 S [kthreadd] [kthreadd]
+ </script>
+
+ <script class="trace-data" type="application/text">
+MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
+NonUserFacing6-5246 ( 1322) [005] .... 16138.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
+ </script>
+
+ <script class="trace-data" type="application/text">
+{"traceEvents": [{"category": "process_argv", "name": "process_argv", "args": {"argv": ["/mnt/ssd3/workspace/master/external/chromium-trace/systrace.py", "-t", "5", "pagecache"]}, "pid": 160383, "ts": 1037300940509.7991, "tid": 139628672526080, "ph": "M"}, {"category": "python", "name": "clock_sync", "args": {"issue_ts": 1037307346185.212, "sync_id": "9a7e4fe3-89ad-441f-8226-8fe533fe973e"}, "pid": 160383, "ts": 1037307351643.906, "tid": 139628726089536, "ph": "c"}], "metadata": {"clock-domain": "SYSTRACE"}}
+ </script>
+<!-- END TRACE -->
+ """
+
+ t2d = parse_trace_file_to_db(test_contents)
+ session = t2d.session
+
+ first_row = session.query(MmFilemapAddToPageCache).order_by(MmFilemapAddToPageCache.id).first()
+
+ #dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
+ assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0x7580, page=0x0000000060e990c7, pfn=677646, ofs=159744), first_row)
+
+ second_to_last_row = session.query(MmFilemapAddToPageCache).filter(MmFilemapAddToPageCache.page.in_([0x000000006e0f8322])).first()
+
+ # dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
+ assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)
+
+def test_timestamp_filter():
+ test_contents = """
+ MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
+ NonUserFacing6-5246 ( 1322) [005] .... 16139.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
+ MediaStoreImpor-27212 (27176) [000] .... 16136.604126: mm_filemap_add_to_page_cache: dev 253:6 ino b1d8 page=0000000098d4d2e2 pfn=829676 ofs=0
+ """
+
+ t2d = parse_trace_file_to_db(test_contents)
+ session = t2d.session
+
+ end_time = 16137.0
+
+ results = session.query(MmFilemapAddToPageCache).join(
+ MmFilemapAddToPageCache.raw_ftrace_entry).filter(
+ RawFtraceEntry.timestamp <= end_time).order_by(
+ MmFilemapAddToPageCache.id).all()
+
+ assert len(results) == 2
+ assert_eq_ignore_id(
+ MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0x7580, page=0x0000000060e990c7, pfn=677646,
+ ofs=159744), results[0])
+ assert_eq_ignore_id(
+ MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
+ ino=0xb1d8, page=0x0000000098d4d2e2, pfn=829676,
+ ofs=0), results[1])
+
+
+if __name__ == '__main__':
+ pytest.main()