file_storage.py revision a1401311d1ab56c4ed0a474bd38c108f75cb0cd9
1# Copyright 2014 The Chromium Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5"""This module handles file-backed storage of the core classes. 6 7The storage is logically organized as follows: 8Storage -> N Archives -> 1 Symbol index 9 N Snapshots -> 1 Mmaps dump. 10 -> 0/1 Native heap dump. 11 12Where an "archive" is essentially a collection of snapshots taken for a given 13app at a given point in time. 14""" 15 16import datetime 17import json 18import os 19 20from memory_inspector.core import memory_map 21from memory_inspector.core import native_heap 22from memory_inspector.core import symbol 23from memory_inspector.data import serialization 24 25 26class Storage(object): 27 28 _SETTINGS_FILE = 'settings-%s.json' 29 30 def __init__(self, root_path): 31 """Creates a file-backed storage. Files will be placed in |root_path|.""" 32 self._root = root_path 33 if not os.path.exists(self._root): 34 os.makedirs(self._root) 35 36 def LoadSettings(self, name): 37 """Loads a key-value dict from the /settings-name.json file. 38 39 This is for backend and device settings (e.g., symbols path, adb path).""" 40 file_path = os.path.join(self._root, Storage._SETTINGS_FILE % name) 41 if not os.path.exists(file_path): 42 return {} 43 with open(file_path) as f: 44 return json.load(f) 45 46 def StoreSettings(self, name, settings): 47 """Stores a key-value dict into /settings-name.json file.""" 48 assert(isinstance(settings, dict)) 49 file_path = os.path.join(self._root, Storage._SETTINGS_FILE % name) 50 if not settings: 51 if os.path.exists(file_path): 52 os.unlink(file_path) 53 return 54 with open(file_path, 'w') as f: 55 return json.dump(settings, f) 56 57 def ListArchives(self): 58 """Lists archives. Each of them is a sub-folder inside the |root_path|.""" 59 return sorted( 60 [name for name in os.listdir(self._root) 61 if os.path.isdir(os.path.join(self._root, name))]) 62 63 def OpenArchive(self, archive_name, create=False): 64 """Returns an instance of |Archive|.""" 65 archive_path = os.path.join(self._root, archive_name) 66 if not os.path.exists(archive_path) and create: 67 os.makedirs(archive_path) 68 return Archive(archive_name, archive_path) 69 70 def DeleteArchive(self, archive_name): 71 """Deletes the archive (removing its folder).""" 72 archive_path = os.path.join(self._root, archive_name) 73 for f in os.listdir(archive_path): 74 os.unlink(os.path.join(archive_path, f)) 75 os.rmdir(archive_path) 76 77 78class Archive(object): 79 """A collection of snapshots, each one holding one memory dump (per kind).""" 80 81 _MMAP_EXT = '-mmap.json' 82 _NHEAP_EXT = '-nheap.json' 83 _SNAP_EXT = '.snapshot' 84 _SYM_FILE = 'syms.json' 85 _TIME_FMT = '%Y-%m-%d_%H-%M-%S-%f' 86 87 def __init__(self, name, path): 88 assert(os.path.isdir(path)) 89 self._name = name 90 self._path = path 91 self._cur_snapshot = None 92 93 def StoreSymbols(self, symbols): 94 """Stores the symbol db (one per the overall archive).""" 95 assert(isinstance(symbols, symbol.Symbols)) 96 file_path = os.path.join(self._path, Archive._SYM_FILE) 97 with open(file_path, 'w') as f: 98 json.dump(symbols, f, cls=serialization.Encoder) 99 100 def HasSymbols(self): 101 return os.path.exists(os.path.join(self._path, Archive._SYM_FILE)) 102 103 def LoadSymbols(self): 104 assert(self.HasSymbols()) 105 file_path = os.path.join(self._path, Archive._SYM_FILE) 106 with open(file_path) as f: 107 return json.load(f, cls=serialization.SymbolsDecoder) 108 109 def StartNewSnapshot(self): 110 """Creates a 2014-01-01_02:03:04.snapshot marker (an empty file).""" 111 self._cur_snapshot = Archive._TimestampToStr(datetime.datetime.now()) 112 file_path = os.path.join(self._path, 113 self._cur_snapshot + Archive._SNAP_EXT) 114 assert(not os.path.exists(file_path)) 115 open(file_path, 'w').close() 116 return datetime.datetime.strptime(self._cur_snapshot, Archive._TIME_FMT) 117 118 def ListSnapshots(self): 119 """Returns a list of timestamps (datetime.datetime instances).""" 120 file_names = sorted( 121 [name[:-(len(Archive._SNAP_EXT))] for name in os.listdir(self._path) 122 if name.endswith(Archive._SNAP_EXT)]) 123 timestamps = [datetime.datetime.strptime(x, Archive._TIME_FMT) 124 for x in file_names] 125 return timestamps 126 127 def StoreMemMaps(self, mmaps): 128 assert(isinstance(mmaps, memory_map.Map)) 129 assert(self._cur_snapshot), 'Must call StartNewSnapshot first' 130 file_path = os.path.join(self._path, self._cur_snapshot + Archive._MMAP_EXT) 131 with open(file_path, 'w') as f: 132 json.dump(mmaps, f, cls=serialization.Encoder) 133 134 def HasMemMaps(self, timestamp): 135 return self._HasSnapshotFile(timestamp, Archive._MMAP_EXT) 136 137 def LoadMemMaps(self, timestamp): 138 assert(self.HasMemMaps(timestamp)) 139 snapshot_name = Archive._TimestampToStr(timestamp) 140 file_path = os.path.join(self._path, snapshot_name + Archive._MMAP_EXT) 141 with open(file_path) as f: 142 return json.load(f, cls=serialization.MmapDecoder) 143 144 def StoreNativeHeap(self, nheap): 145 assert(isinstance(nheap, native_heap.NativeHeap)) 146 assert(self._cur_snapshot), 'Must call StartNewSnapshot first' 147 file_path = os.path.join(self._path, 148 self._cur_snapshot + Archive._NHEAP_EXT) 149 with open(file_path, 'w') as f: 150 json.dump(nheap, f, cls=serialization.Encoder) 151 152 def HasNativeHeap(self, timestamp): 153 return self._HasSnapshotFile(timestamp, Archive._NHEAP_EXT) 154 155 def LoadNativeHeap(self, timestamp): 156 assert(self.HasNativeHeap(timestamp)) 157 snapshot_name = Archive._TimestampToStr(timestamp) 158 file_path = os.path.join(self._path, snapshot_name + Archive._NHEAP_EXT) 159 with open(file_path) as f: 160 return json.load(f, cls=serialization.NativeHeapDecoder) 161 162 def _HasSnapshotFile(self, timestamp, ext): 163 name = Archive._TimestampToStr(timestamp) 164 return os.path.exists(os.path.join(self._path, name + ext)) 165 166 @staticmethod 167 def _TimestampToStr(timestamp): 168 return timestamp.strftime(Archive._TIME_FMT)