Lines Matching refs:it

52   for (BacktraceMap::const_iterator it = iters.first; it != iters.second; ++it) {
54 static_cast<uint32_t>(it->start),
55 static_cast<uint32_t>(it->end),
56 (it->flags & PROT_READ) ? 'r' : '-',
57 (it->flags & PROT_WRITE) ? 'w' : '-',
58 (it->flags & PROT_EXEC) ? 'x' : '-', it->name.c_str());
65 for (auto it = mem_maps.begin(); it != mem_maps.end(); ++it) {
66 void* base = it->first;
67 MemMap* map = it->second;
83 // To not interfere with image position, take the image's address and only place it below. Current
102 // Function is standalone so it can be tested somewhat in mem_map_test.cc.
162 for (BacktraceMap::const_iterator it = map->begin(); it != map->end(); ++it) {
163 if ((begin >= it->start && begin < it->end) // start of new within old
164 && (end > it->start && end <= it->end)) { // end of new within old
186 for (BacktraceMap::const_iterator it = map->begin(); it != map->end(); ++it) {
187 if ((begin >= it->start && begin < it->end) // start of new within old
188 || (end > it->start && end < it->end) // end of new within old
189 || (begin <= it->start && end > it->end)) { // start/end of new includes all of old
191 map_info << std::make_pair(it, map->end());
195 static_cast<uintptr_t>(it->start), static_cast<uintptr_t>(it->end),
196 it->name.c_str(),
267 // Since we didn't use MAP_FIXED the kernel may have mapped it somewhere not in the low
296 // reuse means it is okay that it overlaps an existing page mapping.
389 // reuse means it is okay that it overlaps an existing page mapping.
468 // before it is returned to the system.
483 // Remove it from maps_.
487 for (auto it = maps_->lower_bound(base_begin_), end = maps_->end();
488 it != end && it->first == base_begin_; ++it) {
489 if (it->second == this) {
491 maps_->erase(it);
511 // Add it to maps_.
601 // it to msync() as it only accepts page-aligned base address, and exclude the higher-end
668 for (auto it = mem_maps.begin(), maps_end = mem_maps.end(); it != maps_end;) {
669 MemMap* map = it->second;
670 void* base = it->first;
673 ++it;
681 while (it != maps_end &&
682 it->second->GetProtect() == map->GetProtect() &&
683 it->second->GetName() == map->GetName() &&
684 (it->second->BaseBegin() == end || num_gaps < kMaxGaps)) {
685 if (it->second->BaseBegin() != end) {
692 reinterpret_cast<uintptr_t>(it->second->BaseBegin()) - reinterpret_cast<uintptr_t>(end);
698 CHECK_ALIGNED(it->second->BaseSize(), kPageSize);
700 size += it->second->BaseSize();
701 end = it->second->BaseEnd();
702 ++it;
714 for (auto it = maps_->lower_bound(base_begin), end = maps_->end();
715 it != end && it->first == base_begin; ++it) {
716 if (it->second == map) {
727 for (auto it = maps_->lower_bound(address), end = maps_->end();
728 it != end && it->first == address; ++it) {
729 MemMap* map = it->second;
810 auto it = maps_->upper_bound(reinterpret_cast<void*>(ptr));
811 if (it != maps_->begin()) {
812 auto before_it = it;
818 while (it != maps_->end()) {
820 size_t delta = reinterpret_cast<uintptr_t>(it->first) - ptr;
826 ptr = reinterpret_cast<uintptr_t>(it->second->BaseEnd());
828 ++it;