1// Copyright 2015 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "base/trace_event/heap_profiler_allocation_context.h"
6
7#include <cstring>
8
9#include "base/hash.h"
10#include "base/macros.h"
11
12namespace base {
13namespace trace_event {
14
15bool operator < (const StackFrame& lhs, const StackFrame& rhs) {
16  return lhs.value < rhs.value;
17}
18
19bool operator == (const StackFrame& lhs, const StackFrame& rhs) {
20  return lhs.value == rhs.value;
21}
22
23bool operator != (const StackFrame& lhs, const StackFrame& rhs) {
24  return !(lhs.value == rhs.value);
25}
26
27Backtrace::Backtrace(): frame_count(0) {}
28
29bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
30  if (lhs.frame_count != rhs.frame_count) return false;
31  return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames);
32}
33
34bool operator!=(const Backtrace& lhs, const Backtrace& rhs) {
35  return !(lhs == rhs);
36}
37
38AllocationContext::AllocationContext(): type_name(nullptr) {}
39
40AllocationContext::AllocationContext(const Backtrace& backtrace,
41                                     const char* type_name)
42  : backtrace(backtrace), type_name(type_name) {}
43
44bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) {
45  return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name);
46}
47
48bool operator!=(const AllocationContext& lhs, const AllocationContext& rhs) {
49  return !(lhs == rhs);
50}
51}  // namespace trace_event
52}  // namespace base
53
54namespace BASE_HASH_NAMESPACE {
55using base::trace_event::AllocationContext;
56using base::trace_event::Backtrace;
57using base::trace_event::StackFrame;
58
59size_t hash<StackFrame>::operator()(const StackFrame& frame) const {
60  return hash<const void*>()(frame.value);
61}
62
63size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const {
64  const void* values[Backtrace::kMaxFrameCount];
65  for (size_t i = 0; i != backtrace.frame_count; ++i) {
66    values[i] = backtrace.frames[i].value;
67  }
68  return base::SuperFastHash(
69      reinterpret_cast<const char*>(values),
70      static_cast<int>(backtrace.frame_count * sizeof(*values)));
71}
72
73size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const {
74  size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace);
75
76  // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits,
77  // because the magic number is a prime very close to 2^32 / golden ratio, but
78  // will still redistribute keys bijectively on 64-bit architectures because
79  // the magic number is coprime to 2^64.
80  size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761;
81
82  // Multiply one side to break the commutativity of +. Multiplication with a
83  // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so
84  // randomness is preserved.
85  return (backtrace_hash * 3) + type_hash;
86}
87
88}  // BASE_HASH_NAMESPACE
89