1#!/usr/bin/python2.4
2# Copyright (c) 2010 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Tests exercising chromiumsync and SyncDataModel."""
7
8import unittest
9
10import autofill_specifics_pb2
11import chromiumsync
12import sync_pb2
13import theme_specifics_pb2
14
15class SyncDataModelTest(unittest.TestCase):
16  def setUp(self):
17    self.model = chromiumsync.SyncDataModel()
18
19  def AddToModel(self, proto):
20    self.model._entries[proto.id_string] = proto
21
22  def GetChangesFromTimestamp(self, requested_types, timestamp):
23    message = sync_pb2.GetUpdatesMessage()
24    message.from_timestamp = timestamp
25    for data_type in requested_types:
26      message.requested_types.Extensions[
27        chromiumsync.SYNC_TYPE_TO_EXTENSION[data_type]].SetInParent()
28    return self.model.GetChanges(chromiumsync.UpdateSieve(message))
29
30  def testPermanentItemSpecs(self):
31    specs = chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS
32
33    declared_specs = set(['0'])
34    for spec in specs:
35      self.assertTrue(spec.parent_tag in declared_specs, 'parent tags must '
36                      'be declared before use')
37      declared_specs.add(spec.tag)
38
39    unique_datatypes = set([x.sync_type for x in specs])
40    self.assertEqual(unique_datatypes, set(chromiumsync.ALL_TYPES),
41                     'Every sync datatype should have a permanent folder '
42                     'associated with it')
43
44  def testSaveEntry(self):
45    proto = sync_pb2.SyncEntity()
46    proto.id_string = 'abcd'
47    proto.version = 0
48    self.assertFalse(self.model._ItemExists(proto.id_string))
49    self.model._SaveEntry(proto)
50    self.assertEqual(1, proto.version)
51    self.assertTrue(self.model._ItemExists(proto.id_string))
52    self.model._SaveEntry(proto)
53    self.assertEqual(2, proto.version)
54    proto.version = 0
55    self.assertTrue(self.model._ItemExists(proto.id_string))
56    self.assertEqual(2, self.model._entries[proto.id_string].version)
57
58  def testWritePosition(self):
59    def MakeProto(id_string, parent, position):
60      proto = sync_pb2.SyncEntity()
61      proto.id_string = id_string
62      proto.position_in_parent = position
63      proto.parent_id_string = parent
64      self.AddToModel(proto)
65
66    MakeProto('a', 'X', 1000)
67    MakeProto('b', 'X', 1800)
68    MakeProto('c', 'X', 2600)
69    MakeProto('a1', 'Z', 1007)
70    MakeProto('a2', 'Z', 1807)
71    MakeProto('a3', 'Z', 2607)
72    MakeProto('s', 'Y', 10000)
73
74    def AssertPositionResult(my_id, parent_id, prev_id, expected_position):
75      entry = sync_pb2.SyncEntity()
76      entry.id_string = my_id
77      self.model._WritePosition(entry, parent_id, prev_id)
78      self.assertEqual(expected_position, entry.position_in_parent)
79      self.assertEqual(parent_id, entry.parent_id_string)
80      self.assertFalse(entry.HasField('insert_after_item_id'))
81
82    AssertPositionResult('new', 'new_parent', '', 0)
83    AssertPositionResult('new', 'Y', '', 10000 - (2 ** 20))
84    AssertPositionResult('new', 'Y', 's', 10000 + (2 ** 20))
85    AssertPositionResult('s', 'Y', '', 10000)
86    AssertPositionResult('s', 'Y', 's', 10000)
87    AssertPositionResult('a1', 'Z', '', 1007)
88
89    AssertPositionResult('new', 'X', '', 1000 - (2 ** 20))
90    AssertPositionResult('new', 'X', 'a', 1100)
91    AssertPositionResult('new', 'X', 'b', 1900)
92    AssertPositionResult('new', 'X', 'c', 2600 + (2 ** 20))
93
94    AssertPositionResult('a1', 'X', '', 1000 - (2 ** 20))
95    AssertPositionResult('a1', 'X', 'a', 1100)
96    AssertPositionResult('a1', 'X', 'b', 1900)
97    AssertPositionResult('a1', 'X', 'c', 2600 + (2 ** 20))
98
99    AssertPositionResult('a', 'X', '', 1000)
100    AssertPositionResult('a', 'X', 'b', 1900)
101    AssertPositionResult('a', 'X', 'c', 2600 + (2 ** 20))
102
103    AssertPositionResult('b', 'X', '', 1000 - (2 ** 20))
104    AssertPositionResult('b', 'X', 'a', 1800)
105    AssertPositionResult('b', 'X', 'c', 2600 + (2 ** 20))
106
107    AssertPositionResult('c', 'X', '', 1000 - (2 ** 20))
108    AssertPositionResult('c', 'X', 'a', 1100)
109    AssertPositionResult('c', 'X', 'b', 2600)
110
111  def testCreatePermanentItems(self):
112    self.model._CreatePermanentItems(chromiumsync.ALL_TYPES)
113    self.assertEqual(len(chromiumsync.ALL_TYPES) + 2,
114                     len(self.model._entries))
115
116  def ExpectedPermanentItemCount(self, sync_type):
117    if sync_type == chromiumsync.BOOKMARK:
118      return 4
119    elif sync_type == chromiumsync.TOP_LEVEL:
120      return 1
121    else:
122      return 2
123
124  def testGetChangesFromTimestampZeroForEachType(self):
125    all_types = chromiumsync.ALL_TYPES[1:]
126    for sync_type in all_types:
127      self.model = chromiumsync.SyncDataModel()
128      request_types = [sync_type]
129
130      version, changes, remaining = (
131          self.GetChangesFromTimestamp(request_types, 0))
132
133      expected_count = self.ExpectedPermanentItemCount(sync_type)
134      self.assertEqual(expected_count, version)
135      self.assertEqual(expected_count, len(changes))
136      self.assertEqual('google_chrome', changes[0].server_defined_unique_tag)
137      for change in changes:
138        self.assertTrue(change.HasField('server_defined_unique_tag'))
139        self.assertEqual(change.version, change.sync_timestamp)
140        self.assertTrue(change.version <= version)
141
142      # Test idempotence: another GetUpdates from ts=0 shouldn't recreate.
143      version, changes, remaining = (
144          self.GetChangesFromTimestamp(request_types, 0))
145      self.assertEqual(expected_count, version)
146      self.assertEqual(expected_count, len(changes))
147      self.assertEqual(0, remaining)
148
149      # Doing a wider GetUpdates from timestamp zero shouldn't recreate either.
150      new_version, changes, remaining = (
151          self.GetChangesFromTimestamp(all_types, 0))
152      self.assertEqual(len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS),
153                       new_version)
154      self.assertEqual(new_version, len(changes))
155      self.assertEqual(0, remaining)
156      version, changes, remaining = (
157          self.GetChangesFromTimestamp(request_types, 0))
158      self.assertEqual(new_version, version)
159      self.assertEqual(expected_count, len(changes))
160      self.assertEqual(0, remaining)
161
162  def testBatchSize(self):
163    for sync_type in chromiumsync.ALL_TYPES[1:]:
164      specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
165      self.model = chromiumsync.SyncDataModel()
166      request_types = [sync_type]
167
168      for i in range(self.model._BATCH_SIZE*3):
169        entry = sync_pb2.SyncEntity()
170        entry.id_string = 'batch test %d' % i
171        entry.specifics.CopyFrom(specifics)
172        self.model._SaveEntry(entry)
173      last_bit = self.ExpectedPermanentItemCount(sync_type)
174      version, changes, changes_remaining = (
175          self.GetChangesFromTimestamp(request_types, 0))
176      self.assertEqual(self.model._BATCH_SIZE, version)
177      self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining)
178      version, changes, changes_remaining = (
179          self.GetChangesFromTimestamp(request_types, version))
180      self.assertEqual(self.model._BATCH_SIZE*2, version)
181      self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining)
182      version, changes, changes_remaining = (
183          self.GetChangesFromTimestamp(request_types, version))
184      self.assertEqual(self.model._BATCH_SIZE*3, version)
185      self.assertEqual(last_bit, changes_remaining)
186      version, changes, changes_remaining = (
187          self.GetChangesFromTimestamp(request_types, version))
188      self.assertEqual(self.model._BATCH_SIZE*3 + last_bit, version)
189      self.assertEqual(0, changes_remaining)
190
191      # Now delete a third of the items.
192      for i in xrange(self.model._BATCH_SIZE*3 - 1, 0, -3):
193        entry = sync_pb2.SyncEntity()
194        entry.id_string = 'batch test %d' % i
195        entry.deleted = True
196        self.model._SaveEntry(entry)
197
198      # The batch counts shouldn't change.
199      version, changes, changes_remaining = (
200          self.GetChangesFromTimestamp(request_types, 0))
201      self.assertEqual(self.model._BATCH_SIZE, len(changes))
202      self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining)
203      version, changes, changes_remaining = (
204          self.GetChangesFromTimestamp(request_types, version))
205      self.assertEqual(self.model._BATCH_SIZE, len(changes))
206      self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining)
207      version, changes, changes_remaining = (
208          self.GetChangesFromTimestamp(request_types, version))
209      self.assertEqual(self.model._BATCH_SIZE, len(changes))
210      self.assertEqual(last_bit, changes_remaining)
211      version, changes, changes_remaining = (
212          self.GetChangesFromTimestamp(request_types, version))
213      self.assertEqual(last_bit, len(changes))
214      self.assertEqual(self.model._BATCH_SIZE*4 + last_bit, version)
215      self.assertEqual(0, changes_remaining)
216
217  def testCommitEachDataType(self):
218    for sync_type in chromiumsync.ALL_TYPES[1:]:
219      specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
220      self.model = chromiumsync.SyncDataModel()
221      my_cache_guid = '112358132134'
222      parent = 'foobar'
223      commit_session = {}
224
225      # Start with a GetUpdates from timestamp 0, to populate permanent items.
226      original_version, original_changes, changes_remaining = (
227          self.GetChangesFromTimestamp([sync_type], 0))
228
229      def DoCommit(original=None, id_string='', name=None, parent=None,
230                   prev=None):
231        proto = sync_pb2.SyncEntity()
232        if original is not None:
233          proto.version = original.version
234          proto.id_string = original.id_string
235          proto.parent_id_string = original.parent_id_string
236          proto.name = original.name
237        else:
238          proto.id_string = id_string
239          proto.version = 0
240        proto.specifics.CopyFrom(specifics)
241        if name is not None:
242          proto.name = name
243        if parent:
244          proto.parent_id_string = parent.id_string
245        if prev:
246          proto.insert_after_item_id = prev.id_string
247        else:
248          proto.insert_after_item_id = ''
249        proto.folder = True
250        proto.deleted = False
251        result = self.model.CommitEntry(proto, my_cache_guid, commit_session)
252        self.assertTrue(result)
253        return (proto, result)
254
255      # Commit a new item.
256      proto1, result1 = DoCommit(name='namae', id_string='Foo',
257                                 parent=original_changes[-1])
258      # Commit an item whose parent is another item (referenced via the
259      # pre-commit ID).
260      proto2, result2 = DoCommit(name='Secondo', id_string='Bar',
261                                 parent=proto1)
262        # Commit a sibling of the second item.
263      proto3, result3 = DoCommit(name='Third!', id_string='Baz',
264                                 parent=proto1, prev=proto2)
265
266      self.assertEqual(3, len(commit_session))
267      for p, r in [(proto1, result1), (proto2, result2), (proto3, result3)]:
268        self.assertNotEqual(r.id_string, p.id_string)
269        self.assertEqual(r.originator_client_item_id, p.id_string)
270        self.assertEqual(r.originator_cache_guid, my_cache_guid)
271        self.assertTrue(r is not self.model._entries[r.id_string],
272                        "Commit result didn't make a defensive copy.")
273        self.assertTrue(p is not self.model._entries[r.id_string],
274                        "Commit result didn't make a defensive copy.")
275        self.assertEqual(commit_session.get(p.id_string), r.id_string)
276        self.assertTrue(r.version > original_version)
277      self.assertEqual(result1.parent_id_string, proto1.parent_id_string)
278      self.assertEqual(result2.parent_id_string, result1.id_string)
279      version, changes, remaining = (
280          self.GetChangesFromTimestamp([sync_type], original_version))
281      self.assertEqual(3, len(changes))
282      self.assertEqual(0, remaining)
283      self.assertEqual(original_version + 3, version)
284      self.assertEqual([result1, result2, result3], changes)
285      for c in changes:
286        self.assertTrue(c is not self.model._entries[c.id_string],
287                        "GetChanges didn't make a defensive copy.")
288      self.assertTrue(result2.position_in_parent < result3.position_in_parent)
289      self.assertEqual(0, result2.position_in_parent)
290
291      # Now update the items so that the second item is the parent of the
292      # first; with the first sandwiched between two new items (4 and 5).
293      # Do this in a new commit session, meaning we'll reference items from
294      # the first batch by their post-commit, server IDs.
295      commit_session = {}
296      old_cache_guid = my_cache_guid
297      my_cache_guid = 'A different GUID'
298      proto2b, result2b = DoCommit(original=result2,
299                                   parent=original_changes[-1])
300      proto4, result4 = DoCommit(id_string='ID4', name='Four',
301                                 parent=result2, prev=None)
302      proto1b, result1b = DoCommit(original=result1,
303                                   parent=result2, prev=proto4)
304      proto5, result5 = DoCommit(id_string='ID5', name='Five', parent=result2,
305                                 prev=result1)
306
307      self.assertEqual(2, len(commit_session), 'Only new items in second '
308                       'batch should be in the session')
309      for p, r, original in [(proto2b, result2b, proto2),
310                             (proto4, result4, proto4),
311                             (proto1b, result1b, proto1),
312                             (proto5, result5, proto5)]:
313        self.assertEqual(r.originator_client_item_id, original.id_string)
314        if original is not p:
315          self.assertEqual(r.id_string, p.id_string,
316                           'Ids should be stable after first commit')
317          self.assertEqual(r.originator_cache_guid, old_cache_guid)
318        else:
319          self.assertNotEqual(r.id_string, p.id_string)
320          self.assertEqual(r.originator_cache_guid, my_cache_guid)
321          self.assertEqual(commit_session.get(p.id_string), r.id_string)
322        self.assertTrue(r is not self.model._entries[r.id_string],
323                        "Commit result didn't make a defensive copy.")
324        self.assertTrue(p is not self.model._entries[r.id_string],
325                        "Commit didn't make a defensive copy.")
326        self.assertTrue(r.version > p.version)
327      version, changes, remaining = (
328          self.GetChangesFromTimestamp([sync_type], original_version))
329      self.assertEqual(5, len(changes))
330      self.assertEqual(0, remaining)
331      self.assertEqual(original_version + 7, version)
332      self.assertEqual([result3, result2b, result4, result1b, result5], changes)
333      for c in changes:
334        self.assertTrue(c is not self.model._entries[c.id_string],
335                        "GetChanges didn't make a defensive copy.")
336      self.assertTrue(result4.parent_id_string ==
337                      result1b.parent_id_string ==
338                      result5.parent_id_string ==
339                      result2b.id_string)
340      self.assertTrue(result4.position_in_parent <
341                      result1b.position_in_parent <
342                      result5.position_in_parent)
343
344  def testUpdateSieve(self):
345    # from_timestamp, legacy mode
346    autofill = autofill_specifics_pb2.autofill
347    theme = theme_specifics_pb2.theme
348    msg = sync_pb2.GetUpdatesMessage()
349    msg.from_timestamp = 15412
350    msg.requested_types.Extensions[autofill].SetInParent()
351    msg.requested_types.Extensions[theme].SetInParent()
352
353    sieve = chromiumsync.UpdateSieve(msg)
354    self.assertEqual(sieve._state,
355        {chromiumsync.TOP_LEVEL: 15412,
356         chromiumsync.AUTOFILL: 15412,
357         chromiumsync.THEME: 15412})
358
359    response = sync_pb2.GetUpdatesResponse()
360    sieve.SaveProgress(15412, response)
361    self.assertEqual(0, len(response.new_progress_marker))
362    self.assertFalse(response.HasField('new_timestamp'))
363
364    response = sync_pb2.GetUpdatesResponse()
365    sieve.SaveProgress(15413, response)
366    self.assertEqual(0, len(response.new_progress_marker))
367    self.assertTrue(response.HasField('new_timestamp'))
368    self.assertEqual(15413, response.new_timestamp)
369
370    # Existing tokens
371    msg = sync_pb2.GetUpdatesMessage()
372    marker = msg.from_progress_marker.add()
373    marker.data_type_id = autofill.number
374    marker.token = '15412'
375    marker = msg.from_progress_marker.add()
376    marker.data_type_id = theme.number
377    marker.token = '15413'
378    sieve = chromiumsync.UpdateSieve(msg)
379    self.assertEqual(sieve._state,
380        {chromiumsync.TOP_LEVEL: 15412,
381         chromiumsync.AUTOFILL: 15412,
382         chromiumsync.THEME: 15413})
383
384    response = sync_pb2.GetUpdatesResponse()
385    sieve.SaveProgress(15413, response)
386    self.assertEqual(1, len(response.new_progress_marker))
387    self.assertFalse(response.HasField('new_timestamp'))
388    marker = response.new_progress_marker[0]
389    self.assertEqual(marker.data_type_id, autofill.number)
390    self.assertEqual(marker.token, '15413')
391    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
392
393    # Empty tokens indicating from timestamp = 0
394    msg = sync_pb2.GetUpdatesMessage()
395    marker = msg.from_progress_marker.add()
396    marker.data_type_id = autofill.number
397    marker.token = '412'
398    marker = msg.from_progress_marker.add()
399    marker.data_type_id = theme.number
400    marker.token = ''
401    sieve = chromiumsync.UpdateSieve(msg)
402    self.assertEqual(sieve._state,
403        {chromiumsync.TOP_LEVEL: 0,
404         chromiumsync.AUTOFILL: 412,
405         chromiumsync.THEME: 0})
406    response = sync_pb2.GetUpdatesResponse()
407    sieve.SaveProgress(1, response)
408    self.assertEqual(1, len(response.new_progress_marker))
409    self.assertFalse(response.HasField('new_timestamp'))
410    marker = response.new_progress_marker[0]
411    self.assertEqual(marker.data_type_id, theme.number)
412    self.assertEqual(marker.token, '1')
413    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
414
415    response = sync_pb2.GetUpdatesResponse()
416    sieve.SaveProgress(412, response)
417    self.assertEqual(1, len(response.new_progress_marker))
418    self.assertFalse(response.HasField('new_timestamp'))
419    marker = response.new_progress_marker[0]
420    self.assertEqual(marker.data_type_id, theme.number)
421    self.assertEqual(marker.token, '412')
422    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
423
424    response = sync_pb2.GetUpdatesResponse()
425    sieve.SaveProgress(413, response)
426    self.assertEqual(2, len(response.new_progress_marker))
427    self.assertFalse(response.HasField('new_timestamp'))
428    marker = response.new_progress_marker[0]
429    self.assertEqual(marker.data_type_id, theme.number)
430    self.assertEqual(marker.token, '413')
431    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
432    marker = response.new_progress_marker[1]
433    self.assertEqual(marker.data_type_id, autofill.number)
434    self.assertEqual(marker.token, '413')
435    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
436
437    # Migration token timestamps (client gives timestamp, server returns token)
438    msg = sync_pb2.GetUpdatesMessage()
439    marker = msg.from_progress_marker.add()
440    marker.data_type_id = autofill.number
441    marker.timestamp_token_for_migration = 15213
442    marker = msg.from_progress_marker.add()
443    marker.data_type_id = theme.number
444    marker.timestamp_token_for_migration = 15211
445    sieve = chromiumsync.UpdateSieve(msg)
446    self.assertEqual(sieve._state,
447        {chromiumsync.TOP_LEVEL: 15211,
448         chromiumsync.AUTOFILL: 15213,
449         chromiumsync.THEME: 15211})
450    response = sync_pb2.GetUpdatesResponse()
451    sieve.SaveProgress(16000, response)  # There were updates
452    self.assertEqual(2, len(response.new_progress_marker))
453    self.assertFalse(response.HasField('new_timestamp'))
454    marker = response.new_progress_marker[0]
455    self.assertEqual(marker.data_type_id, theme.number)
456    self.assertEqual(marker.token, '16000')
457    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
458    marker = response.new_progress_marker[1]
459    self.assertEqual(marker.data_type_id, autofill.number)
460    self.assertEqual(marker.token, '16000')
461    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
462
463    msg = sync_pb2.GetUpdatesMessage()
464    marker = msg.from_progress_marker.add()
465    marker.data_type_id = autofill.number
466    marker.timestamp_token_for_migration = 3000
467    marker = msg.from_progress_marker.add()
468    marker.data_type_id = theme.number
469    marker.timestamp_token_for_migration = 3000
470    sieve = chromiumsync.UpdateSieve(msg)
471    self.assertEqual(sieve._state,
472        {chromiumsync.TOP_LEVEL: 3000,
473         chromiumsync.AUTOFILL: 3000,
474         chromiumsync.THEME: 3000})
475    response = sync_pb2.GetUpdatesResponse()
476    sieve.SaveProgress(3000, response)  # Already up to date
477    self.assertEqual(2, len(response.new_progress_marker))
478    self.assertFalse(response.HasField('new_timestamp'))
479    marker = response.new_progress_marker[0]
480    self.assertEqual(marker.data_type_id, theme.number)
481    self.assertEqual(marker.token, '3000')
482    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
483    marker = response.new_progress_marker[1]
484    self.assertEqual(marker.data_type_id, autofill.number)
485    self.assertEqual(marker.token, '3000')
486    self.assertFalse(marker.HasField('timestamp_token_for_migration'))
487
488
489if __name__ == '__main__':
490  unittest.main()
491