otBase.py revision 273a90074ac209d67b5e2cb8ea510cd6c2b10272
1from __future__ import print_function, division
2from fontTools.misc.py23 import *
3from .DefaultTable import DefaultTable
4import struct
5
6class OverflowErrorRecord(object):
7	def __init__(self, overflowTuple):
8		self.tableType = overflowTuple[0]
9		self.LookupListIndex = overflowTuple[1]
10		self.SubTableIndex = overflowTuple[2]
11		self.itemName = overflowTuple[3]
12		self.itemIndex = overflowTuple[4]
13
14	def __repr__(self):
15		return str((self.tableType, "LookupIndex:", self.LookupListIndex, "SubTableIndex:", self.SubTableIndex, "ItemName:", self.itemName, "ItemIndex:", self.itemIndex))
16
17class OTLOffsetOverflowError(Exception):
18	def __init__(self, overflowErrorRecord):
19		self.value = overflowErrorRecord
20
21	def __str__(self):
22		return repr(self.value)
23
24
25class BaseTTXConverter(DefaultTable):
26
27	"""Generic base class for TTX table converters. It functions as an
28	adapter between the TTX (ttLib actually) table model and the model
29	we use for OpenType tables, which is necessarily subtly different.
30	"""
31
32	def decompile(self, data, font):
33		from . import otTables
34		cachingStats = None if True else {}
35		class GlobalState(object):
36			def __init__(self, tableType, cachingStats):
37				self.tableType = tableType
38				self.cachingStats = cachingStats
39		globalState = GlobalState(tableType=self.tableTag,
40					  cachingStats=cachingStats)
41		reader = OTTableReader(data, globalState)
42		tableClass = getattr(otTables, self.tableTag)
43		self.table = tableClass()
44		self.table.decompile(reader, font)
45		if cachingStats:
46			stats = sorted([(v, k) for k, v in cachingStats.items()])
47			stats.reverse()
48			print("cachingsstats for ", self.tableTag)
49			for v, k in stats:
50				if v < 2:
51					break
52				print(v, k)
53			print("---", len(stats))
54
55	def compile(self, font):
56		""" Create a top-level OTFWriter for the GPOS/GSUB table.
57			Call the compile method for the the table
58				for each 'converter' record in the table converter list
59					call converter's write method for each item in the value.
60						- For simple items, the write method adds a string to the
61						writer's self.items list.
62						- For Struct/Table/Subtable items, it add first adds new writer to the
63						to the writer's self.items, then calls the item's compile method.
64						This creates a tree of writers, rooted at the GUSB/GPOS writer, with
65						each writer representing a table, and the writer.items list containing
66						the child data strings and writers.
67			call the getAllData method
68				call _doneWriting, which removes duplicates
69				call _gatherTables. This traverses the tables, adding unique occurences to a flat list of tables
70				Traverse the flat list of tables, calling getDataLength on each to update their position
71				Traverse the flat list of tables again, calling getData each get the data in the table, now that
72				pos's and offset are known.
73
74				If a lookup subtable overflows an offset, we have to start all over.
75		"""
76		class GlobalState(object):
77			def __init__(self, tableType):
78				self.tableType = tableType
79		globalState = GlobalState(tableType=self.tableTag)
80		writer = OTTableWriter(globalState)
81		writer.parent = None
82		self.table.compile(writer, font)
83		return writer.getAllData()
84
85	def toXML(self, writer, font):
86		self.table.toXML2(writer, font)
87
88	def fromXML(self, name, attrs, content, font):
89		from . import otTables
90		if not hasattr(self, "table"):
91			tableClass = getattr(otTables, self.tableTag)
92			self.table = tableClass()
93		self.table.fromXML(name, attrs, content, font)
94
95
96class OTTableReader(object):
97
98	"""Helper class to retrieve data from an OpenType table."""
99
100	__slots__ = ('data', 'offset', 'pos', 'globalState', 'localState')
101
102	def __init__(self, data, globalState={}, localState=None, offset=0):
103		self.data = data
104		self.offset = offset
105		self.pos = offset
106		self.globalState = globalState
107		self.localState = localState
108
109	def getSubReader(self, offset):
110		offset = self.offset + offset
111		cachingStats = self.globalState.cachingStats
112		if cachingStats is not None:
113			cachingStats[offset] = cachingStats.get(offset, 0) + 1
114		return self.__class__(self.data, self.globalState, self.localState, offset)
115
116	def readUShort(self):
117		pos = self.pos
118		newpos = pos + 2
119		value, = struct.unpack(">H", self.data[pos:newpos])
120		self.pos = newpos
121		return value
122
123	def readShort(self):
124		pos = self.pos
125		newpos = pos + 2
126		value, = struct.unpack(">h", self.data[pos:newpos])
127		self.pos = newpos
128		return value
129
130	def readLong(self):
131		pos = self.pos
132		newpos = pos + 4
133		value, = struct.unpack(">l", self.data[pos:newpos])
134		self.pos = newpos
135		return value
136
137	def readUInt24(self):
138		pos = self.pos
139		newpos = pos + 3
140		value, = struct.unpack(">l", b'\0'+self.data[pos:newpos])
141		self.pos = newpos
142		return value
143
144	def readULong(self):
145		pos = self.pos
146		newpos = pos + 4
147		value, = struct.unpack(">L", self.data[pos:newpos])
148		self.pos = newpos
149		return value
150
151	def readTag(self):
152		pos = self.pos
153		newpos = pos + 4
154		value = Tag(self.data[pos:newpos])
155		assert len(value) == 4
156		self.pos = newpos
157		return value
158
159	def __setitem__(self, name, value):
160		state = self.localState.copy() if self.localState else dict()
161		state[name] = value
162		self.localState = state
163
164	def __getitem__(self, name):
165		return self.localState[name]
166
167
168class OTTableWriter(object):
169
170	"""Helper class to gather and assemble data for OpenType tables."""
171
172	def __init__(self, globalState, localState=None):
173		self.items = []
174		self.pos = None
175		self.globalState = globalState
176		self.localState = localState
177
178	def __setitem__(self, name, value):
179		state = self.localState.copy() if self.localState else dict()
180		state[name] = value
181		self.localState = state
182
183	def __getitem__(self, name):
184		return self.localState[name]
185
186	# assembler interface
187
188	def getAllData(self):
189		"""Assemble all data, including all subtables."""
190		self._doneWriting()
191		tables, extTables = self._gatherTables()
192		tables.reverse()
193		extTables.reverse()
194		# Gather all data in two passes: the absolute positions of all
195		# subtable are needed before the actual data can be assembled.
196		pos = 0
197		for table in tables:
198			table.pos = pos
199			pos = pos + table.getDataLength()
200
201		for table in extTables:
202			table.pos = pos
203			pos = pos + table.getDataLength()
204
205
206		data = []
207		for table in tables:
208			tableData = table.getData()
209			data.append(tableData)
210
211		for table in extTables:
212			tableData = table.getData()
213			data.append(tableData)
214
215		return bytesjoin(data)
216
217	def getDataLength(self):
218		"""Return the length of this table in bytes, without subtables."""
219		l = 0
220		for item in self.items:
221			if hasattr(item, "getData") or hasattr(item, "getCountData"):
222				if item.longOffset:
223					l = l + 4  # sizeof(ULong)
224				else:
225					l = l + 2  # sizeof(UShort)
226			else:
227				l = l + len(item)
228		return l
229
230	def getData(self):
231		"""Assemble the data for this writer/table, without subtables."""
232		items = list(self.items)  # make a shallow copy
233		pos = self.pos
234		numItems = len(items)
235		for i in range(numItems):
236			item = items[i]
237
238			if hasattr(item, "getData"):
239				if item.longOffset:
240					items[i] = packULong(item.pos - pos)
241				else:
242					try:
243						items[i] = packUShort(item.pos - pos)
244					except AssertionError:
245						# provide data to fix overflow problem.
246						# If the overflow is to a lookup, or from a lookup to a subtable,
247						# just report the current item.
248						if self.name in [ 'LookupList', 'Lookup']:
249							overflowErrorRecord = self.getOverflowErrorRecord(item)
250						else:
251							# overflow is within a subTable. Life is more complicated.
252							# If we split the sub-table just before the current item, we may still suffer overflow.
253							# This is because duplicate table merging is done only within an Extension subTable tree;
254							# when we split the subtable in two, some items may no longer be duplicates.
255							# Get worst case by adding up all the item lengths, depth first traversal.
256							# and then report the first item that overflows a short.
257							def getDeepItemLength(table):
258								if hasattr(table, "getDataLength"):
259									length = 0
260									for item in table.items:
261										length = length + getDeepItemLength(item)
262								else:
263									length = len(table)
264								return length
265
266							length = self.getDataLength()
267							if hasattr(self, "sortCoverageLast") and item.name == "Coverage":
268								# Coverage is first in the item list, but last in the table list,
269								# The original overflow is really in the item list. Skip the Coverage
270								# table in the following test.
271								items = items[i+1:]
272
273							for j in range(len(items)):
274								item = items[j]
275								length = length + getDeepItemLength(item)
276								if length > 65535:
277									break
278						overflowErrorRecord = self.getOverflowErrorRecord(item)
279
280
281						raise OTLOffsetOverflowError(overflowErrorRecord)
282
283		return bytesjoin(items)
284
285	def __hash__(self):
286		# only works after self._doneWriting() has been called
287		return hash(self.items)
288
289	def __ne__(self, other):
290		return not self.__eq__(other)
291	def __eq__(self, other):
292		if type(self) != type(other):
293			return NotImplemented
294		return self.items == other.items
295
296	def _doneWriting(self, internedTables=None):
297		# Convert CountData references to data string items
298		# collapse duplicate table references to a unique entry
299		# "tables" are OTTableWriter objects.
300
301		# For Extension Lookup types, we can
302		# eliminate duplicates only within the tree under the Extension Lookup,
303		# as offsets may exceed 64K even between Extension LookupTable subtables.
304		if internedTables is None:
305			internedTables = {}
306		items = self.items
307		iRange = list(range(len(items)))
308
309		if hasattr(self, "Extension"):
310			newTree = 1
311		else:
312			newTree = 0
313		for i in iRange:
314			item = items[i]
315			if hasattr(item, "getCountData"):
316				items[i] = item.getCountData()
317			elif hasattr(item, "getData"):
318				if newTree:
319					item._doneWriting()
320				else:
321					item._doneWriting(internedTables)
322					if item in internedTables:
323						items[i] = item = internedTables[item]
324					else:
325						internedTables[item] = item
326		self.items = tuple(items)
327
328	def _gatherTables(self, tables=None, extTables=None, done=None):
329		# Convert table references in self.items tree to a flat
330		# list of tables in depth-first traversal order.
331		# "tables" are OTTableWriter objects.
332		# We do the traversal in reverse order at each level, in order to
333		# resolve duplicate references to be the last reference in the list of tables.
334		# For extension lookups, duplicate references can be merged only within the
335		# writer tree under the  extension lookup.
336		if tables is None: # init call for first time.
337			tables = []
338			extTables = []
339			done = {}
340
341		done[self] = 1
342
343		numItems = len(self.items)
344		iRange = list(range(numItems))
345		iRange.reverse()
346
347		if hasattr(self, "Extension"):
348			appendExtensions = 1
349		else:
350			appendExtensions = 0
351
352		# add Coverage table if it is sorted last.
353		sortCoverageLast = 0
354		if hasattr(self, "sortCoverageLast"):
355			# Find coverage table
356			for i in range(numItems):
357				item = self.items[i]
358				if hasattr(item, "name") and (item.name == "Coverage"):
359					sortCoverageLast = 1
360					break
361			if item not in done:
362				item._gatherTables(tables, extTables, done)
363			else:
364				index = max(item.parent.keys())
365				item.parent[index + 1] = self
366
367		saveItem = None
368		for i in iRange:
369			item = self.items[i]
370			if not hasattr(item, "getData"):
371				continue
372
373			if sortCoverageLast and (i==1) and item.name == 'Coverage':
374				# we've already 'gathered' it above
375				continue
376
377			if appendExtensions:
378				assert extTables is not None, "Program or XML editing error. Extension subtables cannot contain extensions subtables"
379				newDone = {}
380				item._gatherTables(extTables, None, newDone)
381
382			elif item not in done:
383				item._gatherTables(tables, extTables, done)
384			else:
385				index = max(item.parent.keys())
386				item.parent[index + 1] = self
387
388
389		tables.append(self)
390		return tables, extTables
391
392	# interface for gathering data, as used by table.compile()
393
394	def getSubWriter(self):
395		subwriter = self.__class__(self.globalState, self.localState)
396		subwriter.parent = {0:self} # because some subtables have idential values, we discard
397									# the duplicates under the getAllData method. Hence some
398									# subtable writers can have more than one parent writer.
399		return subwriter
400
401	def writeUShort(self, value):
402		assert 0 <= value < 0x10000
403		self.items.append(struct.pack(">H", value))
404
405	def writeShort(self, value):
406		self.items.append(struct.pack(">h", value))
407
408	def writeUInt24(self, value):
409		assert 0 <= value < 0x1000000
410		b = struct.pack(">L", value)
411		self.items.append(b[1:])
412
413	def writeLong(self, value):
414		self.items.append(struct.pack(">l", value))
415
416	def writeULong(self, value):
417		self.items.append(struct.pack(">L", value))
418
419	def writeTag(self, tag):
420		tag = Tag(tag).tobytes()
421		assert len(tag) == 4
422		self.items.append(tag)
423
424	def writeSubTable(self, subWriter):
425		self.items.append(subWriter)
426
427	def writeCountReference(self, table, name):
428		ref = CountReference(table, name)
429		self.items.append(ref)
430		return ref
431
432	def writeStruct(self, format, values):
433		data = struct.pack(*(format,) + values)
434		self.items.append(data)
435
436	def writeData(self, data):
437		self.items.append(data)
438
439	def	getOverflowErrorRecord(self, item):
440		LookupListIndex = SubTableIndex = itemName = itemIndex = None
441		if self.name == 'LookupList':
442			LookupListIndex = item.repeatIndex
443		elif self.name == 'Lookup':
444			LookupListIndex = self.repeatIndex
445			SubTableIndex = item.repeatIndex
446		else:
447			itemName = item.name
448			if hasattr(item, 'repeatIndex'):
449				itemIndex = item.repeatIndex
450			if self.name == 'SubTable':
451				LookupListIndex = self.parent[0].repeatIndex
452				SubTableIndex = self.repeatIndex
453			elif self.name == 'ExtSubTable':
454				LookupListIndex = self.parent[0].parent[0].repeatIndex
455				SubTableIndex = self.parent[0].repeatIndex
456			else: # who knows how far below the SubTable level we are! Climb back up to the nearest subtable.
457				itemName = ".".join(self.name, item.name)
458				p1 = self.parent[0]
459				while p1 and p1.name not in ['ExtSubTable', 'SubTable']:
460					itemName = ".".join(p1.name, item.name)
461					p1 = p1.parent[0]
462				if p1:
463					if p1.name == 'ExtSubTable':
464						LookupListIndex = self.parent[0].parent[0].repeatIndex
465						SubTableIndex = self.parent[0].repeatIndex
466					else:
467						LookupListIndex = self.parent[0].repeatIndex
468						SubTableIndex = self.repeatIndex
469
470		return OverflowErrorRecord( (self.globalState.tableType, LookupListIndex, SubTableIndex, itemName, itemIndex) )
471
472
473class CountReference(object):
474	"""A reference to a Count value, not a count of references."""
475	def __init__(self, table, name):
476		self.table = table
477		self.name = name
478	def setValue(self, value):
479		table = self.table
480		name = self.name
481		if table[name] is None:
482			table[name] = value
483		else:
484			assert table[name] == value, (name, table[name], value)
485	def getCountData(self):
486		return packUShort(self.table[self.name])
487
488
489def packUShort(value):
490	assert 0 <= value < 0x10000, value
491	return struct.pack(">H", value)
492
493
494def packULong(value):
495	assert 0 <= value < 0x100000000, value
496	return struct.pack(">L", value)
497
498
499class BaseTable(object):
500	def __init__(self):
501		self.compileStatus = 0 # 0 means table was created
502									# 1 means the table.read() function was called by a table which is subject
503									# to delayed compilation
504									# 2 means that it was subject to delayed compilation, and
505									# has been decompiled
506
507		self.recurse = 0
508
509	def __getattr__(self, attr):
510		# we get here only when the table does not have the attribute.
511		# This method ovveride exists so that we can try to de-compile
512		# a table which is subject to delayed decompilation, and then try
513		# to get the value again after decompilation.
514		self.recurse +=1
515		if self.recurse > 2:
516			# shouldn't ever get here - we should only get to two levels of recursion.
517			# this guards against self.decompile NOT setting compileStatus to other than 1.
518			raise AttributeError(attr)
519		if self.compileStatus == 1:
520			self.ensureDecompiled()
521			val = getattr(self, attr)
522			self.recurse -=1
523			return val
524
525		raise AttributeError(attr)
526
527
528	"""Generic base class for all OpenType (sub)tables."""
529
530	def getConverters(self):
531		return self.converters
532
533	def getConverterByName(self, name):
534		return self.convertersByName[name]
535
536	def decompile(self, reader, font):
537		self.compileStatus = 2 # table has been decompiled.
538		self.readFormat(reader)
539		table = {}
540		self.__rawTable = table  # for debugging
541		converters = self.getConverters()
542		for conv in converters:
543			if conv.name == "SubTable":
544				conv = conv.getConverter(reader.globalState.tableType,
545						table["LookupType"])
546			if conv.name == "ExtSubTable":
547				conv = conv.getConverter(reader.globalState.tableType,
548						table["ExtensionLookupType"])
549			if conv.name == "FeatureParams":
550				conv = conv.getConverter(reader["FeatureTag"])
551			if conv.repeat:
552				l = []
553				if conv.repeat in table:
554					countValue = table[conv.repeat]
555				else:
556					# conv.repeat is a propagated count
557					countValue = reader[conv.repeat]
558				for i in range(countValue + conv.aux):
559					l.append(conv.read(reader, font, table))
560				table[conv.name] = l
561			else:
562				if conv.aux and not eval(conv.aux, None, table):
563					continue
564				table[conv.name] = conv.read(reader, font, table)
565				if conv.isPropagated:
566					reader[conv.name] = table[conv.name]
567
568		self.postRead(table, font)
569
570		del self.__rawTable  # succeeded, get rid of debugging info
571
572	def ensureDecompiled(self):
573		if self.compileStatus != 1:
574			return
575		self.decompile(self.reader, self.font)
576		del self.reader, self.font
577
578	def compile(self, writer, font):
579		self.ensureDecompiled()
580		table = self.preWrite(font)
581
582		if hasattr(self, 'sortCoverageLast'):
583			writer.sortCoverageLast = 1
584
585		self.writeFormat(writer)
586		for conv in self.getConverters():
587			value = table.get(conv.name)
588			if conv.repeat:
589				if value is None:
590					value = []
591				countValue = len(value) - conv.aux
592				if conv.repeat in table:
593					ref = table[conv.repeat]
594					table[conv.repeat] = None
595					ref.setValue(countValue)
596				else:
597					# conv.repeat is a propagated count
598					writer[conv.repeat].setValue(countValue)
599				for i in range(len(value)):
600					conv.write(writer, font, table, value[i], i)
601			elif conv.isCount:
602				# Special-case Count values.
603				# Assumption: a Count field will *always* precede
604				# the actual array(s).
605				# We need a default value, as it may be set later by a nested
606				# table. We will later store it here.
607				# We add a reference: by the time the data is assembled
608				# the Count value will be filled in.
609				ref = writer.writeCountReference(table, conv.name)
610				if conv.isPropagated:
611					table[conv.name] = None
612					writer[conv.name] = ref
613				else:
614					table[conv.name] = ref
615			else:
616				if conv.aux and not eval(conv.aux, None, table):
617					continue
618				conv.write(writer, font, table, value)
619				if conv.isPropagated:
620					writer[conv.name] = value
621
622	def readFormat(self, reader):
623		pass
624
625	def writeFormat(self, writer):
626		pass
627
628	def postRead(self, table, font):
629		self.__dict__.update(table)
630
631	def preWrite(self, font):
632		return self.__dict__.copy()
633
634	def toXML(self, xmlWriter, font, attrs=None):
635		tableName = self.__class__.__name__
636		if attrs is None:
637			attrs = []
638		if hasattr(self, "Format"):
639			attrs = attrs + [("Format", self.Format)]
640		xmlWriter.begintag(tableName, attrs)
641		xmlWriter.newline()
642		self.toXML2(xmlWriter, font)
643		xmlWriter.endtag(tableName)
644		xmlWriter.newline()
645
646	def toXML2(self, xmlWriter, font):
647		# Simpler variant of toXML, *only* for the top level tables (like GPOS, GSUB).
648		# This is because in TTX our parent writes our main tag, and in otBase.py we
649		# do it ourselves. I think I'm getting schizophrenic...
650		for conv in self.getConverters():
651			if conv.repeat:
652				value = getattr(self, conv.name)
653				for i in range(len(value)):
654					item = value[i]
655					conv.xmlWrite(xmlWriter, font, item, conv.name,
656							[("index", i)])
657			else:
658				if conv.aux and not eval(conv.aux, None, vars(self)):
659					continue
660				value = getattr(self, conv.name)
661				conv.xmlWrite(xmlWriter, font, value, conv.name, [])
662
663	def fromXML(self, name, attrs, content, font):
664		try:
665			conv = self.getConverterByName(name)
666		except KeyError:
667			raise    # XXX on KeyError, raise nice error
668		value = conv.xmlRead(attrs, content, font)
669		if conv.repeat:
670			seq = getattr(self, conv.name, None)
671			if seq is None:
672				seq = []
673				setattr(self, conv.name, seq)
674			seq.append(value)
675		else:
676			setattr(self, conv.name, value)
677
678	def __ne__(self, other):
679		return not self.__eq__(other)
680	def __eq__(self, other):
681		if type(self) != type(other):
682			return NotImplemented
683
684		self.ensureDecompiled()
685		other.ensureDecompiled()
686
687		return self.__dict__ == other.__dict__
688
689
690class FormatSwitchingBaseTable(BaseTable):
691
692	"""Minor specialization of BaseTable, for tables that have multiple
693	formats, eg. CoverageFormat1 vs. CoverageFormat2."""
694
695	def getConverters(self):
696		return self.converters[self.Format]
697
698	def getConverterByName(self, name):
699		return self.convertersByName[self.Format][name]
700
701	def readFormat(self, reader):
702		self.Format = reader.readUShort()
703		assert self.Format != 0, (self, reader.pos, len(reader.data))
704
705	def writeFormat(self, writer):
706		writer.writeUShort(self.Format)
707
708
709#
710# Support for ValueRecords
711#
712# This data type is so different from all other OpenType data types that
713# it requires quite a bit of code for itself. It even has special support
714# in OTTableReader and OTTableWriter...
715#
716
717valueRecordFormat = [
718#	Mask	 Name            isDevice  signed
719	(0x0001, "XPlacement",   0,        1),
720	(0x0002, "YPlacement",   0,        1),
721	(0x0004, "XAdvance",     0,        1),
722	(0x0008, "YAdvance",     0,        1),
723	(0x0010, "XPlaDevice",   1,        0),
724	(0x0020, "YPlaDevice",   1,        0),
725	(0x0040, "XAdvDevice",   1,        0),
726	(0x0080, "YAdvDevice",   1,        0),
727# 	reserved:
728	(0x0100, "Reserved1",    0,        0),
729	(0x0200, "Reserved2",    0,        0),
730	(0x0400, "Reserved3",    0,        0),
731	(0x0800, "Reserved4",    0,        0),
732	(0x1000, "Reserved5",    0,        0),
733	(0x2000, "Reserved6",    0,        0),
734	(0x4000, "Reserved7",    0,        0),
735	(0x8000, "Reserved8",    0,        0),
736]
737
738def _buildDict():
739	d = {}
740	for mask, name, isDevice, signed in valueRecordFormat:
741		d[name] = mask, isDevice, signed
742	return d
743
744valueRecordFormatDict = _buildDict()
745
746
747class ValueRecordFactory(object):
748
749	"""Given a format code, this object convert ValueRecords."""
750
751	def __init__(self, valueFormat):
752		format = []
753		for mask, name, isDevice, signed in valueRecordFormat:
754			if valueFormat & mask:
755				format.append((name, isDevice, signed))
756		self.format = format
757
758	def readValueRecord(self, reader, font):
759		format = self.format
760		if not format:
761			return None
762		valueRecord = ValueRecord()
763		for name, isDevice, signed in format:
764			if signed:
765				value = reader.readShort()
766			else:
767				value = reader.readUShort()
768			if isDevice:
769				if value:
770					from . import otTables
771					subReader = reader.getSubReader(value)
772					value = getattr(otTables, name)()
773					value.decompile(subReader, font)
774				else:
775					value = None
776			setattr(valueRecord, name, value)
777		return valueRecord
778
779	def writeValueRecord(self, writer, font, valueRecord):
780		for name, isDevice, signed in self.format:
781			value = getattr(valueRecord, name, 0)
782			if isDevice:
783				if value:
784					subWriter = writer.getSubWriter()
785					writer.writeSubTable(subWriter)
786					value.compile(subWriter, font)
787				else:
788					writer.writeUShort(0)
789			elif signed:
790				writer.writeShort(value)
791			else:
792				writer.writeUShort(value)
793
794
795class ValueRecord(object):
796
797	# see ValueRecordFactory
798
799	def getFormat(self):
800		format = 0
801		for name in self.__dict__.keys():
802			format = format | valueRecordFormatDict[name][0]
803		return format
804
805	def toXML(self, xmlWriter, font, valueName, attrs=None):
806		if attrs is None:
807			simpleItems = []
808		else:
809			simpleItems = list(attrs)
810		for mask, name, isDevice, format in valueRecordFormat[:4]:  # "simple" values
811			if hasattr(self, name):
812				simpleItems.append((name, getattr(self, name)))
813		deviceItems = []
814		for mask, name, isDevice, format in valueRecordFormat[4:8]:  # device records
815			if hasattr(self, name):
816				device = getattr(self, name)
817				if device is not None:
818					deviceItems.append((name, device))
819		if deviceItems:
820			xmlWriter.begintag(valueName, simpleItems)
821			xmlWriter.newline()
822			for name, deviceRecord in deviceItems:
823				if deviceRecord is not None:
824					deviceRecord.toXML(xmlWriter, font)
825			xmlWriter.endtag(valueName)
826			xmlWriter.newline()
827		else:
828			xmlWriter.simpletag(valueName, simpleItems)
829			xmlWriter.newline()
830
831	def fromXML(self, name, attrs, content, font):
832		from . import otTables
833		for k, v in attrs.items():
834			setattr(self, k, int(v))
835		for element in content:
836			if not isinstance(element, tuple):
837				continue
838			name, attrs, content = element
839			value = getattr(otTables, name)()
840			for elem2 in content:
841				if not isinstance(elem2, tuple):
842					continue
843				name2, attrs2, content2 = elem2
844				value.fromXML(name2, attrs2, content2, font)
845			setattr(self, name, value)
846
847	def __ne__(self, other):
848		return not self.__eq__(other)
849	def __eq__(self, other):
850		if type(self) != type(other):
851			return NotImplemented
852		return self.__dict__ == other.__dict__
853