Skip to content

Commit

Permalink
cache packed sizes of transactions so they don't have to be repacked …
Browse files Browse the repository at this point in the history
…often
  • Loading branch information
forrestv committed Oct 28, 2012
1 parent e245e38 commit 1b0c9e5
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 21 deletions.
18 changes: 9 additions & 9 deletions p2pool/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def generate_transaction(cls, tracker, share_data, block_target, desired_timesta
break
else:
if known_txs is not None:
this_size = len(bitcoin_data.tx_type.pack(known_txs[tx_hash]))
this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
break
new_transaction_size += this_size
Expand Down Expand Up @@ -555,14 +555,14 @@ def should_punish_reason(self, previous_block, bits, tracker, known_txs):
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
return True, 'not all txs present'

all_txs_size = sum(len(bitcoin_data.tx_type.pack(tx)) for tx in other_txs)
if all_txs_size > 1000000:
return True, 'txs over block size limit'

new_txs_size = sum(len(bitcoin_data.tx_type.pack(known_txs[tx_hash])) for tx_hash in self.share_info['new_transaction_hashes'])
if new_txs_size > 50000:
return True, 'new txs over limit'
else:
all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
if all_txs_size > 1000000:
return True, 'txs over block size limit'
new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
if new_txs_size > 50000:
return True, 'new txs over limit'

return False, None

Expand Down
16 changes: 8 additions & 8 deletions p2pool/p2p.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,16 +182,16 @@ def update_remote_view_of_my_mining_txs(before, after):
added = set(after) - set(before)
removed = set(before) - set(after)
if added:
self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(after[x])) for x in added)
self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(after[x]) for x in added)
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
if removed:
self.send_forget_tx(tx_hashes=list(removed))
self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(before[x])) for x in removed)
self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(before[x]) for x in removed)
watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))

self.remote_remembered_txs_size += sum(len(bitcoin_data.tx_type.pack(x)) for x in self.node.mining_txs_var.value.values())
self.remote_remembered_txs_size += sum(bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values())
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values())

Expand Down Expand Up @@ -270,7 +270,7 @@ def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):

hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]

new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send)
new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
if new_remote_remembered_txs_size > self.max_remembered_txs_size:
raise ValueError('shares have too many txs')
self.remote_remembered_txs_size = new_remote_remembered_txs_size
Expand All @@ -282,7 +282,7 @@ def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
if self.other_version >= 8:
res = self.send_forget_tx(tx_hashes=hashes_to_send)

self.remote_remembered_txs_size -= sum(len(bitcoin_data.tx_type.pack(known_txs[x])) for x in hashes_to_send)
self.remote_remembered_txs_size -= sum(bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)

return res

Expand Down Expand Up @@ -359,7 +359,7 @@ def handle_remember_tx(self, tx_hashes, txs):
return

self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx))
self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx)
new_known_txs = dict(self.node.known_txs_var.value)
warned = False
for tx in txs:
Expand All @@ -374,7 +374,7 @@ def handle_remember_tx(self, tx_hashes, txs):
warned = True

self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += len(bitcoin_data.tx_type.pack(tx))
self.remembered_txs_size += bitcoin_data.tx_type.packed_size(tx)
new_known_txs[tx_hash] = tx
self.node.known_txs_var.set(new_known_txs)
if self.remembered_txs_size >= self.max_remembered_txs_size:
Expand All @@ -384,7 +384,7 @@ def handle_remember_tx(self, tx_hashes, txs):
])
def handle_forget_tx(self, tx_hashes):
for tx_hash in tx_hashes:
self.remembered_txs_size -= len(bitcoin_data.tx_type.pack(self.remembered_txs[tx_hash]))
self.remembered_txs_size -= bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash])
assert self.remembered_txs_size >= 0
del self.remembered_txs[tx_hash]

Expand Down
23 changes: 19 additions & 4 deletions p2pool/util/pack.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,19 @@ def pack(self, obj):
raise AssertionError((self._unpack(data), obj))

return data

def packed_size(self, obj):
if hasattr(obj, '_packed_size') and obj._packed_size is not None:
type_obj, packed_size = obj._packed_size
if type_obj is self:
return packed_size

packed_size = len(self.pack(obj))

if hasattr(obj, '_packed_size'):
obj._packed_size = self, packed_size

return packed_size

class VarIntType(Type):
def read(self, file):
Expand Down Expand Up @@ -228,22 +241,24 @@ def write(self, file, item):

def get_record(fields):
fields = tuple(sorted(fields))
if 'keys' in fields:
if 'keys' in fields or '_packed_size' in fields:
raise ValueError()
if fields not in _record_types:
class _Record(object):
__slots__ = fields
__slots__ = fields + ('_packed_size',)
def __init__(self):
self._packed_size = None
def __repr__(self):
return repr(dict(self))
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
#def __iter__(self):
# for field in self.__slots__:
# for field in fields:
# yield field, getattr(self, field)
def keys(self):
return self.__slots__
return fields
def get(self, key, default=None):
return getattr(self, key, default)
def __eq__(self, other):
Expand Down

0 comments on commit 1b0c9e5

Please sign in to comment.