Skip to content

Commit

Permalink
Able to parse over LS
Browse files Browse the repository at this point in the history
  • Loading branch information
tvorogme committed Feb 25, 2024
1 parent 230007c commit 7806d6f
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 39 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def finalize_options(self):

setup(
name="tonpy" if not IS_DEV else "tonpy-dev",
version="0.0.0.1.2b0" if not IS_DEV else "0.0.0.3.2c1",
version="0.0.0.1.2b0" if not IS_DEV else "0.0.0.3.3a1",
author="Disintar LLP",
author_email="andrey@head-labs.com",
description="Types / API for TON blockchain",
Expand Down
110 changes: 72 additions & 38 deletions src/tonpy/blockscanner/blockscanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,48 +43,78 @@ def get_mega_libs():
def process_block(block, lc):
block_txs = {}

r: CellSlice = block['account_blocks'].begin_parse()
del block['account_blocks']
if block['account_blocks'] is not None:
r: CellSlice = block['account_blocks'].begin_parse()
del block['account_blocks']

if r.refs > 0:
account_blocks = VmDict(256, False, cell_root=r.load_ref(),
aug=SkipCryptoCurrency())
else:
account_blocks = []
if r.refs > 0:
account_blocks = VmDict(256, False, cell_root=r.load_ref(),
aug=SkipCryptoCurrency())
else:
account_blocks = []

for i in account_blocks:
account, data = i
data = data.data
# acc_trans#5
assert data.load_uint(4) == 5
me = data.load_uint(256)

# account_addr:bits256
assert me == account

# state_update:^(HASH_UPDATE Account)
data.skip_refs(1, True)

transactions = VmDict(64, False, cell_root=data, aug=SkipCryptoCurrency())

for i in account_blocks:
account, data = i
data = data.data
# acc_trans#5
assert data.load_uint(4) == 5
me = data.load_uint(256)
for t in transactions:
lt, txdata = t
tx = txdata.data.load_ref()
tx_tlb = Transaction()
tx_tlb = tx_tlb.cell_unpack(tx, True)

# account_addr:bits256
assert me == account
account_address = int(tx_tlb.account_addr, 2)
assert account_address == me

# state_update:^(HASH_UPDATE Account)
data.skip_refs(1, True)
if account_address not in block_txs:
block_txs[account_address] = []

transactions = VmDict(64, False, cell_root=data, aug=SkipCryptoCurrency())
block_txs[account_address].append({
'tx': tx,
'lt': tx_tlb.lt,
'now': tx_tlb.now,
'is_tock': tx_tlb.description.is_tock if hasattr(tx_tlb.description, 'is_tock') else False
})
else:
ready = False

account_address = None
lt = None

for t in transactions:
lt, txdata = t
tx = txdata.data.load_ref()
tx_tlb = Transaction()
tx_tlb = tx_tlb.cell_unpack(tx, True)
while not ready:
answer = lc.list_block_transactions_ext(block['block_id'], 256,
account_address=account_address,
lt=lt)
ready = not answer.incomplete

account_address = int(tx_tlb.account_addr, 2)
assert account_address == me
for tx in answer.transactions:
tx_tlb = Transaction()
tx_tlb = tx_tlb.cell_unpack(tx, True)

if account_address not in block_txs:
block_txs[account_address] = []
account_address = int(tx_tlb.account_addr, 2)

if account_address not in block_txs:
block_txs[account_address] = []

block_txs[account_address].append({
'tx': tx,
'lt': tx_tlb.lt,
'now': tx_tlb.now,
'is_tock': tx_tlb.description.is_tock if hasattr(tx_tlb.description, 'is_tock') else False
})

block_txs[account_address].append({
'tx': tx,
'lt': tx_tlb.lt,
'now': tx_tlb.now,
'is_tock': tx_tlb.description.is_tock if hasattr(tx_tlb.description, 'is_tock') else False
})
lt = tx_tlb.lt

total_block_txs = []

Expand Down Expand Up @@ -140,7 +170,8 @@ def load_process_shard(shards_chunk,
known_shards,
stop_shards,
lcparams,
loglevel):
loglevel,
parse_txs_over_ls=False):
answer = []

lcparams = json.loads(lcparams)
Expand Down Expand Up @@ -206,7 +237,7 @@ def process_shard(x, prev_data=None, lc=None):
'prev_block_left': left_shard,
'prev_block_right': right_shard,
'master': block_info.master_ref.master.seq_no,
'account_blocks': block_extra.account_blocks
'account_blocks': block_extra.account_blocks if not parse_txs_over_ls else None
}, *prev_data]

if loglevel > 1:
Expand Down Expand Up @@ -278,7 +309,8 @@ def __init__(self,
raw_process: Callable = None,
chunk_size: int = 1000,
out_queue: Queue = None,
only_mc_blocks: bool = False):
only_mc_blocks: bool = False,
parse_txs_over_ls: bool = False):
"""
:param lcparams: Params for LiteClient
Expand All @@ -300,6 +332,7 @@ def __init__(self,
self.nproc = nproc
self.chunk_size = chunk_size
self.out_queue = out_queue
self.parse_txs_over_ls = parse_txs_over_ls

self.known_key_blocks = {}
self.mega_libs = get_mega_libs()
Expand Down Expand Up @@ -359,7 +392,7 @@ def load_process_shard(self, known_shards, stop_shards):
with Pool(p) as pool:
results = pool.imap_unordered(
load_process_shard(known_shards=known_shards, stop_shards=stop_shards, lcparams=self.lcparams,
loglevel=self.loglevel),
loglevel=self.loglevel, parse_txs_over_ls=self.parse_txs_over_ls),
known_shards_chunks)

for result in results:
Expand Down Expand Up @@ -573,7 +606,8 @@ def raw_process(chunk):
chunk_size=2,
raw_process=raw_process,
out_queue=outq,
only_mc_blocks=True
only_mc_blocks=True,
parse_txs_over_ls=True
)

scanner.start()
Expand Down

0 comments on commit 7806d6f

Please sign in to comment.