Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
8d079ed
introduce logged_based
Dec 27, 2021
4b609b1
Get used with logged_group
Dec 29, 2021
a63d17d
Provide logged group on the rest of code base
Dec 29, 2021
4c3ffad
Merge remote-tracking branch 'origin/develop' into 422_introduce_logg…
Dec 29, 2021
b4eda35
Add Airdropper logged_group
Dec 29, 2021
8275f07
Spit and polish
Dec 29, 2021
b3ecaae
Spit and polish
Dec 29, 2021
c8d51ff
Spit and polish
Dec 29, 2021
e5854f4
Spit and polish
Dec 29, 2021
1c36d89
Spit and polish
Dec 29, 2021
bea6cfa
Spit and polish
Dec 29, 2021
465754e
Spit and polish
Dec 29, 2021
f7a055a
rollback proxy.py
Dec 29, 2021
c345177
spit and polish
Dec 29, 2021
ab79eb0
Remove log_level param from airdropper
Dec 29, 2021
76f914e
Spit and polish
Dec 29, 2021
c5cbdba
Fix indexer base init error
Dec 29, 2021
ee87cb7
Fix indexer base init error
Dec 29, 2021
1218d78
Reduce Airdropper level
Dec 30, 2021
523e44f
roll back get_measurements prefix
Dec 30, 2021
dcdf481
Get logged-groups package from different place
Dec 30, 2021
13242b0
install git to resolve python dependency
Dec 30, 2021
b32b936
Spit on Dockerfile and polish
Dec 30, 2021
35a51b7
Fix get_measurements errors parsig command
Dec 30, 2021
5372136
Merge remote-tracking branch 'origin/develop' into 422_introduce_logg…
Dec 30, 2021
125c56e
Polish after merge
Dec 30, 2021
3f759ac
Fix
Dec 30, 2021
3862077
fix
Dec 30, 2021
a7b1de2
fix
Dec 30, 2021
ed83fe6
Merge branch 'develop' into 422_introduce_logged_groups
Jan 10, 2022
440ba11
fix
Jan 10, 2022
7989afb
fix
Jan 10, 2022
ccdd299
spit and polish
Jan 10, 2022
0b81aca
fix errors
Jan 11, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Merge remote-tracking branch 'origin/develop' into 422_introduce_logg…
…ed_groups

# Conflicts:
#	.dockerignore
#	proxy/common_neon/solana_interactor.py
#	proxy/common_neon/transaction_sender.py
#	proxy/indexer/airdropper.py
#	proxy/indexer/indexer.py
#	proxy/indexer/indexer_base.py
#	proxy/plugin/solana_rest_api.py
  • Loading branch information
rozhkovdmitrii committed Dec 29, 2021
commit 4c3ffad5ef309cda03f9762263638f04c62a55b7
3 changes: 3 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
!requirements.txt
!setup.py
!README.md
!run-test-faucet.sh
!run-faucet.sh
!run-airdropper.sh
!log_cfg.json

# Ignore __pycache__ directory
Expand Down
40 changes: 26 additions & 14 deletions proxy/common_neon/solana_interactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,21 @@
from solana.transaction import Transaction
from urllib.parse import urlparse
from itertools import zip_longest

from logged_groups import logged_group


from .costs import update_transaction_cost
from .utils import get_from_dict
from ..environment import EVM_LOADER_ID, CONFIRMATION_CHECK_DELAY, LOG_SENDING_SOLANA_TRANSACTION, RETRY_ON_FAIL

from typing import Any, List, NamedTuple, Union, cast


class AccountInfo(NamedTuple):
tag: int
lamports: int
owner: PublicKey


@logged_group("Proxy")
class SolanaInteractor:
Expand All @@ -32,7 +39,7 @@ def __init__(self, signer, client: SolanaClient) -> None:

def _send_rpc_batch_request(self, method: str, params_list: List[Any]) -> List[RPCResponse]:
request_data = []
for params in params_list:
for params in params_list:
request_id = next(self.client._provider._request_counter) + 1
request = {"jsonrpc": "2.0", "id": request_id, "method": method, "params": params}
request_data.append(request)
Expand All @@ -48,7 +55,7 @@ def _send_rpc_batch_request(self, method: str, params_list: List[Any]) -> List[R
raise Exception("Invalid RPC response: request {} response {}", request, response)

return response_data


def get_operator_key(self):
return self.signer.public_key()
Expand Down Expand Up @@ -86,10 +93,10 @@ def get_multiple_accounts_info(self, accounts: List[PublicKey]) -> List[AccountI
"dataSlice": { "offset": 0, "length": 16 }
}
result = self.client._provider.make_request("getMultipleAccounts", list(map(str, accounts)), options)
logger.debug("\n{}".format(json.dumps(result, indent=4, sort_keys=True)))
self.debug("\n{}".format(json.dumps(result, indent=4, sort_keys=True)))

if result['result']['value'] is None:
logger.debug("Can't get information about {}".format(accounts))
self.debug("Can't get information about {}".format(accounts))
return None

accounts_info = []
Expand Down Expand Up @@ -167,7 +174,7 @@ def send_multiple_transactions_unconfirmed(self, transactions: List[Transaction]
for transaction in transactions:
transaction.recent_blockhash = blockhash
transaction.sign(self.signer)

base64_transaction = base64.b64encode(transaction.serialize()).decode("utf-8")
request.append((base64_transaction, {"skipPreflight": skip_preflight, "encoding": "base64", "preflightCommitment": "confirmed"}))

Expand Down Expand Up @@ -197,14 +204,19 @@ def confirm_multiple_transactions(self, signatures: List[Union[str, bytes]]):
TIMEOUT = 30 # 30 seconds pylint: disable=invalid-name
elapsed_time = 0
while elapsed_time < TIMEOUT:
self.debug('confirm_transaction for %s', tx_sig)
resp = self.client.get_signature_statuses([tx_sig])
self.debug('confirm_transaction: %s', resp)
if resp["result"]:
status = resp['result']['value'][0]
if status and (status['confirmationStatus'] == 'finalized' or \
status['confirmationStatus'] == 'confirmed' and status['confirmations'] >= confirmations):
return
response = self.client.get_signature_statuses(signatures)
self.debug('confirm_transactions: %s', response)
if response['result'] is None:
continue

for status in response['result']['value']:
if status is None:
break
if status['confirmationStatus'] == 'processed':
break
else:
return

time.sleep(CONFIRMATION_CHECK_DELAY)
elapsed_time += CONFIRMATION_CHECK_DELAY

Expand Down
34 changes: 4 additions & 30 deletions proxy/common_neon/transaction_sender.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def create_account_with_seed(self, seed, storage_size):
account = accountWithSeed(self.sender.get_operator_key(), seed)

if self.sender.get_sol_balance(account) == 0:
minimum_balance = self.sender.get_rent_exempt_balance_for_size(storage_size)
minimum_balance = self.sender.get_multiple_rent_exempt_balances_for_size([storage_size])[0]
self.debug("Minimum balance required for account {}".format(minimum_balance))

trx = Transaction()
Expand All @@ -156,8 +156,7 @@ def create_multiple_accounts_with_seed(self, seeds: List[bytes], sizes: List[int
for account_key, account_info, seed, minimum_balance, storage_size in zip(accounts, accounts_info, seeds, minimum_balances, sizes):
if account_info is None:
self.debug("Minimum balance required for account {}".format(minimum_balance))

trx.add(self.instruction.create_account_with_seed_trx(account, seed, minimum_balance, storage_size))
trx.add(self.instruction.create_account_with_seed_trx(account_key, seed, minimum_balance, storage_size))
else:
if account_info.lamports < minimum_balance:
raise Exception("insufficient balance")
Expand Down Expand Up @@ -359,20 +358,7 @@ def call_signed_with_holder_combined(self):
return self.call_continue()


def create_accounts_for_trx(self):
length = len(self.create_acc_trx.instructions)
if length == 0:
return
self.debug(f"Create account for trx: {length}")
precall_txs = Transaction()
precall_txs.add(self.create_acc_trx)
result = self.sender.send_measured_transaction(precall_txs, self.eth_trx, 'CreateAccountsForTrx')
if check_for_errors(result):
raise Exception("Failed to create account for trx")
self.create_acc_trx = Transaction()


def write_trx_to_holder_account(self):
def make_write_to_holder_account_trx(self) -> List[Transaction]:
self.debug('write_trx_to_holder_account')
msg = self.eth_trx.signature() + len(self.eth_trx.unsigned_msg()).to_bytes(8, byteorder="little") + self.eth_trx.unsigned_msg()

Expand All @@ -385,19 +371,7 @@ def write_trx_to_holder_account(self):
trxs.append(trx)
offset += len(part)

while len(trxs) > 0:
receipts = {}
for trx in trxs:
receipts[self.sender.send_transaction_unconfirmed(trx)] = trx

self.debug("receipts %s", receipts)
for rcpt, trx in receipts.items():
try:
self.sender.collect_result(rcpt, eth_trx=self.eth_trx, reason='WriteHolder')
except Exception as err:
self.debug("collect_result exception: {}".format(str(err)))
else:
trxs.remove(trx)
return trxs


def call_continue(self):
Expand Down
23 changes: 11 additions & 12 deletions proxy/indexer/airdropper.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,6 @@ def __init__(self,
start_slot = 0):
IndexerBase.__init__(self, solana_url, evm_loader_id, log_level, start_slot)

self.setLevel(logging.DEBUG)
self.info(f"""Running indexer with params:
solana_url: {solana_url},
evm_loader_id: {evm_loader_id},
log_level: {log_level},
faucet_url: {faucet_url},
wrapper_whitelist: {wrapper_whitelist},
price update interval: {price_upd_interval},
NEON decimals: {neon_decimals}""")

# collection of eth-address-to-create-accout-trx mappings
# for every addresses that was already funded with airdrop
self.airdrop_ready = SQLDict(tablename="airdrop_ready")
Expand Down Expand Up @@ -172,17 +162,26 @@ def process_receipts(self):
if trx['transaction']['message']['instructions'] is not None:
self.process_trx_airdropper_mode(trx)


@logged_group("Airdropper")
def run_airdropper(solana_url,
evm_loader_id,
faucet_url = '',
wrapper_whitelist = 'ANY',
log_level = 'INFO',
price_update_interval = 60,
neon_decimals = 9,
start_slot = 0):
start_slot = 0, *, logger):
logging.basicConfig(format='%(asctime)s - pid:%(process)d [%(levelname)-.1s] %(funcName)s:%(lineno)d - %(message)s')

logger.info(f"""Running indexer with params:
solana_url: {solana_url},
evm_loader_id: {evm_loader_id},
log_level: {log_level},
faucet_url: {faucet_url},
wrapper_whitelist: {wrapper_whitelist},
price update interval: {price_update_interval},
NEON decimals: {neon_decimals},
Start slot: {start_slot}""")
airdropper = Airdropper(solana_url,
evm_loader_id,
faucet_url,
Expand Down
23 changes: 13 additions & 10 deletions proxy/indexer/indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,7 @@ def __init__(self,
solana_url,
evm_loader_id,
log_level = 'INFO'):
IndexerBase.__init__(self, solana_url, evm_loader_id, log_level)
self.info(f"""Running indexer with params:
solana_url: {solana_url},
evm_loader_id: {evm_loader_id},
log_level: {log_level}""")
IndexerBase.__init__(self, solana_url, evm_loader_id, log_level, 0)

self.canceller = Canceller()
self.logs_db = LogDB()
Expand Down Expand Up @@ -175,7 +171,7 @@ def process_receipts(self):
del continue_table[storage_account]
else:
self.error("Storage not found")
self.error(eth_signature, "unknown")
self.error(f"{eth_signature} unknown")
# raise

del holder_table[write_account]
Expand Down Expand Up @@ -449,7 +445,7 @@ def submit_transaction(self, trx_struct):
}
self.blocks_by_hash[block_hash] = slot

self.debug(trx_struct.eth_signature + " " + status)
self.debug(f"{trx_struct.eth_signature} {status}")


def submit_transaction_part(self, trx_struct):
Expand Down Expand Up @@ -501,9 +497,16 @@ def get_block(self, slot):

return (slot, block_hash)


def run_indexer(solana_url, evm_loader_id, log_level='DEBUG'):
indexer = Indexer(solana_url, evm_loader_id, log_level)
@logged_group("Indexer")
def run_indexer(solana_url, evm_loader_id, log_level = 'DEBUG', *, logger):
logger.info(f"""Running indexer with params:
solana_url: {solana_url},
evm_loader_id: {evm_loader_id},
log_level: {log_level}""")

indexer = Indexer(solana_url,
evm_loader_id,
log_level)
indexer.run()


Expand Down
4 changes: 3 additions & 1 deletion proxy/indexer/indexer_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ class IndexerBase:
def __init__(self,
solana_url,
evm_loader_id,
log_level):
log_level,
start_slot):
logger.setLevel(log_levels.get(log_level, logging.INFO))

self.evm_loader_id = evm_loader_id
self.client = Client(solana_url)
Expand Down
9 changes: 7 additions & 2 deletions proxy/plugin/solana_rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
import threading
import traceback
import unittest
import time

from logged_groups import logged_group

from ..common.utils import build_http_response
Expand Down Expand Up @@ -617,7 +619,7 @@ def handle_request(self, request: HttpParser) -> None:
b'Access-Control-Max-Age': b'86400'
})))
return

start_time = time.time()
self.debug('<<< %s 0x%x %s', threading.get_ident(), id(self.model), request.body.decode('utf8'))
response = None

Expand All @@ -638,7 +640,10 @@ def handle_request(self, request: HttpParser) -> None:
traceback.print_exc()
response = {'jsonrpc': '2.0', 'error': {'code': -32000, 'message': str(err)}}

self.debug(f">>> {threading.get_ident()} 0x{id(self.model)} {json.dumps(response)} {request['method'] if 'method' in request else '---'}")
resp_time_ms = (time.time() - start_time)*1000 # convert this into milliseconds
self.debug('>>> %s 0x%0x %s %s resp_time_ms= %s', threading.get_ident(), id(self.model), json.dumps(response),
request.get('method', '---'),
resp_time_ms)

self.client.queue(memoryview(build_http_response(
httpStatusCodes.OK, body=json.dumps(response).encode('utf8'),
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.