diff --git a/crawlers/deploy/arbitrum-one-state.service b/crawlers/deploy/arbitrum-one-state.service new file mode 100644 index 00000000..78155110 --- /dev/null +++ b/crawlers/deploy/arbitrum-one-state.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Arbitrum one state crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain arbitrum_one +CPUWeight=60 +SyslogIdentifier=arbitrum-one-state \ No newline at end of file diff --git a/crawlers/deploy/arbitrum-one-state.timer b/crawlers/deploy/arbitrum-one-state.timer new file mode 100644 index 00000000..956b741a --- /dev/null +++ b/crawlers/deploy/arbitrum-one-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Arbitrum one state crawler each 5m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/arbitrum-sepolia-state.service b/crawlers/deploy/arbitrum-sepolia-state.service new file mode 100644 index 00000000..d6a96a1d --- /dev/null +++ b/crawlers/deploy/arbitrum-sepolia-state.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Arbitrum Sepolia state crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain arbitrum_sepolia +CPUWeight=60 +SyslogIdentifier=arbitrum-sepolia-state \ No newline at end of file diff --git a/crawlers/deploy/arbitrum-sepolia-state.timer b/crawlers/deploy/arbitrum-sepolia-state.timer new file mode 100644 index 00000000..5ce33711 --- /dev/null +++ b/crawlers/deploy/arbitrum-sepolia-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Arbitrum Sepolia state crawler each 5m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/deploy-state.bash b/crawlers/deploy/deploy-state.bash index 8fd85545..371a21e5 100755 --- a/crawlers/deploy/deploy-state.bash +++ b/crawlers/deploy/deploy-state.bash @@ -32,6 +32,10 @@ ETHEREUM_STATE_CLEAN_TIMER_FILE="ethereum-state-clean.timer" ETHEREUM_METADATA_SERVICE_FILE="ethereum-metadata.service" ETHEREUM_METADATA_TIMER_FILE="ethereum-metadata.timer" +# Ethereum Sepolia +ETHEREUM_SEPOLIA_STATE_SERVICE_FILE="ethereum-sepolia-state.service" +ETHEREUM_SEPOLIA_STATE_TIMER_FILE="ethereum-sepolia-state.timer" + # Polygon service files POLYGON_STATE_SERVICE_FILE="polygon-state.service" POLYGON_STATE_TIMER_FILE="polygon-state.timer" @@ -46,6 +50,13 @@ ZKSYNC_ERA_STATE_TIMER_FILE="zksync-era-state.timer" ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE="zksync-era-state-clean.service" ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE="zksync-era-state-clean.timer" +# Arbitrum one +ARBITRUM_ONE_STATE_SERVICE_FILE="arbitrum-one-state.service" +ARBITRUM_ONE_STATE_TIMER_FILE="arbitrum-one-state.timer" + +# Arbitrum Sepolia +ARBITRUM_SEPOLIA_STATE_SERVICE_FILE="arbitrum-sepolia-state.service" +ARBITRUM_SEPOLIA_STATE_TIMER_FILE="arbitrum-sepolia-state.timer" # Xai XAI_STATE_SERVICE_FILE="xai-state.service" @@ -66,10 +77,16 @@ XAI_SEPOLIA_METADATA_TIMER_FILE="xai-sepolia-metadata.timer" # Game7 GAME7_METADATA_SERVICE_FILE="game7-metadata.service" GAME7_METADATA_TIMER_FILE="game7-metadata.timer" +GAME7_STATE_SERVICE_FILE="game7-state.service" +GAME7_STATE_TIMER_FILE="game7-state.timer" +GAME7_STATE_CLEAN_SERVICE_FILE="game7-state-clean.service" +GAME7_STATE_CLEAN_TIMER_FILE="game7-state-clean.timer" # Game7 testnet GAME7_TESTNET_METADATA_SERVICE_FILE="game7-testnet-metadata.service" GAME7_TESTNET_METADATA_TIMER_FILE="game7-testnet-metadata.timer" +GAME7_TESTNET_STATE_SERVICE_FILE="game7-testnet-state.service" +GAME7_TESTNET_STATE_TIMER_FILE="game7-testnet-state.timer" set -eu @@ -140,6 +157,21 @@ cp "${SCRIPT_DIR}/${ETHEREUM_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_METADATA_TIMER_FILE}" + +# Ethereum Sepolia + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Ethereum Sepolia state service and timer with: ${ETHEREUM_SEPOLIA_STATE_SERVICE_FILE}, ${ETHEREUM_SEPOLIA_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${ETHEREUM_SEPOLIA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_SEPOLIA_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${ETHEREUM_SEPOLIA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_SEPOLIA_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${ETHEREUM_SEPOLIA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_SEPOLIA_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_SEPOLIA_STATE_TIMER_FILE}" + + +# Polygon + echo echo echo -e "${PREFIX_INFO} Replacing existing Polygon state service and timer with: ${POLYGON_STATE_SERVICE_FILE}, ${POLYGON_STATE_TIMER_FILE}" @@ -185,6 +217,31 @@ cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/sy XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}" +# Arbitrum one + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Arbitrum one state service and timer with: ${ARBITRUM_ONE_STATE_SERVICE_FILE}, ${ARBITRUM_ONE_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_STATE_TIMER_FILE}" + + +# Arbitrum Sepolia + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Arbitrum Sepolia state service and timer with: ${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}, ${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" + + +# Xai echo echo @@ -240,6 +297,8 @@ cp "${SCRIPT_DIR}/${XAI_SEPOLIA_METADATA_TIMER_FILE}" "/home/ubuntu/.config/syst XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_METADATA_TIMER_FILE}" +# Game7 + echo echo echo -e "${PREFIX_INFO} Replacing existing Game7 metadata service and timer with: ${GAME7_METADATA_SERVICE_FILE}, ${GAME7_METADATA_TIMER_FILE}" @@ -249,6 +308,26 @@ cp "${SCRIPT_DIR}/${GAME7_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/us XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_METADATA_TIMER_FILE}" +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 state service and timer with: ${GAME7_STATE_SERVICE_FILE}, ${GAME7_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_STATE_TIMER_FILE}" + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 state clean service and timer with: ${GAME7_STATE_CLEAN_SERVICE_FILE}, ${GAME7_STATE_CLEAN_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_CLEAN_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_CLEAN_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_STATE_CLEAN_TIMER_FILE}" + +# Game7 testnet + echo echo echo -e "${PREFIX_INFO} Replacing existing Game7 testnet metadata service and timer with: ${GAME7_TESTNET_METADATA_SERVICE_FILE}, ${GAME7_TESTNET_METADATA_TIMER_FILE}" @@ -257,3 +336,12 @@ cp "${SCRIPT_DIR}/${GAME7_TESTNET_METADATA_SERVICE_FILE}" "/home/ubuntu/.config/ cp "${SCRIPT_DIR}/${GAME7_TESTNET_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_METADATA_TIMER_FILE}" XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_TESTNET_METADATA_TIMER_FILE}" + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 testnet state service and timer with: ${GAME7_TESTNET_STATE_SERVICE_FILE}, ${GAME7_TESTNET_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_TESTNET_STATE_TIMER_FILE}" diff --git a/crawlers/deploy/deploy.bash b/crawlers/deploy/deploy.bash index 51b2c5f0..11dc09b6 100755 --- a/crawlers/deploy/deploy.bash +++ b/crawlers/deploy/deploy.bash @@ -47,6 +47,10 @@ ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="ethereum-historical-crawl-tra ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="ethereum-historical-crawl-events.service" ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="ethereum-historical-crawl-events.timer" +## Sepolia services files +SEPOLIA_STATE_SERVICE_FILE="ethereum-sepolia-state.service" +SEPOLIA_STATE_TIMER_FILE="ethereum-sepolia-state.timer" + # Polygon service files POLYGON_SYNCHRONIZE_SERVICE="polygon-synchronize.service" POLYGON_MISSING_SERVICE_FILE="polygon-missing.service" @@ -124,12 +128,16 @@ ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="arbitrum-one-historical ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="arbitrum-one-historical-crawl-transactions.timer" ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="arbitrum-one-historical-crawl-events.service" ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="arbitrum-one-historical-crawl-events.timer" +ARBITRUM_ONE_STATE_SERVICE_FILE="arbitrum-one-state.service" +ARBITRUM_ONE_STATE_TIMER_FILE="arbitrum-one-state.timer" # Arbitrum Sepolia ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE="arbitrum-sepolia-missing.service" ARBITRUM_SEPOLIA_MISSING_TIMER_FILE="arbitrum-sepolia-missing.timer" ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE="arbitrum-sepolia-moonworm-crawler.service" ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE="arbitrum-sepolia-synchronize.service" +ARBITRUM_SEPOLIA_STATE_SERVICE_FILE="arbitrum-sepolia-state.service" +ARBITRUM_SEPOLIA_STATE_TIMER_FILE="arbitrum-sepolia-state.timer" # Xai XAI_MISSING_SERVICE_FILE="xai-missing.service" @@ -220,10 +228,16 @@ MANTLE_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="mantle-sepolia-historic # Game7 GAME7_METADATA_SERVICE_FILE="game7-metadata.service" GAME7_METADATA_TIMER_FILE="game7-metadata.timer" +GAME7_STATE_SERVICE_FILE="game7-state.service" +GAME7_STATE_TIMER_FILE="game7-state.timer" +GAME7_STATE_CLEAN_SERVICE_FILE="game7-state-clean.service" +GAME7_STATE_CLEAN_TIMER_FILE="game7-state-clean.timer" # Game7 testnet GAME7_TESTNET_METADATA_SERVICE_FILE="game7-testnet-metadata.service" GAME7_TESTNET_METADATA_TIMER_FILE="game7-testnet-metadata.timer" +GAME7_TESTNET_STATE_SERVICE_FILE="game7-testnet-state.service" +GAME7_TESTNET_STATE_TIMER_FILE="game7-testnet-state.timer" set -eu @@ -354,6 +368,20 @@ XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" +## Sepolia + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Sepolia state service and timer with: ${SEPOLIA_STATE_SERVICE_FILE}, ${SEPOLIA_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${SEPOLIA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${SEPOLIA_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${SEPOLIA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${SEPOLIA_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${SEPOLIA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${SEPOLIA_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${SEPOLIA_STATE_TIMER_FILE}" + + +## Polygon + echo echo echo -e "${PREFIX_INFO} Replacing existing Polygon block with transactions syncronizer service definition with ${POLYGON_SYNCHRONIZE_SERVICE}" @@ -709,6 +737,14 @@ cp "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubu XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Arbitrum one state service and timer with: ${ARBITRUM_ONE_STATE_SERVICE_FILE}, ${ARBITRUM_ONE_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_ONE_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_STATE_TIMER_FILE}" # Arbitrum Sepolia echo @@ -736,6 +772,15 @@ cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubun XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Arbitrum Sepolia state service and timer with: ${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}, ${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_STATE_TIMER_FILE}" + # Xai echo echo @@ -1129,6 +1174,25 @@ cp "${SCRIPT_DIR}/${GAME7_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/us XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_METADATA_TIMER_FILE}" +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 state service and timer with: ${GAME7_STATE_SERVICE_FILE}, ${GAME7_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_STATE_TIMER_FILE}" + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 state clean service and timer with: ${GAME7_STATE_CLEAN_SERVICE_FILE}, ${GAME7_STATE_CLEAN_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_CLEAN_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_STATE_CLEAN_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_STATE_CLEAN_TIMER_FILE}" + + # Game7 testnet echo echo @@ -1138,3 +1202,12 @@ cp "${SCRIPT_DIR}/${GAME7_TESTNET_METADATA_SERVICE_FILE}" "/home/ubuntu/.config/ cp "${SCRIPT_DIR}/${GAME7_TESTNET_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_METADATA_TIMER_FILE}" XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_TESTNET_METADATA_TIMER_FILE}" + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing Game7 testnet state service and timer with: ${GAME7_TESTNET_STATE_SERVICE_FILE}, ${GAME7_TESTNET_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${GAME7_TESTNET_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${GAME7_TESTNET_STATE_TIMER_FILE}" +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload +XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${GAME7_TESTNET_STATE_TIMER_FILE}" diff --git a/crawlers/deploy/ethereum-state.timer b/crawlers/deploy/ethereum-state.timer index 7db2aa54..9dec56d4 100644 --- a/crawlers/deploy/ethereum-state.timer +++ b/crawlers/deploy/ethereum-state.timer @@ -1,9 +1,9 @@ [Unit] -Description=Execute Ethereum state crawler each 10m +Description=Execute Ethereum state crawler each 5m [Timer] OnBootSec=15s -OnUnitActiveSec=10m +OnUnitActiveSec=5m [Install] WantedBy=timers.target diff --git a/crawlers/deploy/game7-metadata copy.timer b/crawlers/deploy/game7-metadata copy.timer new file mode 100644 index 00000000..0ae0ee8f --- /dev/null +++ b/crawlers/deploy/game7-metadata copy.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Game7 metadata crawler each 10m + +[Timer] +OnBootSec=20s +OnUnitActiveSec=60m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/game7-state-clean.service b/crawlers/deploy/game7-state-clean.service new file mode 100644 index 00000000..da1e8b99 --- /dev/null +++ b/crawlers/deploy/game7-state-clean.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Game7 state clean labels crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli clean-state-labels --blockchain game7 -N 10000 +CPUWeight=60 +SyslogIdentifier=game7-state-clean diff --git a/crawlers/deploy/game7-state-clean.timer b/crawlers/deploy/game7-state-clean.timer new file mode 100644 index 00000000..0e4b53e9 --- /dev/null +++ b/crawlers/deploy/game7-state-clean.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Game7 state clean labels crawler each 5m + +[Timer] +OnBootSec=50s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/game7-state.service b/crawlers/deploy/game7-state.service new file mode 100644 index 00000000..da19d38c --- /dev/null +++ b/crawlers/deploy/game7-state.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Game7 state crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain game7 +CPUWeight=60 +SyslogIdentifier=game7-state \ No newline at end of file diff --git a/crawlers/deploy/game7-state.timer b/crawlers/deploy/game7-state.timer new file mode 100644 index 00000000..f1a7e2dc --- /dev/null +++ b/crawlers/deploy/game7-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Game7 state crawler each 5m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/game7-testnet-state.service b/crawlers/deploy/game7-testnet-state.service new file mode 100644 index 00000000..98855fa3 --- /dev/null +++ b/crawlers/deploy/game7-testnet-state.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Game7 testnet state crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain game7_testnet +CPUWeight=60 +SyslogIdentifier=game7-testnet-state \ No newline at end of file diff --git a/crawlers/deploy/game7-testnet-state.timer b/crawlers/deploy/game7-testnet-state.timer new file mode 100644 index 00000000..9324115e --- /dev/null +++ b/crawlers/deploy/game7-testnet-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Game7 testnet state crawler each 5m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/sepolia-state.service b/crawlers/deploy/sepolia-state.service new file mode 100644 index 00000000..fb2b426c --- /dev/null +++ b/crawlers/deploy/sepolia-state.service @@ -0,0 +1,11 @@ +[Unit] +Description=Execute Sepolia state crawler +After=network.target + +[Service] +Type=oneshot +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain sepolia +CPUWeight=60 +SyslogIdentifier=sepolia-state \ No newline at end of file diff --git a/crawlers/deploy/sepolia-state.timer b/crawlers/deploy/sepolia-state.timer new file mode 100644 index 00000000..210368d3 --- /dev/null +++ b/crawlers/deploy/sepolia-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Sepolia state crawler each 5m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=5m + +[Install] +WantedBy=timers.target diff --git a/crawlers/mooncrawl/mooncrawl/actions.py b/crawlers/mooncrawl/mooncrawl/actions.py index c9728e93..bc2e4f12 100644 --- a/crawlers/mooncrawl/mooncrawl/actions.py +++ b/crawlers/mooncrawl/mooncrawl/actions.py @@ -16,7 +16,15 @@ Moonstream, MoonstreamQueryResultUrl, ) +from sqlalchemy import text, TextClause +from moonstreamtypes.blockchain import ( + AvailableBlockchainType, + get_block_model, + get_label_model, + get_transaction_model, +) +from .data import QueryDataUpdate from .middleware import MoonstreamHTTPException from .settings import ( bugout_client as bc, @@ -34,6 +42,12 @@ class EntityCollectionNotFoundException(Exception): """ +class QueryTextClauseException(Exception): + """ + Raised when query can't be transformed to TextClause + """ + + def push_data_to_bucket( data: Any, key: str, bucket: str, metadata: Dict[str, Any] = {} ) -> None: @@ -134,7 +148,7 @@ def recive_S3_data_from_query( if_modified_since = if_modified_since_datetime.strftime("%a, %d %b %Y %H:%M:%S GMT") time.sleep(2) - if custom_body: + if custom_body or query_params: headers = { "Authorization": f"Bearer {token}", "Content-Type": "application/json", @@ -232,6 +246,75 @@ def get_customer_db_uri( raise MoonstreamHTTPException(status_code=500, internal_error=e) +def resolve_table_names(request_data: QueryDataUpdate) -> Dict[str, str]: + """ + Determines the table names based on the blockchain and labels version. + Returns an empty dictionary if blockchain is not provided. + """ + if not request_data.blockchain: + return {"labels_table": "ethereum_labels"} + + if request_data.blockchain not in [i.value for i in AvailableBlockchainType]: + logger.error(f"Unknown blockchain {request_data.blockchain}") + raise MoonstreamHTTPException(status_code=403, detail="Unknown blockchain") + + blockchain = AvailableBlockchainType(request_data.blockchain) + labels_version = 2 + + if request_data.customer_id is not None and request_data.instance_id is not None: + labels_version = 3 + + print(labels_version, blockchain) + + tables = { + "labels_table": get_label_model(blockchain, labels_version).__tablename__, + } + + if labels_version != 3: + tables.update( + { + "transactions_table": get_transaction_model(blockchain).__tablename__, + "blocks_table": get_block_model(blockchain).__tablename__, + } + ) + + return tables + + +def prepare_query( + requested_query: str, tables: Dict[str, str], query_id: str +) -> TextClause: + """ + Prepares the SQL query by replacing placeholders with actual table names. + """ + # Check and replace placeholders only if they exist in the query + if "__labels_table__" in requested_query: + requested_query = requested_query.replace( + "__labels_table__", tables.get("labels_table", "ethereum_labels") + ) + + if "__transactions_table__" in requested_query and "transactions_table" in tables: + requested_query = requested_query.replace( + "__transactions_table__", tables["transactions_table"] + ) + + if "__blocks_table__" in requested_query and "blocks_table" in tables: + requested_query = requested_query.replace( + "__blocks_table__", tables["blocks_table"] + ) + + # Check if it can transform to TextClause + try: + query = text(requested_query) + except Exception as e: + logger.error( + f"Can't parse query {query_id} to TextClause in drones /query_update endpoint, error: {e}" + ) + raise QueryTextClauseException( + f"Can't parse query {query_id} to TextClause in drones /query_update endpoint, error: {e}" + ) + + return query ## DB V3 diff --git a/crawlers/mooncrawl/mooncrawl/api.py b/crawlers/mooncrawl/mooncrawl/api.py index 5db35304..25bac945 100644 --- a/crawlers/mooncrawl/mooncrawl/api.py +++ b/crawlers/mooncrawl/mooncrawl/api.py @@ -13,8 +13,6 @@ from bugout.data import BugoutJournalEntity, BugoutResource from fastapi import BackgroundTasks, FastAPI from fastapi.middleware.cors import CORSMiddleware -from moonstreamtypes.blockchain import AvailableBlockchainType, get_block_model, get_label_model, get_transaction_model -from sqlalchemy import text from . import data from .actions import ( @@ -22,6 +20,9 @@ generate_s3_access_links, get_entity_subscription_collection_id, query_parameter_hash, + prepare_query, + resolve_table_names, + QueryTextClauseException, ) from .middleware import MoonstreamHTTPException from .settings import ( @@ -225,49 +226,20 @@ async def queries_data_update_handler( logger.error(f"Unhandled query execute exception, error: {e}") raise MoonstreamHTTPException(status_code=500) - requested_query = request_data.query + # Resolve table names based on the request data default ethereum + tables = resolve_table_names(request_data) - version = 2 - - if request_data.customer_id and request_data.instance_id: - version = 3 - - blockchain_table = "polygon_labels" - if request_data.blockchain: - if request_data.blockchain not in [i.value for i in AvailableBlockchainType]: - logger.error(f"Unknown blockchain {request_data.blockchain}") - raise MoonstreamHTTPException(status_code=403, detail="Unknown blockchain") - - blockchain = AvailableBlockchainType(request_data.blockchain) - - blockchain_table = get_label_model(blockchain, version).__tablename__ - requested_query = requested_query.replace( - "__labels_table__", - blockchain_table - ) - if version == 2: - ( - requested_query.replace( - "__transactions_table__", - get_transaction_model(blockchain).__tablename__, - ) - .replace( - "__blocks_table__", - get_block_model(blockchain).__tablename__, - ) - - ) - - # Check if it can transform to TextClause + # Prepare the query with the resolved table names try: - query = text(requested_query) + query = prepare_query(request_data.query, tables, query_id) + except QueryTextClauseException as e: + logger.error(f"Error preparing query for query id: {query_id}, error: {e}") + raise MoonstreamHTTPException(status_code=500, detail="Error preparing query") except Exception as e: - logger.error( - f"Can't parse query {query_id} to TextClause in drones /query_update endpoint, error: {e}" - ) - raise MoonstreamHTTPException(status_code=500, detail="Can't parse query") + logger.error(f"Error preparing query for query id: {query_id}, error: {e}") + raise MoonstreamHTTPException(status_code=500, detail="Error preparing query") - # Get requried keys for query + # Get required keys for query expected_query_parameters = query._bindparams.keys() # request.params validations @@ -302,9 +274,9 @@ async def queries_data_update_handler( params_hash=params_hash, customer_id=request_data.customer_id, instance_id=request_data.instance_id, - blockchain_table=blockchain_table, + blockchain_table=tables["labels_table"], + # Add any additional parameters needed for the task ) - except Exception as e: logger.error(f"Unhandled query execute exception, error: {e}") raise MoonstreamHTTPException(status_code=500) diff --git a/crawlers/mooncrawl/mooncrawl/blockchain.py b/crawlers/mooncrawl/mooncrawl/blockchain.py index f1d7c7ca..25aece96 100644 --- a/crawlers/mooncrawl/mooncrawl/blockchain.py +++ b/crawlers/mooncrawl/mooncrawl/blockchain.py @@ -46,6 +46,7 @@ MOONSTREAM_NODE_GAME7_ORBIT_ARBITRUM_SEPOLIA_A_EXTERNAL_URI, MOONSTREAM_NODE_IMX_ZKEVM_A_EXTERNAL_URI, MOONSTREAM_NODE_GAME7_TESTNET_A_EXTERNAL_URI, + MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI, MOONSTREAM_NODE_SEPOLIA_A_EXTERNAL_URI, WEB3_CLIENT_REQUEST_TIMEOUT_SECONDS, ) @@ -85,6 +86,7 @@ class BlockCrawlError(Exception): AvailableBlockchainType.GAME7_ORBIT_ARBITRUM_SEPOLIA: MOONSTREAM_NODE_GAME7_ORBIT_ARBITRUM_SEPOLIA_A_EXTERNAL_URI, AvailableBlockchainType.IMX_ZKEVM: MOONSTREAM_NODE_IMX_ZKEVM_A_EXTERNAL_URI, AvailableBlockchainType.GAME7_TESTNET: MOONSTREAM_NODE_GAME7_TESTNET_A_EXTERNAL_URI, + AvailableBlockchainType.GAME7: MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI, AvailableBlockchainType.SEPOLIA: MOONSTREAM_NODE_SEPOLIA_A_EXTERNAL_URI, } diff --git a/crawlers/mooncrawl/mooncrawl/data.py b/crawlers/mooncrawl/mooncrawl/data.py index 020fa7f9..28649cff 100644 --- a/crawlers/mooncrawl/mooncrawl/data.py +++ b/crawlers/mooncrawl/mooncrawl/data.py @@ -70,3 +70,6 @@ class ViewTasks(BaseModel): name: str outputs: List[Dict[str, Any]] address: str + customer_id: Optional[str] = None + instance_id: Optional[str] = None + v3: Optional[bool] = False diff --git a/crawlers/mooncrawl/mooncrawl/settings.py b/crawlers/mooncrawl/mooncrawl/settings.py index ceba9fc1..ca16fb7b 100644 --- a/crawlers/mooncrawl/mooncrawl/settings.py +++ b/crawlers/mooncrawl/mooncrawl/settings.py @@ -254,6 +254,12 @@ "MOONSTREAM_NODE_GAME7_TESTNET_A_EXTERNAL_URI env variable is not set" ) +MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI = os.environ.get( + "MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI", "" +) +if MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI == "": + raise Exception("MOONSTREAM_NODE_GAME7_A_EXTERNAL_URI env variable is not set") + MOONSTREAM_NODE_SEPOLIA_A_EXTERNAL_URI = os.environ.get( "MOONSTREAM_NODE_SEPOLIA_A_EXTERNAL_URI", "" @@ -394,6 +400,8 @@ AvailableBlockchainType.BLAST: "0xcA11bde05977b3631167028862bE2a173976CA11", AvailableBlockchainType.MANTLE: "0xcA11bde05977b3631167028862bE2a173976CA11", AvailableBlockchainType.MANTLE_SEPOLIA: "0xcA11bde05977b3631167028862bE2a173976CA11", + AvailableBlockchainType.GAME7_TESTNET: "0xcA11bde05977b3631167028862bE2a173976CA11", + AvailableBlockchainType.GAME7: "0x1422d8aC9b5E102E6EbA56F0949a2377AB3D8CE9", } diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index 696a5394..8bbfe366 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -8,7 +8,9 @@ from concurrent.futures._base import TimeoutError from pprint import pprint from typing import Any, Dict, List, Optional +import requests from uuid import UUID +from web3 import Web3 from moonstream.client import Moonstream # type: ignore from moonstreamtypes.blockchain import AvailableBlockchainType @@ -17,7 +19,7 @@ from ..actions import recive_S3_data_from_query, get_all_entries_from_search from ..blockchain import connect from ..data import ViewTasks -from ..db import PrePing_SessionLocal +from ..db import PrePing_SessionLocal, create_moonstream_engine, sessionmaker from ..settings import ( bugout_client as bc, INFURA_PROJECT_ID, @@ -25,6 +27,7 @@ multicall_contracts, MOONSTREAM_ADMIN_ACCESS_TOKEN, MOONSTREAM_STATE_CRAWLER_JOURNAL_ID, + MOONSTREAM_DB_V3_CONTROLLER_API, ) from .db import clean_labels, commit_session, view_call_to_label from .Multicall2_interface import Contract as Multicall2 @@ -37,7 +40,26 @@ client = Moonstream() -def execute_query(query: Dict[str, Any], token: str): +def request_connection_string( + customer_id: str, + instance_id: int, + token: str, + user: str = "seer", # token with write access +) -> str: + """ + Request connection string from the Moonstream API. + """ + response = requests.get( + f"{MOONSTREAM_DB_V3_CONTROLLER_API}/customers/{customer_id}/instances/{instance_id}/creds/{user}/url", + headers={"Authorization": f"Bearer {token}"}, + ) + + response.raise_for_status() + + return response.text.replace('"', "") + + +def execute_query(query: Dict[str, Any], token: str) -> Any: """ Query task example: @@ -55,6 +77,8 @@ def execute_query(query: Dict[str, Any], token: str): """ + print(f"Executing query: {query}") + # get the query url query_url = query["query_url"] @@ -65,6 +89,11 @@ def execute_query(query: Dict[str, Any], token: str): params = query["params"] body = {"params": params} + query_params = dict() + + if query.get("customer_id") and query.get("instance_id"): + query_params["customer_id"] = query["customer_id"] + query_params["instance_id"] = query["instance_id"] if blockchain: body["blockchain"] = blockchain @@ -76,6 +105,8 @@ def execute_query(query: Dict[str, Any], token: str): token=token, query_name=query_url, custom_body=body, + params=params, + query_params=query_params, ) # extract the keys as a list @@ -100,136 +131,142 @@ def execute_query(query: Dict[str, Any], token: str): return result -def make_multicall( - multicall_method: Any, - calls: List[Any], - block_timestamp: int, - block_number: str = "latest", -) -> Any: +def encode_calls(calls: List[Dict[str, Any]]) -> List[tuple]: + """Encodes the call data for multicall.""" multicall_calls = [] - for call in calls: try: - multicall_calls.append( - ( - call["address"], - call["method"].encode_data(call["inputs"]).hex(), - ) - ) + encoded_data = call["method"].encode_data(call["inputs"]).hex() + multicall_calls.append((call["address"], encoded_data)) except Exception as e: logger.error( - f'Error encoding data for method {call["method"].name} call: {call}' + f'Error encoding data for method {call["method"].name} call: {call}. Error: {e}' ) + return multicall_calls - multicall_result = multicall_method(False, calls=multicall_calls).call( - block_identifier=block_number - ) - results = [] +def perform_multicall( + multicall_method: Any, multicall_calls: List[tuple], block_identifier: str +) -> Any: + """Performs the multicall and returns the result.""" + return multicall_method(False, calls=multicall_calls).call(block_identifier=block_identifier) - # Handle the case with not successful calls + +def process_multicall_result( + calls: List[Dict[str, Any]], + multicall_result: Any, + multicall_calls: List[tuple], + block_timestamp: int, + block_number: str, + block_hash: Optional[str], +) -> List[Dict[str, Any]]: + """Processes the multicall result and decodes the data.""" + results = [] for index, encoded_data in enumerate(multicall_result): + call = calls[index] try: - if encoded_data[0]: - results.append( - { - "result": calls[index]["method"].decode_data(encoded_data[1]), - "hash": calls[index]["hash"], - "method": calls[index]["method"], - "address": calls[index]["address"], - "name": calls[index]["method"].name, - "inputs": calls[index]["inputs"], - "call_data": multicall_calls[index][1], - "block_number": block_number, - "block_timestamp": block_timestamp, - "status": encoded_data[0], - } - ) - else: - results.append( - { - "result": calls[index]["method"].decode_data(encoded_data[1]), - "hash": calls[index]["hash"], - "method": calls[index]["method"], - "address": calls[index]["address"], - "name": calls[index]["method"].name, - "inputs": calls[index]["inputs"], - "call_data": multicall_calls[index][1], - "block_number": block_number, - "block_timestamp": block_timestamp, - "status": encoded_data[0], - } - ) + result_data = call["method"].decode_data(encoded_data[1]) + result = { + "result": result_data, + "hash": call["hash"], + "method": call["method"], + "address": call["address"], + "name": call["method"].name, + "inputs": call["inputs"], + "call_data": multicall_calls[index][1], + "block_number": block_number, + "block_timestamp": block_timestamp, + "block_hash": block_hash, + "status": encoded_data[0], + "v3": call.get("v3", False), + "customer_id": call.get("customer_id"), + "instance_id": call.get("instance_id"), + } + results.append(result) except Exception as e: - results.append( - { - "result": str(encoded_data[1]), - "hash": calls[index]["hash"], - "method": calls[index]["method"], - "address": calls[index]["address"], - "name": calls[index]["method"].name, - "inputs": calls[index]["inputs"], - "call_data": multicall_calls[index][1], - "block_number": block_number, - "block_timestamp": block_timestamp, - "status": encoded_data[0], - "error": str(e), - } - ) - + result = { + "result": str(encoded_data[1]), + "hash": call["hash"], + "method": call["method"], + "address": call["address"], + "name": call["method"].name, + "inputs": call["inputs"], + "call_data": multicall_calls[index][1], + "block_number": block_number, + "block_timestamp": block_timestamp, + "block_hash": block_hash, + "status": encoded_data[0], + "error": str(e), + "v3": call.get("v3", False), + "customer_id": call.get("customer_id"), + "instance_id": call.get("instance_id"), + } + results.append(result) logger.error( - f"Error decoding data for for method {call['method'].name} call {calls[index]}: {e}." + f"Error decoding data for method {call['method'].name} call {call}: {e}." ) - # data is not decoded, return the encoded data logger.error(f"Encoded data: {encoded_data}") + return results + +def make_multicall( + multicall_method: Any, + calls: List[Dict[str, Any]], + block_timestamp: int, + block_number: str = "latest", + block_hash: Optional[str] = None, +) -> List[Dict[str, Any]]: + """Makes a multicall to the blockchain and processes the results.""" + multicall_calls = encode_calls(calls) + # breakpoint() + multicall_result = perform_multicall( + multicall_method, multicall_calls, block_number + ) + results = process_multicall_result( + calls, + multicall_result, + multicall_calls, + block_timestamp, + block_number, + block_hash, + ) return results -def crawl_calls_level( - web3_client, - db_session, - calls, - responces, - contracts_ABIs, - interfaces, - batch_size, - multicall_method, - block_number, - blockchain_type, - block_timestamp, - max_batch_size=3000, - min_batch_size=4, -): +def generate_calls_of_level( + calls: List[Dict[str, Any]], + responses: Dict[str, Any], + contracts_ABIs: Dict[str, Any], + interfaces: Dict[str, Any], +) -> List[Dict[str, Any]]: + """Generates the calls for the current level.""" calls_of_level = [] - for call in calls: - if call["generated_hash"] in responces: + if call["generated_hash"] in responses: continue parameters = [] - for input in call["inputs"]: - if type(input["value"]) in (str, int): - if input["value"] not in responces: + if isinstance(input["value"], (str, int)): + if input["value"] not in responses: parameters.append([input["value"]]) else: - if input["value"] in contracts_ABIs[call["address"]] and ( - contracts_ABIs[call["address"]][input["value"]]["name"] + if ( + input["value"] in contracts_ABIs[call["address"]] + and contracts_ABIs[call["address"]][input["value"]]["name"] == "totalSupply" - ): # hack for totalSupply TODO(Andrey): need add propper support for response parsing + ): + # Hack for totalSupply parameters.append( - list(range(1, responces[input["value"]][0][0] + 1)) + list(range(1, responses[input["value"]][0][0] + 1)) ) else: - parameters.append(responces[input["value"]]) - elif type(input["value"]) == list: + parameters.append(responses[input["value"]]) + elif isinstance(input["value"], list): parameters.append(input["value"]) else: - raise - + raise Exception("Unknown input value type") for call_parameters in itertools.product(*parameters): - # hack for tuples product - if len(call_parameters) == 1 and type(call_parameters[0]) == tuple: + if len(call_parameters) == 1 and isinstance(call_parameters[0], tuple): call_parameters = call_parameters[0] calls_of_level.append( { @@ -239,21 +276,76 @@ def crawl_calls_level( ), "hash": call["generated_hash"], "inputs": call_parameters, + "v3": call.get("v3", False), + "customer_id": call.get("customer_id"), + "instance_id": call.get("instance_id"), } ) + return calls_of_level + + +def process_results( + make_multicall_result: List[Dict[str, Any]], + db_sessions: Dict[Any, Any], + responses: Dict[str, Any], + blockchain_type: Any, +) -> int: + """Processes the results and adds them to the appropriate database sessions.""" + add_to_session_count = 0 + sessions_to_commit = set() + for result in make_multicall_result: + v3 = result.get("v3", False) + if v3: + customer_id = result.get("customer_id") + instance_id = result.get("instance_id") + db_session = db_sessions.get((customer_id, instance_id)) + else: + db_session = db_sessions.get("v2") + if db_session is None: + logger.error(f"No db_session found for result {result}") + continue + db_view = view_call_to_label(blockchain_type, result, v3) + db_session.add(db_view) + sessions_to_commit.add(db_session) + add_to_session_count += 1 + if result["hash"] not in responses: + responses[result["hash"]] = [] + responses[result["hash"]].append(result["result"]) + # Commit all sessions + for session in sessions_to_commit: + commit_session(session) + logger.info(f"{add_to_session_count} labels committed to database.") + return add_to_session_count - retry = 0 +def crawl_calls_level( + web3_client: Web3, + db_sessions: Dict[Any, Any], + calls: List[Dict[str, Any]], + responses: Dict[str, Any], + contracts_ABIs: Dict[str, Any], + interfaces: Dict[str, Any], + batch_size: int, + multicall_method: Any, + block_number: str, + blockchain_type: Any, + block_timestamp: int, + max_batch_size: int = 3000, + min_batch_size: int = 4, + block_hash: Optional[str] = None, +) -> int: + """Crawls calls at a specific level.""" + calls_of_level = generate_calls_of_level( + calls, responses, contracts_ABIs, interfaces + ) + retry = 0 while len(calls_of_level) > 0: make_multicall_result = [] try: call_chunk = calls_of_level[:batch_size] - logger.info( f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" ) - - # 1 thead with timeout for hung multicall calls with ThreadPoolExecutor(max_workers=1) as executor: future = executor.submit( make_multicall, @@ -261,251 +353,354 @@ def crawl_calls_level( call_chunk, block_timestamp, block_number, + block_hash, ) make_multicall_result = future.result(timeout=20) retry = 0 calls_of_level = calls_of_level[batch_size:] - logger.info(f"lenght of task left {len(calls_of_level)}.") + logger.info(f"Length of tasks left: {len(calls_of_level)}.") batch_size = min(batch_size * 2, max_batch_size) - except ValueError as e: # missing trie node + except ValueError as e: logger.error(f"ValueError: {e}, retrying") retry += 1 if "missing trie node" in str(e): time.sleep(4) if retry > 5: - raise (e) + raise e batch_size = max(batch_size // 4, min_batch_size) - except TimeoutError as e: # timeout + except TimeoutError as e: logger.error(f"TimeoutError: {e}, retrying") retry += 1 if retry > 5: - raise (e) + raise e batch_size = max(batch_size // 3, min_batch_size) except Exception as e: logger.error(f"Exception: {e}") - raise (e) + raise e time.sleep(2) logger.debug(f"Retry: {retry}") - # results parsing and writing to database - add_to_session_count = 0 - for result in make_multicall_result: - db_view = view_call_to_label(blockchain_type, result) - db_session.add(db_view) - add_to_session_count += 1 - - if result["hash"] not in responces: - responces[result["hash"]] = [] - responces[result["hash"]].append(result["result"]) - commit_session(db_session) - logger.info(f"{add_to_session_count} labels commit to database.") - + process_results(make_multicall_result, db_sessions, responses, blockchain_type) return batch_size -def parse_jobs( - jobs: List[Any], - blockchain_type: AvailableBlockchainType, +def connect_to_web3( + blockchain_type: Any, web3_provider_uri: Optional[str], - block_number: Optional[int], - batch_size: int, - moonstream_token: str, - web3_uri: Optional[str] = None, -): - """ - Parse jobs from list and generate web3 interfaces for each contract. - """ - - contracts_ABIs: Dict[str, Any] = {} - contracts_methods: Dict[str, Any] = {} - calls: Dict[int, Any] = {0: []} - responces: Dict[str, Any] = {} - + web3_uri: Optional[str], +) -> Web3: + """Connects to the Web3 client.""" if web3_provider_uri is not None: try: logger.info( f"Connecting to blockchain: {blockchain_type} with custom provider!" ) - web3_client = connect( blockchain_type=blockchain_type, web3_uri=web3_provider_uri ) except Exception as e: logger.error( - f"Web3 connection to custom provider {web3_provider_uri} failed error: {e}" + f"Web3 connection to custom provider {web3_provider_uri} failed. Error: {e}" ) - raise (e) + raise e else: - logger.info(f"Connecting to blockchain: {blockchain_type} with Node balancer.") + logger.info(f"Connecting to blockchain: {blockchain_type} with node balancer.") web3_client = _retry_connect_web3( blockchain_type=blockchain_type, web3_uri=web3_uri ) - logger.info(f"Crawler started connected to blockchain: {blockchain_type}") + return web3_client + +def get_block_info(web3_client: Web3, block_number: Optional[int]) -> tuple: + """Retrieves block information.""" if block_number is None: block_number = web3_client.eth.get_block("latest").number # type: ignore - logger.info(f"Current block number: {block_number}") - - block_timestamp = web3_client.eth.get_block(block_number).timestamp # type: ignore - - multicaller = Multicall2( - web3_client, web3_client.toChecksumAddress(multicall_contracts[blockchain_type]) - ) - - multicall_method = multicaller.tryAggregate - - def recursive_unpack(method_abi: Any, level: int = 0) -> Any: - """ - Generate tree of calls for crawling - """ - have_subcalls = False - - ### we add queryAPI to that tree - - if method_abi["type"] == "queryAPI": - # make queryAPI call - - responce = execute_query(method_abi, token=moonstream_token) - - # generate hash for queryAPI call - - generated_hash = hashlib.md5( - json.dumps( - method_abi, - sort_keys=True, - indent=4, - separators=(",", ": "), - ).encode("utf-8") - ).hexdigest() - - # add responce to responces - - responces[generated_hash] = responce - - return generated_hash - - abi = { - "inputs": [], - "outputs": method_abi["outputs"], - "name": method_abi["name"], - "type": "function", - "stateMutability": "view", - } - - for input in method_abi["inputs"]: - if type(input["value"]) in (int, list): - abi["inputs"].append(input) - - elif type(input["value"]) == str: - abi["inputs"].append(input) - - elif type(input["value"]) == dict: - if input["value"]["type"] == "function": - hash_link = recursive_unpack(input["value"], level + 1) - # replace defenition by hash pointing to the result of the recursive_unpack - input["value"] = hash_link - have_subcalls = True - elif input["value"]["type"] == "queryAPI": - input["value"] = recursive_unpack(input["value"], level + 1) - have_subcalls = True - abi["inputs"].append(input) - abi["address"] = method_abi["address"] + block = web3_client.eth.get_block(block_number) # type: ignore + block_timestamp = block.timestamp # type: ignore + block_hash = block.hash.hex() # type: ignore + return block_number, block_timestamp, block_hash + + +def recursive_unpack( + method_abi: Any, + level: int, + calls: Dict[int, List[Any]], + contracts_methods: Dict[str, Any], + contracts_ABIs: Dict[str, Any], + responses: Dict[str, Any], + moonstream_token: str, + v3: bool, + customer_id: Optional[str] = None, + instance_id: Optional[str] = None, +) -> str: + """Recursively unpacks method ABIs to generate a tree of calls.""" + have_subcalls = False + if method_abi["type"] == "queryAPI": + # Make queryAPI call + response = execute_query(method_abi, token=moonstream_token) + # Generate hash for queryAPI call generated_hash = hashlib.md5( - json.dumps(abi, sort_keys=True, indent=4, separators=(",", ": ")).encode( - "utf-8" - ) + json.dumps( + method_abi, + sort_keys=True, + indent=4, + separators=(",", ": "), + ).encode("utf-8") ).hexdigest() + # Add response to responses + responses[generated_hash] = response + return generated_hash - abi["generated_hash"] = generated_hash - if have_subcalls: - level += 1 - if not calls.get(level): - calls[level] = [] - calls[level].append(abi) - else: - level = 0 - - if not calls.get(level): - calls[level] = [] - calls[level].append(abi) + abi = { + "inputs": [], + "outputs": method_abi["outputs"], + "name": method_abi["name"], + "type": "function", + "stateMutability": "view", + "v3": v3, + "customer_id": customer_id, + "instance_id": instance_id, + } - if not contracts_methods.get(job["address"]): - contracts_methods[job["address"]] = [] - if generated_hash not in contracts_methods[job["address"]]: - contracts_methods[job["address"]].append(generated_hash) - if not contracts_ABIs.get(job["address"]): - contracts_ABIs[job["address"]] = {} - contracts_ABIs[job["address"]][generated_hash] = abi + for input in method_abi["inputs"]: + if isinstance(input["value"], (int, list, str)): + abi["inputs"].append(input) + elif isinstance(input["value"], dict): + if input["value"]["type"] in ["function", "queryAPI"]: + hash_link = recursive_unpack( + input["value"], + level + 1, + calls, + contracts_methods, + contracts_ABIs, + responses, + moonstream_token, + v3, + customer_id, + instance_id, + ) + input["value"] = hash_link + have_subcalls = True + abi["inputs"].append(input) + + abi["address"] = method_abi["address"] + generated_hash = hashlib.md5( + json.dumps(abi, sort_keys=True, indent=4, separators=(",", ": ")).encode( + "utf-8" + ) + ).hexdigest() + abi["generated_hash"] = generated_hash - return generated_hash + if have_subcalls: + level += 1 + calls.setdefault(level, []).append(abi) + else: + level = 0 + calls.setdefault(level, []).append(abi) - for job in jobs: - if job["address"] not in contracts_ABIs: - contracts_ABIs[job["address"]] = [] + contracts_methods.setdefault(method_abi["address"], []) + if generated_hash not in contracts_methods[method_abi["address"]]: + contracts_methods[method_abi["address"]].append(generated_hash) + contracts_ABIs.setdefault(method_abi["address"], {}) + contracts_ABIs[method_abi["address"]][generated_hash] = abi - recursive_unpack(job, 0) + return generated_hash - # generate contracts interfaces +def build_interfaces( + contracts_ABIs: Dict[str, Any], contracts_methods: Dict[str, Any], web3_client: Web3 +) -> Dict[str, Any]: + """Builds contract interfaces with deduplication of ABIs.""" interfaces = {} - for contract_address in contracts_ABIs: - # collect abis for each contract - abis = [] - + # Use a dictionary to deduplicate ABIs by function signature + unique_abis = {} for method_hash in contracts_methods[contract_address]: - abis.append(contracts_ABIs[contract_address][method_hash]) + abi = contracts_ABIs[contract_address][method_hash] + # Create a unique key based on name and input types + if abi["name"] not in unique_abis: + unique_abis[abi["name"]] = abi - # generate interface interfaces[contract_address] = web3_client.eth.contract( - address=web3_client.toChecksumAddress(contract_address), abi=abis + address=web3_client.toChecksumAddress(contract_address), + abi=list(unique_abis.values()) ) + return interfaces + + +def process_address_field(job: Dict[str, Any], moonstream_token: str) -> List[str]: + """Processes the address field of a job and returns a list of addresses.""" + if isinstance(job["address"], str): + return [Web3.toChecksumAddress(job["address"])] + elif isinstance(job["address"], list): + return [ + Web3.toChecksumAddress(address) for address in job["address"] + ] # manual job multiplication + elif isinstance(job["address"], dict): + if job["address"].get("type") == "queryAPI": + # QueryAPI job multiplication + addresses = execute_query(job["address"], token=moonstream_token) + checsum_addresses = [] + for address in addresses: + try: + checsum_addresses.append(Web3.toChecksumAddress(address)) + except Exception as e: + logger.error(f"Invalid address: {address}") + continue + return checsum_addresses + else: + raise ValueError(f"Invalid address type: {type(job['address'])}") + else: + raise ValueError(f"Invalid address type: {type(job['address'])}") - # reverse call_tree - call_tree_levels = sorted(calls.keys(), reverse=True)[:-1] - db_session = PrePing_SessionLocal() +def parse_jobs( + jobs: List[Any], + blockchain_type: Any, + web3_provider_uri: Optional[str], + block_number: Optional[int], + batch_size: int, + moonstream_token: str, + web3_uri: Optional[str] = None, + customer_db_uri: Optional[str] = None, +): + """ + Parses jobs from a list and generates web3 interfaces for each contract. + """ + contracts_ABIs: Dict[str, Any] = {} + contracts_methods: Dict[str, Any] = {} + calls: Dict[int, List[Any]] = {0: []} + responses: Dict[str, Any] = {} + db_sessions: Dict[Any, Any] = {} - # run crawling of levels + web3_client = connect_to_web3(blockchain_type, web3_provider_uri, web3_uri) + block_number, block_timestamp, block_hash = get_block_info( + web3_client, block_number + ) + + multicaller = Multicall2( + web3_client, web3_client.toChecksumAddress(multicall_contracts[blockchain_type]) + ) + multicall_method = multicaller.tryAggregate + + # All sessions are stored in the dictionary db_sessions + # Under one try block try: - # initial call of level 0 all call without subcalls directly moved there + # Process jobs and create session + + for job in jobs: + + ### process address field + ### Handle case when 1 job represents multiple contracts + addresses = process_address_field(job, moonstream_token) + + for address in addresses[1:]: + new_job = job.copy() + new_job["address"] = address + jobs.append(new_job) + + job["address"] = addresses[0] + + v3 = job.get("v3", False) + customer_id = job.get("customer_id") + instance_id = job.get("instance_id") + + ### DB sessions + if customer_db_uri is not None: + if v3 and (customer_id, instance_id) not in db_sessions: + # Create session + engine = create_moonstream_engine(customer_db_uri, 2, 100000) + session = sessionmaker(bind=engine) + try: + db_sessions[(customer_id, instance_id)] = session() + except Exception as e: + logger.error(f"Connection to {engine} failed: {e}") + continue + else: + if "v2" not in db_sessions: + engine = create_moonstream_engine(customer_db_uri, 2, 100000) + db_sessions["v2"] = sessionmaker(bind=engine)() + elif v3: + if (customer_id, instance_id) not in db_sessions: + # Create session + # Assume fetch_connection_string fetches the connection string + connection_string = request_connection_string( + customer_id=customer_id, + instance_id=instance_id, + token=moonstream_token, + ) + engine = create_moonstream_engine(connection_string, 2, 100000) + session = sessionmaker(bind=engine) + try: + db_sessions[(customer_id, instance_id)] = session() + except Exception as e: + logger.error(f"Connection to {engine} failed: {e}") + continue + else: + if "v2" not in db_sessions: + db_sessions["v2"] = PrePing_SessionLocal() + + if job["address"] not in contracts_ABIs: + contracts_ABIs[job["address"]] = {} + + recursive_unpack( + job, + 0, + calls, + contracts_methods, + contracts_ABIs, + responses, + moonstream_token, + v3, + customer_id, + instance_id, + ) + interfaces = build_interfaces(contracts_ABIs, contracts_methods, web3_client) + + call_tree_levels = sorted(calls.keys(), reverse=True)[:-1] + logger.info(f"Crawl level: 0. Jobs amount: {len(calls[0])}") - logger.info(f"call_tree_levels: {call_tree_levels}") + logger.info(f"Call tree levels: {call_tree_levels}") batch_size = crawl_calls_level( - web3_client, - db_session, - calls[0], - responces, - contracts_ABIs, - interfaces, - batch_size, - multicall_method, - block_number, - blockchain_type, - block_timestamp, + web3_client=web3_client, + db_sessions=db_sessions, + calls=calls[0], + responses=responses, + contracts_ABIs=contracts_ABIs, + interfaces=interfaces, + batch_size=batch_size, + multicall_method=multicall_method, + block_number=block_number, # type: ignore + blockchain_type=blockchain_type, + block_timestamp=block_timestamp, + block_hash=block_hash, ) for level in call_tree_levels: logger.info(f"Crawl level: {level}. Jobs amount: {len(calls[level])}") - batch_size = crawl_calls_level( - web3_client, - db_session, - calls[level], - responces, - contracts_ABIs, - interfaces, - batch_size, - multicall_method, - block_number, - blockchain_type, - block_timestamp, + web3_client=web3_client, + db_sessions=db_sessions, + calls=calls[level], + responses=responses, + contracts_ABIs=contracts_ABIs, + interfaces=interfaces, + batch_size=batch_size, + multicall_method=multicall_method, + block_number=block_number, # type: ignore + blockchain_type=blockchain_type, + block_timestamp=block_timestamp, + block_hash=block_hash, ) - finally: - db_session.close() + # Close all sessions + for session in db_sessions.values(): + try: + session.close() + except Exception as e: + logger.error(f"Failed to close session: {e}") def handle_crawl(args: argparse.Namespace) -> None: @@ -576,6 +771,7 @@ def handle_crawl(args: argparse.Namespace) -> None: args.batch_size, args.moonstream_token, args.web3_uri, + args.customer_db_uri, ) @@ -762,6 +958,11 @@ def main() -> None: default=500, help="Size of chunks wich send to Multicall2 contract.", ) + view_state_crawler_parser.add_argument( + "--customer-db-uri", + type=str, + help="URI for the customer database", + ) view_state_crawler_parser.set_defaults(func=handle_crawl) view_state_migration_parser = subparsers.add_parser( diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/db.py b/crawlers/mooncrawl/mooncrawl/state_crawler/db.py index 3682fdc7..79b529cc 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/db.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/db.py @@ -1,6 +1,8 @@ import json import logging from typing import Any, Dict +from hexbytes import HexBytes + from moonstreamtypes.blockchain import AvailableBlockchainType, get_label_model from sqlalchemy.orm import Session @@ -14,13 +16,15 @@ def view_call_to_label( blockchain_type: AvailableBlockchainType, call: Dict[str, Any], + v3: bool = False, label_name=VIEW_STATE_CRAWLER_LABEL, ): """ Creates a label model. """ - label_model = get_label_model(blockchain_type) + version = 3 if v3 else 2 + label_model = get_label_model(blockchain_type, version=version) sanityzed_label_data = json.loads( json.dumps( @@ -35,14 +39,35 @@ def view_call_to_label( ).replace(r"\u0000", "") ) - label = label_model( - label=label_name, - label_data=sanityzed_label_data, - address=call["address"], - block_number=call["block_number"], - transaction_hash=None, - block_timestamp=call["block_timestamp"], - ) + if v3: + + del sanityzed_label_data["type"] + del sanityzed_label_data["name"] + + ## add zero transaction hash + + label = label_model( + label=label_name, + label_name=call["name"], + label_type="view", + label_data=sanityzed_label_data, + ### bytea + address=HexBytes(call["address"]), + block_number=call["block_number"], + block_timestamp=call["block_timestamp"], + block_hash=call["block_hash"], + ) + + else: + + label = label_model( + label=label_name, + label_data=sanityzed_label_data, + address=call["address"], + block_number=call["block_number"], + transaction_hash=None, + block_timestamp=call["block_timestamp"], + ) return label diff --git a/crawlers/mooncrawl/mooncrawl/version.py b/crawlers/mooncrawl/mooncrawl/version.py index 1a4a3d66..0ee15e40 100644 --- a/crawlers/mooncrawl/mooncrawl/version.py +++ b/crawlers/mooncrawl/mooncrawl/version.py @@ -2,4 +2,4 @@ Moonstream crawlers version. """ -MOONCRAWL_VERSION = "0.5.3" +MOONCRAWL_VERSION = "0.5.4"