diff --git a/.github/workflows/pull-request.yaml b/.github/workflows/pull-request.yaml index 5b3495e..4a7eb23 100644 --- a/.github/workflows/pull-request.yaml +++ b/.github/workflows/pull-request.yaml @@ -18,5 +18,5 @@ jobs: - name: Format Lint & Types run: | black --check ./ - pylint gas_station_event_indexer.py - mypy --ignore-missing-imports --strict gas_station_event_indexer.py + pylint *.py + mypy --strict *.py diff --git a/Makefile b/Makefile index a63298c..bbcd0c5 100644 --- a/Makefile +++ b/Makefile @@ -29,6 +29,10 @@ clean: run: @$(PYTHON) gas_station_event_indexer.py +## check: Format code, lint and type-check +check: + black ./ && pylint *.py && mypy --strict *.py + help: @echo "Makefile for Python project with venv" @echo diff --git a/config.toml b/config.toml index a059615..6f2382e 100644 --- a/config.toml +++ b/config.toml @@ -1,3 +1,4 @@ network = "testnet" # gas station contract account id contract_id = "canhazgas.testnet" +log_level = "info" diff --git a/gas_station_event_indexer.py b/gas_station_event_indexer.py index 17c32e2..febb370 100644 --- a/gas_station_event_indexer.py +++ b/gas_station_event_indexer.py @@ -9,9 +9,20 @@ import requests import toml from dataclasses_json import DataClassJsonMixin -from near_lake_framework import near_primitives, LakeConfig, streamer, Network +from near_lake_framework import ( + near_primitives, + LakeConfig, + streamer, + Network, + utils as nlf_util, +) + +from logger import set_logger REQUEST_TIMEOUT = 10 +LOG_LEVEL = "info" + +logging = set_logger(__name__, LOG_LEVEL) ParsedLog = dict[str, Any] @@ -43,12 +54,14 @@ def send_to_service(self) -> None: url = "localhost:3030/send_funding_and_user_signed_txns" try: response = requests.post(url, json=payload, timeout=REQUEST_TIMEOUT) - if response.status_code not in {200, 201}: - print(f"Error: calling {url}: {response.text}") + message = f"{url}: {response.text}" + if response.status_code in {200, 201}: + logging.info("Response from %s", message) else: - print(f"Response from {url}: {response.text}") + logging.error("Error: calling %s", message) + except requests.RequestException as e: - print(f"HTTP Request failed: {str(e)}") + logging.error("HTTP Request failed: %s", {str(e)}) @dataclass @@ -72,38 +85,6 @@ def from_toml(config_path: str = "config.toml") -> Config: return Config(**config_dict) -def fetch_latest_block( - network: Network = Network.MAINNET, -) -> near_primitives.BlockHeight: - """ - Define the RPC endpoint for the NEAR network - """ - url = f"https://rpc.{network}.near.org" - - # Define the payload for fetching the latest block - payload = json.dumps( - { - "jsonrpc": "2.0", - "id": "dontcare", - "method": "block", - "params": {"finality": "final"}, - } - ) - - # Define the headers for the HTTP request - headers = {"Content-Type": "application/json"} - - # Send the HTTP request to the NEAR RPC endpoint - response = requests.request( - "POST", url, headers=headers, data=payload, timeout=REQUEST_TIMEOUT - ) - - # Parse the JSON response to get the latest final block height - latest_final_block: int = response.json()["result"]["header"]["height"] - - return latest_final_block - - # Event format json example: # { # "standard": "x-gas-station", @@ -130,9 +111,8 @@ def extract_relevant_log( try: parsed_log: ParsedLog = json.loads(log[len(log_key) :]) except json.JSONDecodeError: - print( - f"Receipt ID: `{receipt_id}`\n" - f"Error during parsing logs from JSON string to dict" + logging.error( + "Receipt ID: %s\nError parsing logs from JSON string to dict", receipt_id ) return None @@ -163,7 +143,7 @@ def process_log(log: str, receipt: near_primitives.Receipt) -> bool: if parsed_log is None: return False - print(json.dumps(parsed_log, indent=4)) + logging.info("processed log: %s", json.dumps(parsed_log, indent=4)) return process_receipt_if_gas_station_contract(receipt, parsed_log) @@ -176,17 +156,17 @@ def process_receipt_if_gas_station_contract( try: event_data = EventData.from_dict(parsed_log["data"]) if not event_data.validate(): - print(f"Error: Invalid event data: {event_data}") + logging.error("Invalid event data: %s", event_data) return False - print(json.dumps(event_data, indent=4)) + logging.debug(json.dumps(event_data, indent=4)) event_data.send_to_service() return True except json.JSONDecodeError: - print( - f"Receipt ID: `{receipt.receipt_id}`\n" - "Error during parsing event data from JSON string to dict" + logging.error( + "Receipt ID: %s\nError parsing logs from JSON string to dict", + receipt.receipt_id, ) return False @@ -199,7 +179,7 @@ async def handle_streamer_message( async def main() -> None: - latest_final_block = fetch_latest_block(network=CONFIG.network) + latest_final_block = nlf_util.fetch_latest_block(network=CONFIG.network) lake_config = LakeConfig( network=CONFIG.network, start_block_height=latest_final_block, @@ -207,7 +187,9 @@ async def main() -> None: aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"], aws_secret_key=os.environ["AWS_SECRET_ACCESS_KEY"], ) - print(f"Latest final block: {latest_final_block} on network: {CONFIG.network}") + logging.info( + "Latest final block: %s on network: %s", latest_final_block, CONFIG.network.name + ) _stream_handle, streamer_messages_queue = streamer(lake_config) while True: diff --git a/logger.py b/logger.py new file mode 100644 index 0000000..b26a5d1 --- /dev/null +++ b/logger.py @@ -0,0 +1,37 @@ +import logging + + +class IgnoreFilter(logging.Filter): + """ + Customized Log filter that ignores a given message text. + """ + + def __init__(self, ignored_message: str, name: str = "") -> None: + super().__init__(name) + self.ignored_message = ignored_message + self.enabled = True + + def filter(self, record: logging.LogRecord) -> bool: + if not self.enabled: + return True + return self.ignored_message not in record.getMessage() + + def set_ignored_message(self, message: str) -> None: + """Set a new message pattern to ignore.""" + self.ignored_message = message + + def toggle_filter(self, enable: bool) -> None: + """Enable or disable the filter.""" + self.enabled = enable + + def get_ignored_message(self) -> str: + """Get the current ignored message pattern.""" + return self.ignored_message + + +def set_logger(name: str, level: int | str) -> logging.Logger: + logging.basicConfig(level=level) + logging.getLogger("near_lake_framework").setLevel(logging.INFO) + missing_shard_filter = IgnoreFilter("doesn't exist") + logging.getLogger().addFilter(missing_shard_filter) + return logging.getLogger(name) diff --git a/requirements.txt b/requirements.txt index 2e6132e..8411101 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ asyncio>=3.4.3 -near-lake-framework>=0.0.8 +near-lake-framework>=0.1.0 requests>=2.32.0 toml>=0.10.2