Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Logger #10

Merged
merged 4 commits into from
May 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/pull-request.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ jobs:
- name: Format Lint & Types
run: |
black --check ./
pylint gas_station_event_indexer.py
mypy --ignore-missing-imports --strict gas_station_event_indexer.py
pylint *.py
mypy --strict *.py
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ clean:
run:
@$(PYTHON) gas_station_event_indexer.py

## check: Format code, lint and type-check
check:
black ./ && pylint *.py && mypy --strict *.py

help:
@echo "Makefile for Python project with venv"
@echo
Expand Down
1 change: 1 addition & 0 deletions config.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
network = "testnet"
# gas station contract account id
contract_id = "canhazgas.testnet"
log_level = "info"
78 changes: 30 additions & 48 deletions gas_station_event_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,20 @@
import requests
import toml
from dataclasses_json import DataClassJsonMixin
from near_lake_framework import near_primitives, LakeConfig, streamer, Network
from near_lake_framework import (
near_primitives,
LakeConfig,
streamer,
Network,
utils as nlf_util,
)

from logger import set_logger

REQUEST_TIMEOUT = 10
LOG_LEVEL = "info"

logging = set_logger(__name__, LOG_LEVEL)
ParsedLog = dict[str, Any]


Expand Down Expand Up @@ -43,12 +54,14 @@ def send_to_service(self) -> None:
url = "localhost:3030/send_funding_and_user_signed_txns"
try:
response = requests.post(url, json=payload, timeout=REQUEST_TIMEOUT)
if response.status_code not in {200, 201}:
print(f"Error: calling {url}: {response.text}")
message = f"{url}: {response.text}"
if response.status_code in {200, 201}:
logging.info("Response from %s", message)
else:
print(f"Response from {url}: {response.text}")
logging.error("Error: calling %s", message)

except requests.RequestException as e:
print(f"HTTP Request failed: {str(e)}")
logging.error("HTTP Request failed: %s", {str(e)})


@dataclass
Expand All @@ -72,38 +85,6 @@ def from_toml(config_path: str = "config.toml") -> Config:
return Config(**config_dict)


def fetch_latest_block(
network: Network = Network.MAINNET,
) -> near_primitives.BlockHeight:
"""
Define the RPC endpoint for the NEAR network
"""
url = f"https://rpc.{network}.near.org"

# Define the payload for fetching the latest block
payload = json.dumps(
{
"jsonrpc": "2.0",
"id": "dontcare",
"method": "block",
"params": {"finality": "final"},
}
)

# Define the headers for the HTTP request
headers = {"Content-Type": "application/json"}

# Send the HTTP request to the NEAR RPC endpoint
response = requests.request(
"POST", url, headers=headers, data=payload, timeout=REQUEST_TIMEOUT
)

# Parse the JSON response to get the latest final block height
latest_final_block: int = response.json()["result"]["header"]["height"]

return latest_final_block


# Event format json example:
# {
# "standard": "x-gas-station",
Expand All @@ -130,9 +111,8 @@ def extract_relevant_log(
try:
parsed_log: ParsedLog = json.loads(log[len(log_key) :])
except json.JSONDecodeError:
print(
f"Receipt ID: `{receipt_id}`\n"
f"Error during parsing logs from JSON string to dict"
logging.error(
"Receipt ID: %s\nError parsing logs from JSON string to dict", receipt_id
)
return None

Expand Down Expand Up @@ -163,7 +143,7 @@ def process_log(log: str, receipt: near_primitives.Receipt) -> bool:
if parsed_log is None:
return False

print(json.dumps(parsed_log, indent=4))
logging.info("processed log: %s", json.dumps(parsed_log, indent=4))
return process_receipt_if_gas_station_contract(receipt, parsed_log)


Expand All @@ -176,17 +156,17 @@ def process_receipt_if_gas_station_contract(
try:
event_data = EventData.from_dict(parsed_log["data"])
if not event_data.validate():
print(f"Error: Invalid event data: {event_data}")
logging.error("Invalid event data: %s", event_data)
return False

print(json.dumps(event_data, indent=4))
logging.debug(json.dumps(event_data, indent=4))
event_data.send_to_service()
return True

except json.JSONDecodeError:
print(
f"Receipt ID: `{receipt.receipt_id}`\n"
"Error during parsing event data from JSON string to dict"
logging.error(
"Receipt ID: %s\nError parsing logs from JSON string to dict",
receipt.receipt_id,
)
return False

Expand All @@ -199,15 +179,17 @@ async def handle_streamer_message(


async def main() -> None:
latest_final_block = fetch_latest_block(network=CONFIG.network)
latest_final_block = nlf_util.fetch_latest_block(network=CONFIG.network)
lake_config = LakeConfig(
network=CONFIG.network,
start_block_height=latest_final_block,
# These fields must be set!
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
aws_secret_key=os.environ["AWS_SECRET_ACCESS_KEY"],
)
print(f"Latest final block: {latest_final_block} on network: {CONFIG.network}")
logging.info(
"Latest final block: %s on network: %s", latest_final_block, CONFIG.network.name
)

_stream_handle, streamer_messages_queue = streamer(lake_config)
while True:
Expand Down
37 changes: 37 additions & 0 deletions logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import logging


class IgnoreFilter(logging.Filter):
"""
Customized Log filter that ignores a given message text.
"""

def __init__(self, ignored_message: str, name: str = "") -> None:
super().__init__(name)
self.ignored_message = ignored_message
self.enabled = True

def filter(self, record: logging.LogRecord) -> bool:
if not self.enabled:
return True
return self.ignored_message not in record.getMessage()

def set_ignored_message(self, message: str) -> None:
"""Set a new message pattern to ignore."""
self.ignored_message = message

def toggle_filter(self, enable: bool) -> None:
"""Enable or disable the filter."""
self.enabled = enable

def get_ignored_message(self) -> str:
"""Get the current ignored message pattern."""
return self.ignored_message


def set_logger(name: str, level: int | str) -> logging.Logger:
logging.basicConfig(level=level)
logging.getLogger("near_lake_framework").setLevel(logging.INFO)
missing_shard_filter = IgnoreFilter("doesn't exist")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the missing shard data something that gets backfilled in future data from near lake or skipped?

Copy link
Collaborator Author

@bh2smith bh2smith May 30, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My understanding is that there was no data on the shared corresponding to the block. I think this filter can be removed if we downgrade this log to debug and set the lake log level to info. The filter is only a temporary solution.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Created a PR here: frolvanya/near-lake-framework-py#16

We can leave this open and wait for the next version bump (if its approved) - then remove the whole filter class.

logging.getLogger().addFilter(missing_shard_filter)
return logging.getLogger(name)
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
asyncio>=3.4.3
near-lake-framework>=0.0.8
near-lake-framework>=0.1.0
requests>=2.32.0
toml>=0.10.2

Expand Down