Skip to content

Commit

Permalink
add pylint and fix issues
Browse files Browse the repository at this point in the history
  • Loading branch information
bh2smith committed May 29, 2024
1 parent 1112826 commit bd5d9d2
Show file tree
Hide file tree
Showing 5 changed files with 42 additions and 18 deletions.
18 changes: 18 additions & 0 deletions .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
name: Pylint

on: [push]

jobs:
build:
runs-on: ubuntu-latest
name: Pylint
steps:
- uses: actions/checkout@v1
- name: Set up Python 3.9
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: Install Dependencies
run: pip install pylint
- name: Run PyLint
run: pylint ./src
2 changes: 2 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[MASTER]
disable=missing-class-docstring,missing-module-docstring,missing-function-docstring
8 changes: 3 additions & 5 deletions src/near_lake_framework/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,14 @@
import itertools
from enum import Enum
from typing import Optional
from dataclasses import dataclass

from aiobotocore.session import get_session # type: ignore

from near_lake_framework import near_primitives
from near_lake_framework import s3_fetchers


from dataclasses import dataclass


class Network(Enum):
MAINNET = "mainnet"
TESTNET = "testnet"
Expand Down Expand Up @@ -43,7 +41,7 @@ class LakeConfig:
start_block_height: near_primitives.BlockHeight
blocks_preload_pool_size: int = 200

def __init__(
def __init__( # pylint: disable=too-many-arguments
self,
network: Network,
aws_access_key_id: str,
Expand Down Expand Up @@ -86,7 +84,7 @@ async def start(config: LakeConfig, streamer_messages_queue: asyncio.Queue):
await asyncio.sleep(2)
continue

print("Received {} blocks from S3".format(len(block_heights_prefixes)))
print(f"Received {len(block_heights_prefixes)} blocks from S3")

pending_block_heights = iter(block_heights_prefixes)
streamer_messages_futures = []
Expand Down
11 changes: 6 additions & 5 deletions src/near_lake_framework/near_primitives.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,17 @@


class BlockHeightField(mm.fields.Integer):
"""Block Height is unsigned 64-bit integer, so it needs to be serialized as a string and get deserialized
to an integer type in Python.
"""
Block Height is unsigned 64-bit integer, so it needs to be serialized
as a string and get deserialized to an integer type in Python.
"""

def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs, as_string=True)
super().__init__(*args, **kwargs, as_string=True)


@dataclass
class BlockHeader(DataClassJsonMixin):
class BlockHeader(DataClassJsonMixin): # pylint: disable=too-many-instance-attributes
epoch_id: CryptoHash
next_epoch_id: CryptoHash
hash: CryptoHash
Expand Down Expand Up @@ -63,7 +64,7 @@ class BlockHeader(DataClassJsonMixin):


@dataclass
class ChunkHeader(DataClassJsonMixin):
class ChunkHeader(DataClassJsonMixin): # pylint: disable=too-many-instance-attributes
chunk_hash: CryptoHash
prev_block_hash: CryptoHash
outcome_root: CryptoHash
Expand Down
21 changes: 13 additions & 8 deletions src/near_lake_framework/s3_fetchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
from typing import List
import traceback
from botocore.exceptions import ClientError
from botocore.exceptions import ClientError, EndpointConnectionError

from near_lake_framework import near_primitives

Expand All @@ -17,7 +17,7 @@ async def list_blocks(
Bucket=s3_bucket_name,
Delimiter="/",
MaxKeys=number_of_blocks_requested,
StartAfter="{:012d}".format(start_from_block_height),
StartAfter=f"{start_from_block_height:012d}",
RequestPayer="requester",
)

Expand All @@ -32,7 +32,7 @@ async def fetch_streamer_message(
) -> near_primitives.StreamerMessage:
response = await s3_client.get_object(
Bucket=s3_bucket_name,
Key="{:012d}/block.json".format(block_height),
Key=f"{block_height:012d}/block.json",
RequestPayer="requester",
)

Expand All @@ -57,7 +57,7 @@ async def fetch_shard_or_retry(
shard_id: int,
) -> near_primitives.IndexerShard:
while True:
shard_key = "{:012d}/shard_{}.json".format(block_height, shard_id)
shard_key = f"{block_height:012d}/shard_{shard_id}.json"
try:
response = await s3_client.get_object(
Bucket=s3_bucket_name,
Expand All @@ -71,10 +71,15 @@ async def fetch_shard_or_retry(
return near_primitives.IndexerShard.from_json(body)
except ClientError as e:
if e.response["Error"]["Code"] == "NoSuchKey":
logging.warning(
"Failed to fetch shard {} - does not exist".format(shard_key)
)
logging.warning("Failed to fetch shard %s - doesn't exist", shard_key)
else:
traceback.print_exc()
except Exception:
except EndpointConnectionError as e:
logging.error("EndpointConnectionError while fetching shard %s: %s", shard_key, e)
traceback.print_exc()
except asyncio.TimeoutError as e:
logging.error("TimeoutError while fetching shard %s: %s", shard_key, e)
traceback.print_exc()
except Exception as e: # pylint: disable=broad-exception-caught
logging.error("Unexpected error while fetching shard %s: %s", shard_key, e)
traceback.print_exc()

0 comments on commit bd5d9d2

Please sign in to comment.