Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): bump rebdhuhn #142

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
# to update all repo revisions just run: pre-commit autoupdate
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v5.0.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 23.9.1
rev: 24.10.0
hooks:
- id: black
language_version: python3
- repo: https://github.com/pycqa/isort
rev: 5.12.0
rev: 5.13.2
hooks:
- id: isort
name: isort (python)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ classifiers = [
]
dependencies = [
"ebdamame>=0.4.1",
"rebdhuhn>=0.9.0",
"rebdhuhn>=0.14.3",
"cattrs",
"click",
"pydantic-settings"
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ python-docx==1.1.2
# via ebdamame
python-dotenv==1.0.1
# via pydantic-settings
rebdhuhn==0.14.2
rebdhuhn==0.14.3
# via
# ebd-toolchain (pyproject.toml)
# ebdamame
Expand Down
120 changes: 42 additions & 78 deletions src/ebd_toolchain/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from ebdamame.docxtableconverter import DocxTableConverter
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
from rebdhuhn.graph_conversion import convert_empty_table_to_graph, convert_table_to_graph
from rebdhuhn.graph_conversion import convert_table_to_graph
from rebdhuhn.graphviz import convert_dot_to_svg_kroki, convert_graph_to_dot
from rebdhuhn.kroki import DotToSvgConverter, Kroki, KrokiDotBadRequestError, KrokiPlantUmlBadRequestError
from rebdhuhn.models.ebd_graph import EbdGraph
Expand Down Expand Up @@ -82,16 +82,7 @@ def _dump_svg(svg_path: Path, ebd_graph: EbdGraph, converter: DotToSvgConverter)

def _dump_json(json_path: Path, ebd_table: EbdTable | EbdTableMetaData) -> None:
with open(json_path, "w+", encoding="utf-8") as json_file:
if isinstance(ebd_table, EbdTableMetaData):
json.dump(
cattrs.unstructure(EbdTable(metadata=ebd_table, rows=[])),
json_file,
ensure_ascii=False,
indent=2,
sort_keys=True,
)
else:
json.dump(cattrs.unstructure(ebd_table), json_file, ensure_ascii=False, indent=2, sort_keys=True)
json.dump(cattrs.unstructure(ebd_table), json_file, ensure_ascii=False, indent=2, sort_keys=True)


@click.command()
Expand Down Expand Up @@ -153,59 +144,29 @@ def handle_known_error(error: Exception, ebd_key: str) -> None:
continue
assert ebd_kapitel is not None
assert ebd_kapitel.subsection_title is not None
if isinstance(docx_tables, EbdNoTableSection):
ebd_meta_data = EbdTableMetaData(
ebd_code=ebd_key,
ebd_name=ebd_kapitel.subsection_title,
chapter=ebd_kapitel.chapter_title, # type:ignore[arg-type]
# pylint:disable=line-too-long
section=f"{ebd_kapitel.chapter}.{ebd_kapitel.section}.{ebd_kapitel.subsection}: {ebd_kapitel.section_title}",
role="N/A",
remark=docx_tables.remark,
)
if "json" in export_types:
json_path = output_path / Path(f"{ebd_key}.json")
_dump_json(json_path, ebd_meta_data)
click.secho(f"💾 Successfully exported '{ebd_key}.json' to {json_path.absolute()}")
try:
ebd_graph = convert_empty_table_to_graph(ebd_meta_data)
except (
EbdCrossReferenceNotSupportedError,
EndeInWrongColumnError,
OutcomeCodeAmbiguousError,
) as known_issue:
handle_known_error(known_issue, ebd_key)
continue
except Exception as unknown_error: # pylint:disable=broad-except
click.secho(f"Error while graphing {ebd_key}: {str(unknown_error)}; Skip!", fg="red")
continue
try:
if "dot" in export_types:
dot_path = output_path / Path(f"{ebd_key}.dot")
_dump_dot(dot_path, ebd_graph)
click.secho(f"💾 Successfully exported '{ebd_key}.dot' to {dot_path.absolute()}")
if "svg" in export_types:
svg_path = output_path / Path(f"{ebd_key}.svg")
_dump_svg(svg_path, ebd_graph, kroki_client)
click.secho(f"💾 Successfully exported '{ebd_key}.svg' to {svg_path.absolute()}")
except (PathsNotGreaterThanOneError, KrokiDotBadRequestError) as known_issue:
handle_known_error(known_issue, ebd_key)
except AssertionError as assertion_error:
# e.g. AssertionError: If indegree > 1, the number of paths should always be greater than 1 too.
click.secho(str(assertion_error), fg="red")
# both the SVG and dot path require graphviz to work, hence the common error handling block
continue
try:
assert not isinstance(docx_tables, EbdNoTableSection)
converter = DocxTableConverter(
docx_tables,
ebd_key=ebd_key,
ebd_name=ebd_kapitel.subsection_title,
chapter=ebd_kapitel.chapter_title, # type:ignore[arg-type]
# pylint:disable=line-too-long
section=f"{ebd_kapitel.chapter}.{ebd_kapitel.section}.{ebd_kapitel.subsection}: {ebd_kapitel.section_title}",
)
ebd_table = converter.convert_docx_tables_to_ebd_table()
if isinstance(docx_tables, EbdNoTableSection):
ebd_meta_data = EbdTableMetaData(
ebd_code=ebd_key,
ebd_name=ebd_kapitel.subsection_title,
chapter=ebd_kapitel.chapter_title, # type:ignore[arg-type]
# pylint:disable=line-too-long
section=f"{ebd_kapitel.chapter}.{ebd_kapitel.section}.{ebd_kapitel.subsection}: {ebd_kapitel.section_title}",
role="N/A",
remark=docx_tables.remark,
)
ebd_table = EbdTable(metadata=ebd_meta_data, rows=[])

else:
converter = DocxTableConverter(
docx_tables,
ebd_key=ebd_key,
ebd_name=ebd_kapitel.subsection_title,
chapter=ebd_kapitel.chapter_title, # type:ignore[arg-type]
# pylint:disable=line-too-long
section=f"{ebd_kapitel.chapter}.{ebd_kapitel.section}.{ebd_kapitel.subsection}: {ebd_kapitel.section_title}",
)
ebd_table = converter.convert_docx_tables_to_ebd_table()
except Exception as scraping_error: # pylint:disable=broad-except
click.secho(f"Error while scraping {ebd_key}: {str(scraping_error)}; Skip!", fg="red")
continue
Expand All @@ -222,21 +183,24 @@ def handle_known_error(error: Exception, ebd_key: str) -> None:
click.secho(f"Error while graphing {ebd_key}: {str(unknown_error)}; Skip!", fg="red")
continue
if "puml" in export_types:
try:
puml_path = output_path / Path(f"{ebd_key}.puml")
_dump_puml(puml_path, ebd_graph)
click.secho(f"💾 Successfully exported '{ebd_key}.puml' to {puml_path.absolute()}")
except AssertionError as assertion_error:
# https://github.com/Hochfrequenz/rebdhuhn/issues/35
click.secho(str(assertion_error), fg="red")
except (
NotExactlyTwoOutgoingEdgesError,
GraphTooComplexForPlantumlError,
KrokiPlantUmlBadRequestError,
) as known_issue:
handle_known_error(known_issue, ebd_key)
except Exception as general_error: # pylint:disable=broad-exception-caught
click.secho(f"Error while exporting {ebd_key} as UML: {str(general_error)}; Skip!", fg="yellow")
if not any(ebd_table.rows):
click.secho(f"EBD {ebd_key} has no ebd table; Skip puml creation!", fg="yellow")
else:
try:
puml_path = output_path / Path(f"{ebd_key}.puml")
_dump_puml(puml_path, ebd_graph)
click.secho(f"💾 Successfully exported '{ebd_key}.puml' to {puml_path.absolute()}")
except AssertionError as assertion_error:
# https://github.com/Hochfrequenz/rebdhuhn/issues/35
click.secho(str(assertion_error), fg="red")
except (
NotExactlyTwoOutgoingEdgesError,
GraphTooComplexForPlantumlError,
KrokiPlantUmlBadRequestError,
) as known_issue:
handle_known_error(known_issue, ebd_key)
except Exception as general_error: # pylint:disable=broad-exception-caught
click.secho(f"Error while exporting {ebd_key} as UML: {str(general_error)}; Skip!", fg="yellow")

try:
if "dot" in export_types:
Expand Down
Loading