From 4f57b75a977b080390b1c9ab6a0ff25cb9f4fc3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:01:52 +0200 Subject: [PATCH 01/13] Revert "doc: update for master" This reverts commit 1abe1ebb852e8b4e75224d515ea165391613ed6c. --- docs/source/exploring_the_field/3d_products.rst | 6 +++--- docs/source/getting_started.rst | 10 +++++----- docs/source/howto.rst | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/source/exploring_the_field/3d_products.rst b/docs/source/exploring_the_field/3d_products.rst index 3d0bbd86..9ef0007d 100644 --- a/docs/source/exploring_the_field/3d_products.rst +++ b/docs/source/exploring_the_field/3d_products.rst @@ -9,7 +9,7 @@ These two products can be visualized with `QGIS ` .. |dsm| image:: ../images/dsm.png :width: 100% -.. |color| image:: ../images/clr.png +.. |clr| image:: ../images/clr.png :width: 100% .. |dsmclr| image:: ../images/dsm_clr.png :width: 100% @@ -17,7 +17,7 @@ These two products can be visualized with `QGIS ` :width: 100% +--------------+-------------+-------------+-------------------+ -| dsm.tif | color.tif | `QGIS`_ Mix | cloudcompare | +| dsm.tif | clr.tif | `QGIS`_ Mix | cloudcompare | +--------------+-------------+-------------+-------------------+ -| |dsm| | |color| | |dsmclr| | |pc| | +| |dsm| | |clr| | |dsmclr| | |pc| | +--------------+-------------+-------------+-------------------+ diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 05aa3fdb..75ee25d2 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -73,19 +73,19 @@ Getting Started * Go to the ``data_gizeh/outresults/`` output directory to get a :term:`DSM` and color image associated. -Open the ``dsm.tif`` DSM and ``color.tif`` color image in `QGIS`_ software. +Open the ``dsm.tif`` DSM and ``clr.tif`` color image in `QGIS`_ software. .. |dsm| image:: images/dsm.png :width: 100% -.. |color| image:: images/clr.png +.. |clr| image:: images/clr.png :width: 100% -.. |dsmcolor| image:: images/dsm_clr.png +.. |dsmclr| image:: images/dsm_clr.png :width: 100% +--------------+-------------+-------------+ -| dsm.tif | color.tif | `QGIS`_ Mix | +| dsm.tif | clr.tif | `QGIS`_ Mix | +--------------+-------------+-------------+ -| |dsm| | |color| | |dsmcolor| | +| |dsm| | |clr| | |dsmclr| | +--------------+-------------+-------------+ .. _`QGIS`: https://www.qgis.org/ diff --git a/docs/source/howto.rst b/docs/source/howto.rst index 39665249..079cf43e 100644 --- a/docs/source/howto.rst +++ b/docs/source/howto.rst @@ -128,7 +128,7 @@ Convert RGB image to panchromatic image CARS only uses panchromatic images for processing. -If you have a multi-spectral image, you'll need to extract a single band to use, or convert it to a panchromatic image before using it with CARS. +If you have a multi-spectral image, you'll need to convert it to a panchromatic image before using it with CARS. The line below use `"Grayscale Using Luminance" `_ expression with `OTB BandMath `_ From a47c1f63ba14be8db655732806bd2be8cfe67ec5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:01:58 +0200 Subject: [PATCH 02/13] Revert "feat: use only one dockerfile, for production and development" This reverts commit 1f25356aa2445220eb1fb91f09f480092a79afbd. --- Dockerfile | 19 ++++-------- Dockerfile.local | 38 ++++++++++++++++++++++++ Makefile | 10 ------- docs/source/contributing_the_project.rst | 4 +-- 4 files changed, 45 insertions(+), 26 deletions(-) create mode 100644 Dockerfile.local diff --git a/Dockerfile b/Dockerfile index 76110150..b797acc4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,25 +16,16 @@ RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ && rm -rf /var/lib/apt/lists/* # copy and install cars with mccnn plugin capabilities installed (but not configured by default) -WORKDIR /app +WORKDIR /cars +COPY . /cars/ - -# Create a virtual environment -RUN python3 -m venv /app/venv +# Install fiona and rasterio with gdal / proj from otb +RUN make clean && make install-gdal # source venv/bin/activate in docker mode -ENV VIRTUAL_ENV='/app/venv' +ENV VIRTUAL_ENV='/cars/venv' ENV PATH="$VIRTUAL_ENV/bin:$PATH" - -# Copy only necessary files for installation -COPY . /app/cars - -# Install fiona and rasterio with gdal / proj from otb -WORKDIR /app/cars -RUN CARS_VENV=$VIRTUAL_ENV make clean && CARS_VENV=$VIRTUAL_ENV make install-gdal-dev - - # hadolint ignore=DL3013,SC2102 RUN python -m pip cache purge diff --git a/Dockerfile.local b/Dockerfile.local new file mode 100644 index 00000000..97bcfd5c --- /dev/null +++ b/Dockerfile.local @@ -0,0 +1,38 @@ +# Use the same base image +# hadolint ignore=DL3007 +FROM orfeotoolbox/otb:latest +LABEL maintainer="CNES" + +# Install dependencies +# hadolint ignore=DL3008 +RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ + git \ + libpython3.8 \ + python3.8-dev \ + python3.8-venv \ + python3.8 \ + python3-pip \ + python3-numpy \ + python3-virtualenv \ + make \ + && rm -rf /var/lib/apt/lists/* + +# Set up working directory +WORKDIR /app + +# Create a virtual environment +RUN python3 -m venv /app/venv + +# Activate the virtual environment +ENV VIRTUAL_ENV='/app/venv' +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# Copy only necessary files for installation +COPY . /app/cars + +WORKDIR /app/cars +# Install CARS using make +RUN make clean && make install-dev + +# launch cars +CMD ["/bin/bash"] diff --git a/Makefile b/Makefile index 3fb68874..4b757ffd 100644 --- a/Makefile +++ b/Makefile @@ -86,15 +86,6 @@ install-gdal: install-deps-gdal ## install cars (not editable) with dev, docs, n @echo "CARS ${CARS_VERSION} installed in dev mode in virtualenv ${CARS_VENV}" @echo "CARS venv usage: source ${CARS_VENV}/bin/activate; cars -h" -.PHONY: install-gdal-dev -install-gdal-dev: install-deps-gdal ## install cars dev (editable) with dev, docs, notebook dependencies - @test -f ${CARS_VENV}/bin/cars || ${CARS_VENV}/bin/pip install .[dev,docs,notebook,pandora_mccnn] - @test -f .git/hooks/pre-commit || echo " Install pre-commit hook" - @test -f .git/hooks/pre-commit || ${CARS_VENV}/bin/pre-commit install -t pre-commit - @test -f .git/hooks/pre-push || ${CARS_VENV}/bin/pre-commit install -t pre-push - @echo "CARS ${CARS_VERSION} installed in dev mode in virtualenv ${CARS_VENV}" - @echo "CARS venv usage: source ${CARS_VENV}/bin/activate; cars -h" - .PHONY: install-pandora-mccnn install-pandora-mccnn: install-deps ## install cars (not editable) with dev, docs, notebook dependencies @test -f ${CARS_VENV}/bin/cars || ${CARS_VENV}/bin/pip install .[dev,docs,notebook,pandora_mccnn] @@ -235,7 +226,6 @@ clean: clean-venv clean-build clean-precommit clean-pyc clean-test clean-docs cl .PHONY: clean-venv clean-venv: @echo "+ $@" - @echo ${CARS_VENV} @rm -rf ${CARS_VENV} .PHONY: clean-build diff --git a/docs/source/contributing_the_project.rst b/docs/source/contributing_the_project.rst index 0fdfd2c2..f951c1b5 100644 --- a/docs/source/contributing_the_project.rst +++ b/docs/source/contributing_the_project.rst @@ -54,8 +54,8 @@ To setup a development environment with docker, run the following command: .. code-block:: console - docker build -t cars-dev -f Dockerfile . - docker run -it -v "$(pwd)":/app/cars --entrypoint=/bin/bash cars-dev + docker build -t cars-dev -f Dockerfile.local . + docker run -it -v "$(pwd)":/app/cars -w /app/cars cars-dev /bin/bash You're ready to use CARS, all files in the current directory are mounted in the container. From 8202b386f859e2f3aea1d767ac89ea1ce2f19dd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:02:06 +0200 Subject: [PATCH 03/13] =?UTF-8?q?Revert=20"Chaine=20low=20cost:=20sans=20v?= =?UTF-8?q?alidation=20crois=C3=A9e"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit c77d9ee1e5b982013b0fbe03a27da699ec20b353. --- .../dense_matching/census_mccnn_sgm.py | 6 -- .../loaders/config_census_sgm.json | 4 ++ .../dense_matching/loaders/config_mccnn.json | 4 ++ .../dense_matching/loaders/pandora_loader.py | 15 ----- docs/source/usage.rst | 6 -- .../dense_matching/test_pandora_loader.py | 63 ------------------- tests/test_end2end.py | 26 -------- 7 files changed, 8 insertions(+), 116 deletions(-) diff --git a/cars/applications/dense_matching/census_mccnn_sgm.py b/cars/applications/dense_matching/census_mccnn_sgm.py index 947af58b..a274fbc1 100644 --- a/cars/applications/dense_matching/census_mccnn_sgm.py +++ b/cars/applications/dense_matching/census_mccnn_sgm.py @@ -108,7 +108,6 @@ def __init__(self, conf=None): self.disp_range_propagation_filter_size = self.used_config[ "disp_range_propagation_filter_size" ] - self.use_cross_validation = self.used_config["use_cross_validation"] # Saving files self.save_intermediate_data = self.used_config["save_intermediate_data"] @@ -172,9 +171,6 @@ def check_conf(self, conf): overloaded_conf["perf_ambiguity_threshold"] = conf.get( "perf_ambiguity_threshold", 0.6 ) - overloaded_conf["use_cross_validation"] = conf.get( - "use_cross_validation", False - ) # Margins computation parameters overloaded_conf["use_global_disp_range"] = conf.get( "use_global_disp_range", False @@ -217,7 +213,6 @@ def check_conf(self, conf): perf_eta_max_ambiguity=overloaded_conf["perf_eta_max_ambiguity"], perf_eta_max_risk=overloaded_conf["perf_eta_max_risk"], perf_eta_step=overloaded_conf["perf_eta_step"], - use_cross_validation=overloaded_conf["use_cross_validation"], ) overloaded_conf["loader"] = loader overloaded_conf["loader_conf"] = loader_conf @@ -242,7 +237,6 @@ def check_conf(self, conf): "perf_eta_max_risk": float, "perf_eta_step": float, "perf_ambiguity_threshold": float, - "use_cross_validation": bool, "use_global_disp_range": bool, "local_disp_grid_step": int, "disp_range_propagation_filter_size": And( diff --git a/cars/applications/dense_matching/loaders/config_census_sgm.json b/cars/applications/dense_matching/loaders/config_census_sgm.json index e5bf070b..f9ac772a 100644 --- a/cars/applications/dense_matching/loaders/config_census_sgm.json +++ b/cars/applications/dense_matching/loaders/config_census_sgm.json @@ -26,6 +26,10 @@ "filter": { "filter_method": "median", "filter_size": 3 + }, + "validation": { + "validation_method": "cross_checking_accurate", + "cross_checking_threshold": 1.0 } } } \ No newline at end of file diff --git a/cars/applications/dense_matching/loaders/config_mccnn.json b/cars/applications/dense_matching/loaders/config_mccnn.json index 6ab394a5..14a8605a 100644 --- a/cars/applications/dense_matching/loaders/config_mccnn.json +++ b/cars/applications/dense_matching/loaders/config_mccnn.json @@ -23,6 +23,10 @@ "filter" : { "filter_method": "median", "filter_size": 3 + }, + "validation" : { + "validation_method": "cross_checking_accurate", + "cross_checking_threshold": 1 } } } \ No newline at end of file diff --git a/cars/applications/dense_matching/loaders/pandora_loader.py b/cars/applications/dense_matching/loaders/pandora_loader.py index 09442bf6..2e3442b9 100644 --- a/cars/applications/dense_matching/loaders/pandora_loader.py +++ b/cars/applications/dense_matching/loaders/pandora_loader.py @@ -58,7 +58,6 @@ def __init__( # noqa: C901 perf_eta_max_ambiguity=0.99, perf_eta_max_risk=0.25, perf_eta_step=0.04, - use_cross_validation=False, ): """ Init function of PandoraLoader @@ -71,11 +70,8 @@ def __init__( # noqa: C901 :type conf: dict :param method_name: name of method to use :param performance_map_conf: true if generate performance maps - :param use_cross_validation: true to add crossvalidation """ - if method_name is None: - method_name = "census_sgm" self.pandora_config = None @@ -145,13 +141,6 @@ def __init__( # noqa: C901 "confidence_method": "interval_bounds", } } - # Cross validation - cross_validation_conf = { - "validation": { - "validation_method": "cross_checking_accurate", - "cross_checking_threshold": 1.0, - } - } confidences = {} if generate_performance_map: @@ -186,10 +175,6 @@ def __init__( # noqa: C901 conf["pipeline"], confidences ) - # update with cross validation - if use_cross_validation and "validation" not in conf["pipeline"]: - conf["pipeline"].update(cross_validation_conf) - if generate_confidence_intervals: # To ensure the consistency between the disparity map # and the intervals, the median filter for intervals diff --git a/docs/source/usage.rst b/docs/source/usage.rst index 7f7c7a2f..355feef4 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -1115,12 +1115,6 @@ The structure follows this organisation: - should be > 0 - 300 - No - * - use_cross_validation - - Add cross validation step - - bool - - - - false - - No See `Pandora documentation `_ for more information. diff --git a/tests/applications/dense_matching/test_pandora_loader.py b/tests/applications/dense_matching/test_pandora_loader.py index b5a0e9b2..4037a776 100644 --- a/tests/applications/dense_matching/test_pandora_loader.py +++ b/tests/applications/dense_matching/test_pandora_loader.py @@ -25,8 +25,6 @@ # Standard imports # Third party imports -import copy - import pytest # CARS imports @@ -97,67 +95,6 @@ def test_configure_pandora_config(): assert corr_config["pipeline"]["optimization"]["penalty"]["P2"] == 24 -@pytest.mark.unit_tests -def test_configure_cross_validation(): - """ - Test configure pandora correlator cross validation - """ - - pandora_config = { - "input": {"nodata_left": "NaN", "nodata_right": "NaN"}, - "pipeline": { - "right_disp_map": {"method": "accurate"}, - "matching_cost": { - "matching_cost_method": "census", - "window_size": 5, - "subpix": 1, - }, - "optimization": { - "optimization_method": "sgm", - "penalty": { - "P1": 8, - "P2": 24, - "p2_method": "constant", - "penalty_method": "sgm_penalty", - }, - "overcounting": False, - "min_cost_paths": False, - }, - "disparity": { - "disparity_method": "wta", - "invalid_disparity": "NaN", - }, - "refinement": {"refinement_method": "vfit"}, - "filter": {"filter_method": "median", "filter_size": 3}, - }, - } - - # test 1, validation as input already - conf_with_validation = copy.deepcopy(pandora_config) - conf_with_validation["pipeline"].update( - {"validation": {"validation_method": "cross_checking"}} - ) - pandora_loader = PandoraLoader( - conf=conf_with_validation, use_cross_validation=False - ) - corr_config = pandora_loader.get_conf() - assert "validation" in corr_config["pipeline"] - - # test 2: no validation as input, add it - pandora_loader = PandoraLoader( - conf=copy.deepcopy(pandora_config), use_cross_validation=True - ) - corr_config = pandora_loader.get_conf() - assert "validation" in corr_config["pipeline"] - - # test 3: no validation as input, do not add it - pandora_loader = PandoraLoader( - conf=copy.deepcopy(pandora_config), use_cross_validation=False - ) - corr_config = pandora_loader.get_conf() - assert "validation" not in corr_config["pipeline"] - - @pytest.mark.unit_tests def test_overload_pandora_conf_with_confidence(): """ diff --git a/tests/test_end2end.py b/tests/test_end2end.py index 01a83115..d66841ea 100644 --- a/tests/test_end2end.py +++ b/tests/test_end2end.py @@ -93,7 +93,6 @@ def test_end2end_gizeh_rectangle_epi_image_performance_map(): "grid_generation": {"method": "epipolar", "epi_step": 30}, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": True, }, "point_cloud_rasterization": { @@ -633,7 +632,6 @@ def test_end2end_ventoux_unique(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, "loader_conf": { "input": {}, @@ -1031,7 +1029,6 @@ def test_end2end_ventoux_unique(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "point_cloud_outliers_removing.1": { @@ -1105,12 +1102,6 @@ def test_end2end_ventoux_unique_split_epsg_4326(): "max_ram_per_worker": 1000, }, ) - input_config_pc["applications"] = { - "dense_matching": { - "method": "census_sgm", - "use_cross_validation": True, - }, - } pc_pipeline = sensor_to_dense_dsm.SensorToDenseDsmPipeline( input_config_pc ) @@ -1326,7 +1317,6 @@ def test_end2end_ventoux_unique_split(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, "save_intermediate_data": True, "generate_confidence_intervals": False, @@ -2232,7 +2222,6 @@ def test_end2end_use_epipolar_a_priori(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, } @@ -2453,10 +2442,6 @@ def test_end2end_ventoux_full_output_no_elevation(): "disparity_margin": 0.25, "save_intermediate_data": True, }, - "dense_matching": { - "method": "census_sgm", - "use_cross_validation": True, - }, } advanced_config = {"save_intermediate_data": True} @@ -2855,7 +2840,6 @@ def test_end2end_ventoux_with_color(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "loader": "pandora", "save_intermediate_data": True, "use_global_disp_range": False, @@ -3115,7 +3099,6 @@ def test_end2end_ventoux_with_classif(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "loader": "pandora", "save_intermediate_data": True, "use_global_disp_range": False, @@ -3291,7 +3274,6 @@ def test_compute_dsm_with_roi_ventoux(): "resampling": {"method": "bicubic", "strip_height": 80}, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "sparse_matching": { @@ -3453,7 +3435,6 @@ def test_compute_dsm_with_snap_to_img1(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": { @@ -3573,7 +3554,6 @@ def test_end2end_quality_stats(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "point_cloud_outliers_removing.1": { @@ -3867,7 +3847,6 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -4000,7 +3979,6 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -4091,7 +4069,6 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -4235,7 +4212,6 @@ def test_end2end_paca_with_mask(): }, "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "use_global_disp_range": False, }, "dense_matches_filling.2": { @@ -4347,7 +4323,6 @@ def test_end2end_disparity_filling(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "min_epi_tile_size": 100, "save_intermediate_data": True, "use_global_disp_range": False, @@ -4480,7 +4455,6 @@ def test_end2end_disparity_filling_with_zeros(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", - "use_cross_validation": True, "save_intermediate_data": True, "use_global_disp_range": True, }, From b67e2b18f64aab2b492331f60e24e8204c9d00df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:02:29 +0200 Subject: [PATCH 04/13] Revert "feat(docker): create docker dev environment" This reverts commit 2abc142e6cb0001e4e33940852015e09a6571394. --- Dockerfile.local | 38 ------------------------ docs/source/contributing_the_project.rst | 23 +++----------- 2 files changed, 4 insertions(+), 57 deletions(-) delete mode 100644 Dockerfile.local diff --git a/Dockerfile.local b/Dockerfile.local deleted file mode 100644 index 97bcfd5c..00000000 --- a/Dockerfile.local +++ /dev/null @@ -1,38 +0,0 @@ -# Use the same base image -# hadolint ignore=DL3007 -FROM orfeotoolbox/otb:latest -LABEL maintainer="CNES" - -# Install dependencies -# hadolint ignore=DL3008 -RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ - git \ - libpython3.8 \ - python3.8-dev \ - python3.8-venv \ - python3.8 \ - python3-pip \ - python3-numpy \ - python3-virtualenv \ - make \ - && rm -rf /var/lib/apt/lists/* - -# Set up working directory -WORKDIR /app - -# Create a virtual environment -RUN python3 -m venv /app/venv - -# Activate the virtual environment -ENV VIRTUAL_ENV='/app/venv' -ENV PATH="$VIRTUAL_ENV/bin:$PATH" - -# Copy only necessary files for installation -COPY . /app/cars - -WORKDIR /app/cars -# Install CARS using make -RUN make clean && make install-dev - -# launch cars -CMD ["/bin/bash"] diff --git a/docs/source/contributing_the_project.rst b/docs/source/contributing_the_project.rst index f951c1b5..2242e1db 100644 --- a/docs/source/contributing_the_project.rst +++ b/docs/source/contributing_the_project.rst @@ -46,21 +46,6 @@ Particularly, it uses the following pip editable install: With this pip install mode, source code modifications directly impacts ``cars`` command line. - -Setting up a development environment with docker -================================================ - -To setup a development environment with docker, run the following command: - -.. code-block:: console - - docker build -t cars-dev -f Dockerfile.local . - docker run -it -v "$(pwd)":/app/cars -w /app/cars cars-dev /bin/bash - -You're ready to use CARS, all files in the current directory are mounted in the container. - - - Coding guide ============ @@ -70,8 +55,8 @@ Here are some rules to apply when developing a new functionality: * **Test**: Each new functionality shall have a corresponding test in its module's test file. This test shall, if possible, check the function's outputs and the corresponding degraded cases. * **Documentation**: All functions shall be documented (object, parameters, return values). * **Use type hints**: Use the type hints provided by the `typing` python module. -* **Use doctype**: Follow sphinx default doctype for automatic API. -* **Quality code**: Correct project quality code errors with pre-commit automatic workflow (see below). +* **Use doctype**: Follow sphinx default doctype for automatic API +* **Quality code**: Correct project quality code errors with pre-commit automatic workflow (see below) * **Factorization**: Factorize the code as much as possible. The command line tools shall only include the main workflow and rely on the cars python modules. * **Be careful with user interface upgrade:** If major modifications of the user interface or of the tool's behaviour are done, update the user documentation (and the notebooks if necessary). * **Logging and no print**: The usage of the `print()` function is forbidden: use the `logging` python standard module instead. @@ -122,7 +107,7 @@ Jupyter notebooks CARS contains notebooks in tutorials directory. -To generate a `Jupyter kernel `_ with CARS installation, use: +To generate a Jupyter kernel with CARS installation, use: .. code-block:: console @@ -195,7 +180,7 @@ If necessary, Black doesn’t reformat blocks that start with "# fmt: off" and e Flake8 ------ -`Flake8`_ is a command-line utility for enforcing style consistency across Python projects. By default it includes lint checks provided by the `PyFlakes project `_ , PEP-0008 inspired style checks provided by the `PyCodeStyle project `_ , and McCabe complexity checking provided by the `McCabe project `_. It will also run third-party extensions if they are found and installed. +`Flake8`_ is a command-line utility for enforcing style consistency across Python projects. By default it includes lint checks provided by the PyFlakes project, PEP-0008 inspired style checks provided by the PyCodeStyle project, and McCabe complexity checking provided by the McCabe project. It will also run third-party extensions if they are found and installed. CARS ``flake8`` configuration is done in `setup.cfg `_ From 33057e7727b238171194f497459ff1c0f922ca04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:02:37 +0200 Subject: [PATCH 05/13] Revert "fix(doc): update documentation" This reverts commit 2bd83246692e20cf7c603e8bc860705d72745bba. --- .../exploring_the_field/3d_products.rst | 10 +- .../from_satellite_images_to_dsm.rst | 4 +- .../masks_and_classifications_usage.rst | 5 +- docs/source/getting_started.rst | 28 +- docs/source/howto.rst | 25 +- docs/source/software_design/orchestrator.rst | 4 +- docs/source/software_design/plugin.rst | 12 +- docs/source/troubleshooting_and_faqs.rst | 2 +- docs/source/usage.rst | 507 +++++++++--------- tutorials/data_gizeh.tar.bz2.md5sum | 2 +- tutorials/data_gizeh_small.tar.bz2.md5sum | 2 +- 11 files changed, 278 insertions(+), 323 deletions(-) diff --git a/docs/source/exploring_the_field/3d_products.rst b/docs/source/exploring_the_field/3d_products.rst index 9ef0007d..7127ad0d 100644 --- a/docs/source/exploring_the_field/3d_products.rst +++ b/docs/source/exploring_the_field/3d_products.rst @@ -2,14 +2,14 @@ =========== | CARS produces a geotiff file named ``dsm.tif`` that contains the Digital Surface Model in the required cartographic projection and the ground sampling distance defined by the user. -| If the user provides an additional input image, an ortho-image ``clr.tif`` is also produced. The latter is stackable to the DSM (See :ref:`getting_started`). -| If the user saves points clouds as `laz format `_ (point_cloud_fusion, point_cloud_outliers_removing, point_cloud_rasterization), the points clouds are saved in laz compressed format with colors or graylevel image. +| If the user provides an additional input image, an ortho-image ``color.tif`` is also produced. The latter is stackable to the DSM (See :ref:`getting_started`). +| If the user saves points clouds as laz format (point_cloud_fusion, point_cloud_outliers_removing, point_cloud_rasterization), the points clouds are saved in laz compressed format with colors or graylevel image. These two products can be visualized with `QGIS `_ for example. .. |dsm| image:: ../images/dsm.png :width: 100% -.. |clr| image:: ../images/clr.png +.. |color| image:: ../images/clr.png :width: 100% .. |dsmclr| image:: ../images/dsm_clr.png :width: 100% @@ -17,7 +17,7 @@ These two products can be visualized with `QGIS ` :width: 100% +--------------+-------------+-------------+-------------------+ -| dsm.tif | clr.tif | `QGIS`_ Mix | cloudcompare | +| dsm.tif | color.tif | `QGIS`_ Mix | cloudcompare | +--------------+-------------+-------------+-------------------+ -| |dsm| | |clr| | |dsmclr| | |pc| | +| |dsm| | |color| | |dsmclr| | |pc| | +--------------+-------------+-------------+-------------------+ diff --git a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst index 54010143..ee84ec4b 100644 --- a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst +++ b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst @@ -64,8 +64,7 @@ Generate a DSM step by step +--------------------------+---------------------------------------------+ | For each point in one image, the software searches the corresponding point in the other image. -| The color of the pixels (grayscale) in the image :ref:`below` corresponds to the shift value. Some pixels do not have a match, which are represented as transparent pixels in the image. These matching errors can occur due to various reasons such as moving objects, shadows, occlusions, or areas with insufficient texture. -| The transparent pixels indicate areas where the matching algorithm couldn't find a reliable correspondence between the two images, highlighting regions of uncertainty in the matching process. +| The color of the pixels (grayscale) in the image :ref:`below` corresponds to the shift value. Some pixels do not have a match (matching error due to moving objects, shadows etc.). .. _matching: @@ -82,7 +81,6 @@ Generate a DSM step by step | The displacements obtained are transformed into positions in both images. | This allows to deduce lines of sight. The intersection of these lines gives a point in space: longitude, latitude, altitude (see :ref:`below`). -| A line of sight is an imaginary straight line from the camera's perspective through a specific point in the image, extending into 3D space. It represents all possible 3D positions that could have produced that image point. .. _triangulation: diff --git a/docs/source/exploring_the_field/masks_and_classifications_usage.rst b/docs/source/exploring_the_field/masks_and_classifications_usage.rst index 2f98dcd8..fcf0a3b3 100644 --- a/docs/source/exploring_the_field/masks_and_classifications_usage.rst +++ b/docs/source/exploring_the_field/masks_and_classifications_usage.rst @@ -2,9 +2,8 @@ Mask and Classification Usage ============================= | Photogrammetry is a technique that cannot reproduce altitude on water. This technique also has difficulties for moving elements or in shaded areas. -| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage (see :ref:`mask_and_classification_usage`). +| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage. -.. _mask_and_classification_usage: Masks ----- @@ -23,4 +22,4 @@ Classification | Please, see the section :ref:`convert_image_to_binary_image` to make a multiband binary image with 1 bit per band. | All non-zeros values of the classification image will be considered as invalid data. -| The classification can be used in each application by band name list selection parameter. See application ``classification`` parameter :ref:`configuration`. +| The classification can be used in each application by band name list selection parameter. See application ``classification`` parameter :ref:`configuration`.. diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 75ee25d2..6bc4d9b3 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -14,18 +14,6 @@ Getting Started pip install cars -* Alternatively, you can use the provided Dockerfile to build a Docker image for CARS: - -.. code-block:: console - - # Clone the CARS repository - git clone https://github.com/CNES/cars.git - cd cars - - # Build the Docker image - docker build -t cars:latest . - - * Get and extract data samples from CARS repository: .. code-block:: console @@ -73,19 +61,19 @@ Getting Started * Go to the ``data_gizeh/outresults/`` output directory to get a :term:`DSM` and color image associated. -Open the ``dsm.tif`` DSM and ``clr.tif`` color image in `QGIS`_ software. +Open the ``dsm.tif`` DSM and ``color.tif`` color image in `QGIS`_ software. .. |dsm| image:: images/dsm.png :width: 100% -.. |clr| image:: images/clr.png +.. |color| image:: images/clr.png :width: 100% -.. |dsmclr| image:: images/dsm_clr.png +.. |dsmcolor| image:: images/dsm_clr.png :width: 100% -+--------------+-------------+-------------+ -| dsm.tif | clr.tif | `QGIS`_ Mix | -+--------------+-------------+-------------+ -| |dsm| | |clr| | |dsmclr| | -+--------------+-------------+-------------+ ++--------------+-----------------+---------------+ +| dsm.tif | color.tif | `QGIS`_ Mix | ++--------------+-----------------+---------------+ +| |dsm| | |color| | |dsmcolor| | ++--------------+-----------------+---------------+ .. _`QGIS`: https://www.qgis.org/ diff --git a/docs/source/howto.rst b/docs/source/howto.rst index 079cf43e..1c73097e 100644 --- a/docs/source/howto.rst +++ b/docs/source/howto.rst @@ -26,7 +26,7 @@ Example files are available here: https://intelligence.airbus.com/imagery/sample Maxar WorldView example files ----------------------------- -| Example files are available on AWS S3 through the SpaceNet challenge here: `s3://spacenet-dataset/Hosted-Datasets/MVS_dataset/WV3/PAN/` +| Example files are available on AWS S3 through the SpaceNet challenge here: s3://spacenet-dataset/Hosted-Datasets/MVS_dataset/WV3/PAN/. | You need to install `aws-cli `_: .. code-block:: console @@ -104,7 +104,6 @@ For example, if you want to monitor the computation of a CARS run: .. _make_a_simple_pan_sharpening: - Make a simple pan sharpening ---------------------------- @@ -122,24 +121,6 @@ It can be recommended to apply a P+XS pansharpening with `OTB`_. .. _`OTB`: https://www.orfeo-toolbox.org/CookBook-8.0/C++/UserGuide.html#image-data-representation - -Convert RGB image to panchromatic image --------------------------------------- - -CARS only uses panchromatic images for processing. - -If you have a multi-spectral image, you'll need to convert it to a panchromatic image before using it with CARS. - -The line below use `"Grayscale Using Luminance" `_ expression with `OTB BandMath `_ - - -.. code-block:: console - - otbcli_BandMath -il image.tif -out image_panchromatic.tif -exp "(0.2126 * im1b1 + 0.7152 * im1b2 + 0.0722 * im1b3)" - - - - .. _make_a_water_mask: Make a water mask @@ -160,7 +141,7 @@ See next section to apply a gdal_translate to convert the mask with 1bit image s Convert image to binary image ----------------------------- -To translate single image or multiband image with several nbits per band to 1bit per band, it can be recommended to use `gdal_translate `_ as follows: +To translate single image or multiband image with several nbits per band to 1bit per band, it can be recommended to use gdal_translate as follows: .. code-block:: console @@ -190,7 +171,7 @@ Post process output Merge Laz files --------------- -CARS generates several `laz files `_ corresponding to the tiles processed. +CARS generates several laz files corresponding to the tiles processed. Merge can be done with `laszip`_. To merge them: diff --git a/docs/source/software_design/orchestrator.rst b/docs/source/software_design/orchestrator.rst index 16aa5006..76afc599 100644 --- a/docs/source/software_design/orchestrator.rst +++ b/docs/source/software_design/orchestrator.rst @@ -7,7 +7,7 @@ Goals ----- The *orchestrator* is the central element of CARS concepts. -Its role is to ensure the communication between the *computing technology*, the *applications* and the *CarsDatasets*. +Its role is to ensure the communication between the computing technology,the *applications* and *CarsDatasets*. Details ------- @@ -73,7 +73,7 @@ The cluster is the component which allows to realize the calculations. * `start_tasks` to compute each task that have been declared. * `future_iterator`: iterate over the `future` objects -There are already 3 plugins, each one representing a mode: +There are already 4 plugins, each one representing a mode: * *dask* diff --git a/docs/source/software_design/plugin.rst b/docs/source/software_design/plugin.rst index 2df24507..76608c08 100644 --- a/docs/source/software_design/plugin.rst +++ b/docs/source/software_design/plugin.rst @@ -6,7 +6,7 @@ Plugin Geometry plugin ^^^^^^^^^^^^^^^^^ -Geometry plugins aim to enable the use of different geometry libraries, typically `libGEO `_ or `Shareloc `_ to perform CARS geometric operations which require the interpretation of the geometric models of the pairs to process. +Geometry plugins aim to enable the use of different geometry libraries, typically libGEO or Shareloc to perform CARS geometric operations which require the interpretation of the geometric models of the pairs to process. Those operation are: * The epipolar grids computation @@ -44,10 +44,10 @@ For example, if the AbstractGeometry object is defined in file `cars_geometry_pl ) Mandatory methods -++++++++++++++++++ + Currently, the `AbstractGeometry` class requires the implementation of the following mandatory methods and properties: -* `conf_schema` which specify the user inputs json schema required by the geometric library. +* `conf_schema` which specify the user inputs json schema required by the geometric library .. code-block:: python @@ -104,7 +104,7 @@ Currently, the `AbstractGeometry` class requires the implementation of the follo :return: the long/lat/height numpy array in output of the triangulation """ -* `generate_epipolar_grids` which generates the left and right epipolar grids from the images of the pair and their geometrical models. +* `generate_epipolar_grids` which generates the left and right epipolar grids from the images of the pair and their geometrical models .. code-block:: python @@ -137,7 +137,7 @@ Currently, the `AbstractGeometry` class requires the implementation of the follo - the disparity to altitude ratio as a float """ -* `direct_loc` which performs direct localization operations. +* `direct_loc` which performs direct localization operations .. code-block:: python @@ -170,4 +170,4 @@ Available methods Some methods are available in the `AbstractGeometry` class that might be useful for any geometry plugin which would only perform the triangulation using sensor coordinates. CARS' API only provides as inputs of the geometry plugin triangulation method the epipolar coordinates for each image of the pair. Thus the `matches_to_sensor_coords` method enables any plugin to convert those coordinates into the corresponding sensor ones. -`AbstractGeometry` implements the method `image_envelope`. It computes the ground footprint of an image in sensor geometry by projecting its four corners using the direct localization method. This method can be overloaded by any geometry plugin if necessary. +`AbstractGeometry` implements the method `image_envelope`. It computes the ground footprint of an image in sensor geometry by projecting its four corners using the direct localization method. This method can be overloaded by any geometry plugin if necessary. \ No newline at end of file diff --git a/docs/source/troubleshooting_and_faqs.rst b/docs/source/troubleshooting_and_faqs.rst index 070077f2..347f254e 100644 --- a/docs/source/troubleshooting_and_faqs.rst +++ b/docs/source/troubleshooting_and_faqs.rst @@ -64,7 +64,7 @@ Output data How to generate output files overview ? --------------------------------------- -Considering bulky files, it can be recommended to generate an overview file with `GDAL`_ before opening it with `QGIS `_: +Considering bulky files, it can be recommended to generate an overview file with `GDAL`_ before opening it with QGIS: .. code-block:: console diff --git a/docs/source/usage.rst b/docs/source/usage.rst index 355feef4..bceba942 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -49,7 +49,8 @@ Note that ``cars-starter`` script can be used to instantiate this configuration --full Fill all default values --check Check inputs -Finally, an output ``used_conf.json`` file will be created on the output directory. This file contains all the parameters used during execution and can be used as an input configuration file to re-run cars. +Finally, an output ``used_conf.json`` file will be created on the output directory. This file contains all the execution used +parameters and can be used as an input configuration file to re-run cars. .. _configuration: @@ -79,10 +80,10 @@ The structure follows this organisation: .. tab:: Inputs - Inputs depends on the pipeline used by CARS. CARS can be entered with Sensor Images or Point Clouds: + Inputs depends on the pipeline used by CARS. CARS can be entered with Sensor Images or Depth Maps: - * Sensor Images: used in "sensors_to_dense_dsm", "sensors_to_sparse_dsm", "sensors_to_dense_point_clouds" pipelines. - * Point Clouds: used in "dense_point_clouds_to_dense_dsm" pipeline. + * Sensor Images: used in "sensors_to_dense_dsm", "sensors_to_sparse_dsm", "sensors_to_dense_depth_maps" pipelines. + * Depth Maps: used in "dense_depth_maps_to_dense_dsm" pipeline. .. tabs:: @@ -96,29 +97,15 @@ The structure follows this organisation: +============================+=====================================================================+=======================+======================+==========+ | *sensor* | Stereo sensor images | See next section | No | Yes | +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *pairing* | Association of image to create pairs | list of *sensor* | No | Yes | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *epsg* | EPSG code | int, should be > 0 | None | No | + | *pairing* | Association of image to create pairs | list of *sensor* | No | Yes (*) | +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ | *initial_elevation* | Path to SRTM tiles (see :ref:`plugins` section for details) | string | None | No | | | If not provided, internal dem is generated with sparse matches | | | | +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *use_endogenous_elevation* | Use endogenous eleveation intead of provided initial_elevation | bool | False | No | - | | when endogenous elevation is available | | | | - | | If no initial_elevation, endogenous elevation is always used | | | | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ | *roi* | ROI: Vector file path or GeoJson | string, dict | None | No | +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *debug_with_roi* | Use ROI with the tiling of the entire image | Boolean | False | No | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *check_inputs* | Check inputs consistency (to be deprecated and changed) | Boolean | False | No | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *use_epipolar_a_priori* | Active epipolar a priori | bool | False | Yes | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *epipolar_a_priori* | Provide epipolar a priori information (see section below) | dict | | No | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *terrain_a_priori* | Provide terrain a priori information (see section below) | dict | | No | - +----------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ + + (*) `pairing` is required If there are more than two sensors (see pairing section below) **Sensor** @@ -189,6 +176,8 @@ The structure follows this organisation: } } + This attribute is required when there are more than two input sensor images. If only two images ares provided, the pairing can be deduced by cars, considering the first image defined as the left image and second image as right image. + **Initial elevation** The attribute contains all informations about initial elevation: dem path, geoid and default altitude @@ -196,16 +185,14 @@ The structure follows this organisation: +-----------------------+--------------------------------+--------+----------------------+----------------------------+ | Name | Description | Type | Default value | Required | +=======================+================================+========+======================+============================+ - | *dem_path* | Path to DEM tiles | string | None | No | + | *dem* | Path to DEM tiles | string | None | No | +-----------------------+--------------------------------+--------+----------------------+----------------------------+ | *geoid* | Geoid path | string | Cars internal geoid | No | +-----------------------+--------------------------------+--------+----------------------+----------------------------+ - | *default_alt* | Default altitude | int | 0 | No | - +-----------------------+--------------------------------+--------+----------------------+----------------------------+ If no DEM path is provided, an internal dem is generated with sparse matches. If no geoid is provided, the default cars geoid is used (egm96). - `default_alt` is the default height above ellipsoid when there is no DEM available (no coverage for some points or pixels with no_data in the DEM tiles) + When there is no DEM data available, a default height above ellipsoid of 0 is used (no coverage for some points or pixels with no_data in the DEM tiles) Initial elevation can be provided as a dictionary with a field for each parameter, for example: @@ -215,9 +202,8 @@ The structure follows this organisation: { "inputs": { "initial_elevation": { - "dem_path": "/path/to/srtm.tif", - "geoid": "/path/to/geoid.tif", - "default_alt": 30 + "dem": "/path/to/srtm.tif", + "geoid": "/path/to/geoid.tif" } } } @@ -237,74 +223,28 @@ The structure follows this organisation: Elevation management is tightly linked to the geometry plugin used. See :ref:`plugins` section for details - **Epipolar a priori** - - The epipolar is usefull to accelerate the preliminary steps of the grid correction and the disparity range evaluation, - particularly for the sensor_to_full_resolution_dsm pipeline. - The epipolar_a_priori data dict is produced during low or full resolution dsm pipeline. - However, the epipolar_a_priori should be not activated for the sensor_to_low_resolution_dsm. - So, the sensor_to_low_resolution_dsm pipeline produces a refined_conf_full_res.json in the outdir - that contains the epipolar_a_priori information for each sensor image pairs. - The epipolar_a_priori is also saved in the used_conf.json with the sensor_to_full_resolution_dsm pipeline. - - For each sensor images, the epipolar a priori are filled as following: - - +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - | Name | Description | Type | Default value | Required | - +=======================+=============================================================+========+================+==================================+ - | *grid_correction* | The grid correction coefficients | list | | if use_epipolar_a_priori is True | - +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - | *disparity_range* | The disparity range [disp_min, disp_max] | list | | if use_epipolar_a_priori is True | - +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + .. tab:: Depth Maps inputs - .. note:: - - The grid correction coefficients are based on bilinear model with 6 parameters [x1,x2,x3,y1,y2,y3]. - The None value produces no grid correction (equivalent to parameters [0,0,0,0,0,0]). - - - **Terrain a priori** - The terrain a priori is used at the same time that epipolar a priori. - If use_epipolar_a_priori is activated, epipolar_a_priori and terrain_a_priori must be provided. - The terrain_a_priori data dict is produced during low or full resolution dsm pipeline. - - The terrain a priori is initially populated with DEM information. - - +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - | Name | Description | Type | Default value | Required | - +================+=============================================================+========+================+==================================+ - | *dem_median* | DEM generated with median function | str | | if use_epipolar_a_priori is True | - +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - | *dem_min* | DEM generated with min function | str | | if use_epipolar_a_priori is True | - +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - | *dem_max* | DEM generated with max function | str | | if use_epipolar_a_priori is True | - +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ - + +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ + | Name | Description | Type | Default value | Required | + +=========================+=====================================================================+=======================+======================+==========+ + | *depth_maps* | Depth maps to rasterize | dict | No | Yes | + +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ + | *roi* | Region Of Interest: Vector file path or GeoJson | string, dict | None | No | + +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - .. tab:: Point Clouds inputs - +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | Name | Description | Type | Default value | Required | - +=========================+=====================================================================+=======================+======================+==========+ - | *point_clouds* | Point Clouds to rasterize | dict | No | Yes | - +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *epsg* | EPSG code to use for DSM | int, should be > 0 | None | No | - +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ - | *roi* | Region Of Interest: Vector file path or GeoJson | string, dict | None | No | - +-------------------------+---------------------------------------------------------------------+-----------------------+----------------------+----------+ + **Depth Maps** - - **Point Clouds** - - For each point cloud, give a particular name (what you want): + For each depth map, give a particular name (what you want): .. code-block:: json { - "point_clouds": { - "my_name_for_this_point_cloud": + "depth_maps": { + "my_name_for_this_depth_map": { "x" : "path_to_x.tif", "y" : "path_to_y.tif", @@ -316,40 +256,39 @@ The structure follows this organisation: "confidence": { "confidence_name1": "path_to_confidence1.tif", "confidence_name2": "path_to_confidence2.tif", - "confidence_parformance_map": "path_to_performance_map.tif", - } - "epsg": "point_cloud_epsg" + }, + "performance_map": "path_to_performance_map.tif", + "epsg": "depth_map_epsg" } - }, - "epsg": 32644 + } } - These input files can be generated with the sensors_to_dense_point_clouds pipeline, or sensors_to_dense_dsm pipeline activating the saving of point clouds in `triangulation` application. + These input files can be generated with the `sensors_to_dense_depth_maps` pipeline, or `sensors_to_dense_dsm` pipeline activating the saving of depth_map using `save_intermediate_data` in the `triangulation` application. .. note:: - To generate confidence maps and performance map, parameters `generate_performance_map` and `save_disparity_map` of `dense_matching` application must be activated in sensors_to_dense_point_clouds pipeline. The output performance map is `epi_confidence_performance_map.tif`. Then the parameter `save_confidence` of `point_cloud_rasterization` should be activated in dense_point_clouds_to_dense_dsm pipeline to save the performance map. + To generate confidence maps and performance map, parameters `generate_performance_map` and `save_intermediate_data` of `dense_matching` application must be activated in `sensors_to_dense_depth_maps` pipeline. The output performance map is `performance_map.tif`. Then the parameter `save_confidence` of `point_cloud_rasterization` should be activated in dense_depth_maps_to_dense_dsm pipeline to save the performance map. +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ | Name | Description | Type | Default value | Required | +==================+===================================================================+================+===============+==========+ - | *x* | Path to the x coordinates of point cloud | string | | Yes | + | *x* | Path to the x coordinates of depth map | string | | Yes | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *y* | Path to the y coordinates of point cloud | string | | Yes | + | *y* | Path to the y coordinates of depth map | string | | Yes | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *z* | Path to the z coordinates of point cloud | string | | Yes | + | *z* | Path to the z coordinates of depth map | string | | Yes | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *color* | Color of point cloud | string | | Yes | + | *color* | Color of depth map | string | | Yes | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *mask* | Validity mask of point cloud : 0 values are considered valid data | string | | No | + | *mask* | Validity mask of depth map : 0 values are considered valid data | string | | No | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *classification* | Classification of point cloud | string | | No | + | *classification* | Classification of depth map | string | | No | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *filling* | Filling map of point cloud | string | | No | + | *filling* | Filling map of depth map | string | | No | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *confidence* | Dict of paths to the confidences of point cloud | dict | | No | + | *confidence* | Dict of paths to the confidences of depth map | dict | | No | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ - | *epsg* | Epsg code of point cloud | int | 4326 | No | + | *epsg* | Epsg code of depth map | int | 4326 | No | +------------------+-------------------------------------------------------------------+----------------+---------------+----------+ **Region Of Interest (ROI)** @@ -389,7 +328,7 @@ The structure follows this organisation: } } - If the *debug_with_roi* parameter is enabled, the tiling of the entire image is kept but only the tiles intersecting + If the *debug_with_roi* advanced parameter (see dedicated tab) is enabled, the tiling of the entire image is kept but only the tiles intersecting the ROI are computed. MultiPolygon feature is only useful if the parameter *debug_with_roi* is activated, otherwise the total footprint of the @@ -443,7 +382,10 @@ The structure follows this organisation: } } }, - "debug_with_roi": true, + } + "advanced": + { + "debug_with_roi": true } } @@ -611,7 +553,7 @@ The structure follows this organisation: +----------------+-----------------------+--------+------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------+ | Name | Description | Type | Default value | Available values | Required | +================+=======================+========+====================================+======================================================================================================================================================================================================+==========+ - | *pipeline* | The pipeline to use | str | "sensors_to_dense_dsm_no_merging" | "sensors_to_dense_dsm", "sensors_to_sparse_dsm", "sensors_to_dense_point_clouds", "dense_point_clouds_to_dense_dsm", "sensors_to_dense_dsm_no_merging", "dense_point_clouds_to_dense_dsm_no_merging" | False | + | *pipeline* | The pipeline to use | str | "sensors_to_dense_dsm_no_merging" | "sensors_to_dense_dsm", "sensors_to_sparse_dsm", "sensors_to_dense_depth_maps", "dense_depth_maps_to_dense_dsm", "sensors_to_dense_dsm_no_merging", "dense_depth_maps_to_dense_dsm_no_merging" | False | +----------------+-----------------------+--------+------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------+ @@ -702,9 +644,9 @@ The structure follows this organisation: 8. Rasterize: Project these altitudes on a regular grid as well as the associated color. - .. tab:: Sensor to Dense Point Clouds + .. tab:: Sensor to Dense Depth Maps - **Name**: "sensors_to_dense_point_clouds" + **Name**: "sensors_to_dense_depth_maps" **Description** @@ -724,9 +666,9 @@ The structure follows this organisation: 8. Triangule the matches and get for each pixel of the reference image a latitude, longitude and altitude coordinate. - .. tab:: Dense Point Clouds to Dense DSM + .. tab:: Dense Dense Depth Maps to Dense DSM - **Name**: "dense_point_clouds_to_dense_dsm" + **Name**: "dense_depth_maps_to_dense_dsm" **Description** @@ -735,13 +677,13 @@ The structure follows this organisation: :align: center - 1. Merge points clouds coming from each stereo pairs. + 1. Merge depth maps coming from each stereo pairs. 2. Filter the resulting 3D points cloud via two consecutive filters: the first removes the small groups of 3D points, the second filters the points which have the most scattered neighbors. 3. Rasterize: Project these altitudes on a regular grid as well as the associated color. - .. tab:: Dense Point Clouds to Dense DSM no merging + .. tab:: Dense Depth Maps to Dense DSM no merging - **Name**: "dense_point_clouds_to_dense_dsm_no_merging" + **Name**: "dense_depth_maps_to_dense_dsm_no_merging" **Description** @@ -803,15 +745,15 @@ The structure follows this organisation: **Configuration** - +-----------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ - | Name | Description | Type | Available values | Default value | Required | - +=================+===============================================+=========+===================================+===============+==========+ - | method | Method for grid generation | string | "epipolar" | epipolar | No | - +-----------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ - | epi_step | Step of the deformation grid in nb. of pixels | int | should be > 0 | 30 | No | - +-----------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ - | save_grids | Save the generated grids | boolean | | false | No | - +-----------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ + +-------------------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ + | Name | Description | Type | Available values | Default value | Required | + +=========================+===============================================+=========+===================================+===============+==========+ + | method | Method for grid generation | string | "epipolar" | epipolar | No | + +-------------------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ + | epi_step | Step of the deformation grid in nb. of pixels | int | should be > 0 | 30 | No | + +-------------------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ + | save_intermediate_data | Save the generated grids | boolean | | false | No | + +-------------------------+-----------------------------------------------+---------+-----------------------------------+---------------+----------+ **Example** @@ -834,19 +776,17 @@ The structure follows this organisation: **Configuration** - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ - | Name | Description | Type | Available value | Default value | Required | - +=====================+========================================================+=========+=================+===============+==========+ - | method | Method for resampling | string | "bicubic" | "bicubic" | No | - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ - | strip_height | Height of strip (only when tiling is done by strip) | int | should be > 0 | 60 | No | - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ - | step | Horizontal step for resampling inside a strip | int | should be > 0 | 500 | No | - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ - | save_epipolar_image | Save the generated images in output folder | boolean | | false | No | - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ - | save_epipolar_color | Save the generated images (only if color is available) | boolean | | false | No | - +---------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ + +------------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ + | Name | Description | Type | Available value | Default value | Required | + +========================+========================================================+=========+=================+===============+==========+ + | method | Method for resampling | string | "bicubic" | "bicubic" | No | + +------------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ + | strip_height | Height of strip (only when tiling is done by strip) | int | should be > 0 | 60 | No | + +------------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ + | step | Horizontal step for resampling inside a strip | int | should be > 0 | 500 | No | + +------------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ + | save_intermediate_data | Save epipolar images and color | boolean | | false | No | + +------------------------+--------------------------------------------------------+---------+-----------------+---------------+----------+ **Example** @@ -906,7 +846,7 @@ The structure follows this organisation: +--------------------------------------+------------------------------------------------------------------------------------------------+-------------+------------------------+---------------+----------+ | matches_filter_dev_factor | Factor of deviation of isolation of matches to compute threshold of outliers | int, float | should be > 0 | 3.0 | No | +--------------------------------------+------------------------------------------------------------------------------------------------+-------------+------------------------+---------------+----------+ - | save_matches | Save matches in epipolar geometry (4 first columns) and sensor geometry (4 last columns) | boolean | | false | No | + | save_intermediate_data | Save matches in epipolar geometry (4 first columns) and sensor geometry (4 last columns) | boolean | | false | No | +--------------------------------------+------------------------------------------------------------------------------------------------+-------------+------------------------+---------------+----------+ | strip_margin | Margin to use on strip | int | should be > 0 | 10 | No | +--------------------------------------+------------------------------------------------------------------------------------------------+-------------+------------------------+---------------+----------+ @@ -947,6 +887,8 @@ The structure follows this organisation: * min * max + The DEMs are generated in the application dump directory + **Configuration** +---------------------------------+------------------------------------------------------------+------------+-----------------+---------------+----------+ @@ -1091,7 +1033,7 @@ The structure follows this organisation: - - 0.6 - No - * - save_disparity_map + * - save_intermediate_data - Save disparity map and disparity confidence - boolean - @@ -1134,7 +1076,7 @@ The structure follows this organisation: * Disparity range can be global (same disparity range used for each tile), or local (disparity range is estimated for each tile with dem min/max). * When user activate the generation of performance map, this map transits until being rasterized. Performance map is managed as a confidence map. - * To save the confidence in the sensors_to_dense_point_clouds pipeline, the save_disparity_map parameter should be activated. + * To save the confidence in the sensors_to_dense_point_clouds pipeline, the save_intermediate_data parameter should be activated. .. tab:: Dense matches filling @@ -1153,7 +1095,7 @@ The structure follows this organisation: +=====================================+=================================+=========+=========================+====================+==========+ | method | Method for holes detection | string | "plane", "zero_padding" | "plane" | No | +-------------------------------------+---------------------------------+---------+-------------------------+--------------------+----------+ - | save_disparity_map | Save disparity map | boolean | | False | No | + | save_intermediate_data | Save disparity map | boolean | | False | No | +-------------------------------------+---------------------------------+---------+-------------------------+--------------------+----------+ @@ -1219,12 +1161,12 @@ The structure follows this organisation: "dense_matches_filling.1": { "method": "plane", "classification": ["water"], - "save_disparity_map": true + "save_intermediate_data": true }, "dense_matches_filling.2": { "method": "zero_padding", "classification": ["cloud", "snow"], - "save_disparity_map": true + "save_intermediate_data": true } }, @@ -1239,15 +1181,15 @@ The structure follows this organisation: **Configuration** - +-------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ - | Name | Description | Type | Available values | Default value | Required | - +===================+====================================================================================================================+=========+======================================+==============================+==========+ - | method | Method for triangulation | string | "line_of_sight_intersection" | "line_of_sight_intersection" | No | - +-------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ - | snap_to_img1 | If all pairs share the same left image, modify lines of sights of secondary images to cross those of the ref image | boolean | | false | No | - +-------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ - | save_points_cloud | Save points cloud | boolean | | false | No | - +-------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ + +------------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ + | Name | Description | Type | Available values | Default value | Required | + +========================+====================================================================================================================+=========+======================================+==============================+==========+ + | method | Method for triangulation | string | "line_of_sight_intersection" | "line_of_sight_intersection" | No | + +------------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ + | snap_to_img1 | If all pairs share the same left image, modify lines of sights of secondary images to cross those of the ref image | boolean | | false | No | + +------------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ + | save_intermediate_data | Save depth map | boolean | | false | No | + +------------------------+--------------------------------------------------------------------------------------------------------------------+---------+--------------------------------------+------------------------------+----------+ **Example** @@ -1256,7 +1198,7 @@ The structure follows this organisation: "applications": { "triangulation": { "method": "line_of_sight_intersection", - "use_geoid_alt": true + "snap_to_img1": true } }, @@ -1272,17 +1214,15 @@ The structure follows this organisation: **Configuration** - +------------------------------+-----------------------------------------+---------+----------------------------+----------------------------+----------+ - | Name | Description | Type | Available value | Default value | Required | - +==============================+=========================================+=========+============================+============================+==========+ - | method | Method for fusion | string | "mapping_to_terrain_tiles" | "mapping_to_terrain_tiles" | No | - +------------------------------+-----------------------------------------+---------+----------------------------+----------------------------+----------+ - | save_points_cloud_as_laz | Save points clouds as laz format | boolean | | false | No | - +------------------------------+-----------------------------------------+---------+----------------------------+----------------------------+----------+ - | save_points_cloud_as_csv | Save points clouds as csv format | boolean | | false | No | - +------------------------------+-----------------------------------------+---------+----------------------------+----------------------------+----------+ - | save_points_cloud_by_pair | Enable points cloud saving by pair | boolean | | false | No | - +------------------------------+-----------------------------------------+---------+----------------------------+----------------------------+----------+ + +------------------------------+------------------------------------------+---------+----------------------------+----------------------------+----------+ + | Name | Description | Type | Available value | Default value | Required | + +==============================+==========================================+=========+============================+============================+==========+ + | method | Method for fusion | string | "mapping_to_terrain_tiles" | "mapping_to_terrain_tiles" | No | + +------------------------------+------------------------------------------+---------+----------------------------+----------------------------+----------+ + | save_intermediate_data | Save points clouds as laz and csv format | boolean | | false | No | + +------------------------------+------------------------------------------+---------+----------------------------+----------------------------+----------+ + | save_by_pair | Enable points cloud saving by pair | boolean | | false | No | + +------------------------------+------------------------------------------+---------+----------------------------+----------------------------+----------+ **Example** @@ -1292,16 +1232,15 @@ The structure follows this organisation: "applications": { "point_cloud_fusion": { "method": "mapping_to_terrain_tiles", - "save_points_cloud_as_laz": true, - "save_points_cloud_as_csv": true, - "save_points_cloud_by_pair": true, + "save_intermediate_data": true, + "save_by_pair": true, } }, .. note:: - When `save_points_cloud_as_laz` is activated, multiple Laz files are saved, corresponding to each processed terrain tiles. + When `save_intermediate_data` is activated, multiple Laz and csv files are saved, corresponding to each processed terrain tiles. Please, see the section :ref:`merge_laz_files` to merge them into one single file. - `save_points_cloud_by_pair` parameter enables saving by input pair. The csv/laz name aggregates row, col and corresponding pair key. + `save_by_pair` parameter enables saving by input pair. The csv/laz name aggregates row, col and corresponding pair key. .. tab:: Point Cloud outliers removing @@ -1318,11 +1257,9 @@ The structure follows this organisation: +==============================+==========================================+=========+===================================+===============+==========+ | method | Method for point cloud outliers removing | string | "statistical", "small_components" | "statistical" | No | +------------------------------+------------------------------------------+---------+-----------------------------------+---------------+----------+ - | save_points_cloud_as_laz | Save points clouds as laz format | boolean | | false | No | - +------------------------------+------------------------------------------+---------+-----------------------------------+---------------+----------+ - | save_points_cloud_as_csv | Save points clouds as csv format | boolean | | false | No | + | save_intermediate_data | Save points clouds as laz and csv format | boolean | | false | No | +------------------------------+------------------------------------------+---------+-----------------------------------+---------------+----------+ - | save_points_cloud_by_pair | Enable points cloud saving by pair | boolean | | false | No | + | save_by_pair | Enable points cloud saving by pair | boolean | | false | No | +------------------------------+------------------------------------------+---------+-----------------------------------+---------------+----------+ If method is *statistical*: @@ -1374,14 +1311,13 @@ The structure follows this organisation: "point_cloud_outliers_removing.1": { "method": "small_components", "on_ground_margin": 10, - "save_points_cloud_as_laz": true, - "save_points_cloud_as_csv": false + "save_intermediate_data": true }, "point_cloud_outliers_removing.2": { "method": "statistical", "k": 10, - "save_points_cloud_as_laz": true, - "save_points_cloud_by_pair": true, + "save_intermediate_data": true, + "save_by_pair": true, } }, @@ -1429,12 +1365,6 @@ The structure follows this organisation: - - None - No - * - resolution - - Altitude grid step (dsm) - - float - - should be > 0 - - 0.5 - - No * - dsm_no_data - - int @@ -1461,67 +1391,8 @@ The structure follows this organisation: - - 255 - - * - save_color - - Save color ortho-image - - boolean - - - - true - - No - * - save_stats - - - - boolean - - - - false - - No - * - save_mask - - Save mask raster - - boolean - - - - false - - No - * - save_classif - - Save classification mask raster - - boolean - - - - false - - No - * - save_dsm - - Save dsm - - boolean - - - - true - - No - * - save_confidence - - Save all the disparity confidence - - boolean - - - - false - - No - * - save_intervals - - | Save the propagated height confidence intervals - | Confidence disparity intervals must have been - | computed during the dense matching step. - - boolean - - - - false - - No - * - save_source_pc - - Save mask with data source - - boolean - - - - false - - No - * - save_filling - - Save mask with filling information - - boolean - - - - false - - No - * - compute_all - - | Compute all layers even - | if one or more layers - | are not saved (color - | , dsm, msk..) + * - save_intermediate_data + - Save all layers from input point cloud in application `dump_dir` - boolean - - false @@ -1538,30 +1409,148 @@ The structure follows this organisation: } }, + .. tab:: Advanced parameters + + + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + | Name | Description | Type | Default value | Required | + +============================+=========================================================================+=======================+======================+==========+ + | *save_intermediate data* | Save intermediate data for all applications | bool | False | Yes | + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + | *use_epipolar_a_priori* | Active epipolar a priori | bool | False | Yes | + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + | *epipolar_a_priori* | Provide epipolar a priori information (see section below) | dict | | No | + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + | *terrain_a_priori* | Provide terrain a priori information (see section below) | dict | | No | + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + | *debug_with_roi* | Use input ROI with the tiling of the entire image (see Inputs section) | Boolean | False | No | + +----------------------------+-------------------------------------------------------------------------+-----------------------+----------------------+----------+ + + + **Save intermediate data** + + The `save_intermediate_data` flag can be used to activate and deactivate the saving of the possible output of applications. + + It is set in the `advanced` category and can be overloaded in each application separately. It default to false, meaning that no intermediate product in saved). Intermediate data are saved in the `dump_dir` folder found in cars output directory, with a subfolder corresponding to each application. + + For exemple setting `save_intermediate_data` to `true` in `advanced` and to `false` in `application/point_cloud_rasterization` will activate product saving in all applications excepting `point_cloud_rasterization`. Conversely, setting it to `false` in `advanced` and to `true` in `application/point_cloud_rasterization` will only save rasterization outputs. + + Intermediate data refers to all files that are not part of an output product. Files that compose an output product will not be found in the application dump directory. For exemple if `dsm` is requested as output product, the `dsm.tif` files and all activated dsm auxiliary files will not be found in `rasterization` dump directory. This directory will still contain the files generated by the `rasterization` application that are not part of the `dsm` product. + + + **Epipolar a priori** + + The epipolar is usefull to accelerate the preliminary steps of the grid correction and the disparity range evaluation, + particularly for the sensor_to_full_resolution_dsm pipeline. + The epipolar_a_priori data dict is produced during low or full resolution dsm pipeline. + However, the epipolar_a_priori should be not activated for the sensor_to_low_resolution_dsm. + So, the sensor_to_low_resolution_dsm pipeline produces a refined_conf_full_res.json in the outdir + that contains the epipolar_a_priori information for each sensor image pairs. + The epipolar_a_priori is also saved in the used_conf.json with the sensor_to_full_resolution_dsm pipeline. + + For each sensor images, the epipolar a priori are filled as following: + + +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + | Name | Description | Type | Default value | Required | + +=======================+=============================================================+========+================+==================================+ + | *grid_correction* | The grid correction coefficients | list | | if use_epipolar_a_priori is True | + +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + | *disparity_range* | The disparity range [disp_min, disp_max] | list | | if use_epipolar_a_priori is True | + +-----------------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + + .. note:: + + The grid correction coefficients are based on bilinear model with 6 parameters [x1,x2,x3,y1,y2,y3]. + The None value produces no grid correction (equivalent to parameters [0,0,0,0,0,0]). + + + **Terrain a priori** + + The terrain a priori is used at the same time that epipolar a priori. + If use_epipolar_a_priori is activated, epipolar_a_priori and terrain_a_priori must be provided. + The terrain_a_priori data dict is produced during low or full resolution dsm pipeline. + + The terrain a priori is initially populated with DEM information. + + +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + | Name | Description | Type | Default value | Required | + +================+=============================================================+========+================+==================================+ + | *dem_median* | DEM generated with median function | str | | if use_epipolar_a_priori is True | + +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + | *dem_min* | DEM generated with min function | str | | if use_epipolar_a_priori is True | + +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + | *dem_max* | DEM generated with max function | str | | if use_epipolar_a_priori is True | + +----------------+-------------------------------------------------------------+--------+----------------+----------------------------------+ + + + **Example** + + .. code-block:: json + + "advanced": { + "save_intermediate_data": true + } + }, + .. tab:: Outputs - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ - | Name | Description | Type | Default value | Required | - +================+=============================================================+================+================+==========+ - | out_dir | Output folder where results are stored | string | No | No | - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ - | dsm_basename | base name for dsm | string | "dsm.tif" | No | - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ - | geoid | output geoid | bool or string | False | No | - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ - | color_basename | base name for ortho-image | string | "color.tif | No | - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ - | info_basename | base name for file containing information about computation | string | "content.json" | No | - +----------------+-------------------------------------------------------------+----------------+----------------+----------+ + + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | Name | Description | Type | Default value | Required | + +==================+=============================================================+====================+======================+==========+ + | *directory* | Output folder where results are stored | string | No | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *product_level* | Output requested products (dsm, point_cloud, depth_map) | list or string | "dsm" | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *resolution* | Output DSM grid strp (only for dsm product level) | float | 0.5 | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *auxiliary* | Selection of additional files in products | dict | See below | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *epsg* | EPSG code | int, should be > 0 | None | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *geoid* | Output geoid | bool or string | False | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + | *save_by_pair* | save output point clouds by pair | bool | False | No | + +------------------+-------------------------------------------------------------+--------------------+----------------------+----------+ + **Output contents** The output directory, defined on the configuration file (see previous section) contains at the end of the computation: - * the dsm - * color image (if *color image* has been given) - * information json file containing: used parameters, information and numerical results related to computation, step by step and pair by pair. - * subfolder for each defined pair which can contains intermediate data + * the required product levels (`depth_map`, `dsm` and/or `point_cloud`) + * the dump directory (`dump_dir`) containing intermediate data for all applications + * metadata json file containing: used parameters, information and numerical results related to computation, step by step and pair by pair. + * logs folder containing Cars log and profiling information + + + **Output products** + + The `product_level` attribute defines which product should be produced by cars. There are three available product type: `depth_map`, `point_cloud` and `dsm`. A single product can be requested by setting the parameter as string, several products can be requested by providing a list. For `depth_map` and `dsm`, additional auxiliary files can be produced with the product by setting the `auxiliary` dictionary attribute, it contains the following attributes: + + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | Name | Description | Type | Default value | Required | + +=======================+=============================================================+========+================+===========+ + | *color* | Save output color (dsm/depth_map) | bool | True | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | *mask* | Save output mask (dsm/depth map) | bool | False | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | *classification* | Save output classification (dsm/depth_map) | bool | False | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | *performance_map* | Save output performance map (dsm) | bool | False | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | *contributing_pair* | Save output contributing pair (dsm) | bool | False | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + | *filling* | Save output filling (dsm) | bool | False | No | + +-----------------------+-------------------------------------------------------------+--------+----------------+-----------+ + + Note that not all rasters associated to the DSM that cars can produce are available in the output product auxiliary data. For exemple, confidence intervals are not part of the output product but can be found in the rasterization `dump_dir` if `generate_confidence_intervals` is activated in the `dense_matching` application (to compute the confidence) and `save_intermediate_data` is activated in the `rasterization` application configuration (to write it on disk). + + **Point cloud output** + + The point cloud output product consists of a collection of laz files, each containing a tile of the point cloud. If the `save_by_pair` option is set, laz will be produced for each sensor pair defined in input pairing. + + The point cloud found in the product the highest level point cloud produced by cars. For exemple, if outlier removing and point cloud denoising are deactivated, the point cloud will correspond to the output of point cloud fusion. If only the first application of outlier removing is activated, this will be the output point cloud. **Geoid** @@ -1610,7 +1599,7 @@ This section describes optional plugins possibilities of CARS. }, "pairing": [["one", "two"]], "initial_elevation": { - "dem_path": "path/to/srtm_file.tif" + "dem": "path/to/srtm_file.tif" }, }, "geometry_plugin": "SharelocGeometry", diff --git a/tutorials/data_gizeh.tar.bz2.md5sum b/tutorials/data_gizeh.tar.bz2.md5sum index a25ce71a..0d148d7e 100644 --- a/tutorials/data_gizeh.tar.bz2.md5sum +++ b/tutorials/data_gizeh.tar.bz2.md5sum @@ -1 +1 @@ -54e915c71c1f0f9da4ce09c102988127 data_gizeh.tar.bz2 +6f2fc0d739cdc0296bd0c3d03c7d9be4 data_gizeh.tar.bz2 diff --git a/tutorials/data_gizeh_small.tar.bz2.md5sum b/tutorials/data_gizeh_small.tar.bz2.md5sum index e51fa2fc..85f3205a 100644 --- a/tutorials/data_gizeh_small.tar.bz2.md5sum +++ b/tutorials/data_gizeh_small.tar.bz2.md5sum @@ -1 +1 @@ -a09e89fc44d7b4f69f180a3b687e8682 data_gizeh_small.tar.bz2 +6c3d85d319c54db80ac3005084edd918 data_gizeh_small.tar.bz2 From a945b0d9fdc3b8fa5cf7f0e22840464628fea804 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 16:33:36 +0200 Subject: [PATCH 06/13] doc: integration of Datafalk documentation contribution (2bd8324) after content conflicts with other modifications --- docs/source/contributing_the_project.rst | 8 +++---- .../exploring_the_field/3d_products.rst | 2 +- .../from_satellite_images_to_dsm.rst | 5 ++-- .../masks_and_classifications_usage.rst | 5 ++-- docs/source/getting_started.rst | 11 +++++++++ docs/source/howto.rst | 23 +++++++++++++++---- docs/source/software_design/orchestrator.rst | 4 ++-- docs/source/software_design/plugin.rst | 12 ++++++---- docs/source/troubleshooting_and_faqs.rst | 2 +- docs/source/usage.rst | 5 ++-- 10 files changed, 53 insertions(+), 24 deletions(-) diff --git a/docs/source/contributing_the_project.rst b/docs/source/contributing_the_project.rst index 2242e1db..9901247e 100644 --- a/docs/source/contributing_the_project.rst +++ b/docs/source/contributing_the_project.rst @@ -55,8 +55,8 @@ Here are some rules to apply when developing a new functionality: * **Test**: Each new functionality shall have a corresponding test in its module's test file. This test shall, if possible, check the function's outputs and the corresponding degraded cases. * **Documentation**: All functions shall be documented (object, parameters, return values). * **Use type hints**: Use the type hints provided by the `typing` python module. -* **Use doctype**: Follow sphinx default doctype for automatic API -* **Quality code**: Correct project quality code errors with pre-commit automatic workflow (see below) +* **Use doctype**: Follow sphinx default doctype for automatic API. +* **Quality code**: Correct project quality code errors with pre-commit automatic workflow (see below). * **Factorization**: Factorize the code as much as possible. The command line tools shall only include the main workflow and rely on the cars python modules. * **Be careful with user interface upgrade:** If major modifications of the user interface or of the tool's behaviour are done, update the user documentation (and the notebooks if necessary). * **Logging and no print**: The usage of the `print()` function is forbidden: use the `logging` python standard module instead. @@ -107,7 +107,7 @@ Jupyter notebooks CARS contains notebooks in tutorials directory. -To generate a Jupyter kernel with CARS installation, use: +To generate a `Jupyter kernel `_ with CARS installation, use: .. code-block:: console @@ -180,7 +180,7 @@ If necessary, Black doesn’t reformat blocks that start with "# fmt: off" and e Flake8 ------ -`Flake8`_ is a command-line utility for enforcing style consistency across Python projects. By default it includes lint checks provided by the PyFlakes project, PEP-0008 inspired style checks provided by the PyCodeStyle project, and McCabe complexity checking provided by the McCabe project. It will also run third-party extensions if they are found and installed. +`Flake8`_ is a command-line utility for enforcing style consistency across Python projects. By default it includes lint checks provided by the `PyFlakes project `_ , PEP-0008 inspired style checks provided by the `PyCodeStyle project `_ , and McCabe complexity checking provided by the `McCabe project `_. It will also run third-party extensions if they are found and installed. CARS ``flake8`` configuration is done in `setup.cfg `_ diff --git a/docs/source/exploring_the_field/3d_products.rst b/docs/source/exploring_the_field/3d_products.rst index 7127ad0d..a0ef296f 100644 --- a/docs/source/exploring_the_field/3d_products.rst +++ b/docs/source/exploring_the_field/3d_products.rst @@ -3,7 +3,7 @@ | CARS produces a geotiff file named ``dsm.tif`` that contains the Digital Surface Model in the required cartographic projection and the ground sampling distance defined by the user. | If the user provides an additional input image, an ortho-image ``color.tif`` is also produced. The latter is stackable to the DSM (See :ref:`getting_started`). -| If the user saves points clouds as laz format (point_cloud_fusion, point_cloud_outliers_removing, point_cloud_rasterization), the points clouds are saved in laz compressed format with colors or graylevel image. +| If the user saves points clouds as `laz format `_ (point_cloud_fusion, point_cloud_outliers_removing, point_cloud_rasterization), the points clouds are saved in laz compressed format with colors or graylevel image. These two products can be visualized with `QGIS `_ for example. diff --git a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst index ee84ec4b..0ca349d5 100644 --- a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst +++ b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst @@ -64,8 +64,8 @@ Generate a DSM step by step +--------------------------+---------------------------------------------+ | For each point in one image, the software searches the corresponding point in the other image. -| The color of the pixels (grayscale) in the image :ref:`below` corresponds to the shift value. Some pixels do not have a match (matching error due to moving objects, shadows etc.). - +| The color of the pixels (grayscale) in the image :ref:`below` corresponds to the shift value. Some pixels do not have a match, which are represented as transparent pixels in the image. These matching errors can occur due to various reasons such as moving objects, shadows, occlusions, or areas with insufficient texture. +| The transparent pixels indicate areas where the matching algorithm couldn't find a reliable correspondence between the two images, highlighting regions of uncertainty in the matching process. .. _matching: @@ -81,6 +81,7 @@ Generate a DSM step by step | The displacements obtained are transformed into positions in both images. | This allows to deduce lines of sight. The intersection of these lines gives a point in space: longitude, latitude, altitude (see :ref:`below`). +| A line of sight is an imaginary straight line from the camera's perspective through a specific point in the image, extending into 3D space. It represents all possible 3D positions that could have produced that image point. .. _triangulation: diff --git a/docs/source/exploring_the_field/masks_and_classifications_usage.rst b/docs/source/exploring_the_field/masks_and_classifications_usage.rst index fcf0a3b3..a82f6bfd 100644 --- a/docs/source/exploring_the_field/masks_and_classifications_usage.rst +++ b/docs/source/exploring_the_field/masks_and_classifications_usage.rst @@ -2,8 +2,9 @@ Mask and Classification Usage ============================= | Photogrammetry is a technique that cannot reproduce altitude on water. This technique also has difficulties for moving elements or in shaded areas. -| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage. +| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage (see :ref:`mask_and_classification_usage`). +.. _mask_and_classification_usage: Masks ----- @@ -22,4 +23,4 @@ Classification | Please, see the section :ref:`convert_image_to_binary_image` to make a multiband binary image with 1 bit per band. | All non-zeros values of the classification image will be considered as invalid data. -| The classification can be used in each application by band name list selection parameter. See application ``classification`` parameter :ref:`configuration`.. +| The classification can be used in each application by band name list selection parameter. See application ``classification`` parameter :ref:`configuration`. \ No newline at end of file diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 6bc4d9b3..60c554bd 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -14,6 +14,17 @@ Getting Started pip install cars +* Alternatively, you can use the provided Dockerfile to build a Docker image for CARS: + +.. code-block:: console + + # Clone the CARS repository + git clone https://github.com/CNES/cars.git + cd cars + + # Build the Docker image + docker build -t cars:latest . + * Get and extract data samples from CARS repository: .. code-block:: console diff --git a/docs/source/howto.rst b/docs/source/howto.rst index 1c73097e..82fbe308 100644 --- a/docs/source/howto.rst +++ b/docs/source/howto.rst @@ -26,7 +26,7 @@ Example files are available here: https://intelligence.airbus.com/imagery/sample Maxar WorldView example files ----------------------------- -| Example files are available on AWS S3 through the SpaceNet challenge here: s3://spacenet-dataset/Hosted-Datasets/MVS_dataset/WV3/PAN/. +| Example files are available on AWS S3 through the SpaceNet challenge here: `s3://spacenet-dataset/Hosted-Datasets/MVS_dataset/WV3/PAN/` | You need to install `aws-cli `_: .. code-block:: console @@ -104,6 +104,7 @@ For example, if you want to monitor the computation of a CARS run: .. _make_a_simple_pan_sharpening: + Make a simple pan sharpening ---------------------------- @@ -121,6 +122,21 @@ It can be recommended to apply a P+XS pansharpening with `OTB`_. .. _`OTB`: https://www.orfeo-toolbox.org/CookBook-8.0/C++/UserGuide.html#image-data-representation +Convert RGB image to panchromatic image +-------------------------------------- + +CARS only uses panchromatic images for processing. + +If you have a multi-spectral image, you'll need to convert it to a panchromatic image before using it with CARS. + +The line below use `"Grayscale Using Luminance" `_ expression with `OTB BandMath `_ + + +.. code-block:: console + + otbcli_BandMath -il image.tif -out image_panchromatic.tif -exp "(0.2126 * im1b1 + 0.7152 * im1b2 + 0.0722 * im1b3)" + + .. _make_a_water_mask: Make a water mask @@ -141,7 +157,7 @@ See next section to apply a gdal_translate to convert the mask with 1bit image s Convert image to binary image ----------------------------- -To translate single image or multiband image with several nbits per band to 1bit per band, it can be recommended to use gdal_translate as follows: +To translate single image or multiband image with several nbits per band to 1bit per band, it can be recommended to use `gdal_translate `_ as follows: .. code-block:: console @@ -171,8 +187,7 @@ Post process output Merge Laz files --------------- -CARS generates several laz files corresponding to the tiles processed. -Merge can be done with `laszip`_. +CARS generates several `laz files `_ corresponding to the tiles processed. To merge them: diff --git a/docs/source/software_design/orchestrator.rst b/docs/source/software_design/orchestrator.rst index 76afc599..16aa5006 100644 --- a/docs/source/software_design/orchestrator.rst +++ b/docs/source/software_design/orchestrator.rst @@ -7,7 +7,7 @@ Goals ----- The *orchestrator* is the central element of CARS concepts. -Its role is to ensure the communication between the computing technology,the *applications* and *CarsDatasets*. +Its role is to ensure the communication between the *computing technology*, the *applications* and the *CarsDatasets*. Details ------- @@ -73,7 +73,7 @@ The cluster is the component which allows to realize the calculations. * `start_tasks` to compute each task that have been declared. * `future_iterator`: iterate over the `future` objects -There are already 4 plugins, each one representing a mode: +There are already 3 plugins, each one representing a mode: * *dask* diff --git a/docs/source/software_design/plugin.rst b/docs/source/software_design/plugin.rst index 76608c08..521221fd 100644 --- a/docs/source/software_design/plugin.rst +++ b/docs/source/software_design/plugin.rst @@ -6,8 +6,9 @@ Plugin Geometry plugin ^^^^^^^^^^^^^^^^^ -Geometry plugins aim to enable the use of different geometry libraries, typically libGEO or Shareloc to perform CARS geometric operations which require the interpretation of the geometric models of the pairs to process. -Those operation are: +Geometry plugins aim to enable the use of different geometry libraries, typically `libGEO `_ or `Shareloc `_ to perform CARS geometric operations which require the interpretation of the geometric models of the pairs to process. + +Those operations are: * The epipolar grids computation * The direct localization operation @@ -44,10 +45,11 @@ For example, if the AbstractGeometry object is defined in file `cars_geometry_pl ) Mandatory methods +++++++++++++++++++ Currently, the `AbstractGeometry` class requires the implementation of the following mandatory methods and properties: -* `conf_schema` which specify the user inputs json schema required by the geometric library +* `conf_schema` which specify the user input json schema required by the geometric library. .. code-block:: python @@ -104,7 +106,7 @@ Currently, the `AbstractGeometry` class requires the implementation of the follo :return: the long/lat/height numpy array in output of the triangulation """ -* `generate_epipolar_grids` which generates the left and right epipolar grids from the images of the pair and their geometrical models +* `generate_epipolar_grids` which generates the left and right epipolar grids from the images of the pair and their geometrical models. .. code-block:: python @@ -137,7 +139,7 @@ Currently, the `AbstractGeometry` class requires the implementation of the follo - the disparity to altitude ratio as a float """ -* `direct_loc` which performs direct localization operations +* `direct_loc` which performs direct localization operations. .. code-block:: python diff --git a/docs/source/troubleshooting_and_faqs.rst b/docs/source/troubleshooting_and_faqs.rst index 347f254e..070077f2 100644 --- a/docs/source/troubleshooting_and_faqs.rst +++ b/docs/source/troubleshooting_and_faqs.rst @@ -64,7 +64,7 @@ Output data How to generate output files overview ? --------------------------------------- -Considering bulky files, it can be recommended to generate an overview file with `GDAL`_ before opening it with QGIS: +Considering bulky files, it can be recommended to generate an overview file with `GDAL`_ before opening it with `QGIS `_: .. code-block:: console diff --git a/docs/source/usage.rst b/docs/source/usage.rst index bceba942..f188ea30 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -49,8 +49,7 @@ Note that ``cars-starter`` script can be used to instantiate this configuration --full Fill all default values --check Check inputs -Finally, an output ``used_conf.json`` file will be created on the output directory. This file contains all the execution used -parameters and can be used as an input configuration file to re-run cars. +Finally, an output ``used_conf.json`` file will be created on the output directory. This file contains all the parameters used during execution and can be used as an input configuration file to re-run cars. .. _configuration: @@ -1440,7 +1439,7 @@ The structure follows this organisation: **Epipolar a priori** - The epipolar is usefull to accelerate the preliminary steps of the grid correction and the disparity range evaluation, + The epipolar a priori is useful to accelerate the preliminary steps of the grid correction and the disparity range evaluation, particularly for the sensor_to_full_resolution_dsm pipeline. The epipolar_a_priori data dict is produced during low or full resolution dsm pipeline. However, the epipolar_a_priori should be not activated for the sensor_to_low_resolution_dsm. From fab6f09b5957e4cf5547c629f4f9f19644f2d930 Mon Sep 17 00:00:00 2001 From: Datafalk Date: Thu, 26 Sep 2024 16:49:10 +0200 Subject: [PATCH 07/13] feat(docker): create docker dev environment --- Dockerfile.local | 38 ++++++++++++++++++++++++ docs/source/contributing_the_project.rst | 15 ++++++++++ 2 files changed, 53 insertions(+) create mode 100644 Dockerfile.local diff --git a/Dockerfile.local b/Dockerfile.local new file mode 100644 index 00000000..97bcfd5c --- /dev/null +++ b/Dockerfile.local @@ -0,0 +1,38 @@ +# Use the same base image +# hadolint ignore=DL3007 +FROM orfeotoolbox/otb:latest +LABEL maintainer="CNES" + +# Install dependencies +# hadolint ignore=DL3008 +RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ + git \ + libpython3.8 \ + python3.8-dev \ + python3.8-venv \ + python3.8 \ + python3-pip \ + python3-numpy \ + python3-virtualenv \ + make \ + && rm -rf /var/lib/apt/lists/* + +# Set up working directory +WORKDIR /app + +# Create a virtual environment +RUN python3 -m venv /app/venv + +# Activate the virtual environment +ENV VIRTUAL_ENV='/app/venv' +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# Copy only necessary files for installation +COPY . /app/cars + +WORKDIR /app/cars +# Install CARS using make +RUN make clean && make install-dev + +# launch cars +CMD ["/bin/bash"] diff --git a/docs/source/contributing_the_project.rst b/docs/source/contributing_the_project.rst index 9901247e..f951c1b5 100644 --- a/docs/source/contributing_the_project.rst +++ b/docs/source/contributing_the_project.rst @@ -46,6 +46,21 @@ Particularly, it uses the following pip editable install: With this pip install mode, source code modifications directly impacts ``cars`` command line. + +Setting up a development environment with docker +================================================ + +To setup a development environment with docker, run the following command: + +.. code-block:: console + + docker build -t cars-dev -f Dockerfile.local . + docker run -it -v "$(pwd)":/app/cars -w /app/cars cars-dev /bin/bash + +You're ready to use CARS, all files in the current directory are mounted in the container. + + + Coding guide ============ From a83218d0301f690a6ee7326f4d518ce03525e88d Mon Sep 17 00:00:00 2001 From: Steux Yoann Date: Wed, 2 Oct 2024 14:25:11 +0000 Subject: [PATCH 08/13] =?UTF-8?q?Chaine=20low=20cost:=20sans=20validation?= =?UTF-8?q?=20crois=C3=A9e?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dense_matching/census_mccnn_sgm.py | 6 ++ .../loaders/config_census_sgm.json | 4 -- .../dense_matching/loaders/config_mccnn.json | 4 -- .../dense_matching/loaders/pandora_loader.py | 15 +++++ docs/source/usage.rst | 6 ++ .../dense_matching/test_pandora_loader.py | 63 +++++++++++++++++++ tests/test_end2end.py | 26 ++++++++ 7 files changed, 116 insertions(+), 8 deletions(-) diff --git a/cars/applications/dense_matching/census_mccnn_sgm.py b/cars/applications/dense_matching/census_mccnn_sgm.py index a274fbc1..947af58b 100644 --- a/cars/applications/dense_matching/census_mccnn_sgm.py +++ b/cars/applications/dense_matching/census_mccnn_sgm.py @@ -108,6 +108,7 @@ def __init__(self, conf=None): self.disp_range_propagation_filter_size = self.used_config[ "disp_range_propagation_filter_size" ] + self.use_cross_validation = self.used_config["use_cross_validation"] # Saving files self.save_intermediate_data = self.used_config["save_intermediate_data"] @@ -171,6 +172,9 @@ def check_conf(self, conf): overloaded_conf["perf_ambiguity_threshold"] = conf.get( "perf_ambiguity_threshold", 0.6 ) + overloaded_conf["use_cross_validation"] = conf.get( + "use_cross_validation", False + ) # Margins computation parameters overloaded_conf["use_global_disp_range"] = conf.get( "use_global_disp_range", False @@ -213,6 +217,7 @@ def check_conf(self, conf): perf_eta_max_ambiguity=overloaded_conf["perf_eta_max_ambiguity"], perf_eta_max_risk=overloaded_conf["perf_eta_max_risk"], perf_eta_step=overloaded_conf["perf_eta_step"], + use_cross_validation=overloaded_conf["use_cross_validation"], ) overloaded_conf["loader"] = loader overloaded_conf["loader_conf"] = loader_conf @@ -237,6 +242,7 @@ def check_conf(self, conf): "perf_eta_max_risk": float, "perf_eta_step": float, "perf_ambiguity_threshold": float, + "use_cross_validation": bool, "use_global_disp_range": bool, "local_disp_grid_step": int, "disp_range_propagation_filter_size": And( diff --git a/cars/applications/dense_matching/loaders/config_census_sgm.json b/cars/applications/dense_matching/loaders/config_census_sgm.json index f9ac772a..e5bf070b 100644 --- a/cars/applications/dense_matching/loaders/config_census_sgm.json +++ b/cars/applications/dense_matching/loaders/config_census_sgm.json @@ -26,10 +26,6 @@ "filter": { "filter_method": "median", "filter_size": 3 - }, - "validation": { - "validation_method": "cross_checking_accurate", - "cross_checking_threshold": 1.0 } } } \ No newline at end of file diff --git a/cars/applications/dense_matching/loaders/config_mccnn.json b/cars/applications/dense_matching/loaders/config_mccnn.json index 14a8605a..6ab394a5 100644 --- a/cars/applications/dense_matching/loaders/config_mccnn.json +++ b/cars/applications/dense_matching/loaders/config_mccnn.json @@ -23,10 +23,6 @@ "filter" : { "filter_method": "median", "filter_size": 3 - }, - "validation" : { - "validation_method": "cross_checking_accurate", - "cross_checking_threshold": 1 } } } \ No newline at end of file diff --git a/cars/applications/dense_matching/loaders/pandora_loader.py b/cars/applications/dense_matching/loaders/pandora_loader.py index 2e3442b9..09442bf6 100644 --- a/cars/applications/dense_matching/loaders/pandora_loader.py +++ b/cars/applications/dense_matching/loaders/pandora_loader.py @@ -58,6 +58,7 @@ def __init__( # noqa: C901 perf_eta_max_ambiguity=0.99, perf_eta_max_risk=0.25, perf_eta_step=0.04, + use_cross_validation=False, ): """ Init function of PandoraLoader @@ -70,8 +71,11 @@ def __init__( # noqa: C901 :type conf: dict :param method_name: name of method to use :param performance_map_conf: true if generate performance maps + :param use_cross_validation: true to add crossvalidation """ + if method_name is None: + method_name = "census_sgm" self.pandora_config = None @@ -141,6 +145,13 @@ def __init__( # noqa: C901 "confidence_method": "interval_bounds", } } + # Cross validation + cross_validation_conf = { + "validation": { + "validation_method": "cross_checking_accurate", + "cross_checking_threshold": 1.0, + } + } confidences = {} if generate_performance_map: @@ -175,6 +186,10 @@ def __init__( # noqa: C901 conf["pipeline"], confidences ) + # update with cross validation + if use_cross_validation and "validation" not in conf["pipeline"]: + conf["pipeline"].update(cross_validation_conf) + if generate_confidence_intervals: # To ensure the consistency between the disparity map # and the intervals, the median filter for intervals diff --git a/docs/source/usage.rst b/docs/source/usage.rst index f188ea30..d7313419 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -1056,6 +1056,12 @@ The structure follows this organisation: - should be > 0 - 300 - No + * - use_cross_validation + - Add cross validation step + - bool + - + - false + - No See `Pandora documentation `_ for more information. diff --git a/tests/applications/dense_matching/test_pandora_loader.py b/tests/applications/dense_matching/test_pandora_loader.py index 4037a776..b5a0e9b2 100644 --- a/tests/applications/dense_matching/test_pandora_loader.py +++ b/tests/applications/dense_matching/test_pandora_loader.py @@ -25,6 +25,8 @@ # Standard imports # Third party imports +import copy + import pytest # CARS imports @@ -95,6 +97,67 @@ def test_configure_pandora_config(): assert corr_config["pipeline"]["optimization"]["penalty"]["P2"] == 24 +@pytest.mark.unit_tests +def test_configure_cross_validation(): + """ + Test configure pandora correlator cross validation + """ + + pandora_config = { + "input": {"nodata_left": "NaN", "nodata_right": "NaN"}, + "pipeline": { + "right_disp_map": {"method": "accurate"}, + "matching_cost": { + "matching_cost_method": "census", + "window_size": 5, + "subpix": 1, + }, + "optimization": { + "optimization_method": "sgm", + "penalty": { + "P1": 8, + "P2": 24, + "p2_method": "constant", + "penalty_method": "sgm_penalty", + }, + "overcounting": False, + "min_cost_paths": False, + }, + "disparity": { + "disparity_method": "wta", + "invalid_disparity": "NaN", + }, + "refinement": {"refinement_method": "vfit"}, + "filter": {"filter_method": "median", "filter_size": 3}, + }, + } + + # test 1, validation as input already + conf_with_validation = copy.deepcopy(pandora_config) + conf_with_validation["pipeline"].update( + {"validation": {"validation_method": "cross_checking"}} + ) + pandora_loader = PandoraLoader( + conf=conf_with_validation, use_cross_validation=False + ) + corr_config = pandora_loader.get_conf() + assert "validation" in corr_config["pipeline"] + + # test 2: no validation as input, add it + pandora_loader = PandoraLoader( + conf=copy.deepcopy(pandora_config), use_cross_validation=True + ) + corr_config = pandora_loader.get_conf() + assert "validation" in corr_config["pipeline"] + + # test 3: no validation as input, do not add it + pandora_loader = PandoraLoader( + conf=copy.deepcopy(pandora_config), use_cross_validation=False + ) + corr_config = pandora_loader.get_conf() + assert "validation" not in corr_config["pipeline"] + + @pytest.mark.unit_tests def test_overload_pandora_conf_with_confidence(): """ diff --git a/tests/test_end2end.py b/tests/test_end2end.py index d66841ea..01a83115 100644 --- a/tests/test_end2end.py +++ b/tests/test_end2end.py @@ -93,6 +93,7 @@ def test_end2end_gizeh_rectangle_epi_image_performance_map(): "grid_generation": {"method": "epipolar", "epi_step": 30}, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": True, }, "point_cloud_rasterization": { @@ -632,6 +633,7 @@ def test_end2end_ventoux_unique(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, "loader_conf": { "input": {}, @@ -1029,6 +1031,7 @@ def test_end2end_ventoux_unique(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "point_cloud_outliers_removing.1": { @@ -1102,6 +1105,12 @@ def test_end2end_ventoux_unique_split_epsg_4326(): "max_ram_per_worker": 1000, }, ) + input_config_pc["applications"] = { + "dense_matching": { + "method": "census_sgm", + "use_cross_validation": True, + }, + } pc_pipeline = sensor_to_dense_dsm.SensorToDenseDsmPipeline( input_config_pc ) @@ -1317,6 +1326,7 @@ def test_end2end_ventoux_unique_split(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, "save_intermediate_data": True, "generate_confidence_intervals": False, @@ -2222,6 +2232,7 @@ def test_end2end_use_epipolar_a_priori(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, } @@ -2442,6 +2453,10 @@ def test_end2end_ventoux_full_output_no_elevation(): "disparity_margin": 0.25, "save_intermediate_data": True, }, + "dense_matching": { + "method": "census_sgm", + "use_cross_validation": True, + }, } advanced_config = {"save_intermediate_data": True} @@ -2840,6 +2855,7 @@ def test_end2end_ventoux_with_color(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "loader": "pandora", "save_intermediate_data": True, "use_global_disp_range": False, @@ -3099,6 +3115,7 @@ def test_end2end_ventoux_with_classif(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "loader": "pandora", "save_intermediate_data": True, "use_global_disp_range": False, @@ -3274,6 +3291,7 @@ def test_compute_dsm_with_roi_ventoux(): "resampling": {"method": "bicubic", "strip_height": 80}, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "sparse_matching": { @@ -3435,6 +3453,7 @@ def test_compute_dsm_with_snap_to_img1(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": { @@ -3554,6 +3573,7 @@ def test_end2end_quality_stats(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "point_cloud_outliers_removing.1": { @@ -3847,6 +3867,7 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -3979,6 +4000,7 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -4069,6 +4091,7 @@ def test_end2end_ventoux_egm96_geoid(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "triangulation": {"method": "line_of_sight_intersection"}, @@ -4212,6 +4235,7 @@ def test_end2end_paca_with_mask(): }, "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "use_global_disp_range": False, }, "dense_matches_filling.2": { @@ -4323,6 +4347,7 @@ def test_end2end_disparity_filling(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "min_epi_tile_size": 100, "save_intermediate_data": True, "use_global_disp_range": False, @@ -4455,6 +4480,7 @@ def test_end2end_disparity_filling_with_zeros(): dense_dsm_applications = { "dense_matching": { "method": "census_sgm", + "use_cross_validation": True, "save_intermediate_data": True, "use_global_disp_range": True, }, From b08336805d7a64a6a33c944f87dbd5bea1229302 Mon Sep 17 00:00:00 2001 From: steuxyo Date: Thu, 10 Oct 2024 17:09:18 +0200 Subject: [PATCH 09/13] feat: use only one dockerfile, for production and development --- Dockerfile | 19 ++++++++---- Dockerfile.local | 38 ------------------------ Makefile | 10 +++++++ docs/source/contributing_the_project.rst | 4 +-- 4 files changed, 26 insertions(+), 45 deletions(-) delete mode 100644 Dockerfile.local diff --git a/Dockerfile b/Dockerfile index b797acc4..76110150 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,16 +16,25 @@ RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ && rm -rf /var/lib/apt/lists/* # copy and install cars with mccnn plugin capabilities installed (but not configured by default) -WORKDIR /cars -COPY . /cars/ +WORKDIR /app -# Install fiona and rasterio with gdal / proj from otb -RUN make clean && make install-gdal + +# Create a virtual environment +RUN python3 -m venv /app/venv # source venv/bin/activate in docker mode -ENV VIRTUAL_ENV='/cars/venv' +ENV VIRTUAL_ENV='/app/venv' ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# Copy only necessary files for installation +COPY . /app/cars + +# Install fiona and rasterio with gdal / proj from otb +WORKDIR /app/cars +RUN CARS_VENV=$VIRTUAL_ENV make clean && CARS_VENV=$VIRTUAL_ENV make install-gdal-dev + + # hadolint ignore=DL3013,SC2102 RUN python -m pip cache purge diff --git a/Dockerfile.local b/Dockerfile.local deleted file mode 100644 index 97bcfd5c..00000000 --- a/Dockerfile.local +++ /dev/null @@ -1,38 +0,0 @@ -# Use the same base image -# hadolint ignore=DL3007 -FROM orfeotoolbox/otb:latest -LABEL maintainer="CNES" - -# Install dependencies -# hadolint ignore=DL3008 -RUN apt-get update && apt-get install --no-install-recommends -y --quiet \ - git \ - libpython3.8 \ - python3.8-dev \ - python3.8-venv \ - python3.8 \ - python3-pip \ - python3-numpy \ - python3-virtualenv \ - make \ - && rm -rf /var/lib/apt/lists/* - -# Set up working directory -WORKDIR /app - -# Create a virtual environment -RUN python3 -m venv /app/venv - -# Activate the virtual environment -ENV VIRTUAL_ENV='/app/venv' -ENV PATH="$VIRTUAL_ENV/bin:$PATH" - -# Copy only necessary files for installation -COPY . /app/cars - -WORKDIR /app/cars -# Install CARS using make -RUN make clean && make install-dev - -# launch cars -CMD ["/bin/bash"] diff --git a/Makefile b/Makefile index 4b757ffd..3fb68874 100644 --- a/Makefile +++ b/Makefile @@ -86,6 +86,15 @@ install-gdal: install-deps-gdal ## install cars (not editable) with dev, docs, n @echo "CARS ${CARS_VERSION} installed in dev mode in virtualenv ${CARS_VENV}" @echo "CARS venv usage: source ${CARS_VENV}/bin/activate; cars -h" +.PHONY: install-gdal-dev +install-gdal-dev: install-deps-gdal ## install cars dev (editable) with dev, docs, notebook dependencies + @test -f ${CARS_VENV}/bin/cars || ${CARS_VENV}/bin/pip install .[dev,docs,notebook,pandora_mccnn] + @test -f .git/hooks/pre-commit || echo " Install pre-commit hook" + @test -f .git/hooks/pre-commit || ${CARS_VENV}/bin/pre-commit install -t pre-commit + @test -f .git/hooks/pre-push || ${CARS_VENV}/bin/pre-commit install -t pre-push + @echo "CARS ${CARS_VERSION} installed in dev mode in virtualenv ${CARS_VENV}" + @echo "CARS venv usage: source ${CARS_VENV}/bin/activate; cars -h" + .PHONY: install-pandora-mccnn install-pandora-mccnn: install-deps ## install cars (not editable) with dev, docs, notebook dependencies @test -f ${CARS_VENV}/bin/cars || ${CARS_VENV}/bin/pip install .[dev,docs,notebook,pandora_mccnn] @@ -226,6 +235,7 @@ clean: clean-venv clean-build clean-precommit clean-pyc clean-test clean-docs cl .PHONY: clean-venv clean-venv: @echo "+ $@" + @echo ${CARS_VENV} @rm -rf ${CARS_VENV} .PHONY: clean-build diff --git a/docs/source/contributing_the_project.rst b/docs/source/contributing_the_project.rst index f951c1b5..0fdfd2c2 100644 --- a/docs/source/contributing_the_project.rst +++ b/docs/source/contributing_the_project.rst @@ -54,8 +54,8 @@ To setup a development environment with docker, run the following command: .. code-block:: console - docker build -t cars-dev -f Dockerfile.local . - docker run -it -v "$(pwd)":/app/cars -w /app/cars cars-dev /bin/bash + docker build -t cars-dev -f Dockerfile . + docker run -it -v "$(pwd)":/app/cars --entrypoint=/bin/bash cars-dev You're ready to use CARS, all files in the current directory are mounted in the container. From d3fd1cfed52f7c911f1b0a97cff7e0061ec38812 Mon Sep 17 00:00:00 2001 From: steuxyo Date: Thu, 10 Oct 2024 17:42:49 +0200 Subject: [PATCH 10/13] doc: update for master --- docs/source/exploring_the_field/3d_products.rst | 2 +- docs/source/getting_started.rst | 10 +++++----- docs/source/howto.rst | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/source/exploring_the_field/3d_products.rst b/docs/source/exploring_the_field/3d_products.rst index a0ef296f..f0951874 100644 --- a/docs/source/exploring_the_field/3d_products.rst +++ b/docs/source/exploring_the_field/3d_products.rst @@ -19,5 +19,5 @@ These two products can be visualized with `QGIS ` +--------------+-------------+-------------+-------------------+ | dsm.tif | color.tif | `QGIS`_ Mix | cloudcompare | +--------------+-------------+-------------+-------------------+ -| |dsm| | |color| | |dsmclr| | |pc| | +| |dsm| | |color| | |dsmclr| | |pc| | +--------------+-------------+-------------+-------------------+ diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 60c554bd..bf83a1b2 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -81,10 +81,10 @@ Open the ``dsm.tif`` DSM and ``color.tif`` color image in `QGIS`_ software. .. |dsmcolor| image:: images/dsm_clr.png :width: 100% -+--------------+-----------------+---------------+ -| dsm.tif | color.tif | `QGIS`_ Mix | -+--------------+-----------------+---------------+ -| |dsm| | |color| | |dsmcolor| | -+--------------+-----------------+---------------+ ++--------------+-------------+-------------+ +| dsm.tif | color.tif | `QGIS`_ Mix | ++--------------+-------------+-------------+ +| |dsm| | |color| | |dsmcolor| | ++--------------+-------------+-------------+ .. _`QGIS`: https://www.qgis.org/ diff --git a/docs/source/howto.rst b/docs/source/howto.rst index 82fbe308..c81ca083 100644 --- a/docs/source/howto.rst +++ b/docs/source/howto.rst @@ -127,7 +127,7 @@ Convert RGB image to panchromatic image CARS only uses panchromatic images for processing. -If you have a multi-spectral image, you'll need to convert it to a panchromatic image before using it with CARS. +If you have a multi-spectral image, you'll need to extract a single band to use, or convert it to a panchromatic image before using it with CARS. The line below use `"Grayscale Using Luminance" `_ expression with `OTB BandMath `_ From 0fd071c43925c849056e919a3caab4cde31fa61b Mon Sep 17 00:00:00 2001 From: David Youssefi Date: Thu, 17 Oct 2024 12:05:31 +0000 Subject: [PATCH 11/13] fix: Malformed table + blank line --- docs/source/exploring_the_field/3d_products.rst | 2 +- .../exploring_the_field/masks_and_classifications_usage.rst | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/source/exploring_the_field/3d_products.rst b/docs/source/exploring_the_field/3d_products.rst index f0951874..9e4af312 100644 --- a/docs/source/exploring_the_field/3d_products.rst +++ b/docs/source/exploring_the_field/3d_products.rst @@ -19,5 +19,5 @@ These two products can be visualized with `QGIS ` +--------------+-------------+-------------+-------------------+ | dsm.tif | color.tif | `QGIS`_ Mix | cloudcompare | +--------------+-------------+-------------+-------------------+ -| |dsm| | |color| | |dsmclr| | |pc| | +| |dsm| | |color| | |dsmclr| | |pc| | +--------------+-------------+-------------+-------------------+ diff --git a/docs/source/exploring_the_field/masks_and_classifications_usage.rst b/docs/source/exploring_the_field/masks_and_classifications_usage.rst index a82f6bfd..86849df1 100644 --- a/docs/source/exploring_the_field/masks_and_classifications_usage.rst +++ b/docs/source/exploring_the_field/masks_and_classifications_usage.rst @@ -5,6 +5,7 @@ Mask and Classification Usage | For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage (see :ref:`mask_and_classification_usage`). .. _mask_and_classification_usage: + Masks ----- From 9e153d32ca230f25e7906aae01e0cf895a3919f1 Mon Sep 17 00:00:00 2001 From: David Youssefi Date: Thu, 17 Oct 2024 12:21:46 +0000 Subject: [PATCH 12/13] fix: duplicate label + Title underline too short --- .../exploring_the_field/masks_and_classifications_usage.rst | 4 +--- docs/source/howto.rst | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/source/exploring_the_field/masks_and_classifications_usage.rst b/docs/source/exploring_the_field/masks_and_classifications_usage.rst index 86849df1..da8f87bc 100644 --- a/docs/source/exploring_the_field/masks_and_classifications_usage.rst +++ b/docs/source/exploring_the_field/masks_and_classifications_usage.rst @@ -2,9 +2,7 @@ Mask and Classification Usage ============================= | Photogrammetry is a technique that cannot reproduce altitude on water. This technique also has difficulties for moving elements or in shaded areas. -| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage (see :ref:`mask_and_classification_usage`). - -.. _mask_and_classification_usage: +| For this reason, it is possible to mask out areas or apply ad hoc processing to aid the matching stage. Masks ----- diff --git a/docs/source/howto.rst b/docs/source/howto.rst index c81ca083..fdc85bbf 100644 --- a/docs/source/howto.rst +++ b/docs/source/howto.rst @@ -123,7 +123,7 @@ It can be recommended to apply a P+XS pansharpening with `OTB`_. .. _`OTB`: https://www.orfeo-toolbox.org/CookBook-8.0/C++/UserGuide.html#image-data-representation Convert RGB image to panchromatic image --------------------------------------- +--------------------------------------- CARS only uses panchromatic images for processing. From bf0da20f7076b0490f33342fe555a812a8071bfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Traizet?= Date: Thu, 17 Oct 2024 17:26:50 +0200 Subject: [PATCH 13/13] doc: remove section about otbcli_DownloadSRTMTiles which have been removed in OTB 8.0 because SRTM download requires authentication --- .../source/exploring_the_field/from_satellite_images_to_dsm.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst index 0ca349d5..68bb589d 100644 --- a/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst +++ b/docs/source/exploring_the_field/from_satellite_images_to_dsm.rst @@ -115,8 +115,6 @@ Initial Input Digital Elevation Model For now, CARS uses an initial input Digital Elevation Model (:term:`DEM`) which is integrated in the stereo-rectification to minimize the disparity intervals to explore. Any geotiff file can be used. -For example, the `SRTM `_ data corresponding to the processed zone can be used through `otbcli_DownloadSRTMTiles `_. - The parameter is ``initial_elevation`` as seen in :ref:`configuration`.