diff --git a/o/ODM/ODM-2.8.7/.dockerignore b/o/ODM/ODM-2.8.7/.dockerignore
deleted file mode 100644
index b4797a80..00000000
--- a/o/ODM/ODM-2.8.7/.dockerignore
+++ /dev/null
@@ -1,26 +0,0 @@
-Dockerfile
-tests/test_data
-SuperBuild/build
-SuperBuild/download
-SuperBuild/install
-SuperBuild/src
-build
-opensfm
-odm_orthophoto
-odm_texturing
-odm_meshing
-odm_georeferencing
-images_resize
-.git
-
-/contrib
-/docs
-/hooks
-/img
-/license
-/tests
-tests.sh
-settings.yml
-code_of_conduct.md
-configure_18_04.sh
-index.html
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-and-wsl.yaml b/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-and-wsl.yaml
deleted file mode 100644
index 112d3f7a..00000000
--- a/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-and-wsl.yaml
+++ /dev/null
@@ -1,97 +0,0 @@
-name: Publish Docker and WSL Images
-
-on:
- push:
- branches:
- - master
- tags:
- - v*
-
-jobs:
- build:
- runs-on: self-hosted
- timeout-minutes: 2880
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set Swap Space
- uses: pierotofy/set-swap-space@master
- with:
- swap-size-gb: 12
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v1
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
- with:
- config-inline: |
- [worker.oci]
- max-parallelism = 1
- - name: Login to DockerHub
- uses: docker/login-action@v1
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- # Use the repository information of the checked-out code to format docker tags
- - name: Docker meta
- id: docker_meta
- uses: crazy-max/ghaction-docker-meta@v1
- with:
- images: opendronemap/odm
- tag-semver: |
- {{version}}
- - name: Build and push Docker image
- id: docker_build
- uses: docker/build-push-action@v2
- with:
- file: ./portable.Dockerfile
- platforms: linux/amd64,linux/arm64
- push: true
- tags: |
- ${{ steps.docker_meta.outputs.tags }}
- opendronemap/odm:latest
- - name: Export WSL image
- id: wsl_export
- run: |
- docker pull opendronemap/odm
- docker export $(docker create opendronemap/odm) --output odm-wsl-rootfs-amd64.tar.gz
- gzip odm-wsl-rootfs-amd64.tar.gz
- echo ::set-output name=amd64-rootfs::"odm-wsl-rootfs-amd64.tar.gz"
- # Convert tag into a GitHub Release if we're building a tag
- - name: Create Release
- if: github.event_name == 'tag'
- id: create_release
- uses: actions/create-release@v1
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- tag_name: ${{ github.ref }}
- release_name: Release ${{ github.ref }}
- draft: false
- prerelease: false
- # Upload the WSL image to the new Release if we're building a tag
- - name: Upload amd64 Release Asset
- if: github.event_name == 'tag'
- id: upload-amd64-wsl-rootfs
- uses: actions/upload-release-asset@v1
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
- asset_path: ./${{ steps.wsl_export.outputs.amd64-rootfs }}
- asset_name: ${{ steps.wsl_export.outputs.amd64-rootfs }}
- asset_content_type: application/gzip
- # Always archive the WSL rootfs
- - name: Upload amd64 Artifact
- uses: actions/upload-artifact@v2
- with:
- name: wsl-rootfs
- path: ${{ steps.wsl_export.outputs.amd64-rootfs }}
- - name: Docker image digest and WSL rootfs download URL
- run: |
- echo "Docker image digest: ${{ steps.docker_build.outputs.digest }}"
- echo "WSL AMD64 rootfs URL: ${{ steps.upload-amd64-wsl-rootfs.browser_download_url }}"
- # Trigger NodeODM build
- - name: Dispatch NodeODM Build Event
- id: nodeodm_dispatch
- run: |
- curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker.yaml/dispatches --data '{"ref": "master"}'
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-gpu.yaml b/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-gpu.yaml
deleted file mode 100644
index 3ef72f89..00000000
--- a/o/ODM/ODM-2.8.7/.github/workflows/publish-docker-gpu.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-name: Publish Docker GPU Images
-
-on:
- push:
- branches:
- - master
- tags:
- - v*
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set Swap Space
- uses: pierotofy/set-swap-space@master
- with:
- swap-size-gb: 12
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v1
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
- - name: Login to DockerHub
- uses: docker/login-action@v1
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Build and push Docker image
- id: docker_build
- uses: docker/build-push-action@v2
- with:
- file: ./gpu.Dockerfile
- platforms: linux/amd64
- push: true
- tags: opendronemap/odm:gpu
- # Trigger NodeODM build
- - name: Dispatch NodeODM Build Event
- id: nodeodm_dispatch
- run: |
- curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker-gpu.yaml/dispatches --data '{"ref": "master"}'
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/.github/workflows/publish-snap.yml b/o/ODM/ODM-2.8.7/.github/workflows/publish-snap.yml
deleted file mode 100644
index cd923a81..00000000
--- a/o/ODM/ODM-2.8.7/.github/workflows/publish-snap.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-# name: Publish Snap
-
-# on:
-# push:
-# branches:
-# - master
-# tags:
-# - v**
-
-# jobs:
-# build-and-release:
-# runs-on: ubuntu-latest
-# strategy:
-# matrix:
-# architecture:
-# - amd64
-# steps:
-# - name: Checkout
-# uses: actions/checkout@v2
-# - name: Set Swap Space
-# uses: pierotofy/set-swap-space@master
-# with:
-# swap-size-gb: 12
-# - name: Build
-# id: build
-# uses: diddlesnaps/snapcraft-multiarch-action@v1
-# with:
-# architecture: ${{ matrix.architecture }}
-# - name: Review
-# uses: diddlesnaps/snapcraft-review-tools-action@v1
-# with:
-# snap: ${{ steps.build.outputs.snap }}
-# isClassic: 'false'
-# - name: Publish unstable builds to Edge
-# if: github.ref == 'refs/heads/master'
-# uses: snapcore/action-publish@v1
-# with:
-# store_login: ${{ secrets.STORE_LOGIN }}
-# snap: ${{ steps.build.outputs.snap }}
-# release: edge
-# - name: Publish tagged prerelease builds to Beta
-# # These are identified by having a hyphen in the tag name, e.g.: v1.0.0-beta1
-# if: startsWith(github.ref, 'refs/tags/v') && contains(github.ref, '-')
-# uses: snapcore/action-publish@v1
-# with:
-# store_login: ${{ secrets.STORE_LOGIN }}
-# snap: ${{ steps.build.outputs.snap }}
-# release: beta
-# - name: Publish tagged stable or release-candidate builds to Candidate
-# # These are identified by NOT having a hyphen in the tag name, OR having "-RC" or "-rc" in the tag name.
-# if: startsWith(github.ref, 'refs/tags/v1') && ( ( ! contains(github.ref, '-') ) || contains(github.ref, '-RC') || contains(github.ref, '-rc') )
-# uses: snapcore/action-publish@v1
-# with:
-# store_login: ${{ secrets.STORE_LOGIN }}
-# snap: ${{ steps.build.outputs.snap }}
-# release: candidate
diff --git a/o/ODM/ODM-2.8.7/.github/workflows/publish-windows.yml b/o/ODM/ODM-2.8.7/.github/workflows/publish-windows.yml
deleted file mode 100644
index 9e3f00cb..00000000
--- a/o/ODM/ODM-2.8.7/.github/workflows/publish-windows.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-name: Publish Windows Setup
-
-on:
- push:
- branches:
- - master
- tags:
- - v*
-
-jobs:
- build:
- runs-on: windows-2019
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Setup Python
- uses: actions/setup-python@v2
- with:
- python-version: '3.8.1'
- architecture: 'x64'
- - uses: Jimver/cuda-toolkit@v0.2.4
- id: cuda-toolkit
- with:
- cuda: '11.4.0'
- - name: Extract code signing cert
- id: code_sign
- uses: timheuer/base64-to-file@v1
- with:
- fileName: 'comodo.pfx'
- encodedString: ${{ secrets.CODE_SIGNING_CERT }}
- - name: Install venv
- run: |
- python -m pip install virtualenv
- - name: Build sources
- run: |
- python configure.py build
- - name: Create setup
- env:
- CODE_SIGN_CERT_PATH: ${{ steps.code_sign.outputs.filePath }}
- run: |
- python configure.py dist --code-sign-cert-path $env:CODE_SIGN_CERT_PATH
- - name: Upload Setup File
- uses: actions/upload-artifact@v2
- with:
- name: Setup
- path: dist\*.exe
- - name: Upload Setup to Release
- uses: svenstaro/upload-release-action@v2
- if: startsWith(github.ref, 'refs/tags/')
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: dist\*.exe
- file_glob: true
- tag: ${{ github.ref }}
- overwrite: true
-
diff --git a/o/ODM/ODM-2.8.7/.github/workflows/test-build-prs.yaml b/o/ODM/ODM-2.8.7/.github/workflows/test-build-prs.yaml
deleted file mode 100644
index 4caa9bf6..00000000
--- a/o/ODM/ODM-2.8.7/.github/workflows/test-build-prs.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-name: Build PRs
-
-on:
- pull_request:
-
-jobs:
- docker:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set Swap Space
- uses: pierotofy/set-swap-space@master
- with:
- swap-size-gb: 12
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v1
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
- - name: Build
- uses: docker/build-push-action@v2
- with:
- file: ./portable.Dockerfile
- platforms: linux/amd64
- push: false
-
- # snapcraft:
- # runs-on: ubuntu-latest
- # strategy:
- # matrix:
- # architecture:
- # - amd64
- # steps:
- # - name: Checkout
- # uses: actions/checkout@v2
- # - name: Set Swap Space
- # uses: pierotofy/set-swap-space@master
- # with:
- # swap-size-gb: 12
- # - name: Build
- # id: build
- # uses: diddlesnaps/snapcraft-multiarch-action@v1
- # with:
- # architecture: ${{ matrix.architecture }}
- # - name: Review
- # uses: diddlesnaps/snapcraft-review-tools-action@v1
- # with:
- # snap: ${{ steps.build.outputs.snap }}
- # isClassic: 'false'
-
- windows:
- runs-on: windows-2019
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Setup Python
- uses: actions/setup-python@v2
- with:
- python-version: '3.8.1'
- architecture: 'x64'
- - name: Setup Visual C++
- uses: ilammy/msvc-dev-cmd@v1
- with:
- arch: x64
- - name: Install venv
- run: |
- python -m pip install virtualenv
- - name: Build sources
- run: |
- python configure.py build
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/.gitignore b/o/ODM/ODM-2.8.7/.gitignore
deleted file mode 100644
index 7f4f29ac..00000000
--- a/o/ODM/ODM-2.8.7/.gitignore
+++ /dev/null
@@ -1,29 +0,0 @@
-*~
-bin/
-include/
-lib/
-logs/
-share/
-
-
-src/
-
-!modules/*
-
-download/
-
-SuperBuild/build/
-SuperBuild/install/
-build/
-
-cmvs.tar.gz
-parallel.tar.bz2
-LAStools.zip
-pcl.tar.gz
-ceres-solver.tar.gz
-*.pyc
-opencv.zip
-settings.yaml
-.setupdevenv
-__pycache__
-*.snap
diff --git a/o/ODM/ODM-2.8.7/.gitmodules b/o/ODM/ODM-2.8.7/.gitmodules
deleted file mode 100644
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/CNAME b/o/ODM/ODM-2.8.7/CNAME
deleted file mode 100644
index ed563e1e..00000000
--- a/o/ODM/ODM-2.8.7/CNAME
+++ /dev/null
@@ -1 +0,0 @@
-opendronemap.org
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/CONTRIBUTING.md b/o/ODM/ODM-2.8.7/CONTRIBUTING.md
deleted file mode 100644
index c16f4460..00000000
--- a/o/ODM/ODM-2.8.7/CONTRIBUTING.md
+++ /dev/null
@@ -1 +0,0 @@
-See https://github.com/OpenDroneMap/documents/blob/master/CONTRIBUTING.md
diff --git a/o/ODM/ODM-2.8.7/Dockerfile b/o/ODM/ODM-2.8.7/Dockerfile
deleted file mode 100644
index b928dbd0..00000000
--- a/o/ODM/ODM-2.8.7/Dockerfile
+++ /dev/null
@@ -1,48 +0,0 @@
-FROM ubuntu:21.04 AS builder
-
-# Env variables
-ENV DEBIAN_FRONTEND=noninteractive \
- PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
-
-# Prepare directories
-WORKDIR /code
-
-# Copy everything
-COPY . ./
-
-# Run the build
-RUN bash configure.sh install
-
-# Clean Superbuild
-RUN bash configure.sh clean
-
-### END Builder
-
-### Use a second image for the final asset to reduce the number and
-# size of the layers.
-FROM ubuntu:21.04
-
-# Env variables
-ENV DEBIAN_FRONTEND=noninteractive \
- PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
-
-WORKDIR /code
-
-# Copy everything we built from the builder
-COPY --from=builder /code /code
-
-# Copy the Python libraries installed via pip from the builder
-COPY --from=builder /usr/local /usr/local
-
-# Install shared libraries that we depend on via APT, but *not*
-# the -dev packages to save space!
-# Also run a smoke test on ODM and OpenSfM
-RUN bash configure.sh installruntimedepsonly \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
- && bash run.sh --help \
- && bash -c "eval $(python3 /code/opendm/context.py) && python3 -c 'from opensfm import io, pymap'"
-# Entry point
-ENTRYPOINT ["python3", "/code/run.py"]
diff --git a/o/ODM/ODM-2.8.7/LICENSE b/o/ODM/ODM-2.8.7/LICENSE
deleted file mode 100644
index 29ebfa54..00000000
--- a/o/ODM/ODM-2.8.7/LICENSE
+++ /dev/null
@@ -1,661 +0,0 @@
- GNU AFFERO GENERAL PUBLIC LICENSE
- Version 3, 19 November 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU Affero General Public License is a free, copyleft license for
-software and other kinds of works, specifically designed to ensure
-cooperation with the community in the case of network server software.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-our General Public Licenses are intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- Developers that use our General Public Licenses protect your rights
-with two steps: (1) assert copyright on the software, and (2) offer
-you this License which gives you legal permission to copy, distribute
-and/or modify the software.
-
- A secondary benefit of defending all users' freedom is that
-improvements made in alternate versions of the program, if they
-receive widespread use, become available for other developers to
-incorporate. Many developers of free software are heartened and
-encouraged by the resulting cooperation. However, in the case of
-software used on network servers, this result may fail to come about.
-The GNU General Public License permits making a modified version and
-letting the public access it on a server without ever releasing its
-source code to the public.
-
- The GNU Affero General Public License is designed specifically to
-ensure that, in such cases, the modified source code becomes available
-to the community. It requires the operator of a network server to
-provide the source code of the modified version running there to the
-users of that server. Therefore, public use of a modified version, on
-a publicly accessible server, gives the public access to the source
-code of the modified version.
-
- An older license, called the Affero General Public License and
-published by Affero, was designed to accomplish similar goals. This is
-a different license, not a version of the Affero GPL, but Affero has
-released a new version of the Affero GPL which permits relicensing under
-this license.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU Affero General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Remote Network Interaction; Use with the GNU General Public License.
-
- Notwithstanding any other provision of this License, if you modify the
-Program, your modified version must prominently offer all users
-interacting with it remotely through a computer network (if your version
-supports such interaction) an opportunity to receive the Corresponding
-Source of your version by providing access to the Corresponding Source
-from a network server at no charge, through some standard or customary
-means of facilitating copying of software. This Corresponding Source
-shall include the Corresponding Source for any work covered by version 3
-of the GNU General Public License that is incorporated pursuant to the
-following paragraph.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the work with which it is combined will remain governed by version
-3 of the GNU General Public License.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU Affero General Public License from time to time. Such new versions
-will be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU Affero General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU Affero General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU Affero General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If your software can interact with users remotely through a computer
-network, you should also make sure that it provides a way for users to
-get its source. For example, if your program is a web application, its
-interface could display a "Source" link that leads users to an archive
-of the code. There are many ways you could offer source, and different
-solutions will be better for different programs; see section 13 for the
-specific requirements.
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU AGPL, see
-.
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/README.md b/o/ODM/ODM-2.8.7/README.md
deleted file mode 100644
index 9093b07e..00000000
--- a/o/ODM/ODM-2.8.7/README.md
+++ /dev/null
@@ -1,293 +0,0 @@
-
-
-An open source command line toolkit for processing aerial drone imagery. ODM turns simple 2D images into:
-
-* Classified Point Clouds
-* 3D Textured Models
-* Georeferenced Orthorectified Imagery
-* Georeferenced Digital Elevation Models
-
-
-
-The application is available for Windows, Mac and Linux and it works from the command line, making it ideal for power users, scripts and for integration with other software.
-
-If you would rather not type commands in a shell and are looking for a friendly user interface, check out [WebODM](https://github.com/OpenDroneMap/WebODM).
-
-## Quickstart
-
-The easiest way to run ODM on is via docker. To install docker, see [docs.docker.com](https://docs.docker.com). Once you have docker installed and [working](https://docs.docker.com/get-started/#test-docker-installation), you can run ODM by placing some images (JPEGs or TIFFs) in a folder named “images” (for example `C:\Users\youruser\datasets\project\images` or `/home/youruser/datasets/project/images`) and simply run from a Command Prompt / Terminal:
-
-```bash
-# Windows
-docker run -ti --rm -v c:/Users/youruser/datasets:/datasets opendronemap/odm --project-path /datasets project
-
-# Mac/Linux
-docker run -ti --rm -v /home/youruser/datasets:/datasets opendronemap/odm --project-path /datasets project
-```
-
-You can pass [additional parameters](https://docs.opendronemap.org/arguments/) by appending them to the command:
-
-```bash
-docker run -ti --rm -v /datasets:/datasets opendronemap/odm --project-path /datasets project [--additional --parameters --here]
-```
-
-For example, to generate a DSM (`--dsm`) and increase the orthophoto resolution (`--orthophoto-resolution 2`) :
-
-```bash
-docker run -ti --rm -v /datasets:/datasets opendronemap/odm --project-path /datasets project --dsm --orthophoto-resolution 2
-```
-
-## Viewing Results
-
-When the process finishes, the results will be organized as follows:
-
- |-- images/
- |-- img-1234.jpg
- |-- ...
- |-- opensfm/
- |-- see mapillary/opensfm repository for more info
- |-- odm_meshing/
- |-- odm_mesh.ply # A 3D mesh
- |-- odm_texturing/
- |-- odm_textured_model.obj # Textured mesh
- |-- odm_textured_model_geo.obj # Georeferenced textured mesh
- |-- odm_georeferencing/
- |-- odm_georeferenced_model.laz # LAZ format point cloud
- |-- odm_orthophoto/
- |-- odm_orthophoto.tif # Orthophoto GeoTiff
-
-You can use the following free and open source software to open the files generated in ODM:
- * .tif (GeoTIFF): [QGIS](http://www.qgis.org/)
- * .laz (Compressed LAS): [CloudCompare](https://www.cloudcompare.org/)
- * .obj (Wavefront OBJ), .ply (Stanford Triangle Format): [MeshLab](http://www.meshlab.net/)
-
-**Note!** Opening the .tif files generated by ODM in programs such as Photoshop or GIMP might not work (they are GeoTIFFs, not plain TIFFs). Use [QGIS](http://www.qgis.org/) instead.
-
-## API
-
-ODM can be made accessible from a network via [NodeODM](https://github.com/OpenDroneMap/NodeODM).
-
-## Documentation
-
-See http://docs.opendronemap.org for tutorials and more guides.
-
-## Forum
-
-We have a vibrant [community forum](https://community.opendronemap.org/). You can [search it](https://community.opendronemap.org/search?expanded=true) for issues you might be having with ODM and you can post questions there. We encourage users of ODM to participate in the forum and to engage with fellow drone mapping users.
-
-## Windows Setup
-
-ODM can be installed natively on Windows. Just download the latest setup from the [releases](https://github.com/OpenDroneMap/ODM/releases) page. After opening the ODM Console you can process datasets by typing:
-
-```bash
-run C:\Users\youruser\datasets\project [--additional --parameters --here]
-```
-
-## Snap Package
-
-ODM is now available as a Snap Package from the Snap Store. To install you may use the Snap Store (available itself as a Snap Package) or the command line:
-
-```bash
-sudo snap install opendronemap
-```
-
-To run, you will need a terminal window into which you can type:
-
-```bash
-opendronemap
-
-# or
-
-snap run opendronemap
-
-# or
-
-/snap/bin/opendronemap
-```
-
-Snap packages will be kept up-to-date automatically, so you don't need to update ODM manually.
-
-## GPU Acceleration
-
-ODM has support for doing SIFT feature extraction on a GPU, which is about 2x faster than the CPU on a typical consumer laptop. To use this feature, you need to use the `opendronemap/odm:gpu` docker image instead of `opendronemap/odm` and you need to pass the `--gpus all` flag:
-
-```
-docker run -ti --rm -v c:/Users/youruser/datasets:/datasets --gpus all opendronemap/odm:gpu --project-path /datasets project
-```
-
-When you run ODM, if the GPU is recognized, in the first few lines of output you should see:
-
-```
-[INFO] Writing exif overrides
-[INFO] Maximum photo dimensions: 4000px
-[INFO] Found GPU device: Intel(R) OpenCL HD Graphics
-[INFO] Using GPU for extracting SIFT features
-```
-
-The SIFT GPU implementation is CUDA-based, so should work with most NVIDIA graphics cards of the GTX 9xx Generation or newer.
-
-If you have an NVIDIA card, you can test that docker is recognizing the GPU by running:
-
-```
-docker run --rm --gpus all nvidia/cuda:10.0-base nvidia-smi
-```
-
-If you see an output that looks like this:
-
-```
-Fri Jul 24 18:51:55 2020
-+-----------------------------------------------------------------------------+
-| NVIDIA-SMI 440.82 Driver Version: 440.82 CUDA Version: 10.2 |
-|-------------------------------+----------------------+----------------------+
-| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
-| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
-```
-
-You're in good shape!
-
-See https://github.com/NVIDIA/nvidia-docker and https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#docker for information on docker/NVIDIA setup.
-
-## WSL or WSL2 Install
-
-Note: This requires that you have installed WSL already by following [the instructions on Microsoft's Website](https://docs.microsoft.com/en-us/windows/wsl/install-win10).
-
-You can run ODM via WSL or WSL2 by downloading the `rootfs.tar.gz` file from [the releases page on GitHub](https://github.com/OpenDroneMap/ODM/releases). Once you have the file saved to your `Downloads` folder in Windows, open a PowerShell or CMD window by right-clicking the Flag Menu (bottom left by default) and selecting "Windows PowerShell", or alternatively by using the [Windows Terminal from the Windows Store](https://www.microsoft.com/store/productId/9N0DX20HK701).
-
-Inside a PowerShell window, or Windows Terminal running PowerShell, type the following:
-
-```powershell
-# PowerShell
-wsl.exe --import ODM $env:APPDATA\ODM C:\path\to\your\Downloads\rootfs.tar.gz
-```
-
-Alternatively if you're using `CMD.exe` or the `CMD` support in Windows Terminal type:
-
-```cmd
-# CMD
-wsl.exe --import ODM %APPDATA%\ODM C:\path\to\your\Downloads\rootfs.tar.gz
-```
-
-In either case, make sure you replace `C:\path\to\your\Downloads\rootfs.tar.gz` with the actual path to your `rootfs.tar.gz` file.
-
-This will save a new Hard Disk image to your Windows `AppData` folder at `C:\Users\username\AppData\roaming\ODM` (where `username` is your Username in Windows), and will set-up a new WSL "distro" called `ODM`.
-
-You may start the ODM distro by using the relevant option in the Windows Terminal (from the Windows Store) or by executing `wsl.exe -d ODM` in a PowerShell or CMD window.
-
-ODM is installed to the distro's `/code` directory. You may execute it with:
-
-```bash
-/code/run.sh
-```
-
-### Updating ODM in WSL
-
-The easiest way to update the installation of ODM is to download the new `rootfs.tar.gz` file and import it as another distro. You may then unregister the original instance the same way you delete ODM from WSL (see next heading).
-
-### Deleting an ODM in WSL instance
-
-```cmd
-wsl.exe --unregister ODM
-```
-
-Finally you'll want to delete the files by using your Windows File Manager (Explorer) to navigate to `%APPDATA%`, find the `ODM` directory, and delete it by dragging it to the recycle bin. To permanently delete it empty the recycle bin.
-
-If you have installed to a different directory by changing the `--import` command you ran to install you must use that directory name to delete the correct files. This is likely the case if you have multiple ODM installations or are updating an already-installed installation.
-
-## Native Install (Ubuntu 21.04)
-
-You can run ODM natively on Ubuntu 21.04 (although we don't recommend it):
-
-1. Download the source from [here](https://github.com/OpenDroneMap/ODM/archive/master.zip)
-2. Run `bash configure.sh install`
-3. Download a sample dataset from [here](https://github.com/OpenDroneMap/odm_data_aukerman/archive/master.zip) (about 550MB) and extract it in `/datasets/aukerman`
-4. Run `./run.sh --project-path /datasets odm_data_aukerman`
-
-### Updating a native installation
-
-When updating to a newer version of ODM, it is recommended that you run
-
- bash configure.sh reinstall
-
-to ensure all the dependent packages and modules get updated.
-
-### Build From Source
-
-If you want to rebuild your own docker image (if you have changed the source code, for example), from the ODM folder you can type:
-
-```bash
-docker build -t my_odm_image --no-cache .
-```
-When building your own Docker image, if image size is of importance to you, you should use the ```--squash``` flag, like so:
-
-```bash
-docker build --squash -t my_odm_image .
-```
-
-This will clean up intermediate steps in the Docker build process, resulting in a significantly smaller image (about half the size).
-
-Experimental flags need to be enabled in Docker to use the ```--squash``` flag. To enable this, insert the following into the file `/etc/docker/daemon.json`:
-
-```json
-{
- "experimental": true
-}
-```
-
-After this, you must restart docker.
-
-## Developers
-
-Help improve our software! We welcome contributions from everyone, whether to add new features, improve speed, fix existing bugs or add support for more cameras. Check our [code of conduct](https://github.com/OpenDroneMap/documents/blob/master/CONDUCT.md), the [contributing guidelines](https://github.com/OpenDroneMap/documents/blob/master/CONTRIBUTING.md) and [how decisions are made](https://github.com/OpenDroneMap/documents/blob/master/GOVERNANCE.md#how-decisions-are-made).
-
-For Linux users, the easiest way to modify the software is to make sure docker is installed, clone the repository and then run from a shell:
-
-```bash
-$ DATA=/path/to/datasets ./start-dev-env.sh
-```
-
-Where `/path/to/datasets` is a directory where you can place test datasets (it can also point to an empty directory if you don't have test datasets).
-
-Run configure to set up the required third party libraries:
-```bash
-(odmdev) [user:/code] master+* ± bash configure.sh reinstall
-```
-
-You can now make changes to the ODM source. When you are ready to test the changes you can simply invoke:
-
-```bash
-(odmdev) [user:/code] master+* ± ./run.sh --project-path /datasets mydataset
-```
-
-If you have questions, join the developer's chat at https://community.opendronemap.org/c/developers-chat/21
-
-1. Try to keep commits clean and simple
-2. Submit a pull request with detailed changes and test results
-3. Have fun!
-
-### Troubleshooting
-The dev environment makes use of `opendronemap/nodeodm` by default. You may want to run
-`docker pull opendronemap/nodeodm` before running `./start-dev-env.sh` to avoid using an old cached version.
-
-In order to make a clean build, remove `~/.odm-dev-home` and `ODM/.setupdevenv`.
-
-## Credits
-
-ODM makes use of [several libraries](https://github.com/OpenDroneMap/ODM/blob/master/snap/snapcraft.yaml#L36) and other awesome open source projects to perform its tasks. Among them we'd like to highlight:
-
- - [OpenSfM](https://github.com/mapillary/OpenSfM)
- - [OpenMVS](https://github.com/cdcseacave/openMVS/)
- - [PDAL](https://github.com/PDAL/PDAL)
- - [Entwine](https://entwine.io/)
- - [MVS Texturing](https://github.com/nmoehrle/mvs-texturing)
- - [GRASS GIS](https://grass.osgeo.org/)
- - [GDAL](https://gdal.org/)
- - [PoissonRecon](https://github.com/mkazhdan/PoissonRecon)
-
-
-## Citation
-
-> *OpenDroneMap Authors* ODM - A command line toolkit to generate maps, point clouds, 3D models and DEMs from drone, balloon or kite images. **OpenDroneMap/ODM GitHub Page** 2020; [https://github.com/OpenDroneMap/ODM](https://github.com/OpenDroneMap/ODM)
-
-## Trademark
-
-See [Trademark Guidelines](https://github.com/OpenDroneMap/documents/blob/master/TRADEMARK.md)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/CMakeLists.txt b/o/ODM/ODM-2.8.7/SuperBuild/CMakeLists.txt
deleted file mode 100644
index 3c167127..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/CMakeLists.txt
+++ /dev/null
@@ -1,219 +0,0 @@
-cmake_minimum_required(VERSION 3.1)
-
-project(ODM-SuperBuild)
-
-if (NOT CMAKE_BUILD_TYPE)
- message(STATUS "No build type selected, default to Release")
- set(CMAKE_BUILD_TYPE "Release")
-endif()
-
-# Setup SuperBuild root location
-set(SB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR})
-
-################################
-# Setup SuperBuild directories #
-################################
-
-# Setup location where source tar-balls are downloaded
-set(SB_DOWNLOAD_DIR "${SB_ROOT_DIR}/download"
- CACHE PATH "Location where source tar-balls are (to be) downloaded.")
-mark_as_advanced(SB_DOWNLOAD_DIR)
-
-message(STATUS "SuperBuild files will be downloaded to: ${SB_DOWNLOAD_DIR}")
-
-
-# Setup location where source tar-balls are located
-set(SB_SOURCE_DIR "${SB_ROOT_DIR}/src"
- CACHE PATH "Location where source tar-balls are (will be).")
-mark_as_advanced(SB_SOURCE_DIR)
-set(SB_BUILD_DIR "${SB_ROOT_DIR}/build")
-
-message(STATUS "SuperBuild source files will be extracted to: ${SB_SOURCE_DIR}")
-
-
-# Setup location where source tar-balls are located
-set(SB_INSTALL_DIR "${SB_ROOT_DIR}/install"
- CACHE PATH "Location where source tar-balls are (will be) installed.")
-mark_as_advanced(SB_SOURCE_DIR)
-
-message(STATUS "SuperBuild source files will be installed to: ${SB_INSTALL_DIR}")
-
-
-# Setup location where binary files are located
-set(SB_BINARY_DIR "${SB_ROOT_DIR}/build"
- CACHE PATH "Location where files are (will be) located.")
-mark_as_advanced(SB_BINARY_DIR)
-
-message(STATUS "SuperBuild binary files will be located to: ${SB_BINARY_DIR}")
-
-if (WIN32)
- if (NOT DEFINED CMAKE_TOOLCHAIN_FILE)
- message(FATAL_ERROR "CMAKE_TOOLCHAIN_FILE not set. You need to set it to the path of vcpkg.cmake")
- endif()
- get_filename_component(CMAKE_TOOLCHAIN_DIR ${CMAKE_TOOLCHAIN_FILE} DIRECTORY)
- get_filename_component(VCPKG_ROOT "${CMAKE_TOOLCHAIN_DIR}/../../" ABSOLUTE)
- set(WIN32_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}")
- set(PYTHON_HOME "${SB_ROOT_DIR}/../venv")
- set(PYTHON_EXE_PATH "${PYTHON_HOME}/Scripts/python")
-
- # Use the GDAL version that comes with pip
- set(GDAL_ROOT "${PYTHON_HOME}/Lib/site-packages/osgeo")
- set(GDAL_LIBRARY "${GDAL_ROOT}/lib/gdal_i.lib")
- set(GDAL_INCLUDE_DIR "${GDAL_ROOT}/include/gdal")
-
- # Also download missing headers :/
- if (NOT EXISTS "${GDAL_INCLUDE_DIR}/ogrsf_frmts.h")
- file(DOWNLOAD "https://raw.githubusercontent.com/OSGeo/gdal/release/3.2/gdal/ogr/ogrsf_frmts/ogrsf_frmts.h" "${GDAL_INCLUDE_DIR}/ogrsf_frmts.h")
- endif()
-
- message("Copying VCPKG DLLs...")
- file(GLOB COPY_DLLS "${VCPKG_ROOT}/installed/x64-windows/bin/*.dll")
- file(COPY ${COPY_DLLS} DESTINATION "${SB_INSTALL_DIR}/bin")
-
- message("Copying CUDA DLLs...")
- file(GLOB CUDA_DLLS "$ENV{CUDA_PATH}/bin/cudart64*.dll")
- file(COPY ${CUDA_DLLS} DESTINATION "${SB_INSTALL_DIR}/bin")
-
- set(WIN32_GDAL_ARGS -DGDAL_FOUND=TRUE -DGDAL_LIBRARY=${GDAL_LIBRARY} -DGDAL_INCLUDE_DIR=${GDAL_INCLUDE_DIR})
-else()
- set(PYTHON_EXE_PATH "/usr/bin/python3")
-endif()
-
-# Path to additional CMake modules
-set(CMAKE_MODULE_PATH ${SB_ROOT_DIR}/cmake)
-
-include(ExternalProject)
-include(ExternalProject-Setup)
-
-#########################################
-# Download and install third party libs #
-#########################################
-
-# ---------------------------------------------------------------------------------------------
-# Open Source Computer Vision (OpenCV)
-#
-set(ODM_OpenCV_Version 2.4.11)
-option(ODM_BUILD_OpenCV "Force to build OpenCV library" OFF)
-
-SETUP_EXTERNAL_PROJECT(OpenCV ${ODM_OpenCV_Version} ${ODM_BUILD_OpenCV})
-
-
-# ---------------------------------------------------------------------------------------------
-# Point Cloud Library (PCL)
-#
-set(ODM_PCL_Version 1.8.0)
-option(ODM_BUILD_PCL "Force to build PCL library" OFF)
-
-SETUP_EXTERNAL_PROJECT(PCL ${ODM_PCL_Version} ${ODM_BUILD_PCL})
-
-
-# ---------------------------------------------------------------------------------------------
-# Google Flags library (GFlags)
-#
-set(ODM_GFlags_Version 2.1.2)
-option(ODM_BUILD_GFlags "Force to build GFlags library" OFF)
-
-SETUP_EXTERNAL_PROJECT(GFlags ${ODM_GFlags_Version} ${ODM_BUILD_GFlags})
-
-
-# ---------------------------------------------------------------------------------------------
-# Ceres Solver
-#
-set(ODM_Ceres_Version 2.0.0)
-option(ODM_BUILD_Ceres "Force to build Ceres library" OFF)
-
-SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres})
-
-
-# ---------------------------------------------------------------------------------------------
-# Hexer
-#
-SETUP_EXTERNAL_PROJECT(Hexer 1.4 ON)
-
-# ---------------------------------------------------------------------------------------------
-# Open Structure from Motion (OpenSfM)
-#
-
-set(custom_libs OpenSfM
- LASzip
- PDAL
- Untwine
- Entwine
- MvsTexturing
- OpenMVS
- FPCFilter
- PyPopsift
- Obj2Tiles
-)
-
-externalproject_add(mve
- GIT_REPOSITORY https://github.com/OpenDroneMap/mve.git
- GIT_TAG 262
- UPDATE_COMMAND ""
- SOURCE_DIR ${SB_SOURCE_DIR}/mve
- CMAKE_ARGS ${WIN32_CMAKE_ARGS}
- BUILD_IN_SOURCE 1
- INSTALL_COMMAND ""
-)
-
-foreach(lib ${custom_libs})
- SETUP_EXTERNAL_PROJECT_CUSTOM(${lib})
-endforeach()
-
-include(ProcessorCount)
-ProcessorCount(nproc)
-
-if (WIN32)
- set (POISSON_BUILD_CMD ${CMAKE_MAKE_PROGRAM} ${SB_SOURCE_DIR}/PoissonRecon/PoissonRecon.vcxproj /p:configuration=${CMAKE_BUILD_TYPE} /p:Platform=x64 /p:PlatformToolset=${CMAKE_VS_PLATFORM_TOOLSET} /p:WindowsTargetPlatformVersion=${CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION})
- set (POISSON_BIN_PATH "x64/${CMAKE_BUILD_TYPE}/PoissonRecon.exe")
-else()
- set (POISSON_BUILD_CMD make -j${nproc} poissonrecon)
- set (POISSON_BIN_PATH "Linux/PoissonRecon")
-endif()
-externalproject_add(poissonrecon
- GIT_REPOSITORY https://github.com/OpenDroneMap/PoissonRecon.git
- GIT_TAG 272
- PREFIX ${SB_BINARY_DIR}/PoissonRecon
- SOURCE_DIR ${SB_SOURCE_DIR}/PoissonRecon
- UPDATE_COMMAND ""
- CONFIGURE_COMMAND ""
- BUILD_IN_SOURCE 1
- BUILD_COMMAND ${POISSON_BUILD_CMD}
- INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${SB_SOURCE_DIR}/PoissonRecon/Bin/${POISSON_BIN_PATH} ${SB_INSTALL_DIR}/bin
-)
-
-externalproject_add(dem2mesh
- GIT_REPOSITORY https://github.com/OpenDroneMap/dem2mesh.git
- GIT_TAG master
- PREFIX ${SB_BINARY_DIR}/dem2mesh
- SOURCE_DIR ${SB_SOURCE_DIR}/dem2mesh
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_GDAL_ARGS}
-)
-
-externalproject_add(dem2points
- GIT_REPOSITORY https://github.com/OpenDroneMap/dem2points.git
- GIT_TAG master
- PREFIX ${SB_BINARY_DIR}/dem2points
- SOURCE_DIR ${SB_SOURCE_DIR}/dem2points
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_GDAL_ARGS}
-)
-
-externalproject_add(odm_orthophoto
- DEPENDS pcl opencv
- GIT_REPOSITORY https://github.com/OpenDroneMap/odm_orthophoto.git
- GIT_TAG main
- PREFIX ${SB_BINARY_DIR}/odm_orthophoto
- SOURCE_DIR ${SB_SOURCE_DIR}/odm_orthophoto
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS} ${WIN32_GDAL_ARGS}
-)
-
-externalproject_add(lastools
- GIT_REPOSITORY https://github.com/OpenDroneMap/LAStools.git
- GIT_TAG 250
- PREFIX ${SB_BINARY_DIR}/lastools
- SOURCE_DIR ${SB_SOURCE_DIR}/lastools
- CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Ceres.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Ceres.cmake
deleted file mode 100644
index 947e7b91..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Ceres.cmake
+++ /dev/null
@@ -1,33 +0,0 @@
-set(_proj_name ceres)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- DEPENDS gflags
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL http://ceres-solver.org/ceres-solver-2.0.0.tar.gz
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DCMAKE_C_FLAGS=-fPIC
- -DCMAKE_CXX_FLAGS=-fPIC
- -DBUILD_EXAMPLES=OFF
- -DBUILD_TESTING=OFF
- -DMINIGLOG=ON
- -DMINIGLOG_MAX_LOG_LEVEL=-100
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Entwine.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Entwine.cmake
deleted file mode 100644
index 5d52e879..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Entwine.cmake
+++ /dev/null
@@ -1,36 +0,0 @@
-set(_proj_name entwine)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-if (NOT WIN32)
- set(EXTRA_CMAKE_ARGS -DCMAKE_CXX_FLAGS=-isystem\ ${SB_SOURCE_DIR}/pdal)
-endif()
-
-ExternalProject_Add(${_proj_name}
- DEPENDS pdal
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/entwine/
- GIT_TAG 285
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- ${EXTRA_CMAKE_ARGS}
- -DADDITIONAL_LINK_DIRECTORIES_PATHS=${SB_INSTALL_DIR}/lib
- -DWITH_TESTS=OFF
- -DWITH_ZSTD=OFF
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-FPCFilter.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-FPCFilter.cmake
deleted file mode 100644
index 78f5c398..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-FPCFilter.cmake
+++ /dev/null
@@ -1,27 +0,0 @@
-set(_proj_name fpcfilter)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/FPCFilter
- GIT_TAG main
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-GFlags.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-GFlags.cmake
deleted file mode 100644
index 2dc8f33c..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-GFlags.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-set(_proj_name gflags)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/gflags/gflags
- GIT_TAG 28f50e0fed19872e0fd50dd23ce2ee8cd759338e
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON
- -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Hexer.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Hexer.cmake
deleted file mode 100644
index ddff150b..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Hexer.cmake
+++ /dev/null
@@ -1,27 +0,0 @@
-set(_proj_name hexer)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- DEPENDS
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL https://github.com/hobu/hexer/archive/bc748fc16b51c562f68f6641574b7af4244adfa2.tar.gz
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_GDAL_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-LASzip.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-LASzip.cmake
deleted file mode 100644
index be5cc94d..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-LASzip.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-set(_proj_name laszip)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- DEPENDS
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
- URL https://github.com/LASzip/LASzip/archive/0069c42307183c49744f1eb170f7032a8cf6a9db.zip
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DBUILD_SHARED_LIBS=ON
- -DBUILD_STATIC_LIBS=OFF
- -DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
- -DCMAKE_INSTALL_LIBDIR=lib
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-MvsTexturing.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-MvsTexturing.cmake
deleted file mode 100644
index 4f8828f3..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-MvsTexturing.cmake
+++ /dev/null
@@ -1,30 +0,0 @@
-set(_proj_name mvstexturing)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- DEPENDS mve
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
- GIT_REPOSITORY https://github.com/OpenDroneMap/mvs-texturing
- GIT_TAG 287
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DRESEARCH=OFF
- -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Obj2Tiles.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Obj2Tiles.cmake
deleted file mode 100644
index 53b86913..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Obj2Tiles.cmake
+++ /dev/null
@@ -1,33 +0,0 @@
-set(_proj_name obj2tiles)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-set(OBJ2TILES_VERSION v1.0.7)
-set(OBJ2TILES_EXT "")
-
-set(OBJ2TILES_ARCH "Linux64")
-if (WIN32)
- set(OBJ2TILES_ARCH "Win64")
- set(OBJ2TILES_EXT ".exe")
-elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
- set(OBJ2TILES_ARCH "LinuxArm")
-endif()
-
-
-ExternalProject_Add(${_proj_name}
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL https://github.com/OpenDroneMap/Obj2Tiles/releases/download/${OBJ2TILES_VERSION}/Obj2Tiles-${OBJ2TILES_ARCH}.zip
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- UPDATE_COMMAND ""
- CONFIGURE_COMMAND ""
- BUILD_IN_SOURCE 1
- BUILD_COMMAND ""
- INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${SB_SOURCE_DIR}/${_proj_name}/Obj2Tiles${OBJ2TILES_EXT} ${SB_INSTALL_DIR}/bin
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenCV.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenCV.cmake
deleted file mode 100644
index d40d0b42..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenCV.cmake
+++ /dev/null
@@ -1,72 +0,0 @@
-set(_proj_name opencv)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-if (WIN32)
- set(WIN32_CMAKE_EXTRA_ARGS -DPYTHON3_NUMPY_INCLUDE_DIRS=${PYTHON_HOME}/lib/site-packages/numpy/core/include
- -DPYTHON3_PACKAGES_PATH=${PYTHON_HOME}/lib/site-packages
- -DPYTHON3_EXECUTABLE=${PYTHON_EXE_PATH}
- -DWITH_MSMF=OFF
- -DOPENCV_LIB_INSTALL_PATH=${SB_INSTALL_DIR}/lib
- -DOPENCV_BIN_INSTALL_PATH=${SB_INSTALL_DIR}/bin)
-endif()
-
-ExternalProject_Add(${_proj_name}
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL https://github.com/opencv/opencv/archive/4.5.0.zip
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DBUILD_opencv_core=ON
- -DBUILD_opencv_imgproc=ON
- -DBUILD_opencv_highgui=ON
- -DBUILD_opencv_video=ON
- -DBUILD_opencv_ml=ON
- -DBUILD_opencv_features2d=ON
- -DBUILD_opencv_calib3d=ON
- -DBUILD_opencv_contrib=ON
- -DBUILD_opencv_flann=ON
- -DBUILD_opencv_objdetect=ON
- -DBUILD_opencv_photo=ON
- -DBUILD_opencv_legacy=ON
- -DBUILD_opencv_python3=ON
- -DWITH_FFMPEG=OFF
- -DWITH_CUDA=OFF
- -DWITH_GTK=OFF
- -DWITH_VTK=OFF
- -DWITH_EIGEN=OFF
- -DWITH_OPENNI=OFF
- -DBUILD_EXAMPLES=OFF
- -DBUILD_TESTS=OFF
- -DBUILD_PERF_TESTS=OFF
- -DBUILD_DOCS=OFF
- -DBUILD_opencv_apps=OFF
- -DBUILD_opencv_gpu=OFF
- -DBUILD_opencv_videostab=OFF
- -DBUILD_opencv_nonfree=OFF
- -DBUILD_opencv_stitching=OFF
- -DBUILD_opencv_world=OFF
- -DBUILD_opencv_superres=OFF
- -DBUILD_opencv_java=OFF
- -DBUILD_opencv_ocl=OFF
- -DBUILD_opencv_ts=OFF
- -DBUILD_opencv_xfeatures2d=ON
- -DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t
- -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS}
- ${WIN32_CMAKE_EXTRA_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenMVS.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenMVS.cmake
deleted file mode 100644
index 97fd0ca4..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenMVS.cmake
+++ /dev/null
@@ -1,78 +0,0 @@
-set(_proj_name openmvs)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-externalproject_add(vcg
- GIT_REPOSITORY https://github.com/OpenDroneMap/VCG.git
- GIT_TAG 285
- UPDATE_COMMAND ""
- SOURCE_DIR ${SB_SOURCE_DIR}/vcg
- CONFIGURE_COMMAND ""
- BUILD_IN_SOURCE 1
- BUILD_COMMAND ""
- INSTALL_COMMAND ""
-)
-
-externalproject_add(eigen34
- GIT_REPOSITORY https://gitlab.com/libeigen/eigen.git
- GIT_TAG 3.4
- UPDATE_COMMAND ""
- SOURCE_DIR ${SB_SOURCE_DIR}/eigen34
- CONFIGURE_COMMAND ""
- BUILD_IN_SOURCE 1
- BUILD_COMMAND ""
- INSTALL_COMMAND ""
-)
-
-SET(ARM64_CMAKE_ARGS "")
-if(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64" )
- SET(ARM64_CMAKE_ARGS -DOpenMVS_USE_SSE=OFF)
-endif()
-
-SET(GPU_CMAKE_ARGS "")
-if(UNIX)
- if (EXISTS "/usr/local/cuda/lib64/stubs")
- SET(GPU_CMAKE_ARGS -DCMAKE_LIBRARY_PATH=/usr/local/cuda/lib64/stubs)
- endif()
-endif()
-
-if(WIN32)
- # On Windows systems without NVIDIA GPUs, OpenMVS will not launch
- # unless a CUDA DLL is available; we download a dummy DLL
- # generated with https://github.com/ykhwong/dummy-dll-generator that is
- # loaded UNLESS the real CUDA DLL is available, since it will
- # be loaded before our dummy DLL.
- file(DOWNLOAD "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/nvcuda_dummy.dll" "${SB_INSTALL_DIR}/bin/nvcuda.dll")
-endif()
-
-ExternalProject_Add(${_proj_name}
- DEPENDS ceres opencv vcg eigen34
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/openMVS
- GIT_TAG 287
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DOpenCV_DIR=${SB_INSTALL_DIR}/lib/cmake/opencv4
- -DVCG_ROOT=${SB_SOURCE_DIR}/vcg
- -DEIGEN3_INCLUDE_DIR=${SB_SOURCE_DIR}/eigen34/
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
- -DOpenMVS_MAX_CUDA_COMPATIBILITY=ON
- ${GPU_CMAKE_ARGS}
- ${WIN32_CMAKE_ARGS}
- ${ARM64_CMAKE_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenSfM.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenSfM.cmake
deleted file mode 100644
index 88fb4d44..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-OpenSfM.cmake
+++ /dev/null
@@ -1,43 +0,0 @@
-set(_proj_name opensfm)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-include(ProcessorCount)
-ProcessorCount(nproc)
-
-if(WIN32)
- set(OpenCV_DIR "${SB_INSTALL_DIR}/x64/vc16/lib")
- set(BUILD_CMD ${CMAKE_COMMAND} --build "${SB_BUILD_DIR}/opensfm" --config "${CMAKE_BUILD_TYPE}")
-else()
- set(OpenCV_DIR "${SB_INSTALL_DIR}/lib/cmake/opencv4")
- set(BUILD_CMD make "-j${nproc}")
-endif()
-
-ExternalProject_Add(${_proj_name}
- DEPENDS ceres opencv gflags
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/OpenSfM/
- GIT_TAG 287
- #--Update/Patch step----------
- UPDATE_COMMAND git submodule update --init --recursive
- #--Configure step-------------
- SOURCE_DIR ${SB_INSTALL_DIR}/bin/${_proj_name}
- CONFIGURE_COMMAND ${CMAKE_COMMAND} /${_proj_name}/src
- -DCERES_ROOT_DIR=${SB_INSTALL_DIR}
- -DOpenCV_DIR=${OpenCV_DIR}
- -DADDITIONAL_INCLUDE_DIRS=${SB_INSTALL_DIR}/include
- -DOPENSFM_BUILD_TESTS=off
- -DPYTHON_EXECUTABLE=${PYTHON_EXE_PATH}
- ${WIN32_CMAKE_ARGS}
- BUILD_COMMAND ${BUILD_CMD}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_COMMAND ""
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PCL.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PCL.cmake
deleted file mode 100644
index c7322e2d..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PCL.cmake
+++ /dev/null
@@ -1,55 +0,0 @@
-set(_proj_name pcl)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL https://github.com/PointCloudLibrary/pcl/archive/refs/tags/pcl-1.11.1.zip
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DBUILD_features=OFF
- -DBUILD_filters=OFF
- -DBUILD_geometry=OFF
- -DBUILD_keypoints=OFF
- -DBUILD_outofcore=OFF
- -DBUILD_people=OFF
- -DBUILD_recognition=OFF
- -DBUILD_registration=OFF
- -DBUILD_sample_consensus=OFF
- -DBUILD_segmentation=OFF
- -DBUILD_features=OFF
- -DBUILD_surface_on_nurbs=OFF
- -DBUILD_tools=OFF
- -DBUILD_tracking=OFF
- -DBUILD_visualization=OFF
- -DWITH_OPENGL=OFF
- -DWITH_VTK=OFF
- -DWITH_QT=OFF
- -DBUILD_OPENNI=OFF
- -DBUILD_OPENNI2=OFF
- -DWITH_OPENNI=OFF
- -DWITH_OPENNI2=OFF
- -DWITH_FZAPI=OFF
- -DWITH_LIBUSB=OFF
- -DWITH_PCAP=OFF
- -DWITH_PXCAPI=OFF
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DPCL_VERBOSITY_LEVEL=Error
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- -DPCL_BUILD_WITH_FLANN_DYNAMIC_LINKING_WIN32=ON
- ${WIN32_CMAKE_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PDAL.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PDAL.cmake
deleted file mode 100644
index 634c9899..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PDAL.cmake
+++ /dev/null
@@ -1,60 +0,0 @@
-set(_proj_name pdal)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-if (WIN32)
-set(LASZIP_LIB "${SB_INSTALL_DIR}/lib/laszip.lib")
-else()
-set(LASZIP_LIB "${SB_INSTALL_DIR}/lib/liblaszip.so")
-endif()
-
-ExternalProject_Add(${_proj_name}
- DEPENDS hexer laszip
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- URL https://github.com/PDAL/PDAL/archive/refs/tags/2.3RC1.zip
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DBUILD_PGPOINTCLOUD_TESTS=OFF
- -DBUILD_PLUGIN_PGPOINTCLOUD=OFF
- -DBUILD_PLUGIN_CPD=OFF
- -DBUILD_PLUGIN_GREYHOUND=OFF
- -DBUILD_PLUGIN_HEXBIN=ON
- -DBUILD_PLUGIN_ICEBRIDGE=OFF
- -DBUILD_PLUGIN_MRSID=OFF
- -DBUILD_PLUGIN_NITF=OFF
- -DBUILD_PLUGIN_OCI=OFF
- -DBUILD_PLUGIN_P2G=OFF
- -DBUILD_PLUGIN_SQLITE=OFF
- -DBUILD_PLUGIN_RIVLIB=OFF
- -DBUILD_PLUGIN_PYTHON=OFF
- -DWITH_ZSTD=OFF
- -DENABLE_CTEST=OFF
- -DWITH_APPS=ON
- -DWITH_LAZPERF=OFF
- -DWITH_GEOTIFF=ON
- -DWITH_LASZIP=ON
- -DLASZIP_FOUND=TRUE
- -DLASZIP_LIBRARIES=${LASZIP_LIB}
- -DLASZIP_VERSION=3.1.1
- -DLASZIP_INCLUDE_DIR=${SB_INSTALL_DIR}/include
- -DLASZIP_LIBRARY=${LASZIP_LIB}
- -DWITH_TESTS=OFF
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS}
- ${WIN32_GDAL_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PyPopsift.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PyPopsift.cmake
deleted file mode 100644
index 1e1d6fe1..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-PyPopsift.cmake
+++ /dev/null
@@ -1,36 +0,0 @@
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/pypopsift")
-
-# Pypopsift
-find_package(CUDA 7.0)
-
-if(CUDA_FOUND)
- ExternalProject_Add(pypopsift
- DEPENDS opensfm
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/pypopsift
- GIT_TAG 281
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/pypopsift
- CMAKE_ARGS
- -DOUTPUT_DIR=${SB_INSTALL_DIR}/bin/opensfm/opensfm
- -DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
- ${WIN32_CMAKE_ARGS}
- ${ARM64_CMAKE_ARGS}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
- )
-else()
- message(WARNING "Could not find CUDA >= 7.0")
-endif()
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Untwine.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Untwine.cmake
deleted file mode 100644
index 9d2102ac..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/External-Untwine.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-set(_proj_name untwine)
-set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
-
-ExternalProject_Add(${_proj_name}
- DEPENDS pdal
- PREFIX ${_SB_BINARY_DIR}
- TMP_DIR ${_SB_BINARY_DIR}/tmp
- STAMP_DIR ${_SB_BINARY_DIR}/stamp
- #--Download step--------------
- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
- GIT_REPOSITORY https://github.com/OpenDroneMap/untwine/
- GIT_TAG 285
- #--Update/Patch step----------
- UPDATE_COMMAND ""
- #--Configure step-------------
- SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
- CMAKE_ARGS
- -DPDAL_DIR=${SB_INSTALL_DIR}/lib/cmake/PDAL
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
- -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
- #--Build step-----------------
- BINARY_DIR ${_SB_BINARY_DIR}
- #--Install step---------------
- INSTALL_DIR ${SB_INSTALL_DIR}
- #--Output logging-------------
- LOG_DOWNLOAD OFF
- LOG_CONFIGURE OFF
- LOG_BUILD OFF
-)
diff --git a/o/ODM/ODM-2.8.7/SuperBuild/cmake/ExternalProject-Setup.cmake b/o/ODM/ODM-2.8.7/SuperBuild/cmake/ExternalProject-Setup.cmake
deleted file mode 100644
index eea0b533..00000000
--- a/o/ODM/ODM-2.8.7/SuperBuild/cmake/ExternalProject-Setup.cmake
+++ /dev/null
@@ -1,27 +0,0 @@
-set(ADD_INTERNAL_LIB_MSG "--- Adding internal version")
-set(FORCE_BUILD_LIB_MSG "force build ${ADD_INTERNAL_LIB_MSG}")
-
-macro(SETUP_EXTERNAL_PROJECT name version force_build)
-
- if(NOT ${force_build})
-
- find_package(${name} ${version} EXACT QUIET)
-
- if(${${name}_FOUND})
- message(STATUS "${name} ${${name}_VERSION} found")
- set(${name}_DIR ${${name}_DIR})
- else()
- message(STATUS "${name} ${version} not found ${ADD_INTERNAL_LIB_MSG}")
- include(External-${name})
- endif()
- else()
- message(STATUS "${name} ${version} ${FORCE_BUILD_LIB_MSG}")
- include(External-${name})
- endif()
-
-endmacro()
-
-macro(SETUP_EXTERNAL_PROJECT_CUSTOM name)
- message(STATUS "${name} ${FORCE_BUILD_LIB_MSG}")
- include(External-${name})
-endmacro()
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/VERSION b/o/ODM/ODM-2.8.7/VERSION
deleted file mode 100644
index bcd0f91f..00000000
--- a/o/ODM/ODM-2.8.7/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-2.8.7
diff --git a/o/ODM/ODM-2.8.7/code_of_conduct.md b/o/ODM/ODM-2.8.7/code_of_conduct.md
deleted file mode 100644
index 99501c7c..00000000
--- a/o/ODM/ODM-2.8.7/code_of_conduct.md
+++ /dev/null
@@ -1 +0,0 @@
-See https://github.com/OpenDroneMap/documents/blob/master/CONDUCT.md
diff --git a/o/ODM/ODM-2.8.7/configure.py b/o/ODM/ODM-2.8.7/configure.py
deleted file mode 100644
index e2c99b71..00000000
--- a/o/ODM/ODM-2.8.7/configure.py
+++ /dev/null
@@ -1,210 +0,0 @@
-import sys, platform
-if sys.platform != 'win32':
- print("This script is for Windows only! Use configure.sh instead.")
- exit(1)
-if sys.version_info.major != 3 or sys.version_info.minor != 8:
- print("You need to use Python 3.8.x (due to the requirements.txt). You are using %s instead." % platform.python_version())
- exit(1)
-
-import argparse
-import subprocess
-import os
-import stat
-import urllib.request
-import shutil
-import zipfile
-
-from venv import EnvBuilder
-
-parser = argparse.ArgumentParser(description='ODM Windows Configure Script')
-parser.add_argument('action',
- type=str,
- choices=["build", "clean", "dist", "vcpkg_export"],
- help='Action: %(choices)s')
-parser.add_argument('--build-vcpkg',
- type=bool,
- help='Build VCPKG environment from scratch instead of downloading prebuilt one.')
-parser.add_argument('--vcpkg-archive-url',
- type=str,
- default='https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/vcpkg-export-250.zip',
- required=False,
- help='Path to VCPKG export archive')
-parser.add_argument('--code-sign-cert-path',
- type=str,
- default='',
- required=False,
- help='Path to pfx code signing certificate')
-
-args = parser.parse_args()
-
-def run(cmd, cwd=os.getcwd()):
- env = os.environ.copy()
- print(cmd)
- p = subprocess.Popen(cmd, shell=True, env=env, cwd=cwd)
- retcode = p.wait()
- if retcode != 0:
- raise Exception("Command returned %s" % retcode)
-
-# https://izziswift.com/shutil-rmtree-fails-on-windows-with-access-is-denied/
-def rmtree(top):
- for root, dirs, files in os.walk(top, topdown=False):
- for name in files:
- filename = os.path.join(root, name)
- os.chmod(filename, stat.S_IWUSR)
- os.remove(filename)
- for name in dirs:
- os.rmdir(os.path.join(root, name))
- os.rmdir(top)
-
-def vcpkg_requirements():
- with open("vcpkg-requirements.txt") as f:
- pckgs = list(filter(lambda l: len(l) > 0, map(str.strip, f.read().split("\n"))))
- return pckgs
-
-def build():
- # Create python virtual env
- if not os.path.isdir("venv"):
- print("Creating virtual env --> venv/")
- ebuilder = EnvBuilder(with_pip=True)
- ebuilder.create("venv")
-
- run("venv\\Scripts\\pip install --ignore-installed -r requirements.txt")
-
- # Download / build VCPKG environment
- if not os.path.isdir("vcpkg"):
- if args.build_vcpkg:
- print("TODO")
- # git clone vcpkg repo
- # bootstrap
- # install requirements
-
- else:
- if not os.path.exists("vcpkg-env.zip"):
- print("Downloading %s" % args.vcpkg_archive_url)
- with urllib.request.urlopen(args.vcpkg_archive_url) as response, open( "vcpkg-env.zip", 'wb') as out_file:
- shutil.copyfileobj(response, out_file)
- if not os.path.exists("vcpkg"):
- print("Extracting vcpkg-env.zip --> vcpkg/")
- with zipfile.ZipFile("vcpkg-env.zip") as z:
- top_dir = z.namelist()[0]
- z.extractall(".")
-
- if os.path.exists(top_dir):
- os.rename(top_dir, "vcpkg")
- else:
- print("Warning! Something looks wrong in the VCPKG archive... check the vcpkg/ directory.")
- safe_remove("vcpkg-env.zip")
-
- if not os.path.exists(os.path.join("SuperBuild", "build")) or not os.path.exists(os.path.join("SuperBuild", "install")):
- print("Compiling SuperBuild")
-
- build_dir = os.path.join("SuperBuild", "build")
- if not os.path.isdir(build_dir):
- os.mkdir(build_dir)
-
- toolchain_file = os.path.join(os.getcwd(), "vcpkg", "scripts", "buildsystems", "vcpkg.cmake")
- run("cmake .. -DCMAKE_TOOLCHAIN_FILE=\"%s\"" % toolchain_file, cwd=build_dir)
- run("cmake --build . --config Release", cwd=build_dir)
-
-def vcpkg_export():
- if not os.path.exists("vcpkg"):
- print("vcpkg directory does not exist. Did you build the environment?")
- exit(1)
-
- pkgs = vcpkg_requirements()
- out = "vcpkg-export-%s" % odm_version().replace(".", "")
- run("vcpkg\\vcpkg export %s --output=%s --zip" % (" ".join(pkgs), out))
-
-def odm_version():
- with open("VERSION") as f:
- return f.read().split("\n")[0].strip()
-
-def safe_remove(path):
- if os.path.isdir(path):
- rmtree(path)
- elif os.path.isfile(path):
- os.remove(path)
-
-def clean():
- safe_remove("vcpkg-download.zip")
- safe_remove("vcpkg")
- safe_remove("venv")
- safe_remove(os.path.join("SuperBuild", "build"))
- safe_remove(os.path.join("SuperBuild", "download"))
- safe_remove(os.path.join("SuperBuild", "src"))
- safe_remove(os.path.join("SuperBuild", "install"))
-
-def dist():
- if not os.path.exists("SuperBuild\\download"):
- print("You need to run configure.py build before you can run dist")
- exit(1)
-
- # Download VC++ runtime
- vcredist_path = os.path.join("SuperBuild", "download", "vc_redist.x64.zip")
- if not os.path.isfile(vcredist_path):
- vcredist_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/VC_redist.x64.zip"
- print("Downloading %s" % vcredist_url)
- with urllib.request.urlopen(vcredist_url) as response, open(vcredist_path, 'wb') as out_file:
- shutil.copyfileobj(response, out_file)
-
- print("Extracting --> vc_redist.x64.exe")
- with zipfile.ZipFile(vcredist_path) as z:
- z.extractall(os.path.join("SuperBuild", "download"))
-
- # Download portable python
- if not os.path.isdir("python38"):
- pythonzip_path = os.path.join("SuperBuild", "download", "python38.zip")
- python_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/python-3.8.1-embed-amd64-less-pth.zip"
- if not os.path.exists(pythonzip_path):
- print("Downloading %s" % python_url)
- with urllib.request.urlopen(python_url) as response, open( pythonzip_path, 'wb') as out_file:
- shutil.copyfileobj(response, out_file)
-
- os.mkdir("python38")
-
- print("Extracting --> python38/")
- with zipfile.ZipFile(pythonzip_path) as z:
- z.extractall("python38")
-
- # Download signtool
- signtool_path = os.path.join("SuperBuild", "download", "signtool.exe")
- signtool_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/signtool.exe"
- if not os.path.exists(signtool_path):
- print("Downloading %s" % signtool_url)
- with urllib.request.urlopen(signtool_url) as response, open(signtool_path, 'wb') as out_file:
- shutil.copyfileobj(response, out_file)
-
- # Download innosetup
- if not os.path.isdir("innosetup"):
- innosetupzip_path = os.path.join("SuperBuild", "download", "innosetup.zip")
- innosetup_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/innosetup-portable-win32-6.0.5-3.zip"
- if not os.path.exists(innosetupzip_path):
- print("Downloading %s" % innosetup_url)
- with urllib.request.urlopen(innosetup_url) as response, open(innosetupzip_path, 'wb') as out_file:
- shutil.copyfileobj(response, out_file)
-
- os.mkdir("innosetup")
-
- print("Extracting --> innosetup/")
- with zipfile.ZipFile(innosetupzip_path) as z:
- z.extractall("innosetup")
-
- # Run
- cs_flags = ""
- if args.code_sign_cert_path:
- cs_flags = '"/Ssigntool=%s sign /f %s /fd SHA1 /t http://timestamp.sectigo.com $f"' % (signtool_path, args.code_sign_cert_path)
- run("innosetup\\iscc /Qp " + cs_flags + " \"innosetup.iss\"")
-
- print("Done! Setup created in dist/")
-
-if args.action == 'build':
- build()
-elif args.action == 'vcpkg_export':
- vcpkg_export()
-elif args.action == 'dist':
- dist()
-elif args.action == 'clean':
- clean()
-else:
- args.print_help()
- exit(1)
diff --git a/o/ODM/ODM-2.8.7/configure.sh b/o/ODM/ODM-2.8.7/configure.sh
deleted file mode 100755
index 2648f7a8..00000000
--- a/o/ODM/ODM-2.8.7/configure.sh
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/bin/bash
-
-# Ensure the DEBIAN_FRONTEND environment variable is set for apt-get calls
-APT_GET="env DEBIAN_FRONTEND=noninteractive $(command -v apt-get)"
-
-check_version(){
- UBUNTU_VERSION=$(lsb_release -r)
- case "$UBUNTU_VERSION" in
- *"20.04"*|*"21.04"*)
- echo "Ubuntu: $UBUNTU_VERSION, good!"
- ;;
- *"18.04"*|*"16.04"*)
- echo "ODM 2.1 has upgraded to Ubuntu 21.04, but you're on $UBUNTU_VERSION"
- echo "* The last version of ODM that supports Ubuntu 16.04 is v1.0.2."
- echo "* The last version of ODM that supports Ubuntu 18.04 is v2.0.0."
- echo "We recommend you to upgrade, or better yet, use docker."
- exit 1
- ;;
- *)
- echo "You are not on Ubuntu 21.04 (detected: $UBUNTU_VERSION)"
- echo "It might be possible to run ODM on a newer version of Ubuntu, however, you cannot rely on this script."
- exit 1
- ;;
- esac
-}
-
-if [[ $2 =~ ^[0-9]+$ ]] ; then
- processes=$2
-else
- processes=$(nproc)
-fi
-
-ensure_prereqs() {
- export DEBIAN_FRONTEND=noninteractive
-
- if ! command -v sudo &> /dev/null; then
- echo "Installing sudo"
- $APT_GET update
- $APT_GET install -y -qq --no-install-recommends sudo
- else
- sudo $APT_GET update
- fi
-
- if ! command -v lsb_release &> /dev/null; then
- echo "Installing lsb_release"
- sudo $APT_GET install -y -qq --no-install-recommends lsb-release
- fi
-
- if ! command -v pkg-config &> /dev/null; then
- echo "Installing pkg-config"
- sudo $APT_GET install -y -qq --no-install-recommends pkg-config
- fi
-
- echo "Installing tzdata"
- sudo $APT_GET install -y -qq tzdata
-
- UBUNTU_VERSION=$(lsb_release -r)
- if [[ "$UBUNTU_VERSION" == *"20.04"* ]]; then
- echo "Enabling PPA for Ubuntu GIS"
- sudo $APT_GET install -y -qq --no-install-recommends software-properties-common
- sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
- sudo $APT_GET update
- fi
-
- echo "Installing Python PIP"
- sudo $APT_GET install -y -qq --no-install-recommends \
- python3-pip \
- python3-setuptools
- sudo pip3 install -U pip
- sudo pip3 install -U shyaml
-}
-
-# Save all dependencies in snapcraft.yaml to maintain a single source of truth.
-# Maintaining multiple lists will otherwise be painful.
-installdepsfromsnapcraft() {
- section="$2"
- case "$1" in
- build) key=build-packages; ;;
- runtime) key=stage-packages; ;;
- *) key=build-packages; ;; # shouldn't be needed, but it's here just in case
- esac
-
- UBUNTU_VERSION=$(lsb_release -r)
- SNAPCRAFT_FILE="snapcraft.yaml"
- if [[ "$UBUNTU_VERSION" == *"21.04"* ]]; then
- SNAPCRAFT_FILE="snapcraft21.yaml"
- fi
-
- cat snap/$SNAPCRAFT_FILE | \
- shyaml get-values-0 parts.$section.$key | \
- xargs -0 sudo $APT_GET install -y -qq --no-install-recommends
-}
-
-installruntimedepsonly() {
- echo "Installing runtime dependencies"
- ensure_prereqs
- check_version
-
- echo "Installing Required Requisites"
- installdepsfromsnapcraft runtime prereqs
- echo "Installing OpenCV Dependencies"
- installdepsfromsnapcraft runtime opencv
- echo "Installing OpenSfM Dependencies"
- installdepsfromsnapcraft runtime opensfm
- echo "Installing OpenMVS Dependencies"
- installdepsfromsnapcraft runtime openmvs
-}
-
-installreqs() {
- cd /code
-
- ## Set up library paths
- export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib
-
- ## Before installing
- echo "Updating the system"
- ensure_prereqs
- check_version
-
- echo "Installing Required Requisites"
- installdepsfromsnapcraft build prereqs
- echo "Installing OpenCV Dependencies"
- installdepsfromsnapcraft build opencv
- echo "Installing OpenSfM Dependencies"
- installdepsfromsnapcraft build opensfm
- echo "Installing OpenMVS Dependencies"
- installdepsfromsnapcraft build openmvs
-
- set -e
- pip install --ignore-installed -r requirements.txt
- #if [ ! -z "$GPU_INSTALL" ]; then
- #fi
- set +e
-}
-
-install() {
- installreqs
-
- if [ ! -z "$PORTABLE_INSTALL" ]; then
- echo "Replacing g++ and gcc with our scripts for portability..."
- if [ ! -e /usr/bin/gcc_real ]; then
- sudo mv -v /usr/bin/gcc /usr/bin/gcc_real
- sudo cp -v ./docker/gcc /usr/bin/gcc
- fi
- if [ ! -e /usr/bin/g++_real ]; then
- sudo mv -v /usr/bin/g++ /usr/bin/g++_real
- sudo cp -v ./docker/g++ /usr/bin/g++
- fi
- fi
-
- set -eo pipefail
-
- echo "Compiling SuperBuild"
- cd ${RUNPATH}/SuperBuild
- mkdir -p build && cd build
- cmake .. && make -j$processes
-
- echo "Configuration Finished"
-}
-
-uninstall() {
- check_version
-
- echo "Removing SuperBuild and build directories"
- cd ${RUNPATH}/SuperBuild
- rm -rfv build src download install
- cd ../
- rm -rfv build
-}
-
-reinstall() {
- check_version
-
- echo "Reinstalling ODM modules"
- uninstall
- install
-}
-
-clean() {
- rm -rf \
- ${RUNPATH}/SuperBuild/build \
- ${RUNPATH}/SuperBuild/download \
- ${RUNPATH}/SuperBuild/src
-
- # find in /code and delete static libraries and intermediate object files
- find ${RUNPATH} -type f -name "*.a" -delete -or -type f -name "*.o" -delete
-}
-
-usage() {
- echo "Usage:"
- echo "bash configure.sh [nproc]"
- echo "Subcommands:"
- echo " install"
- echo " Installs all dependencies and modules for running OpenDroneMap"
- echo " installruntimedepsonly"
- echo " Installs *only* the runtime libraries (used by docker builds). To build from source, use the 'install' command."
- echo " reinstall"
- echo " Removes SuperBuild and build modules, then re-installs them. Note this does not update OpenDroneMap to the latest version. "
- echo " uninstall"
- echo " Removes SuperBuild and build modules. Does not uninstall dependencies"
- echo " installreqs"
- echo " Only installs the requirements (does not build SuperBuild)"
- echo " clean"
- echo " Cleans the SuperBuild directory by removing temporary files. "
- echo " help"
- echo " Displays this message"
- echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)"
-}
-
-if [[ $1 =~ ^(install|installruntimedepsonly|reinstall|uninstall|installreqs|clean)$ ]]; then
- RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
- "$1"
-else
- echo "Invalid instructions." >&2
- usage
- exit 1
-fi
diff --git a/o/ODM/ODM-2.8.7/console.bat b/o/ODM/ODM-2.8.7/console.bat
deleted file mode 100644
index d879be31..00000000
--- a/o/ODM/ODM-2.8.7/console.bat
+++ /dev/null
@@ -1,8 +0,0 @@
-@echo off
-
-setlocal
-call win32env.bat
-
-start "ODM Console" cmd /k "echo ____________________________ && echo / ____ _____ __ __ \ && echo ^| / __ \ ^| __ \ ^| \/ ^| ^| && echo ^| ^| ^| ^| ^| ^| ^| ^| ^| ^| \ / ^| ^| && echo ^| ^| ^| ^| ^| ^| ^| ^| ^| ^| ^|\/^| ^| ^| && echo ^| ^| ^|__^| ^| ^| ^|__^| ^| ^| ^| ^| ^| ^| && echo ^| \____/ ^|_____/ ^|_^| ^|_^| ^| && echo \____________________________/ && @echo off && FOR /F %%i in (VERSION) do echo version: %%i && @echo on && echo. && run --help
-
-endlocal
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/README.md b/o/ODM/ODM-2.8.7/contrib/blender/README.md
deleted file mode 100644
index 74b93008..00000000
--- a/o/ODM/ODM-2.8.7/contrib/blender/README.md
+++ /dev/null
@@ -1,41 +0,0 @@
-# Blender scripts
-# odm_photo
-Renders photos from ODM generated texture models.
-Currently can produce 360 panoramic photos and 360 3D panoramic (VR) photos.
-NB: the default resolution for 360 photos is 6000x3000 (maximum supported by Facebook).
-
-## Requirements
-* Blender
-* ExifTool (must be on your PATH)
-
-## Usage
-To generate a 360 panoramic photo:
-
- blender -b photo_360.blend --python odm_photo.py --
-
-Output is `/odm_photo/odm_photo_360.jpg`.
-
-To generate a 360 3D panoramic photo:
-
- blender -b photo_vr.blend --python odm_photo.py --
-
-Output is `/odm_photo/odm_photo_vr_L.jpg` and `/odm_photo/odm_photo_vr_R.jpg`.
-
-**NB: argument order matters!**
-
-# odm_video
-Renders videos from ODM generated texture models.
-Currently can produce 360 panoramic videos.
-NB: the default resolution is 4096x2048 (maximum supported by Facebook).
-
-## Requirements
-* Blender
-* Python 2.7 (must be on your PATH)
-* Spatial Media Metadata Injector (https://github.com/google/spatial-media/tree/master/spatialmedia) (place in `spatialmedia` subdirectory)
-
-## Usage
-To generate a 360 panoramic photo:
-
- blender -b photo_360.blend --python odm_video.py --
-
-Output is `/odm_video/odm_video_360.mp4`.
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/common.py b/o/ODM/ODM-2.8.7/contrib/blender/common.py
deleted file mode 100644
index e3774982..00000000
--- a/o/ODM/ODM-2.8.7/contrib/blender/common.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import bpy
-import materials_utils
-
-def loadMesh(file):
-
- bpy.utils.register_module('materials_utils')
-
- bpy.ops.import_scene.obj(filepath=file,
- axis_forward='Y',
- axis_up='Z')
-
- bpy.ops.xps_tools.convert_to_cycles_all()
-
- model = bpy.data.objects[-1]
- minX = float('inf')
- maxX = float('-inf')
- minY = float('inf')
- maxY = float('-inf')
- minZ = float('inf')
- maxZ = float('-inf')
- for coord in model.bound_box:
- x = coord[0]
- y = coord[1]
- z = coord[2]
- minX = min(x, minX)
- maxX = max(x, maxX)
- minY = min(y, minY)
- maxY = max(y, maxY)
- minZ = min(z, minZ)
- maxZ = max(z, maxZ)
-
- model.location[2] += (maxZ - minZ)/2
-
- surfaceShaderType = 'ShaderNodeEmission'
- surfaceShaderName = 'Emission'
-
- for m in bpy.data.materials:
- nt = m.node_tree
- nt.nodes.remove(nt.nodes['Color Mult'])
- nt.nodes.remove(nt.nodes['Diffuse BSDF'])
- nt.nodes.new(surfaceShaderType)
- nt.links.new(nt.nodes['Material Output'].inputs[0],
- nt.nodes[surfaceShaderName].outputs[0])
- nt.links.new(nt.nodes[surfaceShaderName].inputs[0],
- nt.nodes['Diffuse Texture'].outputs[0])
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/odm_photo.py b/o/ODM/ODM-2.8.7/contrib/blender/odm_photo.py
deleted file mode 100644
index b63cdcf3..00000000
--- a/o/ODM/ODM-2.8.7/contrib/blender/odm_photo.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# Renders a photo.
-# ExifTool must be on your PATH.
-# To generate a 360 panoramic photo:
-# blender -b photo_360.blend --python odm_photo.py --
-# To generate a 360 3D panoramic photo:
-# blender -b photo_vr.blend --python odm_photo.py --
-# NB: argument order matters!
-
-import sys
-import bpy
-import subprocess
-from common import loadMesh
-
-
-def main():
-
- if len(sys.argv) < 5 or sys.argv[-2] != '--':
- sys.exit('Please provide the ODM project path.')
-
- projectHome = sys.argv[-1]
-
- loadMesh(projectHome +
- '/odm_texturing/odm_textured_model_geo.obj')
-
- blendName = bpy.path.display_name_from_filepath(bpy.data.filepath)
- fileName = projectHome + '/odm_photo/odm_' + blendName
- render = bpy.data.scenes['Scene'].render
- render.filepath = fileName
- bpy.ops.render.render(write_still=True)
-
- width = render.resolution_x
- height = render.resolution_y
- if(render.use_multiview):
- writeExif(fileName+render.views[0].file_suffix+'.jpg', width, height)
- writeExif(fileName+render.views[1].file_suffix+'.jpg', width, height)
- else:
- writeExif(fileName+'.jpg', width, height)
-
-
-def writeExif(fileName, width, height):
- w = str(width)
- h = str(height)
-
- subprocess.run(['exiftool',
- '-overwrite_original',
- '-CroppedAreaImageWidthPixels=' + w,
- '-CroppedAreaImageHeightPixels=' + h,
- '-FullPanoWidthPixels=' + w,
- '-FullPanoHeightPixels=' + h,
- '-CroppedAreaLeftPixels=0',
- '-CroppedAreaTopPixels=0',
- '-ProjectionType=equirectangular',
- '-UsePanoramaViewer=True',
- '-PoseHeadingDegrees=0',
- '-LargestValidInteriorRectLeft=0',
- '-LargestValidInteriorRectTop=0',
- '-LargestValidInteriorRectWidth=' + w,
- '-LargestValidInteriorRectHeight=' + h,
- fileName])
-
-
-if __name__ == '__main__':
- main()
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/odm_video.py b/o/ODM/ODM-2.8.7/contrib/blender/odm_video.py
deleted file mode 100644
index ef58648e..00000000
--- a/o/ODM/ODM-2.8.7/contrib/blender/odm_video.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python
-
-# Renders a video.
-# To generate a 360 panoramic video:
-# blender -b photo_360.blend --python odm_video.py --
-
-import sys
-import subprocess
-import os
-import bpy
-from common import loadMesh
-
-
-def main():
-
- if len(sys.argv) < 7 or sys.argv[-4] != '--':
- sys.exit('Please provide the ODM project path, camera waypoints (xyz format), and number of frames.')
-
- projectHome = sys.argv[-3]
- waypointFile = sys.argv[-2]
- numFrames = int(sys.argv[-1])
-
- loadMesh(projectHome +
- '/odm_texturing/odm_textured_model_geo.obj')
-
- waypoints = loadWaypoints(waypointFile)
- numWaypoints = len(waypoints)
-
- scene = bpy.data.scenes['Scene']
-
- # create path thru waypoints
- curve = bpy.data.curves.new(name='CameraPath', type='CURVE')
- curve.dimensions = '3D'
- curve.twist_mode = 'Z_UP'
- nurbs = curve.splines.new('NURBS')
- nurbs.points.add(numWaypoints-1)
- weight = 1
- for i in range(numWaypoints):
- nurbs.points[i].co[0] = waypoints[i][0]
- nurbs.points[i].co[1] = waypoints[i][1]
- nurbs.points[i].co[2] = waypoints[i][2]
- nurbs.points[i].co[3] = weight
- nurbs.use_endpoint_u = True
- path = bpy.data.objects.new(name='CameraPath', object_data=curve)
- scene.objects.link(path)
-
- camera = bpy.data.objects['Camera']
- camera.location[0] = 0
- camera.location[1] = 0
- camera.location[2] = 0
- followPath = camera.constraints.new(type='FOLLOW_PATH')
- followPath.name = 'CameraFollowPath'
- followPath.target = path
- followPath.use_curve_follow = True
- animateContext = bpy.context.copy()
- animateContext['constraint'] = followPath
- bpy.ops.constraint.followpath_path_animate(animateContext,
- constraint='CameraFollowPath',
- frame_start=0,
- length=numFrames)
-
- blendName = bpy.path.display_name_from_filepath(bpy.data.filepath)
- fileName = projectHome + '/odm_video/odm_' + blendName.replace('photo', 'video')
- scene.frame_start = 0
- scene.frame_end = numFrames
- render = scene.render
- render.filepath = fileName + '.mp4'
- render.image_settings.file_format = 'FFMPEG'
- if(render.use_multiview):
- render.image_settings.stereo_3d_format.display_mode = 'TOPBOTTOM'
- render.image_settings.views_format = 'STEREO_3D'
- render.views[0].file_suffix = ''
- format3d = 'top-bottom'
- else:
- width = render.resolution_x
- height = render.resolution_y
- format3d = 'none'
- render.resolution_x = 4096
- render.resolution_y = 2048
-
- render.ffmpeg.audio_codec = 'AAC'
- render.ffmpeg.codec = 'H264'
- render.ffmpeg.format = 'MPEG4'
- render.ffmpeg.video_bitrate = 45000
- bpy.ops.render.render(animation=True)
-
- writeMetadata(fileName+'.mp4', format3d)
-
-
-def loadWaypoints(filename):
- waypoints = []
- with open(filename) as f:
- for line in f:
- xyz = line.split()
- waypoints.append((float(xyz[0]), float(xyz[1]), float(xyz[2])))
- return waypoints
-
-
-def writeMetadata(filename, format3d):
- subprocess.run(['python',
- 'spatialmedia',
- '-i',
- '--stereo='+format3d,
- filename,
- filename+'.injected'])
- # check metadata injector was successful
- if os.path.exists(filename+'.injected'):
- os.remove(filename)
- os.rename(filename+'.injected', filename)
-
-
-if __name__ == '__main__':
- main()
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/photo_360.blend b/o/ODM/ODM-2.8.7/contrib/blender/photo_360.blend
deleted file mode 100644
index 6e276947..00000000
Binary files a/o/ODM/ODM-2.8.7/contrib/blender/photo_360.blend and /dev/null differ
diff --git a/o/ODM/ODM-2.8.7/contrib/blender/photo_vr.blend b/o/ODM/ODM-2.8.7/contrib/blender/photo_vr.blend
deleted file mode 100644
index 00f8e7ff..00000000
Binary files a/o/ODM/ODM-2.8.7/contrib/blender/photo_vr.blend and /dev/null differ
diff --git a/o/ODM/ODM-2.8.7/contrib/grass/README.md b/o/ODM/ODM-2.8.7/contrib/grass/README.md
deleted file mode 100644
index 9700f8ad..00000000
--- a/o/ODM/ODM-2.8.7/contrib/grass/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# GRASS scripts
-# odm_grass
-Generates contour and textured relief maps.
-
-## Requirements
-* Compile and install GRASS 7 version or higher, https://grasswiki.osgeo.org/wiki/Compile_and_Install
-* Environment variables:
- * PYTHONHOME set to the location of Python
- * PYTHONPATH set to the location of GRASS Python libs
- * PATH includes GRASS bin and lib directories
- * GISBASE set to the location of GRASS
-
-## Usage
- python odm_grass.py
-
-Output is `/odm_georeferencing/odm_contour.shp` and `/odm_orthophoto/odm_relief.tif`.
diff --git a/o/ODM/ODM-2.8.7/contrib/grass/odm_grass.py b/o/ODM/ODM-2.8.7/contrib/grass/odm_grass.py
deleted file mode 100644
index 78bb6113..00000000
--- a/o/ODM/ODM-2.8.7/contrib/grass/odm_grass.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env python
-
-# To run, set the following env variables:
-# PYTHONHOME location of Python
-# PYTHONPATH location of GRASS Python libs
-# PATH include GRASS bin and lib
-# GISBASE location of GRASS
-
-import os
-import sys
-import grass.script as gscript
-import grass.script.core
-import grass.script.setup
-
-rsurfName = 'odm_rsurf'
-contourName = 'odm_contour'
-orthophotoName = 'odm_orthophoto'
-reliefName = 'odm_relief'
-shadedReliefName = reliefName + '_shaded'
-
-overwrite = True
-
-
-def main():
- if len(sys.argv) < 2:
- sys.exit('Please provide the ODM project path.')
-
- projectHome = sys.argv[1]
-
- gisdb = projectHome+'/grassdata'
- location = 'odm'
- gisrc = gscript.setup.init(os.environ['GISBASE'], gisdb, location)
-
- # get srs and initial extents
- with open(projectHome+'/odm_georeferencing/coords.txt') as f:
- srs = f.readline().split()
- mean = f.readline().split()
- meanX = float(mean[0])
- meanY = float(mean[1])
- minX = float('inf')
- maxX = float('-inf')
- minY = float('inf')
- maxY = float('-inf')
- for line in f:
- xy = line.split()
- x = float(xy[0])
- y = float(xy[1])
- minX = min(x, minX)
- maxX = max(x, maxX)
- minY = min(y, minY)
- maxY = max(y, maxY)
-
- datum = srs[0]
- proj = srs[1]
- zone = srs[2]
- gscript.core.create_location(gisdb, location, datum=datum,
- proj4='+proj='+proj+' +zone='+zone,
- overwrite=overwrite)
-
- n = meanY + maxY
- s = meanY + minY
- e = meanX + maxX
- w = meanX + minX
- gscript.run_command('g.region', flags='s', n=n, s=s, e=e, w=w, res=0.01,
- res3=0.01, overwrite=overwrite)
-
- contour(projectHome)
- relief(projectHome)
-
- os.remove(gisrc)
-
-
-def contour(projectHome):
- """
- Creates a contour map based on the ODM project DEM model.
- """
- print 'Creating contour map'
-
- step = 0.25
-
- gscript.run_command('r.in.gdal', flags='o',
- input=projectHome+'/odm_georeferencing/odm_georeferencing_model_dem.tif',
- output=rsurfName, memory=2047,
- overwrite=overwrite)
-
- gscript.run_command('r.contour', input=rsurfName, output=contourName,
- step=step, overwrite=overwrite)
-
- gscript.run_command('v.out.ogr', input=contourName,
- output=projectHome +
- '/odm_georeferencing/odm_contour.shp',
- overwrite=overwrite)
-
-
-def relief(projectHome):
- """
- Creates a textured relief map in GeoTIFF format.
- NB: this is an RGBA raster and so is readable by image software.
- """
- print 'Creating relief map'
-
- gscript.run_command('r.in.gdal', flags='o',
- input=projectHome+'/odm_orthophoto/odm_orthophoto.tif',
- output=orthophotoName, memory=2047,
- overwrite=overwrite)
-
- gscript.run_command('r.composite', red=orthophotoName+'.red',
- green=orthophotoName+'.green',
- blue=orthophotoName+'.blue',
- output=orthophotoName+'.rgb',
- overwrite=overwrite)
-
- gscript.run_command('r.relief', input=rsurfName, output=reliefName,
- overwrite=overwrite)
-
- gscript.run_command('r.shade', shade=reliefName,
- color=orthophotoName+'.rgb', output=shadedReliefName,
- overwrite=overwrite)
-
- calc = ';'.join([
- '$shadedRelief.red = ' +
- 'if(isnull($orthophoto.red), 0, r#$shadedRelief)',
- '$shadedRelief.green = ' +
- 'if(isnull($orthophoto.green), 0, g#$shadedRelief)',
- '$shadedRelief.blue = ' +
- 'if(isnull($orthophoto.blue), 0, b#$shadedRelief)',
- '$shadedRelief.alpha = ' +
- 'if(isnull($orthophoto.alpha), 0, 255)'
- ])
- gscript.mapcalc(calc, shadedRelief=shadedReliefName,
- orthophoto=orthophotoName, overwrite=overwrite)
-
- gscript.run_command('i.group', group=shadedReliefName+'.group',
- input=shadedReliefName+'.red,' +
- shadedReliefName+'.green,' +
- shadedReliefName+'.blue,' +
- shadedReliefName+'.alpha')
-
- gscript.run_command('r.out.gdal', flags='cm',
- input=shadedReliefName+'.group',
- output=projectHome+'/odm_orthophoto/odm_relief.tif',
- format='GTiff', type='Byte',
- createopt='TILED=yes,COMPRESS=DEFLATE,PREDICTOR=2,' +
- 'BLOCKXSIZE=512,BLOCKYSIZE=512',
- nodata=0, overwrite=overwrite)
-
-
-if __name__ == '__main__':
- main()
diff --git a/o/ODM/ODM-2.8.7/contrib/mergepreview/README.md b/o/ODM/ODM-2.8.7/contrib/mergepreview/README.md
deleted file mode 100644
index 6255750d..00000000
--- a/o/ODM/ODM-2.8.7/contrib/mergepreview/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# Merge Preview
-
-Quickly projects drone images on a map by using georeferencing, camera angles and a global DTM. The images are then merged using ODM's split-merge algorithms.
-
-Quality is obviously not good, works only for nadir-only images and requires the images to have gimbal/camera angle information (not all drones provide this information).
-
-Usage:
-
-```
-# Install DDB (required for geoprojection)
-
-curl -fsSL https://get.dronedb.app -o get-ddb.sh
-sh get-ddb.sh
-
-# Run
-
-python3 mergepreview.py -i images/*.JPG --size 25%
-```
-
-## Example
-
-
-
-
-[Sheffield Park](https://community.opendronemap.org/t/sheffield-park-1/58) images processed with this script.
-
-## Disclaimer
-
-This script is highly experimental. We welcome contributions to improve it.
diff --git a/o/ODM/ODM-2.8.7/contrib/mergepreview/mergepreview.py b/o/ODM/ODM-2.8.7/contrib/mergepreview/mergepreview.py
deleted file mode 100644
index 1e05e98c..00000000
--- a/o/ODM/ODM-2.8.7/contrib/mergepreview/mergepreview.py
+++ /dev/null
@@ -1,126 +0,0 @@
-import argparse
-import sys
-sys.path.append("../../")
-
-import os
-from opendm import orthophoto
-from opendm.cutline import compute_cutline
-import glob
-from opendm.system import run
-from opendm import log
-import shutil
-
-
-parser = argparse.ArgumentParser(description='Quick Merge Preview')
-parser.add_argument('input',
- metavar='',
- nargs='+',
- help='Path to input images or image folder')
-parser.add_argument('--size', '-s',
- metavar='',
- type=str,
- help='Size in percentage terms',
- default='25%')
-parser.add_argument('--force', '-f',
- action='store_true',
- default=False,
- help="Force remove existing directories")
-
-args = parser.parse_args()
-
-try:
- log.ODM_INFO("Checking for DDB...")
- run("ddb --version")
-except:
- log.ODM_ERROR("ddb is not installed. Install it first: https://docs.dronedb.app")
-
-if len(args.input) == 1 and os.path.isdir(args.input[0]):
- input_images = []
- for ext in ["JPG", "JPEG", "TIF", "tiff", "tif", "TIFF"]:
- input_images += glob.glob(os.path.join(args.input[0], "*.%s" % ext))
-else:
- input_images = args.input
-
-log.ODM_INFO("Processing %s images" % len(input_images))
-
-if len(input_images) == 0:
- log.ODM_ERROR("No images")
- exit(1)
-
-cwd_path = os.path.dirname(input_images[0])
-tmp_path = os.path.join(cwd_path, "tmp")
-if os.path.isdir(tmp_path):
- if args.force:
- log.ODM_INFO("Removing previous directory %s" % tmp_path)
- shutil.rmtree(tmp_path)
- else:
- log.ODM_ERROR("%s exists. Pass --force to override." % tmp_path)
- exit(1)
-
-os.makedirs(tmp_path)
-
-for f in input_images:
- name, _ = os.path.splitext(os.path.basename(f))
- geojson = os.path.join(tmp_path, "%s.geojson" % name)
- gpkg = os.path.join(tmp_path, "%s.gpkg" % name)
-
- run("ddb geoproj \"%s\" \"%s\" -s \"%s\"" % (tmp_path, f, args.size))
-
- # Bounds (GPKG)
- run("ddb info --format geojson --geometry polygon \"%s\" > \"%s\"" % (f, geojson))
- run("ogr2ogr \"%s\" \"%s\"" % (gpkg, geojson))
-
-log.ODM_INFO("Computing cutlines")
-
-projected_images = glob.glob(os.path.join(tmp_path, "*.tif"))
-all_orthos_and_ortho_cuts = []
-
-for f in projected_images:
- name, _ = os.path.splitext(os.path.basename(f))
- cutline_file = os.path.join(tmp_path, "%s_cutline.gpkg" % name)
- bounds_file_path = os.path.join(tmp_path, "%s.gpkg" % name)
-
- compute_cutline(f,
- bounds_file_path,
- cutline_file,
- 4,
- scale=1)
-
- cut_raster = os.path.join(tmp_path, "%s_cut.tif" % name)
- orthophoto.compute_mask_raster(f, cutline_file,
- cut_raster,
- blend_distance=20, only_max_coords_feature=True)
-
- feathered_raster = os.path.join(tmp_path, "%s_feathered.tif" % name)
-
- orthophoto.feather_raster(f, feathered_raster,
- blend_distance=20
- )
-
- all_orthos_and_ortho_cuts.append([feathered_raster, cut_raster])
-
-log.ODM_INFO("Merging...")
-
-if len(all_orthos_and_ortho_cuts) > 1:
- # TODO: histogram matching via rasterio
- # currently parts have different color tones
- output_file = os.path.join(cwd_path, 'mergepreview.tif')
-
- if os.path.isfile(output_file):
- os.remove(output_file)
-
- orthophoto.merge(all_orthos_and_ortho_cuts, output_file, {
- 'TILED': 'YES',
- 'COMPRESS': 'LZW',
- 'PREDICTOR': '2',
- 'BIGTIFF': 'IF_SAFER',
- 'BLOCKXSIZE': 512,
- 'BLOCKYSIZE': 512
- })
-
-
- log.ODM_INFO("Wrote %s" % output_file)
- shutil.rmtree(tmp_path)
-else:
- log.ODM_ERROR("Error: no orthos found to merge")
- exit(1)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/contrib/ndvi/README.md b/o/ODM/ODM-2.8.7/contrib/ndvi/README.md
deleted file mode 100644
index 2b5df267..00000000
--- a/o/ODM/ODM-2.8.7/contrib/ndvi/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# NDVI
-
-This script produces a NDVI raster from a CIR orthophoto (odm_orthophoto.tif in your project)
-
-## Requirements
-* python_gdal package from apt
-* numpy python package (included in ODM build)
-
-## Usage
-```
-ndvi.py [-h] [--overwrite] N N
-
-positional arguments:
- The CIR orthophoto. Must be a GeoTiff.
- N NIR band number
- N Vis band number
- The output file. Also must be in GeoTiff format
-
-optional arguments:
- -h, --help show this help message and exit
- --overwrite, -o Will overwrite output file if it exists.
-```
-
-**Argument order matters! NIR first, then VIS**
-
-## Examples:
-Use the [Seneca](https://github.com/OpenDroneMap/odm_data_seneca) dataset for a good working CIR. The band order for that set is NIR-G-B, so you will want to use bands 1 and 2 for this script. After running ODM, the command goes as follows:
-
-`python ndvi.py /path/to/odm_orthophoto.tif 1 2 /path/to/ndvi.tif`
-
-The output in QGIS (with a spectral pseudocolor): 
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/contrib/ndvi/agricultural_indices.py b/o/ODM/ODM-2.8.7/contrib/ndvi/agricultural_indices.py
deleted file mode 100755
index 91c6c636..00000000
--- a/o/ODM/ODM-2.8.7/contrib/ndvi/agricultural_indices.py
+++ /dev/null
@@ -1,112 +0,0 @@
-#!/usr/bin/env python3
-# A script to calculate agricultural indices
-# NDVI - Normalized Difference Vegetation Index - (NIR−RED)/(NIR + RED)
-# NDRE - Normalized Difference Red Edge - (NIR−RE)/(NIR + RE)
-# GNDVI - Green NDVI - (NIR−GREEN)/(NIR + GREEN)
-# https://support.micasense.com/hc/en-us/articles/226531127-Creating-agricultural-indices-NDVI-NDRE-in-QGIS-
-# requires python-gdal
-
-import numpy
-import argparse
-import os.path
-try:
- from osgeo import gdal
- from osgeo import osr
-except ImportError:
- raise ImportError("You need to install python-gdal : \
- run `sudo apt-get install libgdal-dev` \
- # Check Gdal version with \
- gdal-config --version \
- #install corresponding gdal version with pip : \
- pip3 install GDAL==2.4.0")
-
-
-def parse_args():
- argument_parser = argparse.ArgumentParser('Createa from a multispectral orthophoto \
-a Geotif with NDVI, NDRE and GNDVI agricultural indices')
-
- argument_parser.add_argument("orthophoto", metavar="",
- type=argparse.FileType('r'),
- help="The CIR orthophoto. Must be a GeoTiff.")
- argument_parser.add_argument("-red", type=int,
- help="Red band number")
- argument_parser.add_argument("-green", type=int,
- help="Green band number")
- argument_parser.add_argument("-blue", type=int,
- help="Blue band number")
- argument_parser.add_argument("-re", type=int,
- help="RedEdge band number")
- argument_parser.add_argument("-nir", type=int,
- help="NIR band number")
- argument_parser.add_argument("out", metavar="",
- type=argparse.FileType('w'),
- help="The output file.")
- argument_parser.add_argument("--overwrite", "-o",
- action='store_true',
- default=False,
- help="Will overwrite output file if it exists. ")
- return argument_parser.parse_args()
-
-
-if __name__ == "__main__":
-
- # Suppress/hide warning when dividing by zero
- numpy.seterr(divide='ignore', invalid='ignore')
-
- rootdir = os.path.dirname(os.path.abspath(__file__))
-
- # Parse args
- args = parse_args()
-
- if not args.overwrite and os.path.isfile(os.path.join(rootdir, args.out.name)):
- print("File exists, rename or use -o to overwrite.")
- exit()
-
- # import raster
- print("Reading file")
- raster = gdal.Open(args.orthophoto.name)
- orthophoto = raster.ReadAsArray()
-
- # parse out bands
- print("Reading rasters")
- red_matrix=orthophoto[args.red-1].astype(float)
- green_matrix=orthophoto[args.green-1].astype(float)
- blue_matrix=orthophoto[args.blue-1].astype(float)
- re_matrix=orthophoto[args.re-1].astype(float)
- nir_matrix=orthophoto[args.nir-1].astype(float)
-
- outfile = args.out
-
- # NDVI
- print("Computing NDVI")
- #ndvi = calc_ndvi(nir_matrix, red_matrix)
- ndvi = (nir_matrix.astype(float) - red_matrix.astype(float)) / (nir_matrix + red_matrix)
- # NDRE
- print("Computing NDRE")
- #ndre = calc_ndre(nir_matrix, re_matrix)
- ndre = (nir_matrix.astype(float) - re_matrix.astype(float)) / (nir_matrix + re_matrix)
-
- # GNDVI
- print("Computing GNDVI")
- #gndvi = calc_gndvi(nir_matrix, green_matrix)
- gndvi = (nir_matrix.astype(float) - green_matrix.astype(float)) / (nir_matrix + green_matrix)
-
- __import__("IPython").embed()
-
- print("Saving Files")
- # export raster
-
- for name, matrix in zip(['ndvi', 'ndre', 'gndvi' ] ,[ndvi,ndre,gndvi] ):
- print(name)
- out_driver = gdal.GetDriverByName('GTiff')\
- .Create(name+'_'+outfile.name, int(ndvi.shape[1]), int(ndvi.shape[0]), 1, gdal.GDT_Float32)
- outband = out_driver.GetRasterBand(1)
- outband.SetDescription(name.capitalize())
- outband.WriteArray(matrix)
- outcrs = osr.SpatialReference()
- outcrs.ImportFromWkt(raster.GetProjectionRef())
- out_driver.SetProjection(outcrs.ExportToWkt())
- out_driver.SetGeoTransform(raster.GetGeoTransform())
- outband.FlushCache()
-
-
diff --git a/o/ODM/ODM-2.8.7/contrib/ndvi/ndvi.py b/o/ODM/ODM-2.8.7/contrib/ndvi/ndvi.py
deleted file mode 100644
index ab457f2d..00000000
--- a/o/ODM/ODM-2.8.7/contrib/ndvi/ndvi.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# A script to calculate the NDVI from a color-infrared orthophoto.
-# requires python-gdal
-
-import numpy
-import argparse
-import os.path
-try:
- from osgeo import gdal
- from osgeo import osr
-except ImportError:
- raise ImportError("You need to install python-gdal. run `apt-get install python-gdal`")
- exit()
-
-
-def parse_args():
- p = argparse.ArgumentParser("A script that calculates the NDVI of a CIR orthophoto")
-
- p.add_argument("orthophoto", metavar="",
- type=argparse.FileType('r'),
- help="The CIR orthophoto. Must be a GeoTiff.")
- p.add_argument("nir", metavar="N", type=int,
- help="NIR band number")
- p.add_argument("vis", metavar="N", type=int,
- help="Vis band number")
- p.add_argument("out", metavar="",
- type=argparse.FileType('w'),
- help="The output file. Also must be in GeoTiff format")
- p.add_argument("--overwrite", "-o",
- action='store_true',
- default=False,
- help="Will overwrite output file if it exists. ")
- return p.parse_args()
-
-
-def calc_ndvi(nir, vis):
- """
- Calculates the NDVI of an orthophoto using nir and vis bands.
- :param nir: An array containing the nir band
- :param vis: An array containing the vis band
- :return: An array that will be exported as a tif
- """
-
- # Take the orthophoto and do nir - vis / nir + vis
- # for each cell, calculate ndvi (masking out where divide by 0)
- ndvi = numpy.empty(nir.shape, dtype=float)
- mask = numpy.not_equal((nirb + visb), 0.0)
- return numpy.choose(mask, (-1.0, numpy.true_divide(numpy.subtract(nirb, visb), numpy.add(nirb, visb))))
-
-
-if __name__ == "__main__":
-
- rootdir = os.path.dirname(os.path.abspath(__file__))
-
- # Parse args
- args = parse_args()
-
- if not args.overwrite and os.path.isfile(os.path.join(rootdir, args.out.name)):
- print("File exists, rename or use -o to overwrite.")
- exit()
-
- # import raster
- raster = gdal.Open(args.orthophoto.name)
- orthophoto = raster.ReadAsArray()
- # parse out bands
- nirb = orthophoto[args.nir - 1].astype(float)
- visb = orthophoto[args.vis - 1].astype(float)
-
- outfile = args.out
-
- # Do ndvi calc
- ndvi = calc_ndvi(nirb, visb)
-
- # export raster
- out_driver = gdal.GetDriverByName('GTiff')\
- .Create(outfile.name, int(ndvi.shape[1]), int(ndvi.shape[0]), 1, gdal.GDT_Float32)
- outband = out_driver.GetRasterBand(1)
- outband.WriteArray(ndvi)
- outcrs = osr.SpatialReference()
- outcrs.ImportFromWkt(raster.GetProjectionRef())
- out_driver.SetProjection(outcrs.ExportToWkt())
- out_driver.SetGeoTransform(raster.GetGeoTransform())
- outband.FlushCache()
diff --git a/o/ODM/ODM-2.8.7/contrib/ndvi/rename_sentera_agx710_multispectral_tif.py b/o/ODM/ODM-2.8.7/contrib/ndvi/rename_sentera_agx710_multispectral_tif.py
deleted file mode 100644
index d9415b17..00000000
--- a/o/ODM/ODM-2.8.7/contrib/ndvi/rename_sentera_agx710_multispectral_tif.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-# A script to rename.
-# requires python-gdal
-
-import argparse
-import sys
-try:
- from osgeo import gdal
-except ImportError:
- raise ImportError("You need to install python-gdal : \
- run `sudo apt-get install libgdal-dev` \
- # Check Gdal version with \
- gdal-config --version \
- #install corresponding gdal version with pip : \
- pip3 install GDAL==2.4.0")
-
-def parse_args():
- """ Parse arguments """
- argument_parser = argparse.ArgumentParser(
- "A script that rename inplace Sentera AGX710 Geotiff orthophoto. ")
- argument_parser.add_argument("orthophoto", metavar="",
- type=argparse.FileType('r'),
- help="The input orthophoto. Must be a GeoTiff.")
- return argument_parser.parse_args()
-
-
-def rename_sentera_agx710_layers(name):
- """ Only rename Geotif built from Sentera AGX710 images with ODM """
- if raster.RasterCount != 7:
- raise ImportError(F'File {name} does not have 7 layers as a regular\
- Geotif built from Sentera AGX710 images with ODM')
-
- if 'RedGreenBlue' in raster.GetRasterBand(1).GetDescription() and \
- 'RedEdgeGarbageNIR' in raster.GetRasterBand(2).GetDescription():
-
- print("Sentera AGX710 Geotiff file has been detected.\
- Layers are name are :")
- print("RedGreenBlue for Band 1\nRedEdgeGarbageNIR for Band 2\
- \nNone for Band 3\nNone for Band 4\nNone for Band 5\nNone for Band 6")
- print("\nAfter renaming bands will be :")
- print("Red for Band 1\nGreen for Band 2\nBlue for Band 3\n\
- RedEdge for Band 4\nGarbage for Band 5\nNIR for Band 6")
-
- answer = input(
- "Are you sure you want to rename the layers of the input file ? [yes/no] ")
- if answer =='yes':
- raster.GetRasterBand(1).SetDescription('Red')
- raster.GetRasterBand(2).SetDescription('Green')
- raster.GetRasterBand(3).SetDescription('Blue')
- raster.GetRasterBand(4).SetDescription('RedEdge')
- raster.GetRasterBand(5).SetDescription('Garbage')
- raster.GetRasterBand(6).SetDescription('NIR')
- # raster.GetRasterBand(7).SetDescription('Alpha')
- else:
- print("No renaming")
- else :
- print(F'No need for band renaming in {name}')
- sys.exit()
-
-
-if __name__ == "__main__":
-
- # Parse args
- args = parse_args()
-
- # import raster
- raster = gdal.Open(args.orthophoto.name, gdal.GA_Update)
-
- # Rename layers
- rename_sentera_agx710_layers(args.orthophoto.name)
-
- # de-reference the datasets, which triggers gdal to save
- raster = None
diff --git a/o/ODM/ODM-2.8.7/contrib/orthorectify/README.md b/o/ODM/ODM-2.8.7/contrib/orthorectify/README.md
deleted file mode 100644
index f239793b..00000000
--- a/o/ODM/ODM-2.8.7/contrib/orthorectify/README.md
+++ /dev/null
@@ -1,69 +0,0 @@
-# Orthorectification Tool
-
-
-
-This tool is capable of orthorectifying individual images (or all images) from an existing ODM reconstruction.
-
-
-
-## Usage
-
-After running a reconstruction using ODM:
-
-```
-docker run -ti --rm -v /home/youruser/datasets:/datasets opendronemap/odm --project-path /datasets project
-```
-
-You can run the orthorectification module by running:
-
-```
-docker run -ti --rm -v /home/youruser/datasets:/datasets --entrypoint /code/contrib/orthorectify/run.sh opendronemap/odm /datasets/project
-```
-
-This will start the orthorectification process for all images in the dataset. See additional flags you can pass at the end of the command above:
-
-```
-usage: orthorectify.py [-h] [--dem DEM] [--no-alpha NO_ALPHA]
- [--interpolation {nearest,bilinear}]
- [--outdir OUTDIR] [--image-list IMAGE_LIST]
- [--images IMAGES] [--threads THREADS]
- [--skip-visibility-test SKIP_VISIBILITY_TEST]
- dataset
-
-Orthorectification Tool
-
-positional arguments:
- dataset Path to ODM dataset
-
-optional arguments:
- -h, --help show this help message and exit
- --dem DEM Absolute path to DEM to use to
- orthorectify images. Default:
- odm_dem/dsm.tif
- --no-alpha NO_ALPHA Don't output an alpha channel
- --interpolation {nearest,bilinear}
- Type of interpolation to use to sample
- pixel values.Default: bilinear
- --outdir OUTDIR Output directory where to store results.
- Default: orthorectified
- --image-list IMAGE_LIST
- Path to file that contains the list of
- image filenames to orthorectify. By
- default all images in a dataset are
- processed. Default: img_list.txt
- --images IMAGES Comma-separated list of filenames to
- rectify. Use as an alternative to --image-
- list. Default: process all images.
- --skip-visibility-test SKIP_VISIBILITY_TEST
- Skip visibility testing (faster but leaves
- artifacts due to relief displacement)
-```
-
-## Roadmap
-
-Help us improve this module! We could add:
-
- - [ ] GPU support for faster processing
- - [ ] Merging of multiple orthorectified images (blending, filtering, seam leveling)
- - [ ] Faster visibility test
- - [ ] Different methods for orthorectification (direct)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/contrib/orthorectify/orthorectify.py b/o/ODM/ODM-2.8.7/contrib/orthorectify/orthorectify.py
deleted file mode 100755
index 548fc1e0..00000000
--- a/o/ODM/ODM-2.8.7/contrib/orthorectify/orthorectify.py
+++ /dev/null
@@ -1,392 +0,0 @@
-#!/usr/bin/env python3
-# Author: Piero Toffanin
-# License: AGPLv3
-
-import os
-import sys
-sys.path.insert(0, os.path.join("..", "..", os.path.dirname(__file__)))
-
-from math import sqrt
-import rasterio
-import numpy as np
-import numpy.ma as ma
-import multiprocessing
-import argparse
-import functools
-from skimage.draw import line
-from opensfm import dataset
-
-default_dem_path = "odm_dem/dsm.tif"
-default_outdir = "orthorectified"
-default_image_list = "img_list.txt"
-
-parser = argparse.ArgumentParser(description='Orthorectification Tool')
-parser.add_argument('dataset',
- type=str,
- help='Path to ODM dataset')
-parser.add_argument('--dem',
- type=str,
- default=default_dem_path,
- help='Absolute path to DEM to use to orthorectify images. Default: %(default)s')
-parser.add_argument('--no-alpha',
- type=bool,
- help="Don't output an alpha channel")
-parser.add_argument('--interpolation',
- type=str,
- choices=('nearest', 'bilinear'),
- default='bilinear',
- help="Type of interpolation to use to sample pixel values.Default: %(default)s")
-parser.add_argument('--outdir',
- type=str,
- default=default_outdir,
- help="Output directory where to store results. Default: %(default)s")
-parser.add_argument('--image-list',
- type=str,
- default=default_image_list,
- help="Path to file that contains the list of image filenames to orthorectify. By default all images in a dataset are processed. Default: %(default)s")
-parser.add_argument('--images',
- type=str,
- default="",
- help="Comma-separated list of filenames to rectify. Use as an alternative to --image-list. Default: process all images.")
-parser.add_argument('--threads',
- type=int,
- default=multiprocessing.cpu_count(),
- help="Number of CPU processes to use. Default: %(default)s")
-parser.add_argument('--skip-visibility-test',
- type=bool,
- help="Skip visibility testing (faster but leaves artifacts due to relief displacement)")
-args = parser.parse_args()
-
-dataset_path = args.dataset
-dem_path = os.path.join(dataset_path, default_dem_path) if args.dem == default_dem_path else args.dem
-interpolation = args.interpolation
-with_alpha = not args.no_alpha
-image_list = os.path.join(dataset_path, default_image_list) if args.image_list == default_image_list else args.image_list
-
-cwd_path = os.path.join(dataset_path, default_outdir) if args.outdir == default_outdir else args.outdir
-
-if not os.path.exists(cwd_path):
- os.makedirs(cwd_path)
-
-target_images = [] # all
-
-if args.images:
- target_images = list(map(str.strip, args.images.split(",")))
- print("Processing %s images" % len(target_images))
-elif args.image_list:
- with open(image_list) as f:
- target_images = list(filter(lambda filename: filename != '', map(str.strip, f.read().split("\n"))))
- print("Processing %s images" % len(target_images))
-
-if not os.path.exists(dem_path):
- print("Whoops! %s does not exist. Provide a path to a valid DEM" % dem_path)
- exit(1)
-
-
-def bilinear_interpolate(im, x, y):
- x = np.asarray(x)
- y = np.asarray(y)
-
- x0 = np.floor(x).astype(int)
- x1 = x0 + 1
- y0 = np.floor(y).astype(int)
- y1 = y0 + 1
-
- x0 = np.clip(x0, 0, im.shape[1]-1)
- x1 = np.clip(x1, 0, im.shape[1]-1)
- y0 = np.clip(y0, 0, im.shape[0]-1)
- y1 = np.clip(y1, 0, im.shape[0]-1)
-
- Ia = im[ y0, x0 ]
- Ib = im[ y1, x0 ]
- Ic = im[ y0, x1 ]
- Id = im[ y1, x1 ]
-
- wa = (x1-x) * (y1-y)
- wb = (x1-x) * (y-y0)
- wc = (x-x0) * (y1-y)
- wd = (x-x0) * (y-y0)
-
- return wa*Ia + wb*Ib + wc*Ic + wd*Id
-
-# Read DEM
-print("Reading DEM: %s" % dem_path)
-with rasterio.open(dem_path) as dem_raster:
- dem = dem_raster.read()[0]
- dem_has_nodata = dem_raster.profile.get('nodata') is not None
-
- if dem_has_nodata:
- m = ma.array(dem, mask=dem==dem_raster.nodata)
- dem_min_value = m.min()
- dem_max_value = m.max()
- else:
- dem_min_value = dem.min()
- dem_max_value = dem.max()
-
- print("DEM Minimum: %s" % dem_min_value)
- print("DEM Maximum: %s" % dem_max_value)
-
- h, w = dem.shape
-
- crs = dem_raster.profile.get('crs')
- dem_offset_x, dem_offset_y = (0, 0)
-
- if crs:
- print("DEM has a CRS: %s" % str(crs))
-
- # Read coords.txt
- coords_file = os.path.join(dataset_path, "odm_georeferencing", "coords.txt")
- if not os.path.exists(coords_file):
- print("Whoops! Cannot find %s (we need that!)" % coords_file)
- exit(1)
-
- with open(coords_file) as f:
- l = f.readline() # discard
-
- # second line is a northing/easting offset
- l = f.readline().rstrip()
- dem_offset_x, dem_offset_y = map(float, l.split(" "))
-
- print("DEM offset: (%s, %s)" % (dem_offset_x, dem_offset_y))
-
- print("DEM dimensions: %sx%s pixels" % (w, h))
-
- # Read reconstruction
- udata = dataset.UndistortedDataSet(dataset.DataSet(os.path.join(dataset_path, "opensfm")), undistorted_data_path=os.path.join(dataset_path, "opensfm", "undistorted"))
- reconstructions = udata.load_undistorted_reconstruction()
- if len(reconstructions) == 0:
- raise Exception("No reconstructions available")
-
- max_workers = args.threads
- print("Using %s threads" % max_workers)
-
- reconstruction = reconstructions[0]
- for shot in reconstruction.shots.values():
- if len(target_images) == 0 or shot.id in target_images:
-
- print("Processing %s..." % shot.id)
- shot_image = udata.load_undistorted_image(shot.id)
-
- r = shot.pose.get_rotation_matrix()
- Xs, Ys, Zs = shot.pose.get_origin()
- cam_grid_y, cam_grid_x = dem_raster.index(Xs + dem_offset_x, Ys + dem_offset_y)
-
- a1 = r[0][0]
- b1 = r[0][1]
- c1 = r[0][2]
- a2 = r[1][0]
- b2 = r[1][1]
- c2 = r[1][2]
- a3 = r[2][0]
- b3 = r[2][1]
- c3 = r[2][2]
-
- if not args.skip_visibility_test:
- distance_map = np.full((h, w), np.nan)
-
- for j in range(0, h):
- for i in range(0, w):
- distance_map[j][i] = sqrt((cam_grid_x - i) ** 2 + (cam_grid_y - j) ** 2)
- distance_map[distance_map==0] = 1e-7
-
- print("Camera pose: (%f, %f, %f)" % (Xs, Ys, Zs))
-
- img_h, img_w, num_bands = shot_image.shape
- half_img_w = (img_w - 1) / 2.0
- half_img_h = (img_h - 1) / 2.0
- print("Image dimensions: %sx%s pixels" % (img_w, img_h))
- f = shot.camera.focal * max(img_h, img_w)
- has_nodata = dem_raster.profile.get('nodata') is not None
-
- def process_pixels(step):
- imgout = np.full((num_bands, dem_bbox_h, dem_bbox_w), np.nan)
-
- minx = dem_bbox_w
- miny = dem_bbox_h
- maxx = 0
- maxy = 0
-
- for j in range(dem_bbox_miny, dem_bbox_maxy + 1):
- if j % max_workers == step:
- im_j = j - dem_bbox_miny
-
- for i in range(dem_bbox_minx, dem_bbox_maxx + 1):
- im_i = i - dem_bbox_minx
-
- # World coordinates
- Za = dem[j][i]
-
- # Skip nodata
- if has_nodata and Za == dem_raster.nodata:
- continue
-
- Xa, Ya = dem_raster.xy(j, i)
-
- # Remove offset (our cameras don't have the geographic offset)
- Xa -= dem_offset_x
- Ya -= dem_offset_y
-
- # Colinearity function http://web.pdx.edu/~jduh/courses/geog493f14/Week03.pdf
- dx = (Xa - Xs)
- dy = (Ya - Ys)
- dz = (Za - Zs)
-
- den = a3 * dx + b3 * dy + c3 * dz
- x = half_img_w - (f * (a1 * dx + b1 * dy + c1 * dz) / den)
- y = half_img_h - (f * (a2 * dx + b2 * dy + c2 * dz) / den)
-
- if x >= 0 and y >= 0 and x <= img_w - 1 and y <= img_h - 1:
- # Visibility test
- if not args.skip_visibility_test:
- check_dem_points = np.column_stack(line(i, j, cam_grid_x, cam_grid_y))
- check_dem_points = check_dem_points[np.all(np.logical_and(np.array([0, 0]) <= check_dem_points, check_dem_points < [w, h]), axis=1)]
-
- visible = True
- for p in check_dem_points:
- ray_z = Zs + (distance_map[p[1]][p[0]] / distance_map[j][i]) * dz
- if ray_z > dem_max_value:
- break
-
- if dem[p[1]][p[0]] > ray_z:
- visible = False
- break
- if not visible:
- continue
-
- if interpolation == 'bilinear':
- xi = img_w - 1 - x
- yi = img_h - 1 - y
- values = bilinear_interpolate(shot_image, xi, yi)
- else:
- # nearest
- xi = img_w - 1 - int(round(x))
- yi = img_h - 1 - int(round(y))
- values = shot_image[yi][xi]
-
- # We don't consider all zero values (pure black)
- # to be valid sample values. This will sometimes miss
- # valid sample values.
-
- if not np.all(values == 0):
- minx = min(minx, im_i)
- miny = min(miny, im_j)
- maxx = max(maxx, im_i)
- maxy = max(maxy, im_j)
-
- for b in range(num_bands):
- imgout[b][im_j][im_i] = values[b]
-
- # for b in range(num_bands):
- # minx = min(minx, im_i)
- # miny = min(miny, im_j)
- # maxx = max(maxx, im_i)
- # maxy = max(maxy, im_j)
- # imgout[b][im_j][im_i] = 255
- return (imgout, (minx, miny, maxx, maxy))
-
- # Compute bounding box of image coverage
- # assuming a flat plane at Z = min Z
- # (Otherwise we have to scan the entire DEM)
- # The Xa,Ya equations are just derived from the colinearity equations
- # solving for Xa and Ya instead of x,y
- def dem_coordinates(cpx, cpy):
- """
- :param cpx principal point X (image coordinates)
- :param cpy principal point Y (image coordinates)
- """
- Za = dem_min_value
- m = (a3*b1*cpy - a1*b3*cpy - (a3*b2 - a2*b3)*cpx - (a2*b1 - a1*b2)*f)
- Xa = dem_offset_x + (m*Xs + (b3*c1*cpy - b1*c3*cpy - (b3*c2 - b2*c3)*cpx - (b2*c1 - b1*c2)*f)*Za - (b3*c1*cpy - b1*c3*cpy - (b3*c2 - b2*c3)*cpx - (b2*c1 - b1*c2)*f)*Zs)/m
- Ya = dem_offset_y + (m*Ys - (a3*c1*cpy - a1*c3*cpy - (a3*c2 - a2*c3)*cpx - (a2*c1 - a1*c2)*f)*Za + (a3*c1*cpy - a1*c3*cpy - (a3*c2 - a2*c3)*cpx - (a2*c1 - a1*c2)*f)*Zs)/m
-
- y, x = dem_raster.index(Xa, Ya)
- return (x, y)
-
- dem_ul = dem_coordinates(-(img_w - 1) / 2.0, -(img_h - 1) / 2.0)
- dem_ur = dem_coordinates((img_w - 1) / 2.0, -(img_h - 1) / 2.0)
- dem_lr = dem_coordinates((img_w - 1) / 2.0, (img_h - 1) / 2.0)
- dem_ll = dem_coordinates(-(img_w - 1) / 2.0, (img_h - 1) / 2.0)
- dem_bbox = [dem_ul, dem_ur, dem_lr, dem_ll]
- dem_bbox_x = np.array(list(map(lambda xy: xy[0], dem_bbox)))
- dem_bbox_y = np.array(list(map(lambda xy: xy[1], dem_bbox)))
-
- dem_bbox_minx = min(w - 1, max(0, dem_bbox_x.min()))
- dem_bbox_miny = min(h - 1, max(0, dem_bbox_y.min()))
- dem_bbox_maxx = min(w - 1, max(0, dem_bbox_x.max()))
- dem_bbox_maxy = min(h - 1, max(0, dem_bbox_y.max()))
-
- dem_bbox_w = 1 + dem_bbox_maxx - dem_bbox_minx
- dem_bbox_h = 1 + dem_bbox_maxy - dem_bbox_miny
-
- print("Iterating over DEM box: [(%s, %s), (%s, %s)] (%sx%s pixels)" % (dem_bbox_minx, dem_bbox_miny, dem_bbox_maxx, dem_bbox_maxy, dem_bbox_w, dem_bbox_h))
-
- if max_workers > 1:
- with multiprocessing.Pool(max_workers) as p:
- results = p.map(process_pixels, range(max_workers))
- else:
- results = [process_pixels(0)]
-
- results = list(filter(lambda r: r[1][0] <= r[1][2] and r[1][1] <= r[1][3], results))
-
- # Merge image
- imgout, _ = results[0]
-
- for j in range(dem_bbox_miny, dem_bbox_maxy + 1):
- im_j = j - dem_bbox_miny
- resimg, _ = results[j % max_workers]
- for b in range(num_bands):
- imgout[b][im_j] = resimg[b][im_j]
-
- # Merge bounds
- minx = dem_bbox_w
- miny = dem_bbox_h
- maxx = 0
- maxy = 0
-
- for _, bounds in results:
- minx = min(bounds[0], minx)
- miny = min(bounds[1], miny)
- maxx = max(bounds[2], maxx)
- maxy = max(bounds[3], maxy)
-
- print("Output bounds: (%s, %s), (%s, %s) pixels" % (minx, miny, maxx, maxy))
- if minx <= maxx and miny <= maxy:
- imgout = imgout[:,miny:maxy+1,minx:maxx+1]
-
- if with_alpha:
- alpha = np.zeros((imgout.shape[1], imgout.shape[2]), dtype=np.uint8)
-
- # Set all not-NaN indices to 255
- alpha[~np.isnan(imgout[0])] = 255
-
- # Cast
- imgout = imgout.astype(shot_image.dtype)
-
- dem_transform = dem_raster.profile['transform']
- offset_x, offset_y = dem_raster.xy(dem_bbox_miny + miny, dem_bbox_minx + minx, offset='ul')
-
- profile = {
- 'driver': 'GTiff',
- 'width': imgout.shape[2],
- 'height': imgout.shape[1],
- 'count': num_bands + 1 if with_alpha else num_bands,
- 'dtype': imgout.dtype.name,
- 'transform': rasterio.transform.Affine(dem_transform[0], dem_transform[1], offset_x,
- dem_transform[3], dem_transform[4], offset_y),
- 'nodata': None,
- 'crs': crs
- }
-
- outfile = os.path.join(cwd_path, shot.id)
- if not outfile.endswith(".tif"):
- outfile = outfile + ".tif"
-
- with rasterio.open(outfile, 'w', BIGTIFF="IF_SAFER", **profile) as wout:
- for b in range(num_bands):
- wout.write(imgout[b], b + 1)
- if with_alpha:
- wout.write(alpha, num_bands + 1)
-
- print("Wrote %s" % outfile)
- else:
- print("Cannot orthorectify image (is the image inside the DEM bounds?)")
diff --git a/o/ODM/ODM-2.8.7/contrib/orthorectify/run.sh b/o/ODM/ODM-2.8.7/contrib/orthorectify/run.sh
deleted file mode 100755
index beececcc..00000000
--- a/o/ODM/ODM-2.8.7/contrib/orthorectify/run.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-PYTHONPATH=$PYTHONPATH:/code/SuperBuild/install/bin/opensfm python3 orthorectify.py "$@"
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/contrib/pc2dem/README.md b/o/ODM/ODM-2.8.7/contrib/pc2dem/README.md
deleted file mode 100644
index b338dceb..00000000
--- a/o/ODM/ODM-2.8.7/contrib/pc2dem/README.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# Point Cloud To DEM
-
-Convert point clouds (LAS, LAZ, PLY, and any other format compatible with [PDAL](https://pdal.io/stages/readers.html)) to GeoTIFF elevation models.
-
-
-
-This tool includes methods to perform efficient and scalable gapfill interpolation and is the same method used by ODM's processing pipeline. It is offered here as a standalone module for processing individual point clouds.
-
-## Usage
-
-```
-docker run -ti --rm -v /home/youruser/folder_with_point_cloud:/input --entrypoint /code/contrib/pc2dem/pc2dem.py opendronemap/odm /input/point_cloud.las [flags]
-```
-
-The result (`dsm.tif` or `dtm.tif`) will be stored in the same folder as the input point cloud. See additional `flags` you can pass at the end of the command above:
-
-```
-usage: pc2dem.py [-h] [--type {dsm,dtm}] [--resolution RESOLUTION]
- [--gapfill-steps GAPFILL_STEPS]
- point_cloud
-
-Generate DEMs from point clouds using ODM's algorithm.
-
-positional arguments:
- point_cloud Path to point cloud file (.las, .laz,
- .ply)
-
-optional arguments:
- -h, --help show this help message and exit
- --type {dsm,dtm} Type of DEM. Default: dsm
- --resolution RESOLUTION
- Resolution in m/px. Default: 0.05
- --gapfill-steps GAPFILL_STEPS
- Number of steps used to fill areas with
- gaps. Set to 0 to disable gap filling.
- Starting with a radius equal to the output
- resolution, N different DEMs are generated
- with progressively bigger radius using the
- inverse distance weighted (IDW) algorithm
- and merged together. Remaining gaps are
- then merged using nearest neighbor
- interpolation. Default: 3
-
-```
diff --git a/o/ODM/ODM-2.8.7/contrib/pc2dem/pc2dem.py b/o/ODM/ODM-2.8.7/contrib/pc2dem/pc2dem.py
deleted file mode 100755
index 0441565b..00000000
--- a/o/ODM/ODM-2.8.7/contrib/pc2dem/pc2dem.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python3
-# Author: Piero Toffanin
-# License: AGPLv3
-
-import os
-import sys
-sys.path.insert(0, os.path.join("..", "..", os.path.dirname(__file__)))
-
-import argparse
-import multiprocessing
-from opendm.dem import commands
-
-parser = argparse.ArgumentParser(description='Generate DEMs from point clouds using ODM\'s algorithm.')
-parser.add_argument('point_cloud',
- type=str,
- help='Path to point cloud file (.las, .laz, .ply)')
-parser.add_argument('--type',
- type=str,
- choices=("dsm", "dtm"),
- default="dsm",
- help="Type of DEM. Default: %(default)s")
-parser.add_argument('--resolution',
- type=float,
- default=0.05,
- help='Resolution in m/px. Default: %(default)s')
-parser.add_argument('--gapfill-steps',
- default=3,
- type=int,
- help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. '
- 'Starting with a radius equal to the output resolution, N different DEMs are generated with '
- 'progressively bigger radius using the inverse distance weighted (IDW) algorithm '
- 'and merged together. Remaining gaps are then merged using nearest neighbor interpolation. '
- 'Default: %(default)s')
-args = parser.parse_args()
-
-if not os.path.exists(args.point_cloud):
- print("%s does not exist" % args.point_cloud)
- exit(1)
-
-outdir = os.path.dirname(args.point_cloud)
-
-radius_steps = [args.resolution / 2.0]
-for _ in range(args.gapfill_steps - 1):
- radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?
-
-commands.create_dem(args.point_cloud,
- args.type,
- output_type='idw' if args.type == 'dtm' else 'max',
- radiuses=list(map(str, radius_steps)),
- gapfill=args.gapfill_steps > 0,
- outdir=outdir,
- resolution=args.resolution,
- decimation=1,
- verbose=True,
- max_workers=multiprocessing.cpu_count(),
- keep_unfilled_copy=False
- )
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/contrib/resize/README.md b/o/ODM/ODM-2.8.7/contrib/resize/README.md
deleted file mode 100644
index 1b277c49..00000000
--- a/o/ODM/ODM-2.8.7/contrib/resize/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# Resize
-
-Resize a dataset (and optional GCP file).
-
-Resizes images, keeps Exif data. The EXIF width and height attributes will be updated accordingly also. ODM GCP files are scaled also.
-
-Usage:
-
-```
-pip install -r requirements.txt
-python3 resize.py -i images/ -o resized/ 25%
-python3 resize.py -i images/1.JPG -o resized.JPG 25%
-python3 resize.py -i gcp_list.txt -o resized_gcp_list.txt
-```
-
-Originally forked from https://github.com/pierotofy/exifimageresize
diff --git a/o/ODM/ODM-2.8.7/contrib/resize/requirements.txt b/o/ODM/ODM-2.8.7/contrib/resize/requirements.txt
deleted file mode 100644
index 92e99bef..00000000
--- a/o/ODM/ODM-2.8.7/contrib/resize/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Pillow==8.0.1
-piexif==1.1.2
diff --git a/o/ODM/ODM-2.8.7/contrib/resize/resize.py b/o/ODM/ODM-2.8.7/contrib/resize/resize.py
deleted file mode 100644
index bed6ec1b..00000000
--- a/o/ODM/ODM-2.8.7/contrib/resize/resize.py
+++ /dev/null
@@ -1,169 +0,0 @@
-import argparse
-import os
-import glob
-import shutil
-from PIL import Image
-import piexif
-import multiprocessing
-from multiprocessing.pool import ThreadPool
-import sys
-sys.path.append("../../")
-from opendm.gcp import GCPFile
-
-parser = argparse.ArgumentParser(description='Exif Image Resize')
-parser.add_argument('--input', '-i',
- metavar='',
- required=True,
- help='Path to input image/GCP or image folder')
-parser.add_argument('--output', '-o',
- metavar='',
- required=True,
- help='Path to output image/GCP or image folder')
-parser.add_argument('--force', '-f',
- action='store_true',
- default=False,
- help='Overwrite results')
-parser.add_argument('amount',
- metavar='',
- type=str,
- help='Pixel of largest side or percentage to resize images by')
-args = parser.parse_args()
-
-def die(msg):
- print(msg)
- exit(1)
-
-class nonloc:
- errors = 0
-
-def resize_image(image_path, out_path, resize_to, out_path_is_file=False):
- """
- :param image_path: path to the image
- :param out_path: path to the output directory or file
- :param resize_to: percentage ("perc%") or pixels
- """
- try:
- im = Image.open(image_path)
- path, ext = os.path.splitext(image_path)
- if out_path_is_file:
- resized_image_path = out_path
- else:
- resized_image_path = os.path.join(out_path, os.path.basename(image_path))
-
- width, height = im.size
- max_side = max(width, height)
-
- if isinstance(resize_to, str) and resize_to.endswith("%"):
- ratio = float(resize_to[:-1]) / 100.0
- else:
- ratio = float(resize_to) / float(max_side)
-
- resized_width = int(width * ratio)
- resized_height = int(height * ratio)
-
- im.thumbnail((resized_width, resized_height), Image.LANCZOS)
-
- driver = ext[1:].upper()
- if driver == 'JPG':
- driver = 'JPEG'
-
- if 'exif' in im.info:
- exif_dict = piexif.load(im.info['exif'])
- exif_dict['Exif'][piexif.ExifIFD.PixelXDimension] = resized_width
- exif_dict['Exif'][piexif.ExifIFD.PixelYDimension] = resized_height
- im.save(resized_image_path, driver, exif=piexif.dump(exif_dict), quality=100)
- else:
- im.save(resized_image_path, driver, quality=100)
-
- im.close()
-
- print("{} ({}x{}) --> {} ({}x{})".format(image_path, width, height, resized_image_path, resized_width, resized_height))
- except (IOError, ValueError) as e:
- print("Error: Cannot resize {}: {}.".format(image_path, str(e)))
- nonloc.errors += 1
-
-def resize_gcp(gcp_path, out_path, resize_to, out_path_is_file=False):
- """
- :param gcp_path: path to the GCP
- :param out_path: path to the output directory or file
- :param resize_to: percentage ("perc%") or pixels
- """
- try:
- if out_path_is_file:
- resized_gcp_path = out_path
- else:
- resized_gcp_path = os.path.join(out_path, os.path.basename(gcp_path))
-
- if resize_to.endswith("%"):
- ratio = float(resize_to[:-1]) / 100.0
- else:
- ratio = resize_to
-
- gcp = GCPFile(gcp_path)
- if gcp.entries_count() > 0:
- gcp.make_resized_copy(resized_gcp_path, ratio)
- else:
- raise ValueError("No GCP entries")
-
- print("{} --> {}".format(gcp_path, resized_gcp_path))
- except (IOError, ValueError) as e:
- print("Error: Cannot resize {}: {}.".format(gcp_path, str(e)))
- nonloc.errors += 1
-
-if not args.amount.endswith("%"):
- args.amount = float(args.amount)
- if args.amount <= 0:
- die("Invalid amount")
-else:
- try:
- if float(args.amount[:-1]) <= 0:
- die("Invalid amount")
- except:
- die("Invalid amount")
-
-
-files = []
-gcps = []
-
-if os.path.isdir(args.input):
- for ext in ["JPG", "JPEG", "PNG", "TIFF", "TIF"]:
- files += glob.glob("{}/*.{}".format(args.input, ext))
- files += glob.glob("{}/*.{}".format(args.input, ext.lower()))
- gcps = glob.glob("{}/*.txt".format(args.input))
-elif os.path.exists(args.input):
- _, ext = os.path.splitext(args.input)
- if ext.lower() == ".txt":
- gcps = [args.input]
- else:
- files = [args.input]
-else:
- die("{} does not exist".format(args.input))
-
-create_dir = len(files) > 1 or args.output.endswith("/") or len(gcps) > 1
-
-if create_dir and os.path.isdir(args.output):
- if not args.force:
- die("{} exists, pass --force to overwrite results".format(args.output))
- else:
- shutil.rmtree(args.output)
-elif not create_dir and os.path.isfile(args.output):
- if not args.force:
- die("{} exists, pass --force to overwrite results".format(args.output))
- else:
- os.remove(args.output)
-
-if create_dir:
- os.makedirs(args.output)
-
-pool = ThreadPool(processes=multiprocessing.cpu_count())
-
-def resize(file):
- _, ext = os.path.splitext(file)
- if ext.lower() == ".txt":
- return resize_gcp(file, args.output, args.amount, not create_dir)
- else:
- return resize_image(file, args.output, args.amount, not create_dir)
-pool.map(resize, files + gcps)
-
-print("Process completed, {} errors.".format(nonloc.errors))
-
diff --git a/o/ODM/ODM-2.8.7/contrib/shell/odm_exif2utm.sh b/o/ODM/ODM-2.8.7/contrib/shell/odm_exif2utm.sh
deleted file mode 100755
index 35453432..00000000
--- a/o/ODM/ODM-2.8.7/contrib/shell/odm_exif2utm.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-#GPL2 jmenezes ODM extract exif lon/lat project to utm with proj4; 2017-05-28
-# line 23 tab bugfix 2017-07-11
-# apt-get install exiftool geotiff-bin
-if [ $# -lt 2 ]; then
- echo "run inside /images/ directory" 1>&2
- echo $(basename $0)" zone [S|N] > camera_wgs84utm.txt" 1>&2
- exit 1
-fi
-Z=$1
-case $2 in
-s|S) printf "EPSG:327%02d\n" $Z; H=south
-;;
-n|N) printf "EPSG:326%02d\n" $Z; H=north
-;;
-*)
-;;
-esac
-for i in *[jpg,JPG,tif,TIF]; do
- exiftool $i | grep GPS | grep Position | \
- awk -F \: -v img=$i '{ print $2","img }' | tr -d [:blank:] | \
- sed s/deg/d/g | tr \, \\t | awk '{ print $2,$1,$3 }' | \
- proj -f "%.3f" +proj=utm +zone=$Z +$H +ellps=WGS84
-done | sed s/\ /\\t/g
-exit 0
diff --git a/o/ODM/ODM-2.8.7/contrib/visveg/readme.md b/o/ODM/ODM-2.8.7/contrib/visveg/readme.md
deleted file mode 100644
index 1d883be4..00000000
--- a/o/ODM/ODM-2.8.7/contrib/visveg/readme.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Visible Vegetation Indexes
-
-This script produces a Vegetation Index raster from a RGB orthophoto (odm_orthophoto.tif in your project)
-
-## Requirements
-* rasterio (pip install rasterio)
-* numpy python package (included in ODM build)
-
-## Usage
-```
-vegind.py index
-
-positional arguments:
- The RGB orthophoto. Must be a GeoTiff.
- index Index identifier. Allowed values: ngrdi, tgi, vari
-```
-Output will be generated with index suffix in the same directory as input.
-
-## Examples
-
-`python vegind.py /path/to/odm_orthophoto.tif tgi`
-
-Orthophoto photo of Koniaków grass field and forest in QGIS: 
-The Triangular Greenness Index output in QGIS (with a spectral pseudocolor): 
-Visible Atmospheric Resistant Index: 
-Normalized green-red difference index: 
-
-## Bibliography
-
-1. Hunt, E. Raymond, et al. "A Visible Band Index for Remote Sensing Leaf Chlorophyll Content At the Canopy Scale." ITC journal 21(2013): 103-112. doi: 10.1016/j.jag.2012.07.020
-(https://doi.org/10.1016/j.jag.2012.07.020)
diff --git a/o/ODM/ODM-2.8.7/contrib/visveg/vegind.py b/o/ODM/ODM-2.8.7/contrib/visveg/vegind.py
deleted file mode 100644
index 42dcb62d..00000000
--- a/o/ODM/ODM-2.8.7/contrib/visveg/vegind.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-import rasterio, os, sys
-import numpy as np
-
-class bcolors:
- OKBLUE = '\033[94m'
- OKGREEN = '\033[92m'
- WARNING = '\033[93m'
- FAIL = '\033[91m'
- ENDC = '\033[0m'
- BOLD = '\033[1m'
- UNDERLINE = '\033[4m'
-
-try:
- file = sys.argv[1]
- typ = sys.argv[2]
- (fileRoot, fileExt) = os.path.splitext(file)
- outFileName = fileRoot + "_" + typ + fileExt
- if typ not in ['vari', 'tgi', 'ngrdi']:
- raise IndexError
-except (TypeError, IndexError, NameError):
- print bcolors.FAIL + 'Arguments messed up. Check arguments order and index name' + bcolors.ENDC
- print 'Usage: ./vegind.py orto index'
- print ' orto - filepath to RGB orthophoto'
- print ' index - Vegetation Index'
- print bcolors.OKGREEN + 'Available indexes: vari, ngrdi, tgi' + bcolors.ENDC
- sys.exit()
-
-
-def calcNgrdi(red, green):
- """
- Normalized green red difference index
- Tucker,C.J.,1979.
- Red and photographic infrared linear combinations for monitoring vegetation.
- Remote Sensing of Environment 8, 127–150
- :param red: red visible channel
- :param green: green visible channel
- :return: ngrdi index array
- """
- mask = np.not_equal(np.add(red,green), 0.0)
- return np.choose(mask, (-9999.0, np.true_divide(
- np.subtract(green,red),
- np.add(red,green))))
-
-def calcVari(red,green,blue):
- """
- Calculates Visible Atmospheric Resistant Index
- Gitelson, A.A., Kaufman, Y.J., Stark, R., Rundquist, D., 2002.
- Novel algorithms for remote estimation of vegetation fraction.
- Remote Sensing of Environment 80, 76–87.
- :param red: red visible channel
- :param green: green visible channel
- :param blue: blue visible channel
- :return: vari index array, that will be saved to tiff
- """
- mask = np.not_equal(np.subtract(np.add(green,red),blue), 0.0)
- return np.choose(mask, (-9999.0, np.true_divide(np.subtract(green,red),np.subtract(np.add(green,red),blue))))
-
-def calcTgi(red,green,blue):
- """
- Calculates Triangular Greenness Index
- Hunt, E. Raymond Jr.; Doraiswamy, Paul C.; McMurtrey, James E.; Daughtry, Craig S.T.; Perry, Eileen M.; and Akhmedov, Bakhyt,
- A visible band index for remote sensing leaf chlorophyll content at the canopy scale (2013).
- Publications from USDA-ARS / UNL Faculty. Paper 1156.
- http://digitalcommons.unl.edu/usdaarsfacpub/1156
- :param red: red channel
- :param green: green channel
- :param blue: blue channel
- :return: tgi index array, that will be saved to tiff
- """
- mask = np.not_equal(green-red+blue-255.0, 0.0)
- return np.choose(mask, (-9999.0, np.subtract(green, np.multiply(0.39,red), np.multiply(0.61, blue))))
-
-try:
- with rasterio.Env():
- ds = rasterio.open(file)
- profile = ds.profile
- profile.update(dtype=rasterio.float32, count=1, nodata=-9999)
- red = np.float32(ds.read(1))
- green = np.float32(ds.read(2))
- blue = np.float32(ds.read(3))
- np.seterr(divide='ignore', invalid='ignore')
- if typ == 'ngrdi':
- indeks = calcNgrdi(red,green)
- elif typ == 'vari':
- indeks = calcVari(red, green, blue)
- elif typ == 'tgi':
- indeks = calcTgi(red, green, blue)
-
- with rasterio.open(outFileName, 'w', BIGTIFF="IF_SAFER", **profile) as dst:
- dst.write(indeks.astype(rasterio.float32), 1)
-except rasterio.errors.RasterioIOError:
- print bcolors.FAIL + 'Orthophoto file not found or access denied' + bcolors.ENDC
- sys.exit()
diff --git a/o/ODM/ODM-2.8.7/docker/README b/o/ODM/ODM-2.8.7/docker/README
deleted file mode 100644
index 6843dac5..00000000
--- a/o/ODM/ODM-2.8.7/docker/README
+++ /dev/null
@@ -1,3 +0,0 @@
-The g++ and gcc scripts in this directory are used to replace the real g++ and gcc programs so that compilation across all projects (including dependencies) uses the -march=nehalem flag, which allows us to build a docker image compatible with most Intel based CPUs.
-
-Without the -march=nehalem flag, a docker image will contain binaries that are optimized for the machine that built the image, and will not run on older machines.
diff --git a/o/ODM/ODM-2.8.7/docker/g++ b/o/ODM/ODM-2.8.7/docker/g++
deleted file mode 100755
index 33670a25..00000000
--- a/o/ODM/ODM-2.8.7/docker/g++
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-declare -a args
-
-for i in "$@"
-do
- if [[ "$i" != -march* ]]; then
- args+=("$i")
- fi
-done
-
-ARCH=nehalem
-if [[ $(uname -m) == "aarch64" ]]; then
- ARCH=armv8-a
-fi
-
-/usr/bin/g++_real -march=$ARCH "${args[@]}"
diff --git a/o/ODM/ODM-2.8.7/docker/gcc b/o/ODM/ODM-2.8.7/docker/gcc
deleted file mode 100755
index 80183720..00000000
--- a/o/ODM/ODM-2.8.7/docker/gcc
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-declare -a args
-
-for i in "$@"
-do
- if [[ "$i" != -march* ]]; then
- args+=("$i")
- fi
-done
-
-ARCH=nehalem
-if [[ $(uname -m) == "aarch64" ]]; then
- ARCH=armv8-a
-fi
-
-/usr/bin/gcc_real -march=$ARCH "${args[@]}"
diff --git a/o/ODM/ODM-2.8.7/docs/issue_template.md b/o/ODM/ODM-2.8.7/docs/issue_template.md
deleted file mode 100644
index 07c7ee94..00000000
--- a/o/ODM/ODM-2.8.7/docs/issue_template.md
+++ /dev/null
@@ -1,26 +0,0 @@
-****************************************
-PLEASE REMOVE THIS NOTE AFTER READING IT!
-
-First of all, thank you for taking the time to report an issue.
-
-Before you continue, make sure you are in the right place. Please open an issue only to report faults and bugs. For questions and discussion please open a topic on http://community.opendronemap.org/c/opendronemap.
-
-Please use the format below to report bugs and faults.
-****************************************
-
-### How did you install ODM? (Docker, installer, natively, ...)?
-
-[Type answer here]
-
-### What is the problem?
-
-[Type answer here]
-
-### What should be the expected behavior? If this is a feature request, please describe in detail the changes you think should be made to the code, citing files and lines where changes should be made, if possible.
-
-[Type answer here]
-
-### How can we reproduce this? What steps did you do to trigger the problem? If this is an issue with processing a dataset, YOU MUST include a copy of your dataset uploaded on Google Drive or Dropbox (otherwise we cannot reproduce this).
-
-[Type answer here]
-
diff --git a/o/ODM/ODM-2.8.7/gpu.Dockerfile b/o/ODM/ODM-2.8.7/gpu.Dockerfile
deleted file mode 100644
index b7e6d7ec..00000000
--- a/o/ODM/ODM-2.8.7/gpu.Dockerfile
+++ /dev/null
@@ -1,49 +0,0 @@
-FROM nvidia/cuda:11.2.0-devel-ubuntu20.04 AS builder
-
-# Env variables
-ENV DEBIAN_FRONTEND=noninteractive \
- PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
-
-# Prepare directories
-WORKDIR /code
-
-# Copy everything
-COPY . ./
-
-# Run the build
-RUN PORTABLE_INSTALL=YES GPU_INSTALL=YES bash configure.sh install
-
-# Clean Superbuild
-RUN bash configure.sh clean
-
-### END Builder
-
-### Use a second image for the final asset to reduce the number and
-# size of the layers.
-FROM nvidia/cuda:11.2.0-runtime-ubuntu20.04
-#FROM nvidia/cuda:11.2.0-devel-ubuntu20.04
-
-# Env variables
-ENV DEBIAN_FRONTEND=noninteractive \
- PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
-
-WORKDIR /code
-
-# Copy everything we built from the builder
-COPY --from=builder /code /code
-
-# Copy the Python libraries installed via pip from the builder
-COPY --from=builder /usr/local /usr/local
-
-# Install shared libraries that we depend on via APT, but *not*
-# the -dev packages to save space!
-# Also run a smoke test on ODM and OpenSfM
-RUN bash configure.sh installruntimedepsonly \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
- && bash run.sh --help \
- && bash -c "eval $(python3 /code/opendm/context.py) && python3 -c 'from opensfm import io, pymap'"
-# Entry point
-ENTRYPOINT ["python3", "/code/run.py"]
diff --git a/o/ODM/ODM-2.8.7/innosetup.iss b/o/ODM/ODM-2.8.7/innosetup.iss
deleted file mode 100644
index 945e1fee..00000000
--- a/o/ODM/ODM-2.8.7/innosetup.iss
+++ /dev/null
@@ -1,151 +0,0 @@
-; Script generated by the Inno Setup Script Wizard.
-; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
-
-#define MyAppName "ODM"
-#define VerFile FileOpen("VERSION")
-#define MyAppVersion FileRead(VerFile)
-#expr FileClose(VerFile)
-#undef VerFile
-#define MyAppPublisher "OpenDroneMap"
-#define MyAppURL "https://opendronemap.org"
-
-[Setup]
-; NOTE: The value of AppId uniquely identifies this application.
-; Do not use the same AppId value in installers for other applications.
-; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
-AppId={{443998BA-9F8F-4A69-9A96-0D8FBC8C6393}
-AppName={#MyAppName}
-AppVersion={#MyAppVersion}
-AppPublisher={#MyAppPublisher}
-AppPublisherURL={#MyAppURL}
-AppSupportURL={#MyAppURL}
-AppUpdatesURL={#MyAppURL}
-DefaultDirName=C:\ODM
-DefaultGroupName={#MyAppName}
-AllowNoIcons=yes
-LicenseFile=LICENSE
-OutputDir=dist
-OutputBaseFilename=ODM_Setup_{#MyAppVersion}
-Compression=lzma
-SolidCompression=yes
-ArchitecturesAllowed=x64
-SignTool=signtool
-PrivilegesRequired=lowest
-UsePreviousAppDir=no
-;SetupIconFile=setup.ico
-
-[Languages]
-Name: "english"; MessagesFile: "compiler:Default.isl"
-
-[Files]
-Source: "contrib\*"; DestDir: "{app}\contrib"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "licenses\*"; DestDir: "{app}\licenses"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "opendm\*"; DestDir: "{app}\opendm"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "stages\*"; DestDir: "{app}\stages"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "SuperBuild\install\bin\*"; DestDir: "{app}\SuperBuild\install\bin"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "venv\*"; DestDir: "{app}\venv"; Excludes: "__pycache__,pyvenv.cfg"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "python38\*"; DestDir: "{app}\python38"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
-Source: "console.bat"; DestDir: "{app}"; Flags: ignoreversion
-Source: "VERSION"; DestDir: "{app}"; Flags: ignoreversion
-Source: "LICENSE"; DestDir: "{app}"; Flags: ignoreversion
-Source: "run.bat"; DestDir: "{app}"; Flags: ignoreversion
-Source: "run.py"; DestDir: "{app}"; Flags: ignoreversion
-Source: "settings.yaml"; DestDir: "{app}"; Flags: ignoreversion
-Source: "win32env.bat"; DestDir: "{app}"; Flags: ignoreversion
-Source: "winrun.bat"; DestDir: "{app}"; Flags: ignoreversion
-Source: "SuperBuild\download\vc_redist.x64.exe"; DestDir: {tmp}; Flags: dontcopy
-
-[Icons]
-Name: {group}\ODM Console; Filename: "{app}\console.bat"; WorkingDir: "{app}"
-Name: "{userdesktop}\ODM Console"; Filename: "{app}\console.bat"; WorkingDir: "{app}"; Tasks: desktopicon
-
-[Tasks]
-Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
-
-[Run]
-Filename: "{tmp}\vc_redist.x64.exe"; StatusMsg: "Installing Visual C++ Redistributable Packages for Visual Studio 2019"; Parameters: "/quiet"; Check: VC2019RedistNeedsInstall ; Flags: waituntilterminated
-Filename: "{app}\console.bat"; Description: {cm:LaunchProgram,ODM Console}; Flags: nowait postinstall skipifsilent
-
-[Code]
-function VC2019RedistNeedsInstall: Boolean;
-var
- Version: String;
-begin
- if RegQueryStringValue(HKEY_LOCAL_MACHINE,
- 'SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64', 'Version', Version) then
- begin
- // Is the installed version at least 14.14 ?
- Log('VC Redist Version check : found ' + Version);
- Result := (CompareStr(Version, 'v14.14.26429.03')<0);
- end
- else
- begin
- // Not even an old version installed
- Result := True;
- end;
- if (Result) then
- begin
- ExtractTemporaryFile('vc_redist.x64.exe');
- end;
-end;
-
-function GetUninstallString(): String;
-var
- sUnInstPath: String;
- sUnInstallString: String;
-begin
- sUnInstPath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\{#emit SetupSetting("AppId")}_is1');
- sUnInstallString := '';
- if not RegQueryStringValue(HKLM, sUnInstPath, 'UninstallString', sUnInstallString) then
- RegQueryStringValue(HKCU, sUnInstPath, 'UninstallString', sUnInstallString);
- Result := sUnInstallString;
-end;
-
-function IsUpgrade(): Boolean;
-begin
- Result := (GetUninstallString() <> '');
-end;
-
-function UnInstallOldVersion(): Integer;
-var
- sUnInstallString: String;
- iResultCode: Integer;
-begin
-{ Return Values: }
-{ 1 - uninstall string is empty }
-{ 2 - error executing the UnInstallString }
-{ 3 - successfully executed the UnInstallString }
-
- { default return value }
- Result := 0;
-
- { get the uninstall string of the old app }
- sUnInstallString := GetUninstallString();
- if sUnInstallString <> '' then begin
- sUnInstallString := RemoveQuotes(sUnInstallString);
- if Exec(sUnInstallString, '/SILENT /NORESTART /SUPPRESSMSGBOXES','', SW_HIDE, ewWaitUntilTerminated, iResultCode) then
- Result := 3
- else
- Result := 2;
- end else
- Result := 1;
-end;
-
-procedure CurStepChanged(CurStep: TSetupStep);
-begin
- if (CurStep=ssInstall) then
- begin
- if (IsUpgrade()) then
- begin
- UnInstallOldVersion();
- end;
- end;
-end;
-
-[UninstallDelete]
-Type: filesandordirs; Name: "{app}\SuperBuild"
-Type: filesandordirs; Name: "{app}\contrib"
-Type: filesandordirs; Name: "{app}\licenses"
-Type: filesandordirs; Name: "{app}\opendm"
-Type: filesandordirs; Name: "{app}\stages"
-Type: filesandordirs; Name: "{app}\venv"
diff --git a/o/ODM/ODM-2.8.7/licenses/libext_copyright.txt b/o/ODM/ODM-2.8.7/licenses/libext_copyright.txt
deleted file mode 100644
index 60cc18d5..00000000
--- a/o/ODM/ODM-2.8.7/licenses/libext_copyright.txt
+++ /dev/null
@@ -1,199 +0,0 @@
-This package was downloaded from
-http://xorg.freedesktop.org/releases/individual/lib/
-
-Copyright 1986, 1987, 1988, 1989, 1994, 1998 The Open Group
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation.
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
-AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of The Open Group shall not be
-used in advertising or otherwise to promote the sale, use or other dealings
-in this Software without prior written authorization from The Open Group.
-
-Copyright (c) 1996 Digital Equipment Corporation, Maynard, Massachusetts.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software.
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-DIGITAL EQUIPMENT CORPORATION BE LIABLE FOR ANY CLAIM, DAMAGES, INCLUDING,
-BUT NOT LIMITED TO CONSEQUENTIAL OR INCIDENTAL DAMAGES, OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of Digital Equipment Corporation
-shall not be used in advertising or otherwise to promote the sale, use or other
-dealings in this Software without prior written authorization from Digital
-Equipment Corporation.
-
-Copyright (c) 1997 by Silicon Graphics Computer Systems, Inc.
-Permission to use, copy, modify, and distribute this
-software and its documentation for any purpose and without
-fee is hereby granted, provided that the above copyright
-notice appear in all copies and that both that copyright
-notice and this permission notice appear in supporting
-documentation, and that the name of Silicon Graphics not be
-used in advertising or publicity pertaining to distribution
-of the software without specific prior written permission.
-Silicon Graphics makes no representation about the suitability
-of this software for any purpose. It is provided "as is"
-without any express or implied warranty.
-SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
-SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON
-GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL
-DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH
-THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-Copyright 1992 Network Computing Devices
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name of NCD. not be used in advertising or
-publicity pertaining to distribution of the software without specific,
-written prior permission. NCD. makes no representations about the
-suitability of this software for any purpose. It is provided "as is"
-without express or implied warranty.
-
-NCD. DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL NCD.
-BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
-OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
-CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-Copyright 1991,1993 by Digital Equipment Corporation, Maynard, Massachusetts,
-and Olivetti Research Limited, Cambridge, England.
-
- All Rights Reserved
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the names of Digital or Olivetti
-not be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-DIGITAL AND OLIVETTI DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
-SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL THEY BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
-Copyright 1986, 1987, 1988 by Hewlett-Packard Corporation
-
-Permission to use, copy, modify, and distribute this
-software and its documentation for any purpose and without
-fee is hereby granted, provided that the above copyright
-notice appear in all copies and that both that copyright
-notice and this permission notice appear in supporting
-documentation, and that the name of Hewlett-Packard not be used in
-advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-Hewlett-Packard makes no representations about the
-suitability of this software for any purpose. It is provided
-"as is" without express or implied warranty.
-
-This software is not subject to any license of the American
-Telephone and Telegraph Company or of the Regents of the
-University of California.
-
-Copyright (c) 1994, 1995 Hewlett-Packard Company
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be included
-in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL HEWLETT-PACKARD COMPANY BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of the Hewlett-Packard
-Company shall not be used in advertising or otherwise to promote the
-sale, use or other dealings in this Software without prior written
-authorization from the Hewlett-Packard Company.
-
-Copyright Digital Equipment Corporation, 1996
-
-Permission to use, copy, modify, distribute, and sell this
-documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice and this permission
-notice appear in all copies. Digital Equipment Corporation
-makes no representations about the suitability for any purpose
-of the information in this document. This documentation is
-provided ``as is'' without express or implied warranty.
-
-Copyright (c) 1999, 2005, 2006, Oracle and/or its affiliates. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next
-paragraph) shall be included in all copies or substantial portions of the
-Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-Copyright (c) 1989 X Consortium, Inc. and Digital Equipment Corporation.
-Copyright (c) 1992 X Consortium, Inc. and Intergraph Corporation.
-Copyright (c) 1993 X Consortium, Inc. and Silicon Graphics, Inc.
-Copyright (c) 1994, 1995 X Consortium, Inc. and Hewlett-Packard Company.
-
-Permission to use, copy, modify, and distribute this documentation for
-any purpose and without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-Digital Equipment Corporation, Intergraph Corporation, Silicon
-Graphics, Hewlett-Packard, and the X Consortium make no
-representations about the suitability for any purpose of the
-information in this document. This documentation is provided ``as is''
-without express or implied warranty.
diff --git a/o/ODM/ODM-2.8.7/licenses/libx11_copyright.txt b/o/ODM/ODM-2.8.7/licenses/libx11_copyright.txt
deleted file mode 100644
index 7ca010b0..00000000
--- a/o/ODM/ODM-2.8.7/licenses/libx11_copyright.txt
+++ /dev/null
@@ -1,944 +0,0 @@
-This package was downloaded from
-http://xorg.freedesktop.org/releases/individual/lib/
-
-The following is the 'standard copyright' agreed upon by most contributors,
-and is currently the canonical license preferred by the X.Org Foundation.
-This is a slight variant of the common MIT license form published by the
-Open Source Initiative at http://www.opensource.org/licenses/mit-license.php
-
-Copyright holders of new code should use this license statement where
-possible, and insert their name to this list. Please sort by surname
-for people, and by the full name for other entities (e.g. Juliusz
-Chroboczek sorts before Intel Corporation sorts before Daniel Stone).
-
-See each individual source file or directory for the license that applies
-to that file.
-
-Copyright (C) 2003-2006,2008 Jamey Sharp, Josh Triplett
-Copyright © 2009 Red Hat, Inc.
-Copyright 1990-1992,1999,2000,2004,2009,2010 Oracle and/or its affiliates.
-All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next
-paragraph) shall be included in all copies or substantial portions of the
-Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
- ----------------------------------------------------------------------
-
-The following licenses are 'legacy' - usually MIT/X11 licenses with the name
-of the copyright holder(s) in the license statement:
-
-Copyright 1984-1994, 1998 The Open Group
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation.
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
-AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of The Open Group shall not be
-used in advertising or otherwise to promote the sale, use or other dealings
-in this Software without prior written authorization from The Open Group.
-
-X Window System is a trademark of The Open Group.
-
- ----------------------------------------
-
-Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1994, 1996 X Consortium
-Copyright 2000 The XFree86 Project, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be included
-in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR
-OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of the X Consortium shall
-not be used in advertising or otherwise to promote the sale, use or
-other dealings in this Software without prior written authorization
-from the X Consortium.
-
-Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991 by
-Digital Equipment Corporation
-
-Portions Copyright 1990, 1991 by Tektronix, Inc.
-
-Permission to use, copy, modify and distribute this documentation for
-any purpose and without fee is hereby granted, provided that the above
-copyright notice appears in all copies and that both that copyright notice
-and this permission notice appear in all copies, and that the names of
-Digital and Tektronix not be used in in advertising or publicity pertaining
-to this documentation without specific, written prior permission.
-Digital and Tektronix makes no representations about the suitability
-of this documentation for any purpose.
-It is provided ``as is'' without express or implied warranty.
-
- ----------------------------------------
-
-Copyright (c) 1999-2000 Free Software Foundation, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-FREE SOFTWARE FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of the Free Software Foundation
-shall not be used in advertising or otherwise to promote the sale, use or
-other dealings in this Software without prior written authorization from the
-Free Software Foundation.
-
- ----------------------------------------
-
-Code and supporting documentation (c) Copyright 1990 1991 Tektronix, Inc.
- All Rights Reserved
-
-This file is a component of an X Window System-specific implementation
-of Xcms based on the TekColor Color Management System. TekColor is a
-trademark of Tektronix, Inc. The term "TekHVC" designates a particular
-color space that is the subject of U.S. Patent No. 4,985,853 (equivalent
-foreign patents pending). Permission is hereby granted to use, copy,
-modify, sell, and otherwise distribute this software and its
-documentation for any purpose and without fee, provided that:
-
-1. This copyright, permission, and disclaimer notice is reproduced in
- all copies of this software and any modification thereof and in
- supporting documentation;
-2. Any color-handling application which displays TekHVC color
- coordinates identifies these as TekHVC color coordinates in any
- interface that displays these coordinates and in any associated
- documentation;
-3. The term "TekHVC" is always used, and is only used, in association
- with the mathematical derivations of the TekHVC Color Space,
- including those provided in this file and any equivalent pathways and
- mathematical derivations, regardless of digital (e.g., floating point
- or integer) representation.
-
-Tektronix makes no representation about the suitability of this software
-for any purpose. It is provided "as is" and with all faults.
-
-TEKTRONIX DISCLAIMS ALL WARRANTIES APPLICABLE TO THIS SOFTWARE,
-INCLUDING THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-PARTICULAR PURPOSE. IN NO EVENT SHALL TEKTRONIX BE LIABLE FOR ANY
-SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
-RESULTING FROM LOSS OF USE, DATA, OR PROFITS, WHETHER IN AN ACTION OF
-CONTRACT, NEGLIGENCE, OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
-CONNECTION WITH THE USE OR THE PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-(c) Copyright 1995 FUJITSU LIMITED
-This is source code modified by FUJITSU LIMITED under the Joint
-Development Agreement for the CDE/Motif PST.
-
- ----------------------------------------
-
-Copyright 1992 by Oki Technosystems Laboratory, Inc.
-Copyright 1992 by Fuji Xerox Co., Ltd.
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of Oki Technosystems
-Laboratory and Fuji Xerox not be used in advertising or publicity
-pertaining to distribution of the software without specific, written
-prior permission.
-Oki Technosystems Laboratory and Fuji Xerox make no representations
-about the suitability of this software for any purpose. It is provided
-"as is" without express or implied warranty.
-
-OKI TECHNOSYSTEMS LABORATORY AND FUJI XEROX DISCLAIM ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL OKI TECHNOSYSTEMS
-LABORATORY AND FUJI XEROX BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
-OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1990, 1991, 1992, 1993, 1994 by FUJITSU LIMITED
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of FUJITSU LIMITED
-not be used in advertising or publicity pertaining to distribution
-of the software without specific, written prior permission.
-FUJITSU LIMITED makes no representations about the suitability of
-this software for any purpose.
-It is provided "as is" without express or implied warranty.
-
-FUJITSU LIMITED DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
-EVENT SHALL FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-
-Copyright (c) 1995 David E. Wexelblat. All rights reserved
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be included
-in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL DAVID E. WEXELBLAT BE LIABLE FOR ANY CLAIM, DAMAGES OR
-OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
-
-Except as contained in this notice, the name of David E. Wexelblat shall
-not be used in advertising or otherwise to promote the sale, use or
-other dealings in this Software without prior written authorization
-from David E. Wexelblat.
-
- ----------------------------------------
-
-Copyright 1990, 1991 by OMRON Corporation
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name OMRON not be used in
-advertising or publicity pertaining to distribution of the software without
-specific, written prior permission. OMRON makes no representations
-about the suitability of this software for any purpose. It is provided
-"as is" without express or implied warranty.
-
-OMRON DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
-EVENT SHALL OMRON BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTUOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991 by
-Digital Equipment Corporation
-
-Portions Copyright 1990, 1991 by Tektronix, Inc
-
-Rewritten for X.org by Chris Lee
-
-Permission to use, copy, modify, distribute, and sell this documentation
-for any purpose and without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-Chris Lee makes no representations about the suitability for any purpose
-of the information in this document. It is provided \`\`as-is'' without
-express or implied warranty.
-
- ----------------------------------------
-
-Copyright 1993 by Digital Equipment Corporation, Maynard, Massachusetts,
-Copyright 1994 by FUJITSU LIMITED
-Copyright 1994 by Sony Corporation
-
- All Rights Reserved
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the names of Digital, FUJITSU
-LIMITED and Sony Corporation not be used in advertising or publicity
-pertaining to distribution of the software without specific, written
-prior permission.
-
-DIGITAL, FUJITSU LIMITED AND SONY CORPORATION DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL DIGITAL, FUJITSU LIMITED
-AND SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-
-Copyright 1991 by the Open Software Foundation
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name of Open Software Foundation
-not be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission. Open Software
-Foundation makes no representations about the suitability of this
-software for any purpose. It is provided "as is" without express or
-implied warranty.
-
-OPEN SOFTWARE FOUNDATION DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL OPEN SOFTWARE FOUNDATIONN BE
-LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1990, 1991, 1992,1993, 1994 by FUJITSU LIMITED
-Copyright 1993, 1994 by Sony Corporation
-
-Permission to use, copy, modify, distribute, and sell this software and
-its documentation for any purpose is hereby granted without fee, provided
-that the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name of FUJITSU LIMITED and Sony Corporation
-not be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission. FUJITSU LIMITED and
-Sony Corporation makes no representations about the suitability of this
-software for any purpose. It is provided "as is" without express or
-implied warranty.
-
-FUJITSU LIMITED AND SONY CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD
-TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL FUJITSU LIMITED OR SONY CORPORATION BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
-RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright (c) 1993, 1995 by Silicon Graphics Computer Systems, Inc.
-
-Permission to use, copy, modify, and distribute this
-software and its documentation for any purpose and without
-fee is hereby granted, provided that the above copyright
-notice appear in all copies and that both that copyright
-notice and this permission notice appear in supporting
-documentation, and that the name of Silicon Graphics not be
-used in advertising or publicity pertaining to distribution
-of the software without specific prior written permission.
-Silicon Graphics makes no representation about the suitability
-of this software for any purpose. It is provided "as is"
-without any express or implied warranty.
-
-SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
-SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON
-GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL
-DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH
-THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1991, 1992, 1993, 1994 by FUJITSU LIMITED
-Copyright 1993 by Digital Equipment Corporation
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of FUJITSU LIMITED and
-Digital Equipment Corporation not be used in advertising or publicity
-pertaining to distribution of the software without specific, written
-prior permission. FUJITSU LIMITED and Digital Equipment Corporation
-makes no representations about the suitability of this software for
-any purpose. It is provided "as is" without express or implied
-warranty.
-
-FUJITSU LIMITED AND DIGITAL EQUIPMENT CORPORATION DISCLAIM ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
-FUJITSU LIMITED AND DIGITAL EQUIPMENT CORPORATION BE LIABLE FOR
-ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
-IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
-THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1992, 1993 by FUJITSU LIMITED
-Copyright 1993 by Fujitsu Open Systems Solutions, Inc.
-Copyright 1994 by Sony Corporation
-
-Permission to use, copy, modify, distribute and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of FUJITSU LIMITED,
-Fujitsu Open Systems Solutions, Inc. and Sony Corporation not be
-used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-FUJITSU LIMITED, Fujitsu Open Systems Solutions, Inc. and
-Sony Corporation make no representations about the suitability of
-this software for any purpose. It is provided "as is" without
-express or implied warranty.
-
-FUJITSU LIMITED, FUJITSU OPEN SYSTEMS SOLUTIONS, INC. AND SONY
-CORPORATION DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS,
-IN NO EVENT SHALL FUJITSU OPEN SYSTEMS SOLUTIONS, INC., FUJITSU LIMITED
-AND SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
-OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1987, 1988, 1990, 1993 by Digital Equipment Corporation,
-Maynard, Massachusetts,
-
- All Rights Reserved
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Digital not be
-used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-DIGITAL DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
-DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1993 by SunSoft, Inc.
-Copyright 1999-2000 by Bruno Haible
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the names of SunSoft, Inc. and
-Bruno Haible not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission. SunSoft, Inc. and Bruno Haible make no representations
-about the suitability of this software for any purpose. It is
-provided "as is" without express or implied warranty.
-
-SunSoft Inc. AND Bruno Haible DISCLAIM ALL WARRANTIES WITH REGARD
-TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS, IN NO EVENT SHALL SunSoft, Inc. OR Bruno Haible BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1991 by the Open Software Foundation
-Copyright 1993 by the TOSHIBA Corp.
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the names of Open Software Foundation and TOSHIBA
-not be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission. Open Software
-Foundation and TOSHIBA make no representations about the suitability of this
-software for any purpose. It is provided "as is" without express or
-implied warranty.
-
-OPEN SOFTWARE FOUNDATION AND TOSHIBA DISCLAIM ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL OPEN SOFTWARE FOUNDATIONN OR TOSHIBA BE
-LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1988 by Wyse Technology, Inc., San Jose, Ca.,
-
- All Rights Reserved
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name Wyse not be
-used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-WYSE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
-DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
-
- ----------------------------------------
-
-
-Copyright 1991 by the Open Software Foundation
-Copyright 1993, 1994 by the Sony Corporation
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the names of Open Software Foundation and
-Sony Corporation not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior permission.
-Open Software Foundation and Sony Corporation make no
-representations about the suitability of this software for any purpose.
-It is provided "as is" without express or implied warranty.
-
-OPEN SOFTWARE FOUNDATION AND SONY CORPORATION DISCLAIM ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL OPEN
-SOFTWARE FOUNDATIONN OR SONY CORPORATION BE LIABLE FOR ANY SPECIAL,
-INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1992, 1993 by FUJITSU LIMITED
-Copyright 1993 by Fujitsu Open Systems Solutions, Inc.
-
-Permission to use, copy, modify, distribute and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of FUJITSU LIMITED and
-Fujitsu Open Systems Solutions, Inc. not be used in advertising or
-publicity pertaining to distribution of the software without specific,
-written prior permission.
-FUJITSU LIMITED and Fujitsu Open Systems Solutions, Inc. makes no
-representations about the suitability of this software for any purpose.
-It is provided "as is" without express or implied warranty.
-
-FUJITSU LIMITED AND FUJITSU OPEN SYSTEMS SOLUTIONS, INC. DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES
-OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL FUJITSU OPEN SYSTEMS
-SOLUTIONS, INC. AND FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1993, 1994 by Sony Corporation
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of Sony Corporation
-not be used in advertising or publicity pertaining to distribution
-of the software without specific, written prior permission.
-Sony Corporation makes no representations about the suitability of
-this software for any purpose. It is provided "as is" without
-express or implied warranty.
-
-SONY CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
-EVENT SHALL SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1986, 1998 The Open Group
-Copyright (c) 2000 The XFree86 Project, Inc.
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation.
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-X CONSORTIUM OR THE XFREE86 PROJECT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
-Except as contained in this notice, the name of the X Consortium or of the
-XFree86 Project shall not be used in advertising or otherwise to promote the
-sale, use or other dealings in this Software without prior written
-authorization from the X Consortium and the XFree86 Project.
-
- ----------------------------------------
-
-Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation,
- and Nippon Telegraph and Telephone Corporation
-Copyright 1991 by the Open Software Foundation
-Copyright 1993 by the FUJITSU LIMITED
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the names of OMRON, NTT Software, NTT, and
-Open Software Foundation not be used in advertising or publicity
-pertaining to distribution of the software without specific,
-written prior permission. OMRON, NTT Software, NTT, and Open Software
-Foundation make no representations about the suitability of this
-software for any purpose. It is provided "as is" without express or
-implied warranty.
-
-OMRON, NTT SOFTWARE, NTT, AND OPEN SOFTWARE FOUNDATION
-DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT
-SHALL OMRON, NTT SOFTWARE, NTT, OR OPEN SOFTWARE FOUNDATION BE
-LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1988 by Wyse Technology, Inc., San Jose, Ca,
-Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts,
-
- All Rights Reserved
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name Digital not be
-used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-DIGITAL AND WYSE DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
-EVENT SHALL DIGITAL OR WYSE BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
-USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-
-Copyright 1991, 1992 by Fuji Xerox Co., Ltd.
-Copyright 1992, 1993, 1994 by FUJITSU LIMITED
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of Fuji Xerox,
-FUJITSU LIMITED not be used in advertising or publicity pertaining
-to distribution of the software without specific, written prior
-permission. Fuji Xerox, FUJITSU LIMITED make no representations
-about the suitability of this software for any purpose.
-It is provided "as is" without express or implied warranty.
-
-FUJI XEROX, FUJITSU LIMITED DISCLAIM ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL FUJI XEROX,
-FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL
-DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA
-OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 2006 Josh Triplett
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be included
-in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR
-OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
-
- ----------------------------------------
-
-(c) Copyright 1996 by Sebastien Marineau and Holger Veit
-
-
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-HOLGER VEIT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
-OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
-Except as contained in this notice, the name of Sebastien Marineau or Holger Veit
-shall not be used in advertising or otherwise to promote the sale, use or other
-dealings in this Software without prior written authorization from Holger Veit or
-Sebastien Marineau.
-
- ----------------------------------------
-
-Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation,
- and Nippon Telegraph and Telephone Corporation
-Copyright 1991 by the Open Software Foundation
-Copyright 1993 by the TOSHIBA Corp.
-Copyright 1993, 1994 by Sony Corporation
-Copyright 1993, 1994 by the FUJITSU LIMITED
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the names of OMRON, NTT Software, NTT, Open
-Software Foundation, and Sony Corporation not be used in advertising
-or publicity pertaining to distribution of the software without specific,
-written prior permission. OMRON, NTT Software, NTT, Open Software
-Foundation, and Sony Corporation make no representations about the
-suitability of this software for any purpose. It is provided "as is"
-without express or implied warranty.
-
-OMRON, NTT SOFTWARE, NTT, OPEN SOFTWARE FOUNDATION, AND SONY
-CORPORATION DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT
-SHALL OMRON, NTT SOFTWARE, NTT, OPEN SOFTWARE FOUNDATION, OR SONY
-CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
-IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright 2000 by Bruno Haible
-
-Permission to use, copy, modify, distribute, and sell this software
-and its documentation for any purpose is hereby granted without fee,
-provided that the above copyright notice appear in all copies and
-that both that copyright notice and this permission notice appear
-in supporting documentation, and that the name of Bruno Haible not
-be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission. Bruno Haible
-makes no representations about the suitability of this software for
-any purpose. It is provided "as is" without express or implied
-warranty.
-
-Bruno Haible DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
-NO EVENT SHALL Bruno Haible BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
-OR PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright © 2003 Keith Packard
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name of Keith Packard not be used in
-advertising or publicity pertaining to distribution of the software without
-specific, written prior permission. Keith Packard makes no
-representations about the suitability of this software for any purpose. It
-is provided "as is" without express or implied warranty.
-
-KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
-INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
-EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-
- ----------------------------------------
-
-Copyright (c) 2007-2009, Troy D. Hanson
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
-IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
-OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
- ----------------------------------------
-
-Copyright 1992, 1993 by TOSHIBA Corp.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted, provided
-that the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the name of TOSHIBA not be used in advertising
-or publicity pertaining to distribution of the software without specific,
-written prior permission. TOSHIBA make no representations about the
-suitability of this software for any purpose. It is provided "as is"
-without express or implied warranty.
-
-TOSHIBA DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
-TOSHIBA BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
-
-
- ----------------------------------------
-
-Copyright IBM Corporation 1993
-
-All Rights Reserved
-
-License to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of IBM not be
-used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission.
-
-IBM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS, AND
-NONINFRINGEMENT OF THIRD PARTY RIGHTS, IN NO EVENT SHALL
-IBM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
-
- ----------------------------------------
-
-Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation,
- and Nippon Telegraph and Telephone Corporation
-
-Permission to use, copy, modify, distribute, and sell this software and its
-documentation for any purpose is hereby granted without fee, provided that
-the above copyright notice appear in all copies and that both that
-copyright notice and this permission notice appear in supporting
-documentation, and that the names of OMRON, NTT Software, and NTT
-not be used in advertising or publicity pertaining to distribution of the
-software without specific, written prior permission. OMRON, NTT Software,
-and NTT make no representations about the suitability of this
-software for any purpose. It is provided "as is" without express or
-implied warranty.
-
-OMRON, NTT SOFTWARE, AND NTT, DISCLAIM ALL WARRANTIES WITH REGARD
-TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS, IN NO EVENT SHALL OMRON, NTT SOFTWARE, OR NTT, BE
-LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/o/ODM/ODM-2.8.7/licenses/license.md b/o/ODM/ODM-2.8.7/licenses/license.md
deleted file mode 100644
index 11357853..00000000
--- a/o/ODM/ODM-2.8.7/licenses/license.md
+++ /dev/null
@@ -1,24 +0,0 @@
-Licensing for portions of OpenDroneMap are as follows:
-* ImageMagick - Apache 2.0 - http://www.imagemagick.org/script/license.php
-* Jhead - None - http://www.sentex.net/~mwandel/jhead/
-* libjpeg - GPLv2 - http://sourceforge.net/projects/libjpeg/
-* Boost - Boost Software License, Version 1.0 - http://www.boost.org/LICENSE_1_0.txt
-* libgsl0 - GPL - http://www.gnu.org/software/gsl/
-* liblapack - Modified BSD - http://www.netlib.org/lapack/LICENSE.txt
-* Flann - BSD2 - http://opensource.org/licenses/bsd-license.php
-* libzip - BSD - http://www.nih.at/libzip/LICENSE.html
-* libcv - BSD - http://opencv.org/license.html
-* libcvaux - BSD - http://opencv.org/license.html
-* bundler - GPLv3 - http://www.gnu.org/copyleft/gpl.html
-* parallel - GPLv3 - http://www.gnu.org/copyleft/gpl.html
-* PoissonRecon - BSD - http://www.cs.jhu.edu/~misha/Code/PoissonRecon/license.txt
-* vlfeat - BSD - http://www.vlfeat.org/license.html
-* graclus - GPLv3 - http://www.gnu.org/copyleft/gpl.html
-* PROJ.4 - MIT - http://trac.osgeo.org/proj/wiki/WikiStart#License
-* PCL - BSD - http://pointclouds.org
- * Flann - BSD2 - http://opensource.org/licenses/bsd-license.php
- * Eigen - MPL2 - http://www.mozilla.org/MPL/2.0
- * Qhull - http://www.qhull.org/COPYING.txt
-* libext - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libext_copyright.txt
-* libx11 - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libx11_copyright.txt
-* MVS Texturing - BSD - https://github.com/nmoehrle/mvs-texturing/blob/master/LICENSE.txt
diff --git a/o/ODM/ODM-2.8.7/odm_docker_readme.txt b/o/ODM/ODM-2.8.7/odm_docker_readme.txt
deleted file mode 100644
index b5f4a595..00000000
--- a/o/ODM/ODM-2.8.7/odm_docker_readme.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-#ODM 0.3.1 under Debian 8.10 jessie - 2018-02-27 by yjmenezes
-#https://github.com/OpenDroneMap/OpenDroneMap/wiki/Docker
-#git clone https://github.com/OpenDroneMap/OpenDroneMap.git
-git clone https://github.com/yjmenezes/OpenDroneMap.git
-cd OpenDroneMap
-# list images
-docker images
-#remove old my_odm_image if necessary
-docker rmi my_odm_image
-#build a fresh one using instructions from ./Dockerfile
-docker build -t my_odm_image .
-#run tests with supplied image set.
-#Mapped host directories for output. -v host_path:container_path
-cd tests/test_data/
-sudo rm -r odm_* opensfm
-docker run -it --rm \
- -v $(pwd)/gcp_list.txt:/code/gcp_list.txt \
- -v $(pwd)/images:/code/images \
- -v $(pwd)/opensfm:/code/opensfm \
- -v $(pwd)/odm_meshing:/code/odm_meshing \
- -v $(pwd)/odm_georeferencing:/code/odm_georeferencing \
- -v $(pwd)/odm_orthophoto:/code/odm_orthophoto \
- -v $(pwd)/odm_texturing:/code/odm_texturing \
- my_odm_image --mesh-size 100000
-
diff --git a/o/ODM/ODM-2.8.7/opendm/__init__.py b/o/ODM/ODM-2.8.7/opendm/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/boundary.py b/o/ODM/ODM-2.8.7/opendm/boundary.py
deleted file mode 100644
index 86ea5ae2..00000000
--- a/o/ODM/ODM-2.8.7/opendm/boundary.py
+++ /dev/null
@@ -1,121 +0,0 @@
-import fiona
-import fiona.crs
-import os
-import io
-import json
-from opendm import system
-from pyproj import CRS
-from opendm.location import transformer
-from opendm.utils import double_quote
-from osgeo import ogr
-from opendm.shots import get_origin
-
-def compute_boundary_from_shots(reconstruction_json, buffer=0, reconstruction_offset=(0, 0)):
- if not os.path.isfile(reconstruction_json):
- raise IOError(reconstruction_json + " does not exist.")
-
- with open(reconstruction_json) as f:
- data = json.load(f)
- reconstruction = data[0]
-
- mp = ogr.Geometry(ogr.wkbMultiPoint)
-
- for shot_image in reconstruction['shots']:
- shot = reconstruction['shots'][shot_image]
- if shot.get('gps_dop', 999999) < 999999:
- camera = reconstruction['cameras'][shot['camera']]
-
- p = ogr.Geometry(ogr.wkbPoint)
- origin = get_origin(shot)
-
- p.AddPoint_2D(origin[0] + reconstruction_offset[0], origin[1] + reconstruction_offset[1])
- mp.AddGeometry(p)
-
- if mp.GetGeometryCount() < 3:
- return None
-
- convexhull = mp.ConvexHull()
- boundary = convexhull.Buffer(buffer)
-
- return load_boundary(boundary.ExportToJson())
-
-def load_boundary(boundary_json, reproject_to_proj4=None):
- if not isinstance(boundary_json, str):
- boundary_json = json.dumps(boundary_json)
-
- with fiona.open(io.BytesIO(boundary_json.encode('utf-8')), 'r') as src:
- if len(src) != 1:
- raise IOError("Boundary must have a single polygon (found: %s)" % len(src))
-
- geom = src[0]['geometry']
-
- if geom['type'] != 'Polygon':
- raise IOError("Boundary must have a polygon feature (found: %s)" % geom['type'])
-
- rings = geom['coordinates']
-
- if len(rings) == 0:
- raise IOError("Boundary geometry has no rings")
-
- coords = rings[0]
- if len(coords) == 0:
- raise IOError("Boundary geometry has no coordinates")
-
- dimensions = len(coords[0])
-
- if reproject_to_proj4 is not None:
- t = transformer(CRS.from_proj4(fiona.crs.to_string(src.crs)),
- CRS.from_proj4(reproject_to_proj4))
- coords = [t.TransformPoint(*c)[:dimensions] for c in coords]
-
- return coords
-
-def boundary_offset(boundary, reconstruction_offset):
- if boundary is None or reconstruction_offset is None:
- return boundary
-
- res = []
- dims = len(boundary[0])
- for c in boundary:
- if dims == 2:
- res.append((c[0] - reconstruction_offset[0], c[1] - reconstruction_offset[1]))
- else:
- res.append((c[0] - reconstruction_offset[0], c[1] - reconstruction_offset[1], c[2]))
-
- return res
-
-def as_polygon(boundary):
- if boundary is None:
- return None
-
- return "POLYGON((" + ", ".join([" ".join(map(str, c)) for c in boundary]) + "))"
-
-def as_geojson(boundary):
- return '{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":{"type":"Polygon","coordinates":[%s]}}]}' % str(list(map(list, boundary)))
-
-def export_to_bounds_files(boundary, proj4, bounds_json_file, bounds_gpkg_file):
- with open(bounds_json_file, "w") as f:
- f.write(json.dumps({
- "type": "FeatureCollection",
- "name": "bounds",
- "features": [{
- "type": "Feature",
- "properties": {},
- "geometry": {
- "type": "Polygon",
- "coordinates": [boundary]
- }
- }]
- }))
-
- if os.path.isfile(bounds_gpkg_file):
- os.remove(bounds_gpkg_file)
-
- kwargs = {
- 'proj4': proj4,
- 'input': double_quote(bounds_json_file),
- 'output': double_quote(bounds_gpkg_file)
- }
-
- system.run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
-
diff --git a/o/ODM/ODM-2.8.7/opendm/camera.py b/o/ODM/ODM-2.8.7/opendm/camera.py
deleted file mode 100644
index ee78caff..00000000
--- a/o/ODM/ODM-2.8.7/opendm/camera.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import os, json
-from opendm import log
-
-def get_cameras_from_opensfm(reconstruction_file):
- """
- Extract the cameras from OpenSfM's reconstruction.json
- """
- if os.path.exists(reconstruction_file):
- with open(reconstruction_file, 'r') as fin:
- reconstructions = json.loads(fin.read())
-
- result = {}
- for recon in reconstructions:
- if 'cameras' in recon:
- for camera_id in recon['cameras']:
- # Strip "v2" from OpenSfM camera IDs
- new_camera_id = camera_id
- if new_camera_id.startswith("v2 "):
- new_camera_id = new_camera_id[3:]
-
- result[new_camera_id] = recon['cameras'][camera_id]
-
- # Remove "_prior" keys
- keys = list(result[new_camera_id].keys())
- for k in keys:
- if k.endswith('_prior'):
- result[new_camera_id].pop(k)
- return result
- else:
- raise RuntimeError("%s does not exist." % reconstruction_file)
-
-
-def get_opensfm_camera_models(cameras):
- """
- Convert cameras to a format OpenSfM can understand
- (opposite of get_cameras_from_opensfm)
- """
- if isinstance(cameras, dict):
- result = {}
- for camera_id in cameras:
- # Quick check on IDs
- if len(camera_id.split(" ")) < 6:
- raise RuntimeError("Invalid cameraID: %s" % camera_id)
-
- # Add "v2" to camera ID
- if not camera_id.startswith("v2 "):
- osfm_camera_id = "v2 " + camera_id
- else:
- osfm_camera_id = camera_id
-
- # Add "_prior" keys
- camera = cameras[camera_id]
- prior_fields = ["focal","focal_x","focal_y","c_x","c_y","k1","k2","p1","p2","k3"]
- valid_fields = ["id","width","height","projection_type"] + prior_fields + [f + "_prior" for f in prior_fields]
-
- keys = list(camera.keys())
- for param in keys:
- param_prior = param + "_prior"
- if param in prior_fields and not param_prior in camera:
- camera[param_prior] = camera[param]
-
- # Remove invalid keys
- keys = list(camera.keys())
- for k in keys:
- if not k in valid_fields:
- camera.pop(k)
- log.ODM_WARNING("Invalid camera key ignored: %s" % k)
-
- result[osfm_camera_id] = camera
- return result
- else:
- raise RuntimeError("Invalid cameras format: %s. Expected dict." % str(cameras))
diff --git a/o/ODM/ODM-2.8.7/opendm/cogeo.py b/o/ODM/ODM-2.8.7/opendm/cogeo.py
deleted file mode 100644
index 289562e6..00000000
--- a/o/ODM/ODM-2.8.7/opendm/cogeo.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import os
-import shutil
-from opendm import system
-from opendm.concurrency import get_max_memory
-from opendm import io
-from opendm import log
-
-def convert_to_cogeo(src_path, blocksize=256, max_workers=1, compression="DEFLATE"):
- """
- Guarantee that the .tif passed as an argument is a Cloud Optimized GeoTIFF (cogeo)
- The file is destructively converted into a cogeo.
- If the file cannot be converted, the function does not change the file
- :param src_path: path to GeoTIFF
- :return: True on success
- """
-
- if not os.path.isfile(src_path):
- logger.warning("Cannot convert to cogeo: %s (file does not exist)" % src_path)
- return False
-
- log.ODM_INFO("Optimizing %s as Cloud Optimized GeoTIFF" % src_path)
-
-
- tmpfile = io.related_file_path(src_path, postfix='_cogeo')
- swapfile = io.related_file_path(src_path, postfix='_cogeo_swap')
-
- kwargs = {
- 'threads': max_workers if max_workers else 'ALL_CPUS',
- 'blocksize': blocksize,
- 'max_memory': get_max_memory(),
- 'src_path': src_path,
- 'tmpfile': tmpfile,
- 'compress': compression,
- 'predictor': '2' if compression in ['LZW', 'DEFLATE'] else '1',
- }
-
- try:
- system.run("gdal_translate "
- "-of COG "
- "-co NUM_THREADS={threads} "
- "-co BLOCKSIZE={blocksize} "
- "-co COMPRESS={compress} "
- "-co PREDICTOR={predictor} "
- "-co BIGTIFF=IF_SAFER "
- "-co RESAMPLING=NEAREST "
- "--config GDAL_CACHEMAX {max_memory}% "
- "--config GDAL_NUM_THREADS {threads} "
- "\"{src_path}\" \"{tmpfile}\" ".format(**kwargs))
- except Exception as e:
- log.ODM_WARNING("Cannot create Cloud Optimized GeoTIFF: %s" % str(e))
-
- if os.path.isfile(tmpfile):
- shutil.move(src_path, swapfile) # Move to swap location
-
- try:
- shutil.move(tmpfile, src_path)
- except IOError as e:
- log.ODM_WARNING("Cannot move %s to %s: %s" % (tmpfile, src_path, str(e)))
- shutil.move(swapfile, src_path) # Attempt to restore
-
- if os.path.isfile(swapfile):
- os.remove(swapfile)
-
- return True
- else:
- return False
diff --git a/o/ODM/ODM-2.8.7/opendm/concurrency.py b/o/ODM/ODM-2.8.7/opendm/concurrency.py
deleted file mode 100644
index 3cb62488..00000000
--- a/o/ODM/ODM-2.8.7/opendm/concurrency.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from vmem import virtual_memory
-import os
-import sys
-try:
- import Queue as queue
-except:
- import queue
-import threading
-import time
-from opendm import log
-
-def get_max_memory(minimum = 5, use_at_most = 0.5):
- """
- :param minimum minimum value to return (return value will never be lower than this)
- :param use_at_most use at most this fraction of the available memory. 0.5 = use at most 50% of available memory
- :return percentage value of memory to use (75 = 75%).
- """
- return max(minimum, (100 - virtual_memory().percent) * use_at_most)
-
-def get_max_memory_mb(minimum = 100, use_at_most = 0.5):
- """
- :param minimum minimum value to return (return value will never be lower than this)
- :param use_at_most use at most this fraction of the available memory. 0.5 = use at most 50% of available memory
- :return value of memory to use in megabytes.
- """
- return max(minimum, (virtual_memory().available / 1024 / 1024) * use_at_most)
-
-def parallel_map(func, items, max_workers=1, single_thread_fallback=True):
- """
- Our own implementation for parallel processing
- which handles gracefully CTRL+C and reverts to
- single thread processing in case of errors
- :param items list of objects
- :param func function to execute on each object
- """
- global error
- error = None
-
- def process_one(q):
- func(q)
-
- def worker():
- global error
-
- while True:
- (num, q) = pq.get()
- if q is None or error is not None:
- pq.task_done()
- break
-
- try:
- process_one(q)
- except Exception as e:
- error = e
- finally:
- pq.task_done()
-
- if max_workers > 1:
- use_single_thread = False
- pq = queue.PriorityQueue()
- threads = []
- for i in range(max_workers):
- t = threading.Thread(target=worker)
- t.start()
- threads.append(t)
-
- i = 1
- for t in items:
- pq.put((i, t.copy()))
- i += 1
-
- def stop_workers():
- for i in range(len(threads)):
- pq.put((-1, None))
- for t in threads:
- t.join()
-
- # block until all tasks are done
- try:
- while pq.unfinished_tasks > 0:
- time.sleep(0.5)
- except KeyboardInterrupt:
- print("CTRL+C terminating...")
- stop_workers()
- sys.exit(1)
-
- stop_workers()
-
- if error is not None and single_thread_fallback:
- # Try to reprocess using a single thread
- # in case this was a memory error
- log.ODM_WARNING("Failed to run process in parallel, retrying with a single thread...")
- use_single_thread = True
- else:
- use_single_thread = True
-
- if use_single_thread:
- # Boring, single thread processing
- for q in items:
- process_one(q)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/config.py b/o/ODM/ODM-2.8.7/opendm/config.py
deleted file mode 100755
index edef6f82..00000000
--- a/o/ODM/ODM-2.8.7/opendm/config.py
+++ /dev/null
@@ -1,830 +0,0 @@
-import argparse
-import json
-from opendm import context
-from opendm import io
-from opendm import log
-from appsettings import SettingsParser
-from pyodm import Node, exceptions
-import os
-import sys
-
-# parse arguments
-processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints',
- 'odm_meshing', 'mvs_texturing', 'odm_georeferencing',
- 'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess']
-
-with open(os.path.join(context.root_path, 'VERSION')) as version_file:
- __version__ = version_file.read().strip()
-
-
-def path_or_json_string(string):
- try:
- return io.path_or_json_string_to_dict(string)
- except ValueError as e:
- raise argparse.ArgumentTypeError("{0}".format(str(e)))
-
-# Django URL validation regex
-def url_string(string):
- import re
- regex = re.compile(
- r'^(?:http|ftp)s?://' # http:// or https://
- r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.?)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
- r'localhost|' #localhost...
- r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
- r'(?::\d+)?' # optional port
- r'(?:/?|[/?]\S+)$', re.IGNORECASE)
-
- if re.match(regex, string) is None:
- raise argparse.ArgumentTypeError("%s is not a valid URL. The URL must be in the format: http(s)://host[:port]/[?token=]" % string)
- return string
-
-class RerunFrom(argparse.Action):
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, processopts[processopts.index(values):])
- setattr(namespace, self.dest + '_is_set', True)
-
-class StoreTrue(argparse.Action):
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, True)
- setattr(namespace, self.dest + '_is_set', True)
-
-class StoreValue(argparse.Action):
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, values)
- setattr(namespace, self.dest + '_is_set', True)
-
-args = None
-
-def config(argv=None, parser=None):
- global args
-
- if args is not None and argv is None:
- return args
-
- if sys.platform == 'win32':
- usage_bin = 'run'
- else:
- usage_bin = 'run.sh'
-
- if parser is None:
- parser = SettingsParser(description='ODM is a command line toolkit to generate maps, point clouds, 3D models and DEMs from drone, balloon or kite images.',
- usage='%s [options] ' % usage_bin,
- yaml_file=open(context.settings_path))
-
- parser.add_argument('--project-path',
- metavar='',
- action=StoreValue,
- help='Path to the project folder. Your project folder should contain subfolders for each dataset. Each dataset should have an "images" folder.')
- parser.add_argument('name',
- metavar='',
- action=StoreValue,
- type=str,
- default='code',
- nargs='?',
- help='Name of dataset (i.e subfolder name within project folder). Default: %(default)s')
-
- parser.add_argument('--resize-to',
- metavar='',
- action=StoreValue,
- default=2048,
- type=int,
- help='Legacy option (use --feature-quality instead). Resizes images by the largest side for feature extraction purposes only. '
- 'Set to -1 to disable. This does not affect the final orthophoto '
- 'resolution quality and will not resize the original images. Default: %(default)s')
-
- parser.add_argument('--end-with', '-e',
- metavar='',
- action=StoreValue,
- default='odm_postprocess',
- choices=processopts,
- help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s')
-
- rerun = parser.add_mutually_exclusive_group()
-
- rerun.add_argument('--rerun', '-r',
- metavar='',
- action=StoreValue,
- choices=processopts,
- help=('Rerun this stage only and stop. Can be one of: %(choices)s. Default: %(default)s'))
-
- rerun.add_argument('--rerun-all',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Permanently delete all previous results and rerun the processing pipeline.')
-
- rerun.add_argument('--rerun-from',
- action=RerunFrom,
- metavar='',
- choices=processopts,
- help=('Rerun processing from this stage. Can be one of: %(choices)s. Default: %(default)s'))
-
- parser.add_argument('--min-num-features',
- metavar='',
- action=StoreValue,
- default=10000,
- type=int,
- help=('Minimum number of features to extract per image. '
- 'More features can be useful for finding more matches between images, '
- 'potentially allowing the reconstruction of areas with little overlap or insufficient features. '
- 'More features also slow down processing. Default: %(default)s'))
-
- parser.add_argument('--feature-type',
- metavar='',
- action=StoreValue,
- default='sift',
- choices=['akaze', 'hahog', 'orb', 'sift'],
- help=('Choose the algorithm for extracting keypoints and computing descriptors. '
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--feature-quality',
- metavar='',
- action=StoreValue,
- default='high',
- choices=['ultra', 'high', 'medium', 'low', 'lowest'],
- help=('Set feature extraction quality. Higher quality generates better features, but requires more memory and takes longer. '
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--matcher-type',
- metavar='',
- action=StoreValue,
- default='flann',
- choices=['bow', 'bruteforce', 'flann'],
- help=('Matcher algorithm, Fast Library for Approximate Nearest Neighbors or Bag of Words. FLANN is slower, but more stable. BOW is faster, but can sometimes miss valid matches. BRUTEFORCE is very slow but robust.'
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--matcher-neighbors',
- metavar='',
- action=StoreValue,
- default=0,
- type=int,
- help='Perform image matching with the nearest images based on GPS exif data. Set to 0 to match by triangulation. Default: %(default)s')
-
- parser.add_argument('--use-fixed-camera-params',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Turn off camera parameter optimization during bundle adjustment. This can be sometimes useful for improving results that exhibit doming/bowling or when images are taken with a rolling shutter camera. Default: %(default)s')
-
- parser.add_argument('--cameras',
- default='',
- metavar='',
- action=StoreValue,
- type=path_or_json_string,
- help='Use the camera parameters computed from '
- 'another dataset instead of calculating them. '
- 'Can be specified either as path to a cameras.json file or as a '
- 'JSON string representing the contents of a '
- 'cameras.json file. Default: %(default)s')
-
- parser.add_argument('--camera-lens',
- metavar='',
- action=StoreValue,
- default='auto',
- choices=['auto', 'perspective', 'brown', 'fisheye', 'spherical', 'equirectangular', 'dual'],
- help=('Set a camera projection type. Manually setting a value '
- 'can help improve geometric undistortion. By default the application '
- 'tries to determine a lens type from the images metadata. Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--radiometric-calibration',
- metavar='',
- action=StoreValue,
- default='none',
- choices=['none', 'camera', 'camera+sun'],
- help=('Set the radiometric calibration to perform on images. '
- 'When processing multispectral and thermal images you should set this option '
- 'to obtain reflectance/temperature values (otherwise you will get digital number values). '
- '[camera] applies black level, vignetting, row gradient gain/exposure compensation (if appropriate EXIF tags are found) and computes absolute temperature values. '
- '[camera+sun] is experimental, applies all the corrections of [camera], plus compensates for spectral radiance registered via a downwelling light sensor (DLS) taking in consideration the angle of the sun. '
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--max-concurrency',
- metavar='',
- action=StoreValue,
- default=context.num_cores,
- type=int,
- help=('The maximum number of processes to use in various '
- 'processes. Peak memory requirement is ~1GB per '
- 'thread and 2 megapixel image resolution. Default: %(default)s'))
-
- parser.add_argument('--depthmap-resolution',
- metavar='',
- action=StoreValue,
- type=float,
- default=640,
- help=('Controls the density of the point cloud by setting the resolution of the depthmap images. Higher values take longer to compute '
- 'but produce denser point clouds. Overrides the value calculated by --pc-quality.'
- 'Default: %(default)s'))
-
- parser.add_argument('--use-hybrid-bundle-adjustment',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Run local bundle adjustment for every image added to the reconstruction and a global '
- 'adjustment every 100 images. Speeds up reconstruction for very large datasets. Default: %(default)s')
-
- parser.add_argument('--sfm-algorithm',
- metavar='',
- action=StoreValue,
- default='incremental',
- choices=['incremental', 'triangulation', 'planar'],
- help=('Choose the structure from motion algorithm. For aerial datasets, if camera GPS positions and angles are available, triangulation can generate better results. For planar scenes captured at fixed altitude with nadir-only images, planar can be much faster. '
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--use-3dmesh',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Use a full 3D mesh to compute the orthophoto instead of a 2.5D mesh. This option is a bit faster and provides similar results in planar areas. Default: %(default)s')
-
- parser.add_argument('--skip-3dmodel',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Skip generation of a full 3D model. This can save time if you only need 2D results such as orthophotos and DEMs. Default: %(default)s')
-
- parser.add_argument('--skip-report',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Skip generation of PDF report. This can save time if you don\'t need a report. Default: %(default)s')
-
- parser.add_argument('--skip-orthophoto',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Skip generation of the orthophoto. This can save time if you only need 3D results or DEMs. Default: %(default)s')
-
- parser.add_argument('--ignore-gsd',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Ignore Ground Sampling Distance (GSD). GSD '
- 'caps the maximum resolution of image outputs and '
- 'resizes images when necessary, resulting in faster processing and '
- 'lower memory usage. Since GSD is an estimate, sometimes ignoring it can result in slightly better image output quality. Default: %(default)s')
-
- parser.add_argument('--no-gpu',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Do not use GPU acceleration, even if it\'s available. Default: %(default)s')
-
- parser.add_argument('--mesh-size',
- metavar='',
- action=StoreValue,
- default=200000,
- type=int,
- help=('The maximum vertex count of the output mesh. '
- 'Default: %(default)s'))
-
- parser.add_argument('--mesh-octree-depth',
- metavar='',
- action=StoreValue,
- default=11,
- type=int,
- help=('Octree depth used in the mesh reconstruction, '
- 'increase to get more vertices, recommended '
- 'values are 8-12. Default: %(default)s'))
-
- parser.add_argument('--fast-orthophoto',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Skips dense reconstruction and 3D model generation. '
- 'It generates an orthophoto directly from the sparse reconstruction. '
- 'If you just need an orthophoto and do not need a full 3D model, turn on this option. Default: %(default)s')
-
- parser.add_argument('--crop',
- metavar='',
- action=StoreValue,
- default=3,
- type=float,
- help=('Automatically crop image outputs by creating a smooth buffer '
- 'around the dataset boundaries, shrunk by N meters. '
- 'Use 0 to disable cropping. '
- 'Default: %(default)s'))
-
- parser.add_argument('--boundary',
- default='',
- metavar='',
- action=StoreValue,
- type=path_or_json_string,
- help='GeoJSON polygon limiting the area of the reconstruction. '
- 'Can be specified either as path to a GeoJSON file or as a '
- 'JSON string representing the contents of a '
- 'GeoJSON file. Default: %(default)s')
-
- parser.add_argument('--auto-boundary',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Automatically set a boundary using camera shot locations to limit the area of the reconstruction. '
- 'This can help remove far away background artifacts (sky, background landscapes, etc.). See also --boundary. '
- 'Default: %(default)s')
-
- parser.add_argument('--pc-quality',
- metavar='',
- action=StoreValue,
- default='medium',
- choices=['ultra', 'high', 'medium', 'low', 'lowest'],
- help=('Set point cloud quality. Higher quality generates better, denser point clouds, but requires more memory and takes longer. Each step up in quality increases processing time roughly by a factor of 4x.'
- 'Can be one of: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--pc-classify',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Classify the point cloud outputs using a Simple Morphological Filter. '
- 'You can control the behavior of this option by tweaking the --dem-* parameters. '
- 'Default: '
- '%(default)s')
-
- parser.add_argument('--pc-csv',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Export the georeferenced point cloud in CSV format. Default: %(default)s')
-
- parser.add_argument('--pc-las',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Export the georeferenced point cloud in LAS format. Default: %(default)s')
-
- parser.add_argument('--pc-ept',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Export the georeferenced point cloud in Entwine Point Tile (EPT) format. Default: %(default)s')
-
- parser.add_argument('--pc-copc',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Save the georeferenced point cloud in Cloud Optimized Point Cloud (COPC) format. Default: %(default)s')
-
- parser.add_argument('--pc-filter',
- metavar='',
- action=StoreValue,
- type=float,
- default=2.5,
- help='Filters the point cloud by removing points that deviate more than N standard deviations from the local mean. Set to 0 to disable filtering. '
- 'Default: %(default)s')
-
- parser.add_argument('--pc-sample',
- metavar='',
- action=StoreValue,
- type=float,
- default=0,
- help='Filters the point cloud by keeping only a single point around a radius N (in meters). This can be useful to limit the output resolution of the point cloud and remove duplicate points. Set to 0 to disable sampling. '
- 'Default: %(default)s')
-
- parser.add_argument('--pc-tile',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Reduce the memory usage needed for depthmap fusion by splitting large scenes into tiles. Turn this on if your machine doesn\'t have much RAM and/or you\'ve set --pc-quality to high or ultra. Experimental. '
- 'Default: %(default)s')
-
- parser.add_argument('--pc-geometric',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Improve the accuracy of the point cloud by computing geometrically consistent depthmaps. This increases processing time, but can improve results in urban scenes. '
- 'Default: %(default)s')
-
- parser.add_argument('--smrf-scalar',
- metavar='',
- action=StoreValue,
- type=float,
- default=1.25,
- help='Simple Morphological Filter elevation scalar parameter. '
- 'Default: %(default)s')
-
- parser.add_argument('--smrf-slope',
- metavar='',
- action=StoreValue,
- type=float,
- default=0.15,
- help='Simple Morphological Filter slope parameter (rise over run). '
- 'Default: %(default)s')
-
- parser.add_argument('--smrf-threshold',
- metavar='',
- action=StoreValue,
- type=float,
- default=0.5,
- help='Simple Morphological Filter elevation threshold parameter (meters). '
- 'Default: %(default)s')
-
- parser.add_argument('--smrf-window',
- metavar='',
- action=StoreValue,
- type=float,
- default=18.0,
- help='Simple Morphological Filter window radius parameter (meters). '
- 'Default: %(default)s')
-
- parser.add_argument('--texturing-data-term',
- metavar='',
- action=StoreValue,
- default='gmi',
- choices=['gmi', 'area'],
- help=('When texturing the 3D mesh, for each triangle, choose to prioritize images with sharp features (gmi) or those that cover the largest area (area). Default: %(default)s'))
-
- parser.add_argument('--texturing-outlier-removal-type',
- metavar='',
- action=StoreValue,
- default='gauss_clamping',
- choices=['none', 'gauss_clamping', 'gauss_damping'],
- help=('Type of photometric outlier removal method. Can be one of: %(choices)s. Default: %(default)s'))
-
- parser.add_argument('--texturing-skip-global-seam-leveling',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Skip normalization of colors across all images. Useful when processing radiometric data. Default: %(default)s'))
-
- parser.add_argument('--texturing-skip-local-seam-leveling',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Skip the blending of colors near seams. Default: %(default)s')
-
- parser.add_argument('--texturing-keep-unseen-faces',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Keep faces in the mesh that are not seen in any camera. '
- 'Default: %(default)s'))
-
- parser.add_argument('--texturing-tone-mapping',
- metavar='',
- action=StoreValue,
- choices=['none', 'gamma'],
- default='none',
- help='Turn on gamma tone mapping or none for no tone '
- 'mapping. Can be one of %(choices)s. '
- 'Default: %(default)s ')
-
- parser.add_argument('--gcp',
- metavar='',
- action=StoreValue,
- default=None,
- help=('Path to the file containing the ground control '
- 'points used for georeferencing. '
- 'The file needs to '
- 'use the following format: \n'
- 'EPSG: or <+proj definition>\n'
- 'geo_x geo_y geo_z im_x im_y image_name [gcp_name] [extra1] [extra2]\n'
- 'Default: %(default)s'))
-
- parser.add_argument('--geo',
- metavar='',
- action=StoreValue,
- default=None,
- help=('Path to the image geolocation file containing the camera center coordinates used for georeferencing. '
- 'Note that omega/phi/kappa are currently not supported (you can set them to 0). '
- 'The file needs to '
- 'use the following format: \n'
- 'EPSG: or <+proj definition>\n'
- 'image_name geo_x geo_y geo_z [omega (degrees)] [phi (degrees)] [kappa (degrees)] [horz accuracy (meters)] [vert accuracy (meters)]\n'
- 'Default: %(default)s'))
-
- parser.add_argument('--use-exif',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Use this tag if you have a GCP File but '
- 'want to use the EXIF information for georeferencing instead. Default: %(default)s'))
-
- parser.add_argument('--dtm',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Use this tag to build a DTM (Digital Terrain Model, ground only) using a simple '
- 'morphological filter. Check the --dem* and --smrf* parameters for finer tuning. Default: %(default)s')
-
- parser.add_argument('--dsm',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Use this tag to build a DSM (Digital Surface Model, ground + objects) using a progressive '
- 'morphological filter. Check the --dem* parameters for finer tuning. Default: %(default)s')
-
- parser.add_argument('--dem-gapfill-steps',
- metavar='',
- action=StoreValue,
- default=3,
- type=int,
- help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. '
- 'Starting with a radius equal to the output resolution, N different DEMs are generated with '
- 'progressively bigger radius using the inverse distance weighted (IDW) algorithm '
- 'and merged together. Remaining gaps are then merged using nearest neighbor interpolation. '
- 'Default: %(default)s')
-
- parser.add_argument('--dem-resolution',
- metavar='',
- action=StoreValue,
- type=float,
- default=5,
- help='DSM/DTM resolution in cm / pixel. Note that this value is capped to 2x the ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also.'
- ' Default: %(default)s')
-
- parser.add_argument('--dem-decimation',
- metavar='',
- action=StoreValue,
- default=1,
- type=int,
- help='Decimate the points before generating the DEM. 1 is no decimation (full quality). '
- '100 decimates ~99%% of the points. Useful for speeding up generation of DEM results in very large datasets. Default: %(default)s')
-
- parser.add_argument('--dem-euclidean-map',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Computes an euclidean raster map for each DEM. '
- 'The map reports the distance from each cell to the nearest '
- 'NODATA value (before any hole filling takes place). '
- 'This can be useful to isolate the areas that have been filled. '
- 'Default: '
- '%(default)s')
-
- parser.add_argument('--orthophoto-resolution',
- metavar=' 0.0>',
- action=StoreValue,
- default=5,
- type=float,
- help=('Orthophoto resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also. '
- 'Default: %(default)s'))
-
- parser.add_argument('--orthophoto-no-tiled',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Set this parameter if you want a striped GeoTIFF. '
- 'Default: %(default)s')
-
- parser.add_argument('--orthophoto-png',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Set this parameter if you want to generate a PNG rendering of the orthophoto. '
- 'Default: %(default)s')
-
- parser.add_argument('--orthophoto-kmz',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Set this parameter if you want to generate a Google Earth (KMZ) rendering of the orthophoto. '
- 'Default: %(default)s')
-
-
- parser.add_argument('--orthophoto-compression',
- metavar='',
- action=StoreValue,
- type=str,
- choices=['JPEG', 'LZW', 'PACKBITS', 'DEFLATE', 'LZMA', 'NONE'],
- default='DEFLATE',
- help='Set the compression to use for orthophotos. Can be one of: %(choices)s. Default: %(default)s')
-
- parser.add_argument('--orthophoto-cutline',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Generates a polygon around the cropping area '
- 'that cuts the orthophoto around the edges of features. This polygon '
- 'can be useful for stitching seamless mosaics with multiple overlapping orthophotos. '
- 'Default: '
- '%(default)s')
-
- parser.add_argument('--tiles',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Generate static tiles for orthophotos and DEMs that are '
- 'suitable for viewers like Leaflet or OpenLayers. '
- 'Default: %(default)s')
-
- parser.add_argument('--3d-tiles',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Generate OGC 3D Tiles outputs. Default: %(default)s')
-
- parser.add_argument('--rolling-shutter',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Turn on rolling shutter correction. If the camera '
- 'has a rolling shutter and the images were taken in motion, you can turn on this option '
- 'to improve the accuracy of the results. See also --rolling-shutter-readout. '
- 'Default: %(default)s')
-
- parser.add_argument('--rolling-shutter-readout',
- type=float,
- action=StoreValue,
- metavar='',
- default=0,
- help='Override the rolling shutter readout time for your camera sensor (in milliseconds), instead of using the rolling shutter readout database. '
- 'Note that not all cameras are present in the database. Set to 0 to use the database value. '
- 'Default: %(default)s')
-
- parser.add_argument('--build-overviews',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Build orthophoto overviews for faster display in programs such as QGIS. Default: %(default)s')
-
- parser.add_argument('--cog',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Create Cloud-Optimized GeoTIFFs instead of normal GeoTIFFs. Default: %(default)s')
-
-
- parser.add_argument('--verbose', '-v',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Print additional messages to the console. '
- 'Default: %(default)s')
-
- parser.add_argument('--copy-to',
- metavar='',
- action=StoreValue,
- help='Copy output results to this folder after processing.')
-
- parser.add_argument('--time',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Generates a benchmark file with runtime info. '
- 'Default: %(default)s')
-
- parser.add_argument('--debug',
- action=StoreTrue,
- nargs=0,
- default=False,
- help='Print debug messages. Default: %(default)s')
-
- parser.add_argument('--version',
- action='version',
- version='ODM {0}'.format(__version__),
- help='Displays version number and exits. ')
-
- parser.add_argument('--split',
- type=int,
- action=StoreValue,
- default=999999,
- metavar='',
- help='Average number of images per submodel. When '
- 'splitting a large dataset into smaller '
- 'submodels, images are grouped into clusters. '
- 'This value regulates the number of images that '
- 'each cluster should have on average. Default: %(default)s')
-
- parser.add_argument('--split-overlap',
- type=float,
- action=StoreValue,
- metavar='',
- default=150,
- help='Radius of the overlap between submodels. '
- 'After grouping images into clusters, images '
- 'that are closer than this radius to a cluster '
- 'are added to the cluster. This is done to ensure '
- 'that neighboring submodels overlap. Default: %(default)s')
-
- parser.add_argument('--split-image-groups',
- metavar='',
- action=StoreValue,
- default=None,
- help=('Path to the image groups file that controls how images should be split into groups. '
- 'The file needs to use the following format: \n'
- 'image_name group_name\n'
- 'Default: %(default)s'))
- # parser.add_argument('--split-multitracks',
- # action=StoreTrue,
- # nargs=0,
- # default=False,
- # help='Split multi-track reconstructions.')
-
- parser.add_argument('--sm-cluster',
- metavar='',
- action=StoreValue,
- type=url_string,
- default=None,
- help='URL to a ClusterODM instance '
- 'for distributing a split-merge workflow on '
- 'multiple nodes in parallel. '
- 'Default: %(default)s')
-
- parser.add_argument('--merge',
- metavar='',
- action=StoreValue,
- default='all',
- choices=['all', 'pointcloud', 'orthophoto', 'dem'],
- help=('Choose what to merge in the merge step in a split dataset. '
- 'By default all available outputs are merged. '
- 'Options: %(choices)s. Default: '
- '%(default)s'))
-
- parser.add_argument('--force-gps',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Use images\' GPS exif data for reconstruction, even if there are GCPs present.'
- 'This flag is useful if you have high precision GPS measurements. '
- 'If there are no GCPs, this flag does nothing. Default: %(default)s'))
-
- parser.add_argument('--gps-accuracy',
- type=float,
- action=StoreValue,
- metavar='',
- default=10,
- help='Set a value in meters for the GPS Dilution of Precision (DOP) '
- 'information for all images. If your images are tagged '
- 'with high precision GPS information (RTK), this value will be automatically '
- 'set accordingly. You can use this option to manually set it in case the reconstruction '
- 'fails. Lowering this option can sometimes help control bowling-effects over large areas. Default: %(default)s')
-
- parser.add_argument('--optimize-disk-space',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Delete heavy intermediate files to optimize disk space usage. This '
- 'affects the ability to restart the pipeline from an intermediate stage, '
- 'but allows datasets to be processed on machines that don\'t have sufficient '
- 'disk space available. Default: %(default)s'))
-
- parser.add_argument('--pc-rectify',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('Perform ground rectification on the point cloud. This means that wrongly classified ground '
- 'points will be re-classified and gaps will be filled. Useful for generating DTMs. '
- 'Default: %(default)s'))
-
- parser.add_argument('--primary-band',
- metavar='',
- action=StoreValue,
- default="auto",
- type=str,
- help=('When processing multispectral datasets, you can specify the name of the primary band that will be used for reconstruction. '
- 'It\'s recommended to choose a band which has sharp details and is in focus. '
- 'Default: %(default)s'))
-
- parser.add_argument('--skip-band-alignment',
- action=StoreTrue,
- nargs=0,
- default=False,
- help=('When processing multispectral datasets, ODM will automatically align the images for each band. '
- 'If the images have been postprocessed and are already aligned, use this option. '
- 'Default: %(default)s'))
-
- args = parser.parse_args(argv)
-
- # check that the project path setting has been set properly
- if not args.project_path:
- log.ODM_ERROR('You need to set the project path in the '
- 'settings.yaml file before you can run ODM, '
- 'or use `--project-path `. Run `python3 '
- 'run.py --help` for more information. ')
- sys.exit(1)
-
- if args.fast_orthophoto:
- log.ODM_INFO('Fast orthophoto is turned on, automatically setting --skip-3dmodel')
- args.skip_3dmodel = True
-
- if args.pc_rectify and not args.pc_classify:
- log.ODM_INFO("Ground rectify is turned on, automatically turning on point cloud classification")
- args.pc_classify = True
-
- if args.dtm and not args.pc_classify:
- log.ODM_INFO("DTM is turned on, automatically turning on point cloud classification")
- args.pc_classify = True
-
- if args.skip_3dmodel and args.use_3dmesh:
- log.ODM_WARNING('--skip-3dmodel is set, but so is --use-3dmesh. --skip-3dmodel will be ignored.')
- args.skip_3dmodel = False
-
- if args.orthophoto_cutline and not args.crop:
- log.ODM_WARNING("--orthophoto-cutline is set, but --crop is not. --crop will be set to 0.01")
- args.crop = 0.01
-
- if args.sm_cluster:
- try:
- Node.from_url(args.sm_cluster).info()
- except exceptions.NodeConnectionError as e:
- log.ODM_ERROR("Cluster node seems to be offline: %s" % str(e))
- sys.exit(1)
-
- return args
diff --git a/o/ODM/ODM-2.8.7/opendm/context.py b/o/ODM/ODM-2.8.7/opendm/context.py
deleted file mode 100644
index 293d6cb5..00000000
--- a/o/ODM/ODM-2.8.7/opendm/context.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import os
-import sys
-import multiprocessing
-
-# Define some needed locations
-current_path = os.path.abspath(os.path.dirname(__file__))
-root_path, _ = os.path.split(current_path)
-
-superbuild_path = os.path.join(root_path, 'SuperBuild')
-superbuild_bin_path = os.path.join(superbuild_path, 'install', 'bin')
-
-# add opencv,opensfm to python path
-python_packages_paths = [os.path.join(superbuild_path, p) for p in [
- 'install/lib/python3.9/dist-packages',
- 'install/lib/python3.8/dist-packages',
- 'install/lib/python3/dist-packages',
- 'install/bin/opensfm'
-]]
-for p in python_packages_paths:
- sys.path.append(p)
-
-
-# define opensfm path
-opensfm_path = os.path.join(superbuild_bin_path, "opensfm")
-
-poisson_recon_path = os.path.join(superbuild_bin_path, 'PoissonRecon')
-dem2mesh_path = os.path.join(superbuild_bin_path, 'dem2mesh')
-dem2points_path = os.path.join(superbuild_bin_path, 'dem2points')
-
-# define mvstex path
-mvstex_path = os.path.join(superbuild_bin_path, "texrecon")
-
-# openmvs paths
-omvs_densify_path = os.path.join(superbuild_bin_path, "OpenMVS", "DensifyPointCloud")
-omvs_reconstructmesh_path = os.path.join(superbuild_bin_path, "OpenMVS", "ReconstructMesh")
-
-fpcfilter_path = os.path.join(superbuild_bin_path, "FPCFilter")
-
-odm_orthophoto_path = os.path.join(superbuild_bin_path, "odm_orthophoto")
-settings_path = os.path.join(root_path, 'settings.yaml')
-
-# Define supported image extensions
-supported_extensions = {'.jpg','.jpeg','.png', '.tif', '.tiff', '.bmp'}
-
-# Define the number of cores
-num_cores = multiprocessing.cpu_count()
-
-
-# Print python paths if invoked as a script
-if __name__ == "__main__":
- print("export PYTHONPATH=" + ":".join(python_packages_paths))
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/cropper.py b/o/ODM/ODM-2.8.7/opendm/cropper.py
deleted file mode 100644
index 54ccb9ba..00000000
--- a/o/ODM/ODM-2.8.7/opendm/cropper.py
+++ /dev/null
@@ -1,269 +0,0 @@
-from opendm import context
-from opendm.system import run
-from opendm import log
-from opendm.point_cloud import export_summary_json
-from osgeo import ogr
-import json, os
-from opendm.concurrency import get_max_memory
-from opendm.utils import double_quote
-
-class Cropper:
- def __init__(self, storage_dir, files_prefix = "crop"):
- self.storage_dir = storage_dir
- self.files_prefix = files_prefix
-
- def path(self, suffix):
- """
- @return a path relative to storage_dir and prefixed with files_prefix
- """
- return os.path.join(self.storage_dir, '{}.{}'.format(self.files_prefix, suffix))
-
- @staticmethod
- def crop(gpkg_path, geotiff_path, gdal_options, keep_original=True, warp_options=[]):
- if not os.path.exists(gpkg_path) or not os.path.exists(geotiff_path):
- log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(gpkg_path, geotiff_path))
- return geotiff_path
-
- log.ODM_INFO("Cropping %s" % geotiff_path)
-
- # Rename original file
- # path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif
-
- path, filename = os.path.split(geotiff_path)
- # path = path/to
- # filename = odm_orthophoto.tif
-
- basename, ext = os.path.splitext(filename)
- # basename = odm_orthophoto
- # ext = .tif
-
- original_geotiff = os.path.join(path, "{}.original{}".format(basename, ext))
- os.replace(geotiff_path, original_geotiff)
-
- try:
- kwargs = {
- 'gpkg_path': double_quote(gpkg_path),
- 'geotiffInput': double_quote(original_geotiff),
- 'geotiffOutput': double_quote(geotiff_path),
- 'options': ' '.join(map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)),
- 'warpOptions': ' '.join(warp_options),
- 'max_memory': get_max_memory()
- }
-
- run('gdalwarp -cutline {gpkg_path} '
- '-crop_to_cutline '
- '{options} '
- '{warpOptions} '
- '{geotiffInput} '
- '{geotiffOutput} '
- '--config GDAL_CACHEMAX {max_memory}%'.format(**kwargs))
-
- if not keep_original:
- os.remove(original_geotiff)
-
- except Exception as e:
- log.ODM_WARNING('Something went wrong while cropping: {}'.format(e))
-
- # Revert rename
- os.replace(original_geotiff, geotiff_path)
-
- return geotiff_path
-
- @staticmethod
- def merge_bounds(input_bound_files, output_bounds, buffer_distance = 0):
- """
- Merge multiple bound files into a single bound computed from the convex hull
- of all bounds (minus a buffer distance in meters)
- """
- geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
-
- driver = ogr.GetDriverByName('GPKG')
- srs = None
-
- for input_bound_file in input_bound_files:
- ds = driver.Open(input_bound_file, 0) # ready-only
-
- layer = ds.GetLayer()
- srs = layer.GetSpatialRef()
-
- # Collect all Geometry
- for feature in layer:
- geomcol.AddGeometry(feature.GetGeometryRef())
-
- ds = None
-
- # Calculate convex hull
- convexhull = geomcol.ConvexHull()
-
- # If buffer distance is specified
- # Create two buffers, one shrunk by
- # N + 3 and then that buffer expanded by 3
- # so that we get smooth corners. \m/
- BUFFER_SMOOTH_DISTANCE = 3
-
- if buffer_distance > 0:
- convexhull = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
- convexhull = convexhull.Buffer(BUFFER_SMOOTH_DISTANCE)
-
- # Save to a new file
- if os.path.exists(output_bounds):
- driver.DeleteDataSource(output_bounds)
-
- out_ds = driver.CreateDataSource(output_bounds)
- layer = out_ds.CreateLayer("convexhull", srs=srs, geom_type=ogr.wkbPolygon)
-
- feature_def = layer.GetLayerDefn()
- feature = ogr.Feature(feature_def)
- feature.SetGeometry(convexhull)
- layer.CreateFeature(feature)
- feature = None
-
- # Save and close output data source
- out_ds = None
-
- def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
- """
- Compute a buffered polygon around the data extents (not just a bounding box)
- of the given point cloud.
-
- @return filename to GeoJSON containing the polygon
- """
- if not os.path.exists(pointcloud_path):
- log.ODM_WARNING('Point cloud does not exist, cannot generate bounds {}'.format(pointcloud_path))
- return ''
-
- # Do decimation prior to extracting boundary information
- decimated_pointcloud_path = self.path('decimated.las')
-
- run("pdal translate -i \"{}\" "
- "-o \"{}\" "
- "decimation "
- "--filters.decimation.step={} ".format(pointcloud_path, decimated_pointcloud_path, decimation_step))
-
- if not os.path.exists(decimated_pointcloud_path):
- log.ODM_WARNING('Could not decimate point cloud, thus cannot generate GPKG bounds {}'.format(decimated_pointcloud_path))
- return ''
-
- # Use PDAL to dump boundary information
- # then read the information back
-
- boundary_file_path = self.path('boundary.json')
-
- run('pdal info --boundary --filters.hexbin.edge_size=1 --filters.hexbin.threshold=0 "{0}" > "{1}"'.format(decimated_pointcloud_path, boundary_file_path))
-
- pc_geojson_boundary_feature = None
-
- with open(boundary_file_path, 'r') as f:
- json_f = json.loads(f.read())
- pc_geojson_boundary_feature = json_f['boundary']['boundary_json']
-
- if pc_geojson_boundary_feature is None: raise RuntimeError("Could not determine point cloud boundaries")
-
- # Write bounds to GeoJSON
- tmp_bounds_geojson_path = self.path('tmp-bounds.geojson')
- with open(tmp_bounds_geojson_path, "w") as f:
- f.write(json.dumps({
- "type": "FeatureCollection",
- "features": [{
- "type": "Feature",
- "geometry": pc_geojson_boundary_feature
- }]
- }))
-
- # Create a convex hull around the boundary
- # as to encompass the entire area (no holes)
- driver = ogr.GetDriverByName('GeoJSON')
- ds = driver.Open(tmp_bounds_geojson_path, 0) # ready-only
- layer = ds.GetLayer()
-
- # Collect all Geometry
- geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
- for feature in layer:
- geomcol.AddGeometry(feature.GetGeometryRef())
-
- # Calculate convex hull
- convexhull = geomcol.ConvexHull()
-
- # If buffer distance is specified
- # Create two buffers, one shrunk by
- # N + 3 and then that buffer expanded by 3
- # so that we get smooth corners. \m/
- BUFFER_SMOOTH_DISTANCE = 3
-
- if buffer_distance > 0:
- # For small areas, check that buffering doesn't obliterate
- # our hull
- tmp = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
- tmp = tmp.Buffer(BUFFER_SMOOTH_DISTANCE)
- if tmp.Area() > 0:
- convexhull = tmp
- else:
- log.ODM_WARNING("Very small crop area detected, we will not smooth it.")
-
- # Save to a new file
- bounds_geojson_path = self.path('bounds.geojson')
- if os.path.exists(bounds_geojson_path):
- os.remove(bounds_geojson_path)
-
- out_ds = driver.CreateDataSource(bounds_geojson_path)
- layer = out_ds.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
-
- feature_def = layer.GetLayerDefn()
- feature = ogr.Feature(feature_def)
- feature.SetGeometry(convexhull)
- layer.CreateFeature(feature)
- feature = None
-
- # Save and close data sources
- out_ds = ds = None
-
- # Remove decimated point cloud
- if os.path.exists(decimated_pointcloud_path):
- os.remove(decimated_pointcloud_path)
-
- # Remove tmp bounds
- if os.path.exists(tmp_bounds_geojson_path):
- os.remove(tmp_bounds_geojson_path)
-
- return bounds_geojson_path
-
-
- def create_bounds_gpkg(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
- """
- Compute a buffered polygon around the data extents (not just a bounding box)
- of the given point cloud.
-
- @return filename to Geopackage containing the polygon
- """
- if not os.path.exists(pointcloud_path):
- log.ODM_WARNING('Point cloud does not exist, cannot generate GPKG bounds {}'.format(pointcloud_path))
- return ''
-
- bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step)
-
- summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
- export_summary_json(pointcloud_path, summary_file_path)
-
- pc_proj4 = None
- with open(summary_file_path, 'r') as f:
- json_f = json.loads(f.read())
- pc_proj4 = json_f['summary']['srs']['proj4']
-
- if pc_proj4 is None: raise RuntimeError("Could not determine point cloud proj4 declaration")
-
- bounds_gpkg_path = os.path.join(self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix))
-
- if os.path.isfile(bounds_gpkg_path):
- os.remove(bounds_gpkg_path)
-
- # Convert bounds to GPKG
- kwargs = {
- 'input': double_quote(bounds_geojson_path),
- 'output': double_quote(bounds_gpkg_path),
- 'proj4': pc_proj4
- }
-
- run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
-
- return bounds_gpkg_path
-
diff --git a/o/ODM/ODM-2.8.7/opendm/cutline.py b/o/ODM/ODM-2.8.7/opendm/cutline.py
deleted file mode 100644
index e3f8bf8b..00000000
--- a/o/ODM/ODM-2.8.7/opendm/cutline.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import os
-import shutil
-import rasterio
-import fiona
-import numpy as np
-import math
-import sys
-from opendm import log
-from opendm import io
-from opendm import concurrency
-from opendm import get_image_size
-from opendm import system
-
-from skimage.feature import canny
-from skimage.draw import line
-from skimage.graph import route_through_array
-import shapely
-from shapely.geometry import LineString, mapping, shape
-from shapely.ops import polygonize, unary_union
-
-if sys.platform == 'win32':
- # Temporary fix for: ValueError: GEOSGeom_createLinearRing_r returned a NULL pointer
- # https://github.com/Toblerity/Shapely/issues/1005
- shapely.speedups.disable()
-
-def write_raster(data, file):
- profile = {
- 'driver': 'GTiff',
- 'width': data.shape[1],
- 'height': data.shape[0],
- 'count': 1,
- 'dtype': 'float32',
- 'transform': None,
- 'nodata': None,
- 'crs': None
- }
-
- with rasterio.open(file, 'w', BIGTIFF="IF_SAFER", **profile) as wout:
- wout.write(data, 1)
-
-def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, scale=1):
- if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
- log.ODM_INFO("Computing cutline")
-
- scale = max(0.0001, min(1, scale))
- scaled_orthophoto = None
- if scale < 1:
- log.ODM_INFO("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))
-
- scaled_orthophoto = io.related_file_path(orthophoto_file, postfix=".scaled")
- # Scale orthophoto before computing cutline
- system.run("gdal_translate -outsize {}% 0 "
- "-co NUM_THREADS={} "
- "--config GDAL_CACHEMAX {}% "
- '"{}" "{}"'.format(
- scale * 100,
- max_concurrency,
- concurrency.get_max_memory(),
- orthophoto_file,
- scaled_orthophoto
- ))
-
- orthophoto_file = scaled_orthophoto
-
- # open raster
- f = rasterio.open(orthophoto_file)
- rast = f.read(1) # First band only
- height, width = rast.shape
- number_lines = int(max(8, math.ceil(min(width, height) / 256.0)))
- line_hor_offset = int(width / number_lines)
- line_ver_offset = int(height / number_lines)
-
- if line_hor_offset <= 2 or line_ver_offset <= 2:
- log.ODM_WARNING("Cannot compute cutline, orthophoto is too small (%sx%spx)" % (width, height))
- return
-
- crop_f = fiona.open(crop_area_file, 'r')
- if len(crop_f) == 0:
- log.ODM_WARNING("Crop area is empty, cannot compute cutline")
- return
-
- crop_poly = shape(crop_f[1]['geometry'])
- crop_f.close()
-
- linestrings = []
-
- # Compute canny edges on first band
- edges = canny(rast)
-
- def compute_linestrings(direction):
- log.ODM_INFO("Computing %s cutlines" % direction)
- # Initialize cost map
- cost_map = np.full((height, width), 1, dtype=np.float32)
-
- # Write edges to cost map
- cost_map[edges==True] = 0 # Low cost
-
- # Write "barrier, floor is lava" costs
- if direction == 'vertical':
- lines = [((i, 0), (i, height - 1)) for i in range(line_hor_offset, width - line_hor_offset, line_hor_offset)]
- points = []
- pad_x = int(line_hor_offset / 2.0)
- for i in range(0, len(lines)):
- a,b = lines[i]
- points.append(((a[0] - pad_x , a[1]), (b[0] - pad_x, b[1])))
- a,b = lines[-1]
- points.append(((a[0] + pad_x , a[1]), (b[0] + pad_x, b[1])))
- else:
- lines = [((0, j), (width - 1, j)) for j in range(line_ver_offset, height - line_ver_offset, line_ver_offset)]
- points = []
- pad_y = int(line_ver_offset / 2.0)
- for i in range(0, len(lines)):
- a,b = lines[i]
- points.append(((a[0] , a[1] - pad_y), (b[0], b[1] - pad_y)))
- a,b = lines[-1]
- points.append(((a[0] , a[1] + pad_y), (b[0], b[1] + pad_y)))
-
- for a, b in lines:
- rr,cc = line(*a, *b)
- cost_map[cc, rr] = 9999 # Lava
-
- # Calculate route
- for a, b in points:
- line_coords, cost = route_through_array(cost_map, (a[1], a[0]), (b[1], b[0]), fully_connected=True, geometric=True)
-
- # Convert to geographic
- geo_line_coords = [f.xy(*c) for c in line_coords]
-
- # Simplify
- ls = LineString(geo_line_coords)
- linestrings.append(ls.simplify(0.05, preserve_topology=False))
-
- compute_linestrings('vertical')
- compute_linestrings('horizontal')
-
-
- # Generate polygons and keep only those inside the crop area
- log.ODM_INFO("Generating polygons... this could take a bit.")
- polygons = []
- for p in polygonize(unary_union(linestrings)):
- if crop_poly.contains(p):
- polygons.append(p)
-
- # This should never happen
- if len(polygons) == 0:
- log.ODM_WARNING("No polygons, cannot compute cutline")
- return
-
- log.ODM_INFO("Merging polygons")
- cutline_polygons = unary_union(polygons)
- if not hasattr(cutline_polygons, '__getitem__'):
- cutline_polygons = [cutline_polygons]
-
- largest_cutline = cutline_polygons[0]
- max_area = largest_cutline.area
- for p in cutline_polygons:
- if p.area > max_area:
- max_area = p.area
- largest_cutline = p
-
- log.ODM_INFO("Largest cutline found: %s m^2" % max_area)
-
- meta = {
- 'crs': {'init': str(f.crs).lower() },
- 'driver': 'GPKG',
- 'schema': {
- 'properties': {},
- 'geometry': 'Polygon'
- }
- }
-
- # Remove previous
- if os.path.exists(destination):
- os.remove(destination)
-
- with fiona.open(destination, 'w', **meta) as sink:
- sink.write({
- 'geometry': mapping(largest_cutline),
- 'properties': {}
- })
- f.close()
- log.ODM_INFO("Wrote %s" % destination)
-
- # Cleanup
- if scaled_orthophoto is not None and os.path.exists(scaled_orthophoto):
- os.remove(scaled_orthophoto)
- else:
- log.ODM_WARNING("We've been asked to compute cutline, but either %s or %s is missing. Skipping..." % (orthophoto_file, crop_area_file))
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/commands.py b/o/ODM/ODM-2.8.7/opendm/dem/commands.py
deleted file mode 100755
index aa096e8b..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/commands.py
+++ /dev/null
@@ -1,354 +0,0 @@
-import os
-import sys
-import rasterio
-import numpy
-import math
-import time
-import shutil
-from opendm.system import run
-from opendm import point_cloud
-from opendm import io
-from opendm import system
-from opendm.concurrency import get_max_memory, parallel_map
-from scipy import ndimage
-from datetime import datetime
-from opendm.vendor.gdal_fillnodata import main as gdal_fillnodata
-from opendm import log
-try:
- import Queue as queue
-except:
- import queue
-import threading
-
-from .ground_rectification.rectify import run_rectification
-from . import pdal
-
-try:
- # GDAL >= 3.3
- from osgeo_utils.gdal_proximity import main as gdal_proximity
-except ModuleNotFoundError:
- # GDAL <= 3.2
- try:
- from osgeo.utils.gdal_proximity import main as gdal_proximity
- except:
- pass
-
-def classify(lasFile, scalar, slope, threshold, window, verbose=False):
- start = datetime.now()
-
- try:
- pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window, verbose)
- except:
- log.ODM_WARNING("Error creating classified file %s" % lasFile)
-
- log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
- return lasFile
-
-def rectify(lasFile, debug=False, reclassify_threshold=5, min_area=750, min_points=500):
- start = datetime.now()
-
- try:
- # Currently, no Python 2 lib that supports reading and writing LAZ, so we will do it manually until ODM is migrated to Python 3
- # When migration is done, we can move to pylas and avoid using PDAL for conversion
- tempLasFile = os.path.join(os.path.dirname(lasFile), 'tmp.las')
-
- # Convert LAZ to LAS
- cmd = [
- 'pdal',
- 'translate',
- '-i %s' % lasFile,
- '-o %s' % tempLasFile
- ]
- system.run(' '.join(cmd))
-
- log.ODM_INFO("Rectifying {} using with [reclassify threshold: {}, min area: {}, min points: {}]".format(lasFile, reclassify_threshold, min_area, min_points))
- run_rectification(
- input=tempLasFile, output=tempLasFile, debug=debug, \
- reclassify_plan='median', reclassify_threshold=reclassify_threshold, \
- extend_plan='surrounding', extend_grid_distance=5, \
- min_area=min_area, min_points=min_points)
-
- # Convert LAS to LAZ
- cmd = [
- 'pdal',
- 'translate',
- '-i %s' % tempLasFile,
- '-o %s' % lasFile
- ]
- system.run(' '.join(cmd))
- os.remove(tempLasFile)
-
- except Exception as e:
- raise Exception("Error rectifying ground in file %s: %s" % (lasFile, str(e)))
-
- log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
- return lasFile
-
-error = None
-
-def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
- outdir='', resolution=0.1, max_workers=1, max_tile_size=4096,
- verbose=False, decimation=None, keep_unfilled_copy=False,
- apply_smoothing=True):
- """ Create DEM from multiple radii, and optionally gapfill """
-
- global error
- error = None
-
- start = datetime.now()
-
- if not os.path.exists(outdir):
- log.ODM_INFO("Creating %s" % outdir)
- os.mkdir(outdir)
-
- extent = point_cloud.get_extent(input_point_cloud)
- log.ODM_INFO("Point cloud bounds are [minx: %s, maxx: %s] [miny: %s, maxy: %s]" % (extent['minx'], extent['maxx'], extent['miny'], extent['maxy']))
- ext_width = extent['maxx'] - extent['minx']
- ext_height = extent['maxy'] - extent['miny']
-
- w, h = (int(math.ceil(ext_width / float(resolution))),
- int(math.ceil(ext_height / float(resolution))))
-
- # Set a floor, no matter the resolution parameter
- # (sometimes a wrongly estimated scale of the model can cause the resolution
- # to be set unrealistically low, causing errors)
- RES_FLOOR = 64
- if w < RES_FLOOR and h < RES_FLOOR:
- prev_w, prev_h = w, h
-
- if w >= h:
- w, h = (RES_FLOOR, int(math.ceil(ext_height / ext_width * RES_FLOOR)))
- else:
- w, h = (int(math.ceil(ext_width / ext_height * RES_FLOOR)), RES_FLOOR)
-
- floor_ratio = prev_w / float(w)
- resolution *= floor_ratio
- radiuses = [str(float(r) * floor_ratio) for r in radiuses]
-
- log.ODM_WARNING("Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s. The scale of this reconstruction might be off." % ((prev_w, prev_h), RES_FLOOR, resolution))
-
- final_dem_pixels = w * h
-
- num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
- num_tiles = num_splits * num_splits
- log.ODM_INFO("DEM resolution is %s, max tile size is %s, will split DEM generation into %s tiles" % ((h, w), max_tile_size, num_tiles))
-
- tile_bounds_width = ext_width / float(num_splits)
- tile_bounds_height = ext_height / float(num_splits)
-
- tiles = []
-
- for r in radiuses:
- minx = extent['minx']
-
- for x in range(num_splits):
- miny = extent['miny']
- if x == num_splits - 1:
- maxx = extent['maxx']
- else:
- maxx = minx + tile_bounds_width
-
- for y in range(num_splits):
- if y == num_splits - 1:
- maxy = extent['maxy']
- else:
- maxy = miny + tile_bounds_height
-
- filename = os.path.join(os.path.abspath(outdir), '%s_r%s_x%s_y%s.tif' % (dem_type, r, x, y))
-
- tiles.append({
- 'radius': r,
- 'bounds': {
- 'minx': minx,
- 'maxx': maxx,
- 'miny': miny,
- 'maxy': maxy
- },
- 'filename': filename
- })
-
- miny = maxy
- minx = maxx
-
- # Sort tiles by increasing radius
- tiles.sort(key=lambda t: float(t['radius']), reverse=True)
-
- def process_tile(q):
- log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
-
- d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])
-
- if dem_type == 'dtm':
- d = pdal.json_add_classification_filter(d, 2)
-
- if decimation is not None:
- d = pdal.json_add_decimation_filter(d, decimation)
-
- pdal.json_add_readers(d, [input_point_cloud])
- pdal.run_pipeline(d, verbose=verbose)
-
- parallel_map(process_tile, tiles, max_workers)
-
- output_file = "%s.tif" % dem_type
- output_path = os.path.abspath(os.path.join(outdir, output_file))
-
- # Verify tile results
- for t in tiles:
- if not os.path.exists(t['filename']):
- raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
-
- # Create virtual raster
- tiles_vrt_path = os.path.abspath(os.path.join(outdir, "tiles.vrt"))
- tiles_file_list = os.path.abspath(os.path.join(outdir, "tiles_list.txt"))
- with open(tiles_file_list, 'w') as f:
- for t in tiles:
- f.write(t['filename'] + '\n')
-
- run('gdalbuildvrt -input_file_list "%s" "%s" ' % (tiles_file_list, tiles_vrt_path))
-
- merged_vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
- geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'tiles.tmp.tif'))
- geotiff_small_path = os.path.abspath(os.path.join(outdir, 'tiles.small.tif'))
- geotiff_small_filled_path = os.path.abspath(os.path.join(outdir, 'tiles.small_filled.tif'))
- geotiff_path = os.path.abspath(os.path.join(outdir, 'tiles.tif'))
-
- # Build GeoTIFF
- kwargs = {
- 'max_memory': get_max_memory(),
- 'threads': max_workers if max_workers else 'ALL_CPUS',
- 'tiles_vrt': tiles_vrt_path,
- 'merged_vrt': merged_vrt_path,
- 'geotiff': geotiff_path,
- 'geotiff_tmp': geotiff_tmp_path,
- 'geotiff_small': geotiff_small_path,
- 'geotiff_small_filled': geotiff_small_filled_path
- }
-
- if gapfill:
- # Sometimes, for some reason gdal_fillnodata.py
- # behaves strangely when reading data directly from a .VRT
- # so we need to convert to GeoTIFF first.
- run('gdal_translate '
- '-co NUM_THREADS={threads} '
- '-co BIGTIFF=IF_SAFER '
- '--config GDAL_CACHEMAX {max_memory}% '
- '"{tiles_vrt}" "{geotiff_tmp}"'.format(**kwargs))
-
- # Scale to 10% size
- run('gdal_translate '
- '-co NUM_THREADS={threads} '
- '-co BIGTIFF=IF_SAFER '
- '--config GDAL_CACHEMAX {max_memory}% '
- '-outsize 10% 0 '
- '"{geotiff_tmp}" "{geotiff_small}"'.format(**kwargs))
-
- # Fill scaled
- gdal_fillnodata(['.',
- '-co', 'NUM_THREADS=%s' % kwargs['threads'],
- '-co', 'BIGTIFF=IF_SAFER',
- '--config', 'GDAL_CACHE_MAX', str(kwargs['max_memory']) + '%',
- '-b', '1',
- '-of', 'GTiff',
- kwargs['geotiff_small'], kwargs['geotiff_small_filled']])
-
- # Merge filled scaled DEM with unfilled DEM using bilinear interpolation
- run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path))
- run('gdal_translate '
- '-co NUM_THREADS={threads} '
- '-co TILED=YES '
- '-co BIGTIFF=IF_SAFER '
- '-co COMPRESS=DEFLATE '
- '--config GDAL_CACHEMAX {max_memory}% '
- '"{merged_vrt}" "{geotiff}"'.format(**kwargs))
- else:
- run('gdal_translate '
- '-co NUM_THREADS={threads} '
- '-co TILED=YES '
- '-co BIGTIFF=IF_SAFER '
- '-co COMPRESS=DEFLATE '
- '--config GDAL_CACHEMAX {max_memory}% '
- '"{tiles_vrt}" "{geotiff}"'.format(**kwargs))
-
- if apply_smoothing:
- median_smoothing(geotiff_path, output_path)
- os.remove(geotiff_path)
- else:
- os.replace(geotiff_path, output_path)
-
- if os.path.exists(geotiff_tmp_path):
- if not keep_unfilled_copy:
- os.remove(geotiff_tmp_path)
- else:
- os.replace(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
-
- for cleanup_file in [tiles_vrt_path, tiles_file_list, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
- if os.path.exists(cleanup_file): os.remove(cleanup_file)
- for t in tiles:
- if os.path.exists(t['filename']): os.remove(t['filename'])
-
- log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
-
-
-def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
- if not os.path.exists(geotiff_path):
- log.ODM_WARNING("Cannot compute euclidean map (file does not exist: %s)" % geotiff_path)
- return
-
- nodata = -9999
- with rasterio.open(geotiff_path) as f:
- nodata = f.nodatavals[0]
-
- if not os.path.exists(output_path) or overwrite:
- log.ODM_INFO("Computing euclidean distance: %s" % output_path)
-
- if gdal_proximity is not None:
- try:
- gdal_proximity(['gdal_proximity.py', geotiff_path, output_path, '-values', str(nodata)])
- except Exception as e:
- log.ODM_WARNING("Cannot compute euclidean distance: %s" % str(e))
-
- if os.path.exists(output_path):
- return output_path
- else:
- log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
- else:
- log.ODM_WARNING("Cannot compute euclidean map, gdal_proximity is missing")
-
- else:
- log.ODM_INFO("Found a euclidean distance map: %s" % output_path)
- return output_path
-
-
-def median_smoothing(geotiff_path, output_path, smoothing_iterations=1):
- """ Apply median smoothing """
- start = datetime.now()
-
- if not os.path.exists(geotiff_path):
- raise Exception('File %s does not exist!' % geotiff_path)
-
- log.ODM_INFO('Starting smoothing...')
-
- with rasterio.open(geotiff_path) as img:
- nodata = img.nodatavals[0]
- dtype = img.dtypes[0]
- arr = img.read()[0]
-
- nodata_locs = arr == nodata
-
- # Median filter (careful, changing the value 5 might require tweaking)
- # the lines below. There's another numpy function that takes care of
- # these edge cases, but it's slower.
- for i in range(smoothing_iterations):
- log.ODM_INFO("Smoothing iteration %s" % str(i + 1))
- arr = ndimage.median_filter(arr, size=9, output=dtype, mode='nearest')
-
- # Median filter leaves a bunch of zeros in nodata areas
- arr[nodata_locs] = nodata
-
- # write output
- with rasterio.open(output_path, 'w', BIGTIFF="IF_SAFER", **img.profile) as imgout:
- imgout.write(arr, 1)
-
- log.ODM_INFO('Completed smoothing to create %s in %s' % (output_path, datetime.now() - start))
-
- return output_path
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/types.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/types.py
deleted file mode 100755
index da6a79ab..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/types.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import numpy as np
-from scipy.spatial import Delaunay
-from ..point_cloud import PointCloud
-
-EPSILON = 0.00001
-
-class PolyBounds(object):
- def __init__(self, points):
- self.__points = points
- self.__delaunay = Delaunay(points)
- [x_min, y_min] = np.amin(points, axis=0)
- [x_max, y_max] = np.amax(points, axis=0)
- self._corners = (x_min, x_max, y_min, y_max)
-
- def keep_points_inside(self, point_cloud):
- """Return a new point cloud with the points from the given cloud that are inside the bounds"""
- mask = self.calculate_mask(point_cloud)
- return point_cloud[mask]
-
- def percentage_of_points_inside(self, points):
- if isinstance(points, PointCloud):
- points = points.get_xy()
- mask = self.calculate_mask(points)
- return np.count_nonzero(mask) * 100 / points.shape[0]
-
- def calculate_mask(self, points):
- """Calculate the mask that would filter out the points outside the bounds"""
- if isinstance(points, PointCloud):
- points = points.get_xy()
- return self.__delaunay.find_simplex(points) >= 0
-
- def center(self):
- (x_min, x_max, y_min, y_max) = self._corners
- return ((x_min + x_max) / 2, (y_min + y_max) / 2)
-
- def corners(self):
- return self._corners
-
-class BoxBounds(object):
- def __init__(self, x_min, x_max, y_min, y_max):
- self._corners = (x_min, x_max, y_min, y_max)
-
- def keep_points_inside(self, point_cloud):
- """Return a new point cloud with the points from the given cloud that are inside the bounds"""
- mask = self.calculate_mask(point_cloud)
- return point_cloud[mask]
-
- def percentage_of_points_inside(self, points):
- if isinstance(points, PointCloud):
- points = points.get_xy()
- mask = self.calculate_mask(points)
- return np.count_nonzero(mask) * 100 / points.shape[0]
-
- def calculate_mask(self, points):
- """Calculate the mask that would filter out the points outside the bounds"""
- if isinstance(points, PointCloud):
- points = points.get_xy()
- (x_min, x_max, y_min, y_max) = self._corners
- min = np.array([x_min, y_min])
- max = np.array([x_max, y_max])
-
- return np.all(np.logical_and(min <= points, points <= max), axis=1)
-
- def center(self):
- (x_min, x_max, y_min, y_max) = self._corners
- return ((x_min + x_max) / 2, (y_min + y_max) / 2)
-
- def corners(self):
- return self._corners
-
- def area(self):
- (x_min, x_max, y_min, y_max) = self._corners
- return (x_max - x_min) * (y_max - y_min)
-
- def divide_by_point(self, point):
- """Divide the box into four boxes, marked by the point. It is assumed that the point is inside the box"""
- [x_point, y_point] = point
- (x_min, x_max, y_min, y_max) = self._corners
- return [
- BoxBounds(x_min, x_point, y_min, y_point),
- BoxBounds(x_point + EPSILON, x_max, y_min, y_point),
- BoxBounds(x_min, x_point, y_point + EPSILON, y_max),
- BoxBounds(x_point + EPSILON, x_max, y_point + EPSILON, y_max)
- ]
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/utils.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/utils.py
deleted file mode 100755
index 78c7300b..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/bounds/utils.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import numpy as np
-from scipy.spatial import ConvexHull
-from .types import BoxBounds, PolyBounds
-
-def calculate_convex_hull_bounds(points):
- hull = ConvexHull(points)
- return PolyBounds(points[hull.vertices])
-
-def box_from_point_and_size(center, width, height):
- return BoxBounds(center[0] - width / 2, center[0] + width / 2, center[1] - height / 2, center[1] + height / 2)
-
-def box_from_cloud(point_cloud):
- xy = point_cloud.get_xy()
- [x_min, y_min] = np.amin(xy, axis=0)
- [x_max, y_max] = np.amax(xy, axis=0)
- return BoxBounds(x_min, x_max, y_min, y_max)
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/dimension.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/dimension.py
deleted file mode 100755
index 261bc5c0..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/dimension.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import numpy as np
-from abc import ABCMeta, abstractmethod
-
-class Dimension(object):
- __metaclass__ = ABCMeta
-
- def __init__(self):
- super(Dimension, self).__init__()
-
- @abstractmethod
- def assign(self, *point_clouds, **kwargs):
- "Assign a value to the points on the partition"
-
- @abstractmethod
- def assign_default(self, point_cloud):
- "Assign a default value"
-
- @abstractmethod
- def get_name(self):
- "Return the name of the dimension"
-
- @abstractmethod
- def get_las_type(self):
- "Return the type of the values stored"
-
- def _set_values(self, point_cloud, values):
- point_cloud.add_dimension(self, values)
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/distance_dimension.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/distance_dimension.py
deleted file mode 100755
index 33b56835..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/distance_dimension.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import numpy as np
-from sklearn.linear_model import RANSACRegressor
-from .dimension import Dimension
-
-class DistanceDimension(Dimension):
- """Assign each point the distance to the estimated ground"""
-
- def __init__(self):
- super(DistanceDimension, self).__init__()
-
- def assign_default(self, point_cloud):
- default = np.full(point_cloud.len(), -1)
- super(DistanceDimension, self)._set_values(point_cloud, default)
-
- def assign(self, *point_clouds, **kwargs):
- for point_cloud in point_clouds:
- xy = point_cloud.get_xy()
-
- # Calculate RANSCAC model
- model = RANSACRegressor().fit(xy, point_cloud.get_z())
-
- # Calculate angle between estimated plane and XY plane
- angle = self.__calculate_angle(model)
- if angle >= 45:
- # If the angle is higher than 45 degrees, then don't calculate the difference, since it will probably be way off
- diff = np.full(point_cloud.len(), 0)
- else:
- predicted = model.predict(xy)
- diff = point_cloud.get_z() - predicted
- # Ignore the diff when the diff is below the ground
- diff[diff < 0] = 0
- super(DistanceDimension, self)._set_values(point_cloud, diff)
-
- def get_name(self):
- return 'distance_to_ground'
-
- def get_las_type(self):
- return 10
-
- def __calculate_angle(self, model):
- "Calculate the angle between the estimated plane and the XY plane"
- a = model.estimator_.coef_[0]
- b = model.estimator_.coef_[1]
- angle = np.arccos(1 / np.sqrt(a ** 2 + b ** 2 + 1))
- return np.degrees(angle)
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/extended_dimension.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/extended_dimension.py
deleted file mode 100755
index 741d041b..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/extended_dimension.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import numpy as np
-from .dimension import Dimension
-
-class ExtendedDimension(Dimension):
- """Whether the point was added or was already on the original point cloud"""
-
- def __init__(self):
- super(ExtendedDimension, self).__init__()
-
- def assign_default(self, point_cloud):
- default = np.full(point_cloud.len(), 0, dtype=np.uint16)
- super(ExtendedDimension, self)._set_values(point_cloud, default)
-
- def assign(self, *point_clouds, **kwargs):
- for point_cloud in point_clouds:
- added = np.full(point_cloud.len(), 1, dtype=np.uint16)
- super(ExtendedDimension, self)._set_values(point_cloud, added)
-
- def get_name(self):
- return 'extended'
-
- def get_las_type(self):
- return 3
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/partition_dimension.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/partition_dimension.py
deleted file mode 100755
index 8d39866b..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/extra_dimensions/partition_dimension.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import numpy as np
-from .dimension import Dimension
-
-class PartitionDimension(Dimension):
- """Group points by partition"""
-
- def __init__(self, name):
- super(PartitionDimension, self).__init__()
- self.counter = 1
- self.name = name
-
- def assign_default(self, point_cloud):
- default = np.full(point_cloud.len(), 0)
- super(PartitionDimension, self)._set_values(point_cloud, default)
-
- def assign(self, *point_clouds, **kwargs):
- for point_cloud in point_clouds:
- super(PartitionDimension, self)._set_values(point_cloud, np.full(point_cloud.len(), self.counter))
- self.counter += 1
-
- def get_name(self):
- return self.name
-
- def get_las_type(self):
- return 5
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/grid/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/grid/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/grid/builder.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/grid/builder.py
deleted file mode 100755
index be141a43..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/grid/builder.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import numpy as np
-from sklearn.neighbors import BallTree
-
-EPSILON = 0.00001
-
-def build_grid(bounds, point_cloud, distance):
- """First, a 2D grid is built with a distance of 'distance' between points, inside the given bounds.
- Then, only points that don't have a point cloud neighbour closer than 'distance' are left. The rest are filtered out."""
-
- # Generate a grid of 2D points inside the bounds, with a distance of 'distance' between them
- grid = __build_grid(bounds, distance)
-
- # Filter out grid points outside the bounds (makes sense if bounds are not squared)
- grid_inside = bounds.keep_points_inside(grid)
-
- # Filter out the grid points that have a neighbor closer than 'distance' from the given point cloud
- return __calculate_lonely_points(grid_inside, point_cloud, distance)
-
-def __build_grid(bounds, distance):
- x_min, x_max, y_min, y_max = bounds.corners()
- grid = [[x, y] for x in np.arange(x_min, x_max + distance, distance) for y in np.arange(y_min, y_max + distance, distance)]
- return np.array(grid)
-
-def __calculate_lonely_points(grid, point_cloud, distance):
- # Generate BallTree for point cloud
- ball_tree = BallTree(point_cloud.get_xy(), metric='manhattan')
-
- # Calculate for each of the points in the grid, the amount of neighbors in the original ground cloud
- count = ball_tree.query_radius(grid, distance - EPSILON, count_only=True)
-
- # Return only the points in the grid that don't have a neighbor
- return grid[count == 0]
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/io/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/io/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/io/las_io.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/io/las_io.py
deleted file mode 100755
index 49ce0361..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/io/las_io.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# TODO: Move to pylas when project migrates to python3
-
-from laspy.file import File
-from laspy.header import Header
-import numpy as np
-from ..point_cloud import PointCloud
-
-def read_cloud(point_cloud_path):
- # Open point cloud and read its properties
- las_file = File(point_cloud_path, mode='r')
- header = (las_file.header.copy(), las_file.header.scale, las_file.header.offset,las_file.header.evlrs, las_file.header.vlrs)
- [x_scale, y_scale, z_scale] = las_file.header.scale
- [x_offset, y_offset, z_offset] = las_file.header.offset
-
- # Calculate the real coordinates
- x = las_file.X * x_scale + x_offset
- y = las_file.Y * y_scale + y_offset
- z = las_file.Z * z_scale + z_offset
-
- cloud = PointCloud.with_dimensions(x, y, z, las_file.Classification, las_file.red, las_file.green, las_file.blue)
-
- # Close the file
- las_file.close()
-
- # Return the result
- return header, cloud
-
-def write_cloud(header, point_cloud, output_point_cloud_path, write_extra_dimensions=False):
- (h, scale, offset, evlrs, vlrs) = header
-
- # Open output file
- output_las_file = File(output_point_cloud_path, mode='w', header=h, evlrs=evlrs, vlrs=vlrs)
-
- if write_extra_dimensions:
- # Create new dimensions
- for name, dimension in point_cloud.extra_dimensions_metadata.items():
- output_las_file.define_new_dimension(name=name, data_type=dimension.get_las_type(), description="Dimension added by Ground Extend")
-
- # Assign dimension values
- for dimension_name, values in point_cloud.extra_dimensions.items():
- setattr(output_las_file, dimension_name, values)
-
- # Adapt points to scale and offset
- [x_scale, y_scale, z_scale] = scale
- [x_offset, y_offset, z_offset] = offset
- [x, y] = np.hsplit(point_cloud.xy, 2)
- output_las_file.X = (x.ravel() - x_offset) / x_scale
- output_las_file.Y = (y.ravel() - y_offset) / y_scale
- output_las_file.Z = (point_cloud.z - z_offset) / z_scale
-
- # Set color
- [red, green, blue] = np.hsplit(point_cloud.rgb, 3)
- output_las_file.red = red.ravel()
- output_las_file.green = green.ravel()
- output_las_file.blue = blue.ravel()
-
- # Set classification
- output_las_file.Classification = point_cloud.classification.astype(np.uint8)
-
- # Set header
- output_las_file.header.scale = scale
- output_las_file.header.offset = offset
-
- # Close files
- output_las_file.close()
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/__init__.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/__init__.py
deleted file mode 100755
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/one_partition.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/one_partition.py
deleted file mode 100755
index cfa73703..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/one_partition.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from .partition_plan import PartitionPlan, Partition
-from ..bounds.utils import box_from_cloud
-
-class OnePartition(PartitionPlan):
- """This partition plan does nothing. It returns all the cloud points in one partition."""
-
- def __init__(self, point_cloud):
- super(OnePartition, self).__init__()
- self.point_cloud = point_cloud
-
- def execute(self, **kwargs):
- bounds = box_from_cloud(self.point_cloud)
- return [Partition(self.point_cloud, bounds=bounds)]
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/partition_plan.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/partition_plan.py
deleted file mode 100755
index ba7961c9..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/partition_plan.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from abc import ABCMeta, abstractmethod
-
-class PartitionPlan(object):
- """We want to partition the ground in different areas. There are many ways to do so, and each of them will be a different partition plan."""
- __metaclass__ = ABCMeta
-
- def __init__(self):
- super(PartitionPlan, self).__init__()
-
- @abstractmethod
- def execute(self):
- """This method is expected to return a list of Partition instances"""
-
-class Partition:
- def __init__(self, point_cloud, **kwargs):
- self.point_cloud = point_cloud
- self.bounds = kwargs['bounds']
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/quad_partitions.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/quad_partitions.py
deleted file mode 100755
index f0cc71ab..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/quad_partitions.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import numpy as np
-from abc import abstractmethod
-from ..bounds.utils import box_from_cloud
-from .partition_plan import PartitionPlan, Partition
-
-class QuadPartitions(PartitionPlan):
- """This partition plan starts with one big partition that includes the whole point cloud. It then divides it into four partitions, based on some criteria.
- Each of these partitions are then divided into four other partitions and so on. The algorithm has two possible stopping criteria:
- if subdividing a partition would imply that one of the new partitions contains fewer that a given amount of points, or that one of the new partitions as an area smaller that the given size,
- then the partition is not divided."""
-
- def __init__(self, point_cloud):
- super(QuadPartitions, self).__init__()
- self.point_cloud = point_cloud
-
- @abstractmethod
- def choose_divide_point(self, point_cloud, bounding_box):
- """Given a point cloud and a bounding box, calculate the point that will be used to divide the partition by four"""
-
- def execute(self, **kwargs):
- initial_bounding_box = box_from_cloud(self.point_cloud)
- return self._divide_until(self.point_cloud, initial_bounding_box, kwargs['min_points'], kwargs['min_area'])
-
- def _divide_until(self, point_cloud, bounding_box, min_points, min_area):
- dividing_point = self.choose_divide_point(point_cloud, bounding_box)
- new_boxes = bounding_box.divide_by_point(dividing_point)
-
- for new_box in new_boxes:
- if new_box.area() < min_area:
- return [Partition(point_cloud, bounds=bounding_box)] # If by dividing, I break the minimum area threshold, don't do it
-
- subdivisions = []
-
- for new_box in new_boxes:
- mask = new_box.calculate_mask(point_cloud)
- if np.count_nonzero(mask) < min_points:
- return [Partition(point_cloud, bounds=bounding_box)] # If by dividing, I break the minimum amount of points in a zone, don't do it
-
- subdivisions += self._divide_until(point_cloud[mask], new_box, min_points, min_area)
-
- return subdivisions
-
-class UniformPartitions(QuadPartitions):
- """This kind of partitioner takes the current bounding box, and divides it by four uniform partitions"""
-
- def __init__(self, point_cloud):
- super(UniformPartitions, self).__init__(point_cloud)
-
- def choose_divide_point(self, point_cloud, bounding_box):
- return bounding_box.center()
-
-class MedianPartitions(QuadPartitions):
- """This kind of partitioner takes the current point cloud, and divides it by the median, so that all four new partitions have the same amount of points"""
-
- def __init__(self, point_cloud):
- super(MedianPartitions, self).__init__(point_cloud)
-
- def choose_divide_point(self, point_cloud, bounding_box):
- return np.median(point_cloud.get_xy(), axis=0)
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/selector.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/selector.py
deleted file mode 100755
index 20acd2cd..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/selector.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from .one_partition import OnePartition
-from .quad_partitions import UniformPartitions, MedianPartitions
-from .surrounding_partitions import SurroundingPartitions
-
-
-def select_partition_plan(name, point_cloud):
- if name == 'one':
- return OnePartition(point_cloud)
- elif name == 'uniform':
- return UniformPartitions(point_cloud)
- elif name == 'median':
- return MedianPartitions(point_cloud)
- elif name == 'surrounding':
- return SurroundingPartitions(point_cloud)
- else:
- raise Exception('Incorrect partition name.')
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/surrounding_partitions.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/surrounding_partitions.py
deleted file mode 100755
index 483cb2aa..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/partition/surrounding_partitions.py
+++ /dev/null
@@ -1,110 +0,0 @@
-from sklearn.cluster import DBSCAN
-from sklearn.neighbors import BallTree
-import numpy as np
-import math
-
-from ..bounds.utils import box_from_cloud, calculate_convex_hull_bounds
-from ..bounds.types import BoxBounds
-from ..grid.builder import build_grid
-from ..point_cloud import PointCloud
-from .partition_plan import PartitionPlan, Partition
-
-DEFAULT_DISTANCE = 5
-MIN_PERCENTAGE_OF_POINTS_IN_CONVEX_HULL = 90
-EPSILON = 0.0001
-
-class SurroundingPartitions(PartitionPlan):
-
- def __init__(self, point_cloud):
- super(SurroundingPartitions, self).__init__()
- self.point_cloud = point_cloud
- self.chebyshev_ball_tree = BallTree(point_cloud.xy, metric='chebyshev')
- self.manhattan_ball_tree = BallTree(point_cloud.xy, metric='manhattan')
-
- def execute(self, **kwargs):
- distance = kwargs['distance'] if 'distance' in kwargs else DEFAULT_DISTANCE
- bounds = kwargs['bounds'] if 'bounds' in kwargs else box_from_cloud(self.point_cloud)
- min_points = kwargs['min_points']
- min_area = kwargs['min_area']
-
- result = ExecutionResult(self.point_cloud.len())
- grid = build_grid(bounds, self.point_cloud, distance)
-
- if grid.shape[0] >= 1:
- db = DBSCAN(eps=distance + EPSILON, min_samples=1, metric='manhattan', n_jobs=-1).fit(grid)
- clusters = set(db.labels_)
-
- for cluster in clusters:
- cluster_members = grid[db.labels_ == cluster]
- point_cloud_neighbors, point_cloud_neighbors_mask = self.__find_cluster_neighbors(cluster_members, distance)
-
- if self.__is_cluster_surrounded(cluster_members, point_cloud_neighbors):
- result.add_cluster_partition(cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask)
- else:
- point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box = self.__find_points_for_non_surrounded_cluster(bounds, cluster_members, distance, min_area, min_points)
- result.add_zone_partition(cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box)
-
- return result.build_result(self.point_cloud)
-
- def __find_points_for_non_surrounded_cluster(self, bounds, cluster_members, distance, min_area, min_points):
- (center_x, center_y) = bounds.center()
-
- [x_min, y_min] = np.amin(cluster_members, axis=0)
- [x_max, y_max] = np.amax(cluster_members, axis=0)
-
- x = [x_min - distance, x_max + distance]
- y = [y_min - distance, y_max + distance]
-
- # Find the indices of the corner closest to the center of the point cloud
- closest_x_idx = np.argmin(np.abs(x - center_x))
- closest_y_idx = np.argmin(np.abs(y - center_y))
-
- # Calculate the direction to where the box should grow
- x_dir = -1 if closest_x_idx == 0 else 1
- y_dir = -1 if closest_y_idx == 0 else 1
-
- bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
- while bounding_box.area() < min_area:
- x[closest_x_idx] += distance * x_dir
- y[closest_y_idx] += distance * y_dir
- bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
-
- mask = bounding_box.calculate_mask(self.point_cloud)
- while len(mask) < min_points:
- x[closest_x_idx] += distance * x_dir
- y[closest_y_idx] += distance * y_dir
- bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
- mask = bounding_box.calculate_mask(self.point_cloud)
-
- return self.point_cloud[mask], mask, bounding_box
-
- def __is_cluster_surrounded(self, cluster_members, point_cloud_neighbors):
- convex_hull = calculate_convex_hull_bounds(point_cloud_neighbors.get_xy())
- ratio = convex_hull.percentage_of_points_inside(cluster_members)
- return ratio > MIN_PERCENTAGE_OF_POINTS_IN_CONVEX_HULL
-
- def __find_cluster_neighbors(self, cluster_members, distance):
- mask_per_point = self.manhattan_ball_tree.query_radius(cluster_members, distance * 3)
- all_neighbor_mask = np.concatenate(mask_per_point)
- point_cloud_neighbors = self.point_cloud[all_neighbor_mask]
- return point_cloud_neighbors, all_neighbor_mask
-
-class ExecutionResult:
- def __init__(self, cloud_size):
- self.partitions = [ ]
- self.marked_as_neighbors = np.zeros(cloud_size, dtype=bool)
-
- def add_cluster_partition(self, cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask):
- convex_hull = calculate_convex_hull_bounds(np.concatenate((point_cloud_neighbors.get_xy(), cluster_members)))
- self.marked_as_neighbors[point_cloud_neighbors_mask] = True
- self.partitions.append(Partition(point_cloud_neighbors, bounds=convex_hull))
-
- def add_zone_partition(self, cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box):
- self.marked_as_neighbors[point_cloud_neighbors_mask] = True
- self.partitions.append(Partition(point_cloud_neighbors, bounds=bounding_box))
-
- def build_result(self, whole_point_cloud):
- remaining_cloud = whole_point_cloud[~self.marked_as_neighbors]
- new_bounds = box_from_cloud(remaining_cloud)
- self.partitions.insert(0, Partition(remaining_cloud, bounds=new_bounds))
- return self.partitions
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/point_cloud.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/point_cloud.py
deleted file mode 100755
index a39bdf76..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/point_cloud.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import numpy as np
-from numpy.lib.recfunctions import append_fields
-
-class PointCloud:
- """Representation of a 3D point cloud"""
- def __init__(self, xy, z, classification, rgb, indices, extra_dimensions, extra_dimensions_metadata):
- self.xy = xy
- self.z = z
- self.classification = classification
- self.rgb = rgb
- self.indices = indices
- self.extra_dimensions = extra_dimensions
- self.extra_dimensions_metadata = extra_dimensions_metadata
-
- @staticmethod
- def with_dimensions(x, y, z, classification, red, green, blue, indices=None):
- xy = np.column_stack((x, y))
- rgb = np.column_stack((red, green, blue))
- indices = indices if indices is not None else np.arange(0, len(x))
- return PointCloud(xy, z, classification, rgb, indices, { }, { })
-
- @staticmethod
- def with_xy(xy):
- [x, y] = np.hsplit(xy, 2)
- empty = np.empty(xy.shape[0])
- return PointCloud.with_dimensions(x.ravel(), y.ravel(), empty, np.empty(xy.shape[0], dtype=np.uint8), empty, empty, empty)
-
- def __getitem__(self, mask):
- masked_dimensions = { name: values[mask] for name, values in self.extra_dimensions.items() }
- return PointCloud(self.xy[mask], self.z[mask], self.classification[mask], self.rgb[mask], self.indices[mask], masked_dimensions, self.extra_dimensions_metadata)
-
- def concatenate(self, other_cloud):
- for name, dimension in self.extra_dimensions_metadata.items():
- if name not in other_cloud.extra_dimensions:
- dimension.assign_default(other_cloud)
- for name, dimension in other_cloud.extra_dimensions_metadata.items():
- if name not in self.extra_dimensions:
- dimension.assign_default(self)
- new_indices = np.arange(len(self.indices), len(self.indices) + len(other_cloud.indices))
- self.xy = np.concatenate((self.xy, other_cloud.xy))
- self.z = np.concatenate((self.z, other_cloud.z))
- self.classification = np.concatenate((self.classification, other_cloud.classification))
- self.rgb = np.concatenate((self.rgb, other_cloud.rgb))
- self.indices = np.concatenate((self.indices, new_indices))
- self.extra_dimensions = { name: np.concatenate((values, other_cloud.extra_dimensions[name])) for name, values in self.extra_dimensions.items() }
-
- def update(self, other_cloud):
- for name, dimension in self.extra_dimensions_metadata.items():
- if name not in other_cloud.extra_dimensions:
- dimension.assign_default(other_cloud)
- for name, dimension in other_cloud.extra_dimensions_metadata.items():
- if name not in self.extra_dimensions:
- dimension.assign_default(self)
- self.xy[other_cloud.indices] = other_cloud.xy
- self.z[other_cloud.indices] = other_cloud.z
- self.classification[other_cloud.indices] = other_cloud.classification
- self.rgb[other_cloud.indices] = other_cloud.rgb
- for name, values in self.extra_dimensions.items():
- values[other_cloud.indices] = other_cloud.extra_dimensions[name]
-
- def add_dimension(self, dimension, values):
- self.extra_dimensions[dimension.get_name()] = values
- self.extra_dimensions_metadata[dimension.get_name()] = dimension
-
- def get_xy(self):
- return self.xy
-
- def get_z(self):
- return self.z
-
- def len(self):
- return len(self.z)
-
- def get_extra_dimension_values(self, name):
- return self.extra_dimensions[name]
-
- def get_bounding_box(self):
- [x_min, y_min] = np.amin(self.xy, axis=0)
- [x_max, y_max] = np.amax(self.xy, axis=0)
- z_min = min(self.z)
- z_max = max(self.z)
- return BoundingBox3D(x_min, x_max, y_min, y_max, z_min, z_max)
-
-
-class BoundingBox3D:
- def __init__(self, x_min, x_max, y_min, y_max, z_min, z_max):
- self.x_min = x_min
- self.x_max = x_max
- self.y_min = y_min
- self.y_max = y_max
- self.z_min = z_min
- self.z_max = z_max
-
- def keep_points_inside(self, point_cloud):
- min = np.array([self.x_min, self.y_min, self.z_min])
- max = np.array([self.x_max, self.y_max, self.z_max])
-
- arr = np.column_stack((point_cloud.get_xy(), point_cloud.get_z()))
- mask = np.all(np.logical_and(min <= arr, arr <= max), axis=1)
-
- return point_cloud[mask]
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/rectify.py b/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/rectify.py
deleted file mode 100755
index 235da49f..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/ground_rectification/rectify.py
+++ /dev/null
@@ -1,154 +0,0 @@
-import argparse
-import numpy as np
-from os import path
-from sklearn.neighbors import BallTree
-from sklearn.linear_model import RANSACRegressor
-from .extra_dimensions.distance_dimension import DistanceDimension
-from .extra_dimensions.partition_dimension import PartitionDimension
-from .extra_dimensions.extended_dimension import ExtendedDimension
-from .grid.builder import build_grid
-from .bounds.utils import calculate_convex_hull_bounds
-from .io.las_io import read_cloud, write_cloud
-from .partition.selector import select_partition_plan
-from .point_cloud import PointCloud
-
-EPSILON = 0.00001
-
-def run_rectification(**kwargs):
- header, point_cloud = read_cloud(kwargs['input'])
-
- if 'reclassify_plan' in kwargs and kwargs['reclassify_plan'] is not None:
- point_cloud = reclassify_cloud(point_cloud, kwargs['reclassify_plan'], kwargs['reclassify_threshold'], kwargs['min_points'], kwargs['min_area'])
-
- if 'extend_plan' in kwargs and kwargs['extend_plan'] is not None:
- point_cloud = extend_cloud(point_cloud, kwargs['extend_plan'], kwargs['extend_grid_distance'], kwargs['min_points'], kwargs['min_area'])
-
- write_cloud(header, point_cloud, kwargs['output'], kwargs['debug'])
-
-def reclassify_cloud(point_cloud, plan, threshold, min_points, min_area):
- # Get only ground
- ground_cloud = point_cloud[point_cloud.classification == 2]
-
- # Get the partition plan, according to the specified criteria
- partition_plan = select_partition_plan(plan, ground_cloud)
-
- # Execute the partition plan, and get all the partitions
- partitions = [result for result in partition_plan.execute(min_points=min_points, min_area=min_area)]
-
- # Add 'distance to ground' and 'partition number' dimensions to the cloud
- for dimension in [DistanceDimension(), PartitionDimension('reclassify_partition')]:
-
- # Calculate new dimension for partition
- for partition in partitions:
- dimension.assign(partition.point_cloud)
-
- # Update new data to the original point cloud
- point_cloud.update(partition.point_cloud)
-
- # Calculate the points that need to be reclassified
- mask = point_cloud.get_extra_dimension_values('distance_to_ground') > threshold
-
- # Reclassify them as 'unclassified'
- point_cloud.classification[mask] = 1
-
- return point_cloud
-
-def extend_cloud(point_cloud, plan, distance, min_points, min_area):
- # Get only ground
- ground_cloud = point_cloud[point_cloud.classification == 2]
-
- # Read the bounds file
- bounds = calculate_convex_hull_bounds(ground_cloud.get_xy())
-
- # Generate a grid of 2D points inside the bounds, with a distance of 'distance' between them
- grid_2d = build_grid(bounds, ground_cloud, distance)
-
- # Create a new point cloud
- grid_3d = PointCloud.with_xy(grid_2d)
-
- # Get the partition plan, according to the specified criteria
- partition_plan = select_partition_plan(plan, ground_cloud)
-
- # Execute the partition plan, and get all the partitions
- partitions = partition_plan.execute(distance=distance, min_points=min_points, min_area=min_area, bounds=bounds)
-
- # Create dimensions
- partition_dimension = PartitionDimension('extend_partition')
- extended_dimension = ExtendedDimension()
-
- for partition in partitions:
- # Keep the grid point that are inside the partition
- grid_inside = partition.bounds.keep_points_inside(grid_3d)
-
- if grid_inside.len() > 0:
- # In each partition, calculate the altitude of the grid points
- new_points = __calculate_new_points(grid_inside, partition.point_cloud)
-
- # Assign the dimension values
- partition_dimension.assign(new_points, partition.point_cloud)
- extended_dimension.assign(new_points)
-
- # Update the original 3d grid with the new calculated points
- grid_3d.update(new_points)
-
- else:
- # Assign the original points the correct partition
- partition_dimension.assign(partition.point_cloud)
-
- # Update new information to the original point cloud
- point_cloud.update(partition.point_cloud)
-
-
- # Calculate the bounding box of the original cloud
- bbox = point_cloud.get_bounding_box()
-
- # Remove points that might have ended up outside the bbox
- grid_3d = bbox.keep_points_inside(grid_3d)
-
- # Add the new grid points to the original cloud
- point_cloud.concatenate(grid_3d)
-
- # Add the new points to the original point cloud
- return point_cloud
-
-def __calculate_new_points(grid_points_inside, partition_point_cloud):
- # Calculate RANSCAC model
- model = RANSACRegressor().fit(partition_point_cloud.get_xy(), partition_point_cloud.get_z())
-
- # With the ransac model, calculate the altitude for each grid point
- grid_points_altitude = model.predict(grid_points_inside.get_xy())
-
- # Calculate color for new points
- [avg_red, avg_green, avg_blue] = np.mean(partition_point_cloud.rgb, axis=0)
- red = np.full(grid_points_inside.len(), avg_red)
- green = np.full(grid_points_inside.len(), avg_green)
- blue = np.full(grid_points_inside.len(), avg_blue)
-
- # Classify all new points as ground
- classification = np.full(grid_points_inside.len(), 2, dtype=np.uint8)
-
- # Split xy into columns
- [x, y] = np.hsplit(grid_points_inside.get_xy(), 2)
-
- # Return point cloud
- return PointCloud.with_dimensions(x.ravel(), y.ravel(), grid_points_altitude, classification, red, green, blue, grid_points_inside.indices)
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser(description='This script takes a pre-classified point cloud, and then it re-clasiffies wrongly classified ground point to non-ground points and finally adds ground points where needed.')
- parser.add_argument('input', type=str, help='The path where to find the pre-classified point cloud.')
- parser.add_argument('output', type=str, help='The path where to save the rectified point cloud.')
- parser.add_argument('--reclassify_plan', type=str, help='The partition plan to use reclasiffication. Must be one of(one, uniform, median, surrounding)')
- parser.add_argument('--reclassify_threshold', type=float, help='Every point with a distance to the estimated ground that is higher than the threshold will be reclassified as non ground', default=5)
- parser.add_argument('--extend_plan', type=str, help='The partition plan to use for extending the ground. Must be one of(one, uniform, median, surrounding)')
- parser.add_argument('--extend_grid_distance', type=float, help='The distance between points on the grid that will be added to the point cloud.', default=5)
- parser.add_argument('--min_area', type=int, help='Some partition plans need a minimum area as a stopping criteria.', default=750)
- parser.add_argument('--min_points', type=int, help='Some partition plans need a minimum number of points as a stopping criteria.', default=500)
-
- args = parser.parse_args()
-
- if args.reclassify_plan is None and args.extend_plan is None:
- raise Exception("Please set a reclassifying or extension plan. Otherwise there is nothing for me to do.")
-
- run(input=args.input, reclassify_plan=args.reclassify_plan, reclassify_threshold=args.reclassify_threshold, \
- extend_plan=args.extend_plan, extend_grid_distance=args.extend_grid_distance, \
- output=args.output, min_points=args.min_points, min_area=args.min_area, debug=False)
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/merge.py b/o/ODM/ODM-2.8.7/opendm/dem/merge.py
deleted file mode 100644
index fdeef93a..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/merge.py
+++ /dev/null
@@ -1,175 +0,0 @@
-import math
-import numpy as np
-from scipy import ndimage
-import rasterio
-from rasterio.transform import Affine, rowcol
-from opendm import system
-from opendm.dem.commands import compute_euclidean_map
-from opendm import log
-from opendm import io
-import os
-
-def euclidean_merge_dems(input_dems, output_dem, creation_options={}, euclidean_map_source=None):
- """
- Based on https://github.com/mapbox/rio-merge-rgba
- and ideas from Anna Petrasova
- implementation by Piero Toffanin
-
- Computes a merged DEM by computing/using a euclidean
- distance to NODATA cells map for all DEMs and then blending all overlapping DEM cells
- by a weighted average based on such euclidean distance.
- """
- inputs = []
- bounds=None
- precision=7
-
- existing_dems = []
- for dem in input_dems:
- if not io.file_exists(dem):
- log.ODM_WARNING("%s does not exist. Will skip from merged DEM." % dem)
- continue
- existing_dems.append(dem)
-
- if len(existing_dems) == 0:
- log.ODM_WARNING("No input DEMs, skipping euclidean merge.")
- return
-
- with rasterio.open(existing_dems[0]) as first:
- src_nodata = first.nodatavals[0]
- res = first.res
- dtype = first.dtypes[0]
- profile = first.profile
-
- for dem in existing_dems:
- eumap = compute_euclidean_map(dem, io.related_file_path(dem, postfix=".euclideand", replace_base=euclidean_map_source), overwrite=False)
- if eumap and io.file_exists(eumap):
- inputs.append((dem, eumap))
-
- log.ODM_INFO("%s valid DEM rasters to merge" % len(inputs))
-
- sources = [(rasterio.open(d), rasterio.open(e)) for d,e in inputs]
-
- # Extent from option or extent of all inputs.
- if bounds:
- dst_w, dst_s, dst_e, dst_n = bounds
- else:
- # scan input files.
- # while we're at it, validate assumptions about inputs
- xs = []
- ys = []
- for src_d, src_e in sources:
- left, bottom, right, top = src_d.bounds
- xs.extend([left, right])
- ys.extend([bottom, top])
- if src_d.profile["count"] != 1 or src_e.profile["count"] != 1:
- raise ValueError("Inputs must be 1-band rasters")
- dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)
- log.ODM_INFO("Output bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
-
- output_transform = Affine.translation(dst_w, dst_n)
- output_transform *= Affine.scale(res[0], -res[1])
-
- # Compute output array shape. We guarantee it will cover the output
- # bounds completely.
- output_width = int(math.ceil((dst_e - dst_w) / res[0]))
- output_height = int(math.ceil((dst_n - dst_s) / res[1]))
-
- # Adjust bounds to fit.
- dst_e, dst_s = output_transform * (output_width, output_height)
- log.ODM_INFO("Output width: %d, height: %d" % (output_width, output_height))
- log.ODM_INFO("Adjusted bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
-
- profile["transform"] = output_transform
- profile["height"] = output_height
- profile["width"] = output_width
- profile["tiled"] = creation_options.get('TILED', 'YES') == 'YES'
- profile["blockxsize"] = creation_options.get('BLOCKXSIZE', 512)
- profile["blockysize"] = creation_options.get('BLOCKYSIZE', 512)
- profile["compress"] = creation_options.get('COMPRESS', 'LZW')
- profile["nodata"] = src_nodata
-
- # Creation opts
- profile.update(creation_options)
-
- # create destination file
- with rasterio.open(output_dem, "w", **profile) as dstrast:
-
- for idx, dst_window in dstrast.block_windows():
-
- left, bottom, right, top = dstrast.window_bounds(dst_window)
-
- blocksize = dst_window.width
- dst_rows, dst_cols = (dst_window.height, dst_window.width)
-
- # initialize array destined for the block
- dst_count = first.count
- dst_shape = (dst_count, dst_rows, dst_cols)
-
- dstarr = np.zeros(dst_shape, dtype=dtype)
- distsum = np.zeros(dst_shape, dtype=dtype)
- small_distance = 0.001953125
-
- for src_d, src_e in sources:
- # The full_cover behavior is problematic here as it includes
- # extra pixels along the bottom right when the sources are
- # slightly misaligned
- #
- # src_window = get_window(left, bottom, right, top,
- # src.transform, precision=precision)
- #
- # With rio merge this just adds an extra row, but when the
- # imprecision occurs at each block, you get artifacts
-
- nodata = src_d.nodatavals[0]
-
- # Alternative, custom get_window using rounding
- src_window_d = tuple(zip(rowcol(
- src_d.transform, left, top, op=round, precision=precision
- ), rowcol(
- src_d.transform, right, bottom, op=round, precision=precision
- )))
-
- src_window_e = tuple(zip(rowcol(
- src_e.transform, left, top, op=round, precision=precision
- ), rowcol(
- src_e.transform, right, bottom, op=round, precision=precision
- )))
-
- temp_d = np.zeros(dst_shape, dtype=dtype)
- temp_d = src_d.read(
- out=temp_d, window=src_window_d, boundless=True, masked=False
- )
-
- temp_e = np.zeros(dst_shape, dtype=dtype)
- temp_e = src_e.read(
- out=temp_e, window=src_window_e, boundless=True, masked=False
- )
-
- # Set NODATA areas in the euclidean map to a very low value
- # so that:
- # - Areas with overlap prioritize DEM layers' cells that
- # are far away from NODATA areas
- # - Areas that have no overlap are included in the final result
- # even if they are very close to a NODATA cell
- temp_e[temp_e==0] = small_distance
- temp_e[temp_d==nodata] = 0
-
- np.multiply(temp_d, temp_e, out=temp_d)
- np.add(dstarr, temp_d, out=dstarr)
- np.add(distsum, temp_e, out=distsum)
-
- np.divide(dstarr, distsum, out=dstarr, where=distsum[0] != 0.0)
-
- # Perform nearest neighbor interpolation on areas where two or more rasters overlap
- # but where both rasters have only interpolated data. This prevents the creation
- # of artifacts that average areas of interpolation.
- indices = ndimage.distance_transform_edt(np.logical_and(distsum < 1, distsum > small_distance),
- return_distances=False,
- return_indices=True)
- dstarr = dstarr[tuple(indices)]
-
- dstarr[dstarr == 0.0] = src_nodata
-
- dstrast.write(dstarr, window=dst_window)
-
- return output_dem
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/pdal.py b/o/ODM/ODM-2.8.7/opendm/dem/pdal.py
deleted file mode 100644
index fb3edf31..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/pdal.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python
-################################################################################
-# lidar2dems - utilities for creating DEMs from LiDAR data
-#
-# AUTHOR: Matthew Hanson, matt.a.hanson@gmail.com
-#
-# Copyright (C) 2015 Applied Geosolutions LLC, oss@appliedgeosolutions.com
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-# list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-################################################################################
-
-# Library functions for creating DEMs from Lidar data
-
-import os
-import sys
-import json as jsonlib
-import tempfile
-from opendm import system
-from opendm import log
-from opendm.utils import double_quote
-
-from datetime import datetime
-
-
-""" JSON Functions """
-
-
-def json_base():
- """ Create initial JSON for PDAL pipeline """
- return {'pipeline': []}
-
-
-def json_gdal_base(filename, output_type, radius, resolution=1, bounds=None):
- """ Create initial JSON for PDAL pipeline containing a Writer element """
- json = json_base()
-
- d = {
- 'type': 'writers.gdal',
- 'resolution': resolution,
- 'radius': radius,
- 'filename': filename,
- 'output_type': output_type,
- 'data_type': 'float'
- }
-
- if bounds is not None:
- d['bounds'] = "([%s,%s],[%s,%s])" % (bounds['minx'], bounds['maxx'], bounds['miny'], bounds['maxy'])
-
- json['pipeline'].insert(0, d)
-
- return json
-
-
-def json_las_base(fout):
- """ Create initial JSON for writing to a LAS file """
- json = json_base()
- json['pipeline'].insert(0, {
- 'type': 'writers.las',
- 'filename': fout
- })
- return json
-
-
-def json_add_decimation_filter(json, step):
- """ Add decimation Filter element and return """
- json['pipeline'].insert(0, {
- 'type': 'filters.decimation',
- 'step': step
- })
- return json
-
-
-def json_add_classification_filter(json, classification, equality="equals"):
- """ Add classification Filter element and return """
- limits = 'Classification[{0}:{0}]'.format(classification)
- if equality == 'max':
- limits = 'Classification[:{0}]'.format(classification)
-
- json['pipeline'].insert(0, {
- 'type': 'filters.range',
- 'limits': limits
- })
- return json
-
-
-def is_ply_file(filename):
- _, ext = os.path.splitext(filename)
- return ext.lower() == '.ply'
-
-
-def json_add_reader(json, filename):
- """ Add Reader Element and return """
- reader_type = 'readers.las' # default
- if is_ply_file(filename):
- reader_type = 'readers.ply'
-
- json['pipeline'].insert(0, {
- 'type': reader_type,
- 'filename': os.path.abspath(filename)
- })
- return json
-
-
-def json_add_readers(json, filenames):
- """ Add merge Filter element and readers to a Writer element and return Filter element """
- for f in filenames:
- json_add_reader(json, f)
-
- if len(filenames) > 1:
- json['pipeline'].insert(0, {
- 'type': 'filters.merge'
- })
-
- return json
-
-
-def json_print(json):
- """ Pretty print JSON """
- log.ODM_DEBUG(jsonlib.dumps(json, indent=4, separators=(',', ': ')))
-
-
-""" Run PDAL commands """
-
-def run_pipeline(json, verbose=False):
- """ Run PDAL Pipeline with provided JSON """
- if verbose:
- json_print(json)
-
- # write to temp file
- f, jsonfile = tempfile.mkstemp(suffix='.json')
- if verbose:
- log.ODM_INFO('Pipeline file: %s' % jsonfile)
- os.write(f, jsonlib.dumps(json).encode('utf8'))
- os.close(f)
-
- cmd = [
- 'pdal',
- 'pipeline',
- '-i %s' % double_quote(jsonfile)
- ]
- if verbose or sys.platform == 'win32':
- system.run(' '.join(cmd))
- else:
- system.run(' '.join(cmd) + ' > /dev/null 2>&1')
- os.remove(jsonfile)
-
-
-def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=False):
- """ Run PDAL translate """
- cmd = [
- 'pdal',
- 'translate',
- '-i %s' % fin,
- '-o %s' % fout,
- 'smrf',
- '--filters.smrf.scalar=%s' % scalar,
- '--filters.smrf.slope=%s' % slope,
- '--filters.smrf.threshold=%s' % threshold,
- '--filters.smrf.window=%s' % window,
- ]
-
- if verbose:
- log.ODM_INFO(' '.join(cmd))
-
- system.run(' '.join(cmd))
-
-def merge_point_clouds(input_files, output_file, verbose=False):
- if len(input_files) == 0:
- log.ODM_WARNING("Cannot merge point clouds, no point clouds to merge.")
- return
-
- cmd = [
- 'pdal',
- 'merge',
- ' '.join(map(double_quote, input_files + [output_file])),
- ]
-
- if verbose:
- log.ODM_INFO(' '.join(cmd))
-
- system.run(' '.join(cmd))
-
diff --git a/o/ODM/ODM-2.8.7/opendm/dem/utils.py b/o/ODM/ODM-2.8.7/opendm/dem/utils.py
deleted file mode 100644
index 9fb383a9..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dem/utils.py
+++ /dev/null
@@ -1,10 +0,0 @@
-
-def get_dem_vars(args):
- return {
- 'TILED': 'YES',
- 'COMPRESS': 'DEFLATE',
- 'BLOCKXSIZE': 512,
- 'BLOCKYSIZE': 512,
- 'BIGTIFF': 'IF_SAFER',
- 'NUM_THREADS': args.max_concurrency,
- }
diff --git a/o/ODM/ODM-2.8.7/opendm/dls.py b/o/ODM/ODM-2.8.7/opendm/dls.py
deleted file mode 100644
index 7e0bf980..00000000
--- a/o/ODM/ODM-2.8.7/opendm/dls.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-MicaSense Downwelling Light Sensor Utilities
-
-Copyright 2017 MicaSense, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in the
-Software without restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
-Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
-
-
-import numpy as np
-# for DLS correction, we need the sun position at the time the image was taken
-# this can be computed using the pysolar package (ver 0.6)
-# https://pypi.python.org/pypi/Pysolar/0.6
-# we import multiple times with checking here because the case of Pysolar is
-# different depending on the python version :(
-import imp
-
-havePysolar = False
-
-try:
- import pysolar.solar as pysolar
- havePysolar = True
-except ImportError:
- try:
- import Pysolar.solar as pysolar
- havePysolar = True
- except ImportError:
- import pysolar.solar as pysolar
- havePysolar = True
-finally:
- if not havePysolar:
- print("Unable to import pysolar")
-
-def fresnel(phi):
- return __multilayer_transmission(phi, n=[1.000277,1.6,1.38])
-
-# define functions to compute the DLS-Sun angle:
-def __fresnel_transmission(phi, n1=1.000277, n2=1.38, polarization=[.5, .5]):
- """compute fresnel transmission between media with refractive indices n1 and n2"""
- # computes the reflection and transmittance
- # for incidence angles phi for transition from medium
- # with refractive index n1 to n2
- # teflon e.g. n2=1.38
- # polycarbonate n2=1.6
- # polarization=[.5,.5] - unpolarized light
- # polarization=[1.,0] - s-polarized light - perpendicular to plane of incidence
- # polarization=[0,1.] - p-polarized light - parallel to plane of incidence
- f1 = np.cos(phi)
- f2 = np.sqrt(1-(n1/n2*np.sin(phi))**2)
- Rs = ((n1*f1-n2*f2)/(n1*f1+n2*f2))**2
- Rp = ((n1*f2-n2*f1)/(n1*f2+n2*f1))**2
- T = 1.-polarization[0]*Rs-polarization[1]*Rp
- if T > 1: T= 0.
- if T < 0: T = 0.
- if np.isnan(T): T = 0.
- return T
-
-def __multilayer_transmission(phi, n, polarization=[.5, .5]):
- T = 1.0
- phi_eff = np.copy(phi)
- for i in range(0,len(n)-1):
- n1 = n[i]
- n2 = n[i+1]
- phi_eff = np.arcsin(np.sin(phi_eff)/n1)
- T *= __fresnel_transmission(phi_eff, n1, n2, polarization=polarization)
- return T
-
-# get the position of the sun in North-East-Down (NED) coordinate system
-def ned_from_pysolar(sunAzimuth, sunAltitude):
- """Convert pysolar coordinates to NED coordinates."""
- elements = (
- np.cos(sunAzimuth) * np.cos(sunAltitude),
- np.sin(sunAzimuth) * np.cos(sunAltitude),
- -np.sin(sunAltitude),
- )
- return np.array(elements).transpose()
-
-# get the sensor orientation in North-East-Down coordinates
-# pose is a yaw/pitch/roll tuple of angles measured for the DLS
-# ori is the 3D orientation vector of the DLS in body coordinates (typically [0,0,-1])
-def get_orientation(pose, ori):
- """Generate an orientation vector from yaw/pitch/roll angles in radians."""
- yaw, pitch, roll = pose
- c1 = np.cos(-yaw)
- s1 = np.sin(-yaw)
- c2 = np.cos(-pitch)
- s2 = np.sin(-pitch)
- c3 = np.cos(-roll)
- s3 = np.sin(-roll)
- Ryaw = np.array([[c1, s1, 0], [-s1, c1, 0], [0, 0, 1]])
- Rpitch = np.array([[c2, 0, -s2], [0, 1, 0], [s2, 0, c2]])
- Rroll = np.array([[1, 0, 0], [0, c3, s3], [0, -s3, c3]])
- R = np.dot(Ryaw, np.dot(Rpitch, Rroll))
- n = np.dot(R, ori)
- return n
-
-# from the current position (lat,lon,alt) tuple
-# and time (UTC), as well as the sensor orientation (yaw,pitch,roll) tuple
-# compute a sensor sun angle - this is needed as the actual sun irradiance
-# (for clear skies) is related to the measured irradiance by:
-
-# I_measured = I_direct * cos (sun_sensor_angle) + I_diffuse
-# For clear sky, I_direct/I_diffuse ~ 6 and we can simplify this to
-# I_measured = I_direct * (cos (sun_sensor_angle) + 1/6)
-
-def compute_sun_angle(
- position,
- pose,
- utc_datetime,
- sensor_orientation,
-):
- """ compute the sun angle using pysolar functions"""
- altitude = 0
- azimuth = 0
- import warnings
- with warnings.catch_warnings(): # Ignore pysolar leap seconds offset warning
- warnings.simplefilter("ignore")
- try:
- altitude = pysolar.get_altitude(position[0], position[1], utc_datetime)
- azimuth = pysolar.get_azimuth(position[0], position[1], utc_datetime)
- except AttributeError: # catch 0.6 version of pysolar required for python 2.7 support
- altitude = pysolar.GetAltitude(position[0], position[1], utc_datetime)
- azimuth = 180-pysolar.GetAzimuth(position[0], position[1], utc_datetime)
- sunAltitude = np.radians(np.array(altitude))
- sunAzimuth = np.radians(np.array(azimuth))
- sunAzimuth = sunAzimuth % (2 * np.pi ) #wrap range 0 to 2*pi
- nSun = ned_from_pysolar(sunAzimuth, sunAltitude)
- nSensor = np.array(get_orientation(pose, sensor_orientation))
- angle = np.arccos(np.dot(nSun, nSensor))
- return nSun, nSensor, angle, sunAltitude, sunAzimuth
diff --git a/o/ODM/ODM-2.8.7/opendm/entwine.py b/o/ODM/ODM-2.8.7/opendm/entwine.py
deleted file mode 100644
index e430d867..00000000
--- a/o/ODM/ODM-2.8.7/opendm/entwine.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import os
-import sys
-import shutil
-from opendm.utils import double_quote
-from opendm import io
-from opendm import log
-from opendm import system
-from opendm import concurrency
-
-
-def build(input_point_cloud_files, output_path, max_concurrency=8, rerun=False):
- num_files = len(input_point_cloud_files)
- if num_files == 0:
- log.ODM_WARNING("No input point cloud files to process")
- return
-
- tmpdir = io.related_file_path(output_path, postfix="-tmp")
-
- def dir_cleanup():
- if io.dir_exists(output_path):
- log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
- shutil.rmtree(output_path)
-
- if io.dir_exists(tmpdir):
- log.ODM_WARNING("Removing previous EPT temp directory: %s" % tmpdir)
- shutil.rmtree(tmpdir)
-
- if rerun:
- dir_cleanup()
-
- # Attempt with entwine (faster, more memory hungry)
- try:
- build_entwine(input_point_cloud_files, tmpdir, output_path, max_concurrency=max_concurrency)
- except Exception as e:
- log.ODM_WARNING("Cannot build EPT using entwine (%s), attempting with untwine..." % str(e))
- dir_cleanup()
- build_untwine(input_point_cloud_files, tmpdir, output_path, max_concurrency=max_concurrency)
-
- if os.path.exists(tmpdir):
- shutil.rmtree(tmpdir)
-
-
-def build_entwine(input_point_cloud_files, tmpdir, output_path, max_concurrency=8, reproject=None):
- kwargs = {
- 'threads': max_concurrency,
- 'tmpdir': tmpdir,
- 'all_inputs': "-i " + " ".join(map(double_quote, input_point_cloud_files)),
- 'outputdir': output_path,
- 'reproject': (" -r %s " % reproject) if reproject is not None else ""
- }
-
- system.run('entwine build --threads {threads} --tmp "{tmpdir}" {all_inputs} -o "{outputdir}" {reproject}'.format(**kwargs))
-
-def build_untwine(input_point_cloud_files, tmpdir, output_path, max_concurrency=8, rerun=False):
- kwargs = {
- # 'threads': max_concurrency,
- 'tmpdir': tmpdir,
- 'files': "--files " + " ".join(map(double_quote, input_point_cloud_files)),
- 'outputdir': output_path
- }
-
- # Run untwine
- system.run('untwine --temp_dir "{tmpdir}" {files} --output_dir "{outputdir}"'.format(**kwargs))
-
-def build_copc(input_point_cloud_files, output_file):
- if len(input_point_cloud_files) == 0:
- logger.ODM_WARNING("Cannot build COPC, no input files")
- return
-
- base_path, ext = os.path.splitext(output_file)
- tmpdir = io.related_file_path(base_path, postfix="-tmp")
- if os.path.exists(tmpdir):
- log.ODM_WARNING("Removing previous directory %s" % tmpdir)
- shutil.rmtree(tmpdir)
-
- kwargs = {
- 'tmpdir': tmpdir,
- 'files': "--files " + " ".join(map(double_quote, input_point_cloud_files)),
- 'output': output_file
- }
-
- # Run untwine
- system.run('untwine --temp_dir "{tmpdir}" {files} -o "{output}" --single_file'.format(**kwargs))
-
- if os.path.exists(tmpdir):
- shutil.rmtree(tmpdir)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/gcp.py b/o/ODM/ODM-2.8.7/opendm/gcp.py
deleted file mode 100644
index 184a49eb..00000000
--- a/o/ODM/ODM-2.8.7/opendm/gcp.py
+++ /dev/null
@@ -1,212 +0,0 @@
-import glob
-import os
-from opendm import log
-from opendm import location
-from pyproj import CRS
-
-class GCPFile:
- def __init__(self, gcp_path):
- self.gcp_path = gcp_path
- self.entries = []
- self.raw_srs = ""
- self.srs = None
- self.read()
-
- def read(self):
- if self.exists():
- with open(self.gcp_path, 'r') as f:
- contents = f.read().strip()
-
- # Strip eventual BOM characters
- contents = contents.replace('\ufeff', '')
-
- lines = list(map(str.strip, contents.split('\n')))
- if lines:
- self.raw_srs = lines[0] # SRS
- self.srs = location.parse_srs_header(self.raw_srs)
-
- for line in lines[1:]:
- if line != "" and line[0] != "#":
- parts = line.split()
- if len(parts) >= 6:
- self.entries.append(line)
- else:
- log.ODM_WARNING("Malformed GCP line: %s" % line)
-
- def iter_entries(self):
- for entry in self.entries:
- yield self.parse_entry(entry)
-
- def parse_entry(self, entry):
- if entry:
- parts = entry.split()
- x, y, z, px, py, filename = parts[:6]
- extras = " ".join(parts[6:])
- return GCPEntry(float(x), float(y), float(z), float(px), float(py), filename, extras)
-
- def get_entry(self, n):
- if n < self.entries_count():
- return self.parse_entry(self.entries[n])
-
- def entries_count(self):
- return len(self.entries)
-
- def exists(self):
- return bool(self.gcp_path and os.path.exists(self.gcp_path))
-
- def make_resized_copy(self, gcp_file_output, ratio):
- """
- Creates a new resized GCP file from an existing GCP file. If one already exists, it will be removed.
- :param gcp_file_output output path of new GCP file
- :param ratio scale GCP coordinates by this value
- :return path to new GCP file
- """
- output = [self.raw_srs]
-
- for entry in self.iter_entries():
- entry.px *= ratio
- entry.py *= ratio
- output.append(str(entry))
-
- with open(gcp_file_output, 'w') as f:
- f.write('\n'.join(output) + '\n')
-
- return gcp_file_output
-
- def wgs84_utm_zone(self):
- """
- Finds the UTM zone where the first point of the GCP falls into
- :return utm zone string valid for a coordinates header
- """
- if self.entries_count() > 0:
- entry = self.get_entry(0)
- longlat = CRS.from_epsg("4326")
- lon, lat = location.transform2(self.srs, longlat, entry.x, entry.y)
- utm_zone, hemisphere = location.get_utm_zone_and_hemisphere_from(lon, lat)
- return "WGS84 UTM %s%s" % (utm_zone, hemisphere)
-
- def create_utm_copy(self, gcp_file_output, filenames=None, rejected_entries=None, include_extras=True):
- """
- Creates a new GCP file from an existing GCP file
- by optionally including only filenames and reprojecting each point to
- a UTM CRS. Rejected entries can recorded by passing a list object to
- rejected_entries.
- """
- if os.path.exists(gcp_file_output):
- os.remove(gcp_file_output)
-
- output = [self.wgs84_utm_zone()]
- target_srs = location.parse_srs_header(output[0])
- transformer = location.transformer(self.srs, target_srs)
-
- for entry in self.iter_entries():
- if filenames is None or entry.filename in filenames:
- entry.x, entry.y, entry.z = transformer.TransformPoint(entry.x, entry.y, entry.z)
- if not include_extras:
- entry.extras = ''
- output.append(str(entry))
- elif isinstance(rejected_entries, list):
- rejected_entries.append(entry)
-
- with open(gcp_file_output, 'w') as f:
- f.write('\n'.join(output) + '\n')
-
- return gcp_file_output
-
- def make_filtered_copy(self, gcp_file_output, images_dir, min_images=3):
- """
- Creates a new GCP file from an existing GCP file includes
- only the points that reference images existing in the images_dir directory.
- If less than min_images images are referenced, no GCP copy is created.
- :return gcp_file_output if successful, None if no output file was created.
- """
- if not self.exists() or not os.path.exists(images_dir):
- return None
-
- if os.path.exists(gcp_file_output):
- os.remove(gcp_file_output)
-
- files = list(map(os.path.basename, glob.glob(os.path.join(images_dir, "*"))))
-
- output = [self.raw_srs]
- files_found = 0
-
- for entry in self.iter_entries():
- if entry.filename in files:
- output.append(str(entry))
- files_found += 1
-
- if files_found >= min_images:
- with open(gcp_file_output, 'w') as f:
- f.write('\n'.join(output) + '\n')
-
- return gcp_file_output
-
- def make_micmac_copy(self, output_dir, precisionxy=1, precisionz=1, utm_zone = None):
- """
- Convert this GCP file in a format compatible with MicMac.
- :param output_dir directory where to save the two MicMac GCP files. The directory must exist.
- :param utm_zone UTM zone to use for output coordinates (UTM string, PROJ4 or EPSG definition).
- If one is not specified, the nearest UTM zone will be selected.
- :param precisionxy horizontal precision of GCP measurements in meters.
- :param precisionz vertical precision of GCP measurements in meters.
- """
- if not os.path.isdir(output_dir):
- raise IOError("{} does not exist.".format(output_dir))
- if not isinstance(precisionxy, float) and not isinstance(precisionxy, int):
- raise AssertionError("precisionxy must be a number")
- if not isinstance(precisionz, float) and not isinstance(precisionz, int):
- raise AssertionError("precisionz must be a number")
-
- gcp_3d_file = os.path.join(output_dir, '3d_gcp.txt')
- gcp_2d_file = os.path.join(output_dir, '2d_gcp.txt')
-
- if os.path.exists(gcp_3d_file):
- os.remove(gcp_3d_file)
- if os.path.exists(gcp_2d_file):
- os.remove(gcp_2d_file)
-
- if utm_zone is None:
- utm_zone = self.wgs84_utm_zone()
-
- target_srs = location.parse_srs_header(utm_zone)
- transformer = location.transformer(self.srs, target_srs)
-
- gcps = {}
- for entry in self.iter_entries():
- utm_x, utm_y, utm_z = transformer.TransformPoint(entry.x, entry.y, entry.z)
- k = "{} {} {}".format(utm_x, utm_y, utm_z)
- if not k in gcps:
- gcps[k] = [entry]
- else:
- gcps[k].append(entry)
-
-
- with open(gcp_3d_file, 'w') as f3:
- with open(gcp_2d_file, 'w') as f2:
- gcp_n = 1
- for k in gcps:
- f3.write("GCP{} {} {} {}\n".format(gcp_n, k, precisionxy, precisionz))
-
- for entry in gcps[k]:
- f2.write("GCP{} {} {} {}\n".format(gcp_n, entry.filename, entry.px, entry.py))
-
- gcp_n += 1
-
- return (gcp_3d_file, gcp_2d_file)
-
-class GCPEntry:
- def __init__(self, x, y, z, px, py, filename, extras=""):
- self.x = x
- self.y = y
- self.z = z
- self.px = px
- self.py = py
- self.filename = filename
- self.extras = extras
-
- def __str__(self):
- return "{} {} {} {} {} {} {}".format(self.x, self.y, self.z,
- self.px, self.py,
- self.filename,
- self.extras).rstrip()
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/geo.py b/o/ODM/ODM-2.8.7/opendm/geo.py
deleted file mode 100644
index 3e826f84..00000000
--- a/o/ODM/ODM-2.8.7/opendm/geo.py
+++ /dev/null
@@ -1,83 +0,0 @@
-import os
-import math
-from opendm import log
-from opendm import location
-from pyproj import CRS
-
-class GeoFile:
- def __init__(self, geo_path):
- self.geo_path = geo_path
- self.entries = {}
- self.srs = None
-
- with open(self.geo_path, 'r') as f:
- contents = f.read().strip()
-
- lines = list(map(str.strip, contents.split('\n')))
- if lines:
- self.raw_srs = lines[0] # SRS
- self.srs = location.parse_srs_header(self.raw_srs)
- longlat = CRS.from_epsg("4326")
-
- for line in lines[1:]:
- if line != "" and line[0] != "#":
- parts = line.split()
- if len(parts) >= 3:
- i = 3
- filename = parts[0]
- x, y = [float(p) for p in parts[1:3]]
- z = float(parts[3]) if len(parts) >= 4 else None
-
- # Always convert coordinates to WGS84
- if z is not None:
- x, y, z = location.transform3(self.srs, longlat, x, y, z)
- else:
- x, y = location.transform2(self.srs, longlat, x, y)
-
- yaw = pitch = roll = None
-
- if len(parts) >= 7:
- yaw, pitch, roll = [float(p) for p in parts[4:7]]
- if math.isnan(yaw) or math.isnan(pitch) or math.isnan(roll):
- yaw = pitch = roll = None
- i = 7
-
- horizontal_accuracy = vertical_accuracy = None
- if len(parts) >= 9:
- horizontal_accuracy,vertical_accuracy = [float(p) for p in parts[7:9]]
- i = 9
-
- extras = " ".join(parts[i:])
- self.entries[filename] = GeoEntry(filename, x, y, z,
- yaw, pitch, roll,
- horizontal_accuracy, vertical_accuracy,
- extras)
- else:
- log.ODM_WARNING("Malformed geo line: %s" % line)
-
- def get_entry(self, filename):
- return self.entries.get(filename)
-
-
-class GeoEntry:
- def __init__(self, filename, x, y, z, yaw=None, pitch=None, roll=None, horizontal_accuracy=None, vertical_accuracy=None, extras=None):
- self.filename = filename
- self.x = x
- self.y = y
- self.z = z
- self.yaw = yaw
- self.pitch = pitch
- self.roll = roll
- self.horizontal_accuracy = horizontal_accuracy
- self.vertical_accuracy = vertical_accuracy
- self.extras = extras
-
- def __str__(self):
- return "{} ({} {} {}) ({} {} {}) ({} {}) {}".format(self.filename,
- self.x, self.y, self.z,
- self.yaw, self.pitch, self.roll,
- self.horizontal_accuracy, self.vertical_accuracy,
- self.extras).rstrip()
-
- def position_string(self):
- return "{} {} {}".format(self.x, self.y, self.z)
diff --git a/o/ODM/ODM-2.8.7/opendm/get_image_size.py b/o/ODM/ODM-2.8.7/opendm/get_image_size.py
deleted file mode 100644
index 94b60a52..00000000
--- a/o/ODM/ODM-2.8.7/opendm/get_image_size.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from PIL import Image
-import cv2
-
-from opendm import log
-
-Image.MAX_IMAGE_PIXELS = None
-
-def get_image_size(file_path, fallback_on_error=True):
- """
- Return (width, height) for a given img file
- """
- try:
- with Image.open(file_path) as img:
- width, height = img.size
- except Exception as e:
- if fallback_on_error:
- log.ODM_WARNING("Cannot read %s with PIL, fallback to cv2: %s" % (file_path, str(e)))
- img = cv2.imread(file_path)
- width = img.shape[1]
- height = img.shape[0]
- else:
- raise e
-
- return (width, height)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/gpu.py b/o/ODM/ODM-2.8.7/opendm/gpu.py
deleted file mode 100644
index c92cad12..00000000
--- a/o/ODM/ODM-2.8.7/opendm/gpu.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import os
-import sys
-import shutil
-import ctypes
-from opendm import log
-from repoze.lru import lru_cache
-
-def gpu_disabled_by_user_env():
- return bool(os.environ.get('ODM_NO_GPU'))
-
-@lru_cache(maxsize=None)
-def has_popsift_and_can_handle_texsize(width, height):
- # We first check that we have the required compute capabilities
- # As we do not support compute capabilities less than 3.5
- try:
- compute_major, compute_minor = get_cuda_compute_version(0)
- if compute_major < 3 or (compute_major == 3 and compute_minor < 5):
- # Not supported
- log.ODM_WARNING("CUDA compute platform is not supported (detected: %s.%s but we need at least 3.5)" % (compute_major, compute_minor))
- return False
- except Exception as e:
- log.ODM_WARNING("Cannot use GPU for feature extraction: %s" % str(e))
- return False
-
- try:
- from opensfm import pypopsift
- fits = pypopsift.fits_texture(int(width * 1.02), int(height * 1.02))
- if not fits:
- log.ODM_WARNING("Image size (%sx%spx) would not fit in GPU memory, falling back to CPU" % (width, height))
- return fits
- except (ModuleNotFoundError, ImportError):
- return False
- except Exception as e:
- log.ODM_WARNING(str(e))
- return False
-
-@lru_cache(maxsize=None)
-def get_cuda_compute_version(device_id = 0):
- cuda_lib = "libcuda.so"
- if sys.platform == 'win32':
- cuda_lib = os.path.join(os.environ.get('SYSTEMROOT'), 'system32', 'nvcuda.dll')
- if not os.path.isfile(cuda_lib):
- cuda_lib = "nvcuda.dll"
-
- nvcuda = ctypes.cdll.LoadLibrary(cuda_lib)
-
- nvcuda.cuInit.argtypes = (ctypes.c_uint32, )
- nvcuda.cuInit.restypes = (ctypes.c_int32)
-
- if nvcuda.cuInit(0) != 0:
- raise Exception("Cannot initialize CUDA")
-
- nvcuda.cuDeviceGetCount.argtypes = (ctypes.POINTER(ctypes.c_int32), )
- nvcuda.cuDeviceGetCount.restypes = (ctypes.c_int32)
-
- device_count = ctypes.c_int32()
- if nvcuda.cuDeviceGetCount(ctypes.byref(device_count)) != 0:
- raise Exception("Cannot get device count")
-
- if device_count.value == 0:
- raise Exception("No devices")
-
- nvcuda.cuDeviceComputeCapability.argtypes = (ctypes.POINTER(ctypes.c_int32), ctypes.POINTER(ctypes.c_int32), ctypes.c_int32)
- nvcuda.cuDeviceComputeCapability.restypes = (ctypes.c_int32)
- compute_major = ctypes.c_int32()
- compute_minor = ctypes.c_int32()
-
- if nvcuda.cuDeviceComputeCapability(ctypes.byref(compute_major), ctypes.byref(compute_minor), device_id) != 0:
- raise Exception("Cannot get CUDA compute version")
-
- return (compute_major.value, compute_minor.value)
-
-def has_gpu(args):
- if gpu_disabled_by_user_env():
- log.ODM_INFO("Disabling GPU features (ODM_NO_GPU is set)")
- return False
- if args.no_gpu:
- log.ODM_INFO("Disabling GPU features (--no-gpu is set)")
- return False
-
- if sys.platform == 'win32':
- nvcuda_path = os.path.join(os.environ.get('SYSTEMROOT'), 'system32', 'nvcuda.dll')
- if os.path.isfile(nvcuda_path):
- log.ODM_INFO("CUDA drivers detected")
- return True
- else:
- log.ODM_INFO("No CUDA drivers detected, using CPU")
- return False
- else:
- if shutil.which('nvidia-smi') is not None:
- log.ODM_INFO("nvidia-smi detected")
- return True
- else:
- log.ODM_INFO("nvidia-smi not found in PATH, using CPU")
- return False
diff --git a/o/ODM/ODM-2.8.7/opendm/gsd.py b/o/ODM/ODM-2.8.7/opendm/gsd.py
deleted file mode 100644
index b954fde3..00000000
--- a/o/ODM/ODM-2.8.7/opendm/gsd.py
+++ /dev/null
@@ -1,176 +0,0 @@
-import os
-import json
-import numpy as np
-import math
-from repoze.lru import lru_cache
-from opendm import log
-from opendm.shots import get_origin
-
-def rounded_gsd(reconstruction_json, default_value=None, ndigits=0, ignore_gsd=False):
- """
- :param reconstruction_json path to OpenSfM's reconstruction.json
- :return GSD value rounded. If GSD cannot be computed, or ignore_gsd is set, it returns a default value.
- """
- if ignore_gsd:
- return default_value
-
- gsd = opensfm_reconstruction_average_gsd(reconstruction_json)
-
- if gsd is not None:
- return round(gsd, ndigits)
- else:
- return default_value
-
-
-def image_max_size(photos, target_resolution, reconstruction_json, gsd_error_estimate = 0.5, ignore_gsd=False, has_gcp=False):
- """
- :param photos images database
- :param target_resolution resolution the user wants have in cm / pixel
- :param reconstruction_json path to OpenSfM's reconstruction.json
- :param gsd_error_estimate percentage of estimated error in the GSD calculation to set an upper bound on resolution.
- :param ignore_gsd if set to True, simply return the largest side of the largest image in the images database.
- :return A dimension in pixels calculated by taking the image_scale_factor and applying it to the size of the largest image.
- Returned value is never higher than the size of the largest side of the largest image.
- """
- max_width = 0
- max_height = 0
- if ignore_gsd:
- isf = 1.0
- else:
- isf = image_scale_factor(target_resolution, reconstruction_json, gsd_error_estimate, has_gcp=has_gcp)
-
- for p in photos:
- max_width = max(p.width, max_width)
- max_height = max(p.height, max_height)
-
- return int(math.ceil(max(max_width, max_height) * isf))
-
-def image_scale_factor(target_resolution, reconstruction_json, gsd_error_estimate = 0.5, has_gcp=False):
- """
- :param target_resolution resolution the user wants have in cm / pixel
- :param reconstruction_json path to OpenSfM's reconstruction.json
- :param gsd_error_estimate percentage of estimated error in the GSD calculation to set an upper bound on resolution.
- :return A down-scale (<= 1) value to apply to images to achieve the target resolution by comparing the current GSD of the reconstruction.
- If a GSD cannot be computed, it just returns 1. Returned scale values are never higher than 1.
- """
- gsd = opensfm_reconstruction_average_gsd(reconstruction_json, use_all_shots=has_gcp)
-
- if gsd is not None and target_resolution > 0:
- gsd = gsd * (1 + gsd_error_estimate)
- return min(1, gsd / target_resolution)
- else:
- return 1
-
-
-def cap_resolution(resolution, reconstruction_json, gsd_error_estimate = 0.1, gsd_scaling = 1.0, ignore_gsd=False,
- ignore_resolution=False, has_gcp=False):
- """
- :param resolution resolution in cm / pixel
- :param reconstruction_json path to OpenSfM's reconstruction.json
- :param gsd_error_estimate percentage of estimated error in the GSD calculation to set an upper bound on resolution.
- :param gsd_scaling scaling of estimated GSD.
- :param ignore_gsd when set to True, forces the function to just return resolution.
- :param ignore_resolution when set to True, forces the function to return a value based on GSD.
- :return The max value between resolution and the GSD computed from the reconstruction.
- If a GSD cannot be computed, or ignore_gsd is set to True, it just returns resolution. Units are in cm / pixel.
- """
- if ignore_gsd:
- return resolution
-
- gsd = opensfm_reconstruction_average_gsd(reconstruction_json, use_all_shots=has_gcp or ignore_resolution)
-
- if gsd is not None:
- gsd = gsd * (1 - gsd_error_estimate) * gsd_scaling
- if gsd > resolution or ignore_resolution:
- log.ODM_WARNING('Maximum resolution set to {} * (GSD - {}%) '
- '({:.2f} cm / pixel, requested resolution was {:.2f} cm / pixel)'
- .format(gsd_scaling, gsd_error_estimate * 100, gsd, resolution))
- return gsd
- else:
- return resolution
- else:
- log.ODM_WARNING('Cannot calculate GSD, using requested resolution of {:.2f}'.format(resolution))
- return resolution
-
-
-@lru_cache(maxsize=None)
-def opensfm_reconstruction_average_gsd(reconstruction_json, use_all_shots=False):
- """
- Computes the average Ground Sampling Distance of an OpenSfM reconstruction.
- :param reconstruction_json path to OpenSfM's reconstruction.json
- :return Ground Sampling Distance value (cm / pixel) or None if
- a GSD estimate cannot be compute
- """
- if not os.path.isfile(reconstruction_json):
- raise IOError(reconstruction_json + " does not exist.")
-
- with open(reconstruction_json) as f:
- data = json.load(f)
-
- # Calculate median height from sparse reconstruction
- reconstruction = data[0]
- point_heights = []
-
- for pointId in reconstruction['points']:
- point = reconstruction['points'][pointId]
- point_heights.append(point['coordinates'][2])
-
- ground_height = np.median(point_heights)
-
- gsds = []
- for shotImage in reconstruction['shots']:
- shot = reconstruction['shots'][shotImage]
- if use_all_shots or shot.get('gps_dop', 999999) < 999999:
- camera = reconstruction['cameras'][shot['camera']]
- shot_origin = get_origin(shot)
- shot_height = shot_origin[2]
- focal_ratio = camera.get('focal', camera.get('focal_x'))
- if not focal_ratio:
- log.ODM_WARNING("Cannot parse focal values from %s. This is likely an unsupported camera model." % reconstruction_json)
- return None
-
- gsds.append(calculate_gsd_from_focal_ratio(focal_ratio,
- shot_height - ground_height,
- camera['width']))
-
- if len(gsds) > 0:
- mean = np.mean(gsds)
- if mean < 0:
- log.ODM_WARNING("Negative GSD estimated, this might indicate a flipped Z-axis.")
- return abs(mean)
-
- return None
-
-
-def calculate_gsd(sensor_width, flight_height, focal_length, image_width):
- """
- :param sensor_width in millimeters
- :param flight_height in meters
- :param focal_length in millimeters
- :param image_width in pixels
- :return Ground Sampling Distance
-
- >>> round(calculate_gsd(13.2, 100, 8.8, 5472), 2)
- 2.74
- >>> calculate_gsd(13.2, 100, 0, 2000)
- >>> calculate_gsd(13.2, 100, 8.8, 0)
- """
- if sensor_width != 0:
- return calculate_gsd_from_focal_ratio(focal_length / sensor_width,
- flight_height,
- image_width)
- else:
- return None
-
-
-def calculate_gsd_from_focal_ratio(focal_ratio, flight_height, image_width):
- """
- :param focal_ratio focal length (mm) / sensor_width (mm)
- :param flight_height in meters
- :param image_width in pixels
- :return Ground Sampling Distance
- """
- if focal_ratio == 0 or image_width == 0:
- return None
-
- return ((flight_height * 100) / image_width) / focal_ratio
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/io.py b/o/ODM/ODM-2.8.7/opendm/io.py
deleted file mode 100644
index b8be1af0..00000000
--- a/o/ODM/ODM-2.8.7/opendm/io.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import shutil, errno
-import json
-
-def absolute_path_file(path_file):
- return os.path.abspath(path_file)
-
-
-def extract_path_from_file(file):
- path_file = os.path.abspath(os.path.dirname(file))
- path, file = os.path.split(path_file)
- return path
-
-
-def join_paths(*args):
- return os.path.join(*args)
-
-
-def file_exists(path_file):
- return os.path.isfile(path_file)
-
-
-def dir_exists(dirname):
- return os.path.isdir(dirname)
-
-
-def copy(src, dst):
- try:
- shutil.copytree(src, dst)
- except OSError as e:
- if e.errno == errno.ENOTDIR:
- shutil.copy(src, dst)
- else: raise
-
-def rename_file(src, dst):
- try:
- os.rename(src, dst)
- return True
- except OSError as e:
- if e.errno == errno.ENOENT:
- return False
- else:
- raise
-
-
-# find a file in the root directory
-def find(filename, folder):
- for root, dirs, files in os.walk(folder):
- return '/'.join((root, filename)) if filename in files else None
-
-
-def related_file_path(input_file_path, prefix="", postfix="", replace_base=None):
- """
- For example: related_file_path("/path/to/file.ext", "a.", ".b")
- --> "/path/to/a.file.b.ext"
- """
- path, filename = os.path.split(input_file_path)
-
- # path = path/to
- # filename = file.ext
-
- basename, ext = os.path.splitext(filename)
- # basename = file
- # ext = .ext
-
- if replace_base is not None:
- basename = replace_base
-
- return os.path.join(path, "{}{}{}{}".format(prefix, basename, postfix, ext))
-
-def path_or_json_string_to_dict(string):
- if string == "":
- return {}
-
- if string.startswith("[") or string.startswith("{"):
- try:
- return json.loads(string)
- except:
- raise ValueError("{0} is not a valid JSON string.".format(string))
- elif file_exists(string):
- try:
- with open(string, 'r') as f:
- return json.loads(f.read())
- except:
- raise ValueError("{0} is not a valid JSON file.".format(string))
- else:
- raise ValueError("{0} is not a valid JSON file or string.".format(string))
diff --git a/o/ODM/ODM-2.8.7/opendm/location.py b/o/ODM/ODM-2.8.7/opendm/location.py
deleted file mode 100644
index 46e9b550..00000000
--- a/o/ODM/ODM-2.8.7/opendm/location.py
+++ /dev/null
@@ -1,158 +0,0 @@
-import math
-from opendm import log
-from pyproj import Proj, Transformer, CRS
-from osgeo import osr
-
-def extract_utm_coords(photos, images_path, output_coords_file):
- """
- Create a coordinate file containing the GPS positions of all cameras
- to be used later in the ODM toolchain for automatic georeferecing
- :param photos ([ODM_Photo]) list of photos
- :param images_path (str) path to dataset images
- :param output_coords_file (str) path to output coordinates file
- :return None
- """
- if len(photos) == 0:
- raise Exception("No input images, cannot create coordinates file of GPS positions")
-
- utm_zone = None
- hemisphere = None
- coords = []
- reference_photo = None
- for photo in photos:
- if photo.latitude is None or photo.longitude is None:
- log.ODM_WARNING("GPS position not available for %s" % photo.filename)
- continue
-
- if utm_zone is None:
- utm_zone, hemisphere = get_utm_zone_and_hemisphere_from(photo.longitude, photo.latitude)
-
- try:
- alt = photo.altitude if photo.altitude is not None else 0
- coord = convert_to_utm(photo.longitude, photo.latitude, alt, utm_zone, hemisphere)
- except:
- raise Exception("Failed to convert GPS position to UTM for %s" % photo.filename)
-
- coords.append(coord)
-
- if utm_zone is None:
- raise Exception("No images seem to have GPS information")
-
- # Calculate average
- dx = 0.0
- dy = 0.0
- num = float(len(coords))
- for coord in coords:
- dx += coord[0] / num
- dy += coord[1] / num
-
- dx = int(math.floor(dx))
- dy = int(math.floor(dy))
-
- # Open output file
- with open(output_coords_file, "w") as f:
- f.write("WGS84 UTM %s%s\n" % (utm_zone, hemisphere))
- f.write("%s %s\n" % (dx, dy))
- for coord in coords:
- f.write("%s %s %s\n" % (coord[0] - dx, coord[1] - dy, coord[2]))
-
-def transform2(from_srs, to_srs, x, y):
- return transformer(from_srs, to_srs).TransformPoint(x, y, 0)[:2]
-
-def transform3(from_srs, to_srs, x, y, z):
- return transformer(from_srs, to_srs).TransformPoint(x, y, z)
-
-def proj_srs_convert(srs):
- """
- Convert a Proj SRS object to osr SRS object
- """
- res = osr.SpatialReference()
- epsg = srs.to_epsg()
-
- if epsg:
- res.ImportFromEPSG(epsg)
- else:
- proj4 = srs.to_proj4()
- res.ImportFromProj4(proj4)
-
- res.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
-
- return res
-
-def transformer(from_srs, to_srs):
- src = proj_srs_convert(from_srs)
- tgt = proj_srs_convert(to_srs)
- return osr.CoordinateTransformation(src, tgt)
-
-def get_utm_zone_and_hemisphere_from(lon, lat):
- """
- Calculate the UTM zone and hemisphere that a longitude/latitude pair falls on
- :param lon longitude
- :param lat latitude
- :return [utm_zone, hemisphere]
- """
- utm_zone = (int(math.floor((lon + 180.0)/6.0)) % 60) + 1
- hemisphere = 'S' if lat < 0 else 'N'
- return [utm_zone, hemisphere]
-
-def convert_to_utm(lon, lat, alt, utm_zone, hemisphere):
- """
- Convert longitude, latitude and elevation values to UTM
- :param lon longitude
- :param lat latitude
- :param alt altitude
- :param utm_zone UTM zone number
- :param hemisphere one of 'N' or 'S'
- :return [x,y,z] UTM coordinates
- """
- if hemisphere == 'N':
- p = Proj(proj='utm',zone=utm_zone,ellps='WGS84', preserve_units=True)
- else:
- p = Proj(proj='utm',zone=utm_zone,ellps='WGS84', preserve_units=True, south=True)
-
- x,y = p(lon, lat)
- return [x, y, alt]
-
-def parse_srs_header(header):
- """
- Parse a header coming from GCP or coordinate file
- :param header (str) line
- :return Proj object
- """
- log.ODM_INFO('Parsing SRS header: %s' % header)
- header = header.strip()
- ref = header.split(' ')
-
- try:
- if ref[0] == 'WGS84' and ref[1] == 'UTM':
- datum = ref[0]
- utm_pole = (ref[2][len(ref[2]) - 1]).upper()
- utm_zone = int(ref[2][:len(ref[2]) - 1])
-
- proj_args = {
- 'zone': utm_zone,
- 'datum': datum
- }
-
- proj4 = '+proj=utm +zone={zone} +datum={datum} +units=m +no_defs=True'
- if utm_pole == 'S':
- proj4 += ' +south=True'
-
- srs = CRS.from_proj4(proj4.format(**proj_args))
- elif '+proj' in header:
- srs = CRS.from_proj4(header.strip('\''))
- elif header.lower().startswith("epsg:"):
- srs = CRS.from_epsg(header.lower()[5:])
- else:
- raise RuntimeError('Could not parse coordinates. Bad SRS supplied: %s' % header)
- except RuntimeError as e:
- log.ODM_ERROR('Uh oh! There seems to be a problem with your coordinates/GCP file.\n\n'
- 'The line: %s\n\n'
- 'Is not valid. Projections that are valid include:\n'
- ' - EPSG:*****\n'
- ' - WGS84 UTM **(N|S)\n'
- ' - Any valid proj4 string (for example, +proj=utm +zone=32 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs)\n\n'
- 'Modify your input and try again.' % header)
- raise RuntimeError(e)
-
- return srs
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/log.py b/o/ODM/ODM-2.8.7/opendm/log.py
deleted file mode 100644
index 01b3b118..00000000
--- a/o/ODM/ODM-2.8.7/opendm/log.py
+++ /dev/null
@@ -1,157 +0,0 @@
-import sys
-import threading
-import os
-import json
-import datetime
-import dateutil.parser
-import shutil
-import multiprocessing
-
-from opendm.loghelpers import double_quote, args_to_dict
-from vmem import virtual_memory
-
-if sys.platform == 'win32':
- # No colors on Windows, sorry!
- HEADER = ''
- OKBLUE = ''
- OKGREEN = ''
- DEFAULT = ''
- WARNING = ''
- FAIL = ''
- ENDC = ''
-else:
- HEADER = '\033[95m'
- OKBLUE = '\033[94m'
- OKGREEN = '\033[92m'
- DEFAULT = '\033[39m'
- WARNING = '\033[93m'
- FAIL = '\033[91m'
- ENDC = '\033[0m'
-
-lock = threading.Lock()
-
-def odm_version():
- with open(os.path.join(os.path.dirname(__file__), "..", "VERSION")) as f:
- return f.read().split("\n")[0].strip()
-
-def memory():
- mem = virtual_memory()
- return {
- 'total': round(mem.total / 1024 / 1024),
- 'available': round(mem.available / 1024 / 1024)
- }
-
-class ODMLogger:
- def __init__(self):
- self.show_debug = False
- self.json = None
- self.json_output_file = None
- self.start_time = datetime.datetime.now()
-
- def log(self, startc, msg, level_name):
- level = ("[" + level_name + "]").ljust(9)
- with lock:
- print("%s%s %s%s" % (startc, level, msg, ENDC))
- sys.stdout.flush()
- if self.json is not None:
- self.json['stages'][-1]['messages'].append({
- 'message': msg,
- 'type': level_name.lower()
- })
-
- def init_json_output(self, output_files, args):
- self.json_output_files = output_files
- self.json_output_file = output_files[0]
- self.json = {}
- self.json['odmVersion'] = odm_version()
- self.json['memory'] = memory()
- self.json['cpus'] = multiprocessing.cpu_count()
- self.json['images'] = -1
- self.json['options'] = args_to_dict(args)
- self.json['startTime'] = self.start_time.isoformat()
- self.json['stages'] = []
- self.json['processes'] = []
- self.json['success'] = False
-
- def log_json_stage_run(self, name, start_time):
- if self.json is not None:
- self.json['stages'].append({
- 'name': name,
- 'startTime': start_time.isoformat(),
- 'messages': [],
- })
-
- def log_json_images(self, count):
- if self.json is not None:
- self.json['images'] = count
-
- def log_json_stage_error(self, error, exit_code, stack_trace = ""):
- if self.json is not None:
- self.json['error'] = {
- 'code': exit_code,
- 'message': error
- }
- self.json['stackTrace'] = list(map(str.strip, stack_trace.split("\n")))
- self._log_json_end_time()
-
- def log_json_success(self):
- if self.json is not None:
- self.json['success'] = True
- self._log_json_end_time()
-
- def log_json_process(self, cmd, exit_code, output = []):
- if self.json is not None:
- d = {
- 'command': cmd,
- 'exitCode': exit_code,
- }
- if output:
- d['output'] = output
-
- self.json['processes'].append(d)
-
- def _log_json_end_time(self):
- if self.json is not None:
- end_time = datetime.datetime.now()
- self.json['endTime'] = end_time.isoformat()
- self.json['totalTime'] = round((end_time - self.start_time).total_seconds(), 2)
-
- if self.json['stages']:
- last_stage = self.json['stages'][-1]
- last_stage['endTime'] = end_time.isoformat()
- start_time = dateutil.parser.isoparse(last_stage['startTime'])
- last_stage['totalTime'] = round((end_time - start_time).total_seconds(), 2)
-
- def info(self, msg):
- self.log(DEFAULT, msg, "INFO")
-
- def warning(self, msg):
- self.log(WARNING, msg, "WARNING")
-
- def error(self, msg):
- self.log(FAIL, msg, "ERROR")
-
- def exception(self, msg):
- self.log(FAIL, msg, "EXCEPTION")
-
- def debug(self, msg):
- if self.show_debug:
- self.log(OKGREEN, msg, "DEBUG")
-
- def close(self):
- if self.json is not None and self.json_output_file is not None:
- try:
- with open(self.json_output_file, 'w') as f:
- f.write(json.dumps(self.json, indent=4))
- for f in self.json_output_files[1:]:
- shutil.copy(self.json_output_file, f)
- except Exception as e:
- print("Cannot write log.json: %s" % str(e))
-
-logger = ODMLogger()
-
-ODM_INFO = logger.info
-ODM_WARNING = logger.warning
-ODM_ERROR = logger.error
-ODM_EXCEPTION = logger.exception
-ODM_DEBUG = logger.debug
diff --git a/o/ODM/ODM-2.8.7/opendm/loghelpers.py b/o/ODM/ODM-2.8.7/opendm/loghelpers.py
deleted file mode 100644
index 283816fa..00000000
--- a/o/ODM/ODM-2.8.7/opendm/loghelpers.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from shlex import _find_unsafe
-
-def double_quote(s):
- """Return a shell-escaped version of the string *s*."""
- if not s:
- return '""'
- if _find_unsafe(s) is None:
- return s
-
- # use double quotes, and prefix double quotes with a \
- # the string $"b is then quoted as "$\"b"
- return '"' + s.replace('"', '\\\"') + '"'
-
-def args_to_dict(args):
- args_dict = vars(args)
- result = {}
- for k in sorted(args_dict.keys()):
- # Skip _is_set keys
- if k.endswith("_is_set"):
- continue
-
- # Don't leak token
- if k == 'sm_cluster' and args_dict[k] is not None:
- result[k] = True
- else:
- result[k] = args_dict[k]
-
- return result
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/mesh.py b/o/ODM/ODM-2.8.7/opendm/mesh.py
deleted file mode 100644
index f0c524a6..00000000
--- a/o/ODM/ODM-2.8.7/opendm/mesh.py
+++ /dev/null
@@ -1,216 +0,0 @@
-from __future__ import absolute_import
-import os, shutil, sys, struct, random, math, platform
-from opendm.dem import commands
-from opendm import system
-from opendm import log
-from opendm import context
-from opendm import concurrency
-from scipy import signal
-import numpy as np
-
-def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, verbose=False, available_cores=None, method='gridded', smooth_dsm=True):
- # Create DSM from point cloud
-
- # Create temporary directory
- mesh_directory = os.path.dirname(outMesh)
- tmp_directory = os.path.join(mesh_directory, 'tmp')
- if os.path.exists(tmp_directory):
- shutil.rmtree(tmp_directory)
- os.mkdir(tmp_directory)
- log.ODM_INFO('Created temporary directory: %s' % tmp_directory)
-
- radius_steps = [dsm_radius]
- for _ in range(2):
- radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary
-
- log.ODM_INFO('Creating DSM for 2.5D mesh')
-
- commands.create_dem(
- inPointCloud,
- 'mesh_dsm',
- output_type='max',
- radiuses=list(map(str, radius_steps)),
- gapfill=True,
- outdir=tmp_directory,
- resolution=dsm_resolution,
- verbose=verbose,
- max_workers=available_cores,
- apply_smoothing=smooth_dsm
- )
-
- if method == 'gridded':
- mesh = dem_to_mesh_gridded(os.path.join(tmp_directory, 'mesh_dsm.tif'), outMesh, maxVertexCount, verbose, maxConcurrency=max(1, available_cores))
- elif method == 'poisson':
- dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply'), verbose)
- mesh = screened_poisson_reconstruction(dsm_points, outMesh, depth=depth,
- samples=samples,
- maxVertexCount=maxVertexCount,
- threads=max(1, available_cores - 1), # poissonrecon can get stuck on some machines if --threads == all cores
- verbose=verbose)
- else:
- raise 'Not a valid method: ' + method
-
- # Cleanup tmp
- if os.path.exists(tmp_directory):
- shutil.rmtree(tmp_directory)
-
- return mesh
-
-
-def dem_to_points(inGeotiff, outPointCloud, verbose=False):
- log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff)
-
- kwargs = {
- 'bin': context.dem2points_path,
- 'outfile': outPointCloud,
- 'infile': inGeotiff,
- 'verbose': '-verbose' if verbose else ''
- }
-
- system.run('"{bin}" -inputFile "{infile}" '
- '-outputFile "{outfile}" '
- '-skirtHeightThreshold 1.5 '
- '-skirtIncrements 0.2 '
- '-skirtHeightCap 100 '
- ' {verbose} '.format(**kwargs))
-
- return outPointCloud
-
-
-def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxConcurrency=1):
- log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff)
-
- mesh_path, mesh_filename = os.path.split(outMesh)
- # mesh_path = path/to
- # mesh_filename = odm_mesh.ply
-
- basename, ext = os.path.splitext(mesh_filename)
- # basename = odm_mesh
- # ext = .ply
-
- outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))
-
- # This should work without issues most of the times,
- # but just in case we lower maxConcurrency if it fails.
- while True:
- try:
- kwargs = {
- 'bin': context.dem2mesh_path,
- 'outfile': outMeshDirty,
- 'infile': inGeotiff,
- 'maxVertexCount': maxVertexCount,
- 'maxConcurrency': maxConcurrency,
- 'verbose': '-verbose' if verbose else ''
- }
- system.run('"{bin}" -inputFile "{infile}" '
- '-outputFile "{outfile}" '
- '-maxTileLength 2000 '
- '-maxVertexCount {maxVertexCount} '
- '-maxConcurrency {maxConcurrency} '
- '-edgeSwapThreshold 0.15 '
- ' {verbose} '.format(**kwargs))
- break
- except Exception as e:
- maxConcurrency = math.floor(maxConcurrency / 2)
- if maxConcurrency >= 1:
- log.ODM_WARNING("dem2mesh failed, retrying with lower concurrency (%s) in case this is a memory issue" % maxConcurrency)
- else:
- raise e
-
-
- # Cleanup and reduce vertex count if necessary
- # (as dem2mesh cannot guarantee that we'll have the target vertex count)
- cleanupArgs = {
- 'reconstructmesh': context.omvs_reconstructmesh_path,
- 'outfile': outMesh,
- 'infile': outMeshDirty,
- 'max_faces': maxVertexCount * 2
- }
-
- system.run('"{reconstructmesh}" -i "{infile}" '
- '-o "{outfile}" '
- '--remove-spikes 0 --remove-spurious 0 --smooth 0 '
- '--target-face-num {max_faces} -v 0'.format(**cleanupArgs))
-
- # Delete intermediate results
- os.remove(outMeshDirty)
-
- return outMesh
-
-
-def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False):
-
- mesh_path, mesh_filename = os.path.split(outMesh)
- # mesh_path = path/to
- # mesh_filename = odm_mesh.ply
-
- basename, ext = os.path.splitext(mesh_filename)
- # basename = odm_mesh
- # ext = .ply
-
- outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))
- if os.path.isfile(outMeshDirty):
- os.remove(outMeshDirty)
-
- # Since PoissonRecon has some kind of a race condition on ppc64el, and this helps...
- if platform.machine() == 'ppc64le':
- log.ODM_WARNING("ppc64le platform detected, forcing single-threaded operation for PoissonRecon")
- threads = 1
-
- while True:
- poissonReconArgs = {
- 'bin': context.poisson_recon_path,
- 'outfile': outMeshDirty,
- 'infile': inPointCloud,
- 'depth': depth,
- 'samples': samples,
- 'pointWeight': pointWeight,
- 'threads': int(threads),
- 'verbose': '--verbose' if verbose else ''
- }
-
- # Run PoissonRecon
- try:
- system.run('"{bin}" --in "{infile}" '
- '--out "{outfile}" '
- '--depth {depth} '
- '--pointWeight {pointWeight} '
- '--samplesPerNode {samples} '
- '--threads {threads} '
- '--bType 2 '
- '--linearFit '
- '{verbose}'.format(**poissonReconArgs))
- except Exception as e:
- log.ODM_WARNING(str(e))
-
- if os.path.isfile(outMeshDirty):
- break # Done!
- else:
-
- # PoissonRecon will sometimes fail due to race conditions
- # on certain machines, especially on Windows
- threads //= 2
-
- if threads < 1:
- break
- else:
- log.ODM_WARNING("PoissonRecon failed with %s threads, let's retry with %s..." % (threads, threads // 2))
-
-
- # Cleanup and reduce vertex count if necessary
- cleanupArgs = {
- 'reconstructmesh': context.omvs_reconstructmesh_path,
- 'outfile': outMesh,
- 'infile':outMeshDirty,
- 'max_faces': maxVertexCount * 2
- }
-
- system.run('"{reconstructmesh}" -i "{infile}" '
- '-o "{outfile}" '
- '--remove-spikes 0 --remove-spurious 20 --smooth 0 '
- '--target-face-num {max_faces} -v 0'.format(**cleanupArgs))
-
- # Delete intermediate results
- os.remove(outMeshDirty)
-
- return outMesh
diff --git a/o/ODM/ODM-2.8.7/opendm/multispectral.py b/o/ODM/ODM-2.8.7/opendm/multispectral.py
deleted file mode 100644
index c10f0264..00000000
--- a/o/ODM/ODM-2.8.7/opendm/multispectral.py
+++ /dev/null
@@ -1,574 +0,0 @@
-import math
-import re
-import cv2
-import os
-from opendm import dls
-import numpy as np
-from opendm import log
-from opendm.concurrency import parallel_map
-from opensfm.io import imread
-
-from skimage import exposure
-from skimage.morphology import disk
-from skimage.filters import rank, gaussian
-
-# Loosely based on https://github.com/micasense/imageprocessing/blob/master/micasense/utils.py
-
-def dn_to_radiance(photo, image):
- """
- Convert Digital Number values to Radiance values
- :param photo ODM_Photo
- :param image numpy array containing image data
- :return numpy array with radiance image values
- """
-
- image = image.astype("float32")
- if len(image.shape) != 3:
- raise ValueError("Image should have shape length of 3 (got: %s)" % len(image.shape))
-
- # Thermal (this should never happen, but just in case..)
- if photo.is_thermal():
- return image
-
- # All others
- a1, a2, a3 = photo.get_radiometric_calibration()
- dark_level = photo.get_dark_level()
-
- exposure_time = photo.exposure_time
- gain = photo.get_gain()
- photometric_exp = photo.get_photometric_exposure()
-
- if a1 is None and photometric_exp is None:
- log.ODM_WARNING("Cannot perform radiometric calibration, no FNumber/Exposure Time or Radiometric Calibration EXIF tags found in %s. Using Digital Number." % photo.filename)
- return image
-
- if a1 is None and photometric_exp is not None:
- a1 = photometric_exp
-
- V, x, y = vignette_map(photo)
- if x is None:
- x, y = np.meshgrid(np.arange(photo.width), np.arange(photo.height))
-
- if dark_level is not None:
- image -= dark_level
-
- # Normalize DN to 0 - 1.0
- bit_depth_max = photo.get_bit_depth_max()
- if bit_depth_max:
- image /= bit_depth_max
-
- if V is not None:
- # vignette correction
- V = np.repeat(V[:, :, np.newaxis], image.shape[2], axis=2)
- image *= V
-
- if exposure_time and a2 is not None and a3 is not None:
- # row gradient correction
- R = 1.0 / (1.0 + a2 * y / exposure_time - a3 * y)
- R = np.repeat(R[:, :, np.newaxis], image.shape[2], axis=2)
- image *= R
-
- # Floor any negative radiances to zero (can happen due to noise around blackLevel)
- if dark_level is not None:
- image[image < 0] = 0
-
- # apply the radiometric calibration - i.e. scale by the gain-exposure product and
- # multiply with the radiometric calibration coefficient
-
- if gain is not None and exposure_time is not None:
- image /= (gain * exposure_time)
-
- image *= a1
-
- return image
-
-def vignette_map(photo):
- x_vc, y_vc = photo.get_vignetting_center()
- polynomial = photo.get_vignetting_polynomial()
-
- if x_vc and polynomial:
- # append 1., so that we can call with numpy polyval
- polynomial.append(1.0)
- vignette_poly = np.array(polynomial)
-
- # perform vignette correction
- # get coordinate grid across image
- x, y = np.meshgrid(np.arange(photo.width), np.arange(photo.height))
-
- # meshgrid returns transposed arrays
- # x = x.T
- # y = y.T
-
- # compute matrix of distances from image center
- r = np.hypot((x - x_vc), (y - y_vc))
-
- # compute the vignette polynomial for each distance - we divide by the polynomial so that the
- # corrected image is image_corrected = image_original * vignetteCorrection
-
- vignette = 1.0 / np.polyval(vignette_poly, r)
- return vignette, x, y
-
- return None, None, None
-
-def dn_to_reflectance(photo, image, use_sun_sensor=True):
- radiance = dn_to_radiance(photo, image)
- irradiance = compute_irradiance(photo, use_sun_sensor=use_sun_sensor)
- return radiance * math.pi / irradiance
-
-def compute_irradiance(photo, use_sun_sensor=True):
- # Thermal (this should never happen, but just in case..)
- if photo.is_thermal():
- return 1.0
-
- # Some cameras (Micasense) store the value (nice! just return)
- hirradiance = photo.get_horizontal_irradiance()
- if hirradiance is not None:
- return hirradiance
-
- # TODO: support for calibration panels
-
- if use_sun_sensor and photo.get_sun_sensor():
- # Estimate it
- dls_orientation_vector = np.array([0,0,-1])
- sun_vector_ned, sensor_vector_ned, sun_sensor_angle, \
- solar_elevation, solar_azimuth = dls.compute_sun_angle([photo.latitude, photo.longitude],
- photo.get_dls_pose(),
- photo.get_utc_time(),
- dls_orientation_vector)
-
- angular_correction = dls.fresnel(sun_sensor_angle)
-
- # TODO: support for direct and scattered irradiance
-
- direct_to_diffuse_ratio = 6.0 # Assumption, clear skies
- spectral_irradiance = photo.get_sun_sensor()
-
- percent_diffuse = 1.0 / direct_to_diffuse_ratio
- sensor_irradiance = spectral_irradiance / angular_correction
-
- # Find direct irradiance in the plane normal to the sun
- untilted_direct_irr = sensor_irradiance / (percent_diffuse + np.cos(sun_sensor_angle))
- direct_irradiance = untilted_direct_irr
- scattered_irradiance = untilted_direct_irr * percent_diffuse
-
- # compute irradiance on the ground using the solar altitude angle
- horizontal_irradiance = direct_irradiance * np.sin(solar_elevation) + scattered_irradiance
- return horizontal_irradiance
- elif use_sun_sensor:
- log.ODM_WARNING("No sun sensor values found for %s" % photo.filename)
-
- return 1.0
-
-def get_photos_by_band(multi_camera, user_band_name):
- band_name = get_primary_band_name(multi_camera, user_band_name)
-
- for band in multi_camera:
- if band['name'] == band_name:
- return band['photos']
-
-
-def get_primary_band_name(multi_camera, user_band_name):
- if len(multi_camera) < 1:
- raise Exception("Invalid multi_camera list")
-
- # multi_camera is already sorted by band_index
- if user_band_name == "auto":
- return multi_camera[0]['name']
-
- for band in multi_camera:
- if band['name'].lower() == user_band_name.lower():
- return band['name']
-
- band_name_fallback = multi_camera[0]['name']
-
- log.ODM_WARNING("Cannot find band name \"%s\", will use \"%s\" instead" % (user_band_name, band_name_fallback))
- return band_name_fallback
-
-
-def compute_band_maps(multi_camera, primary_band):
- """
- Computes maps of:
- - { photo filename --> associated primary band photo } (s2p)
- - { primary band filename --> list of associated secondary band photos } (p2s)
- by looking at capture UUID, capture time or filenames as a fallback
- """
- band_name = get_primary_band_name(multi_camera, primary_band)
- primary_band_photos = None
- for band in multi_camera:
- if band['name'] == band_name:
- primary_band_photos = band['photos']
- break
-
- # Try using capture time as the grouping factor
- try:
- unique_id_map = {}
- s2p = {}
- p2s = {}
-
- for p in primary_band_photos:
- uuid = p.get_capture_id()
- if uuid is None:
- raise Exception("Cannot use capture time (no information in %s)" % p.filename)
-
- # Should be unique across primary band
- if unique_id_map.get(uuid) is not None:
- raise Exception("Unreliable UUID/capture time detected (duplicate)")
-
- unique_id_map[uuid] = p
-
- for band in multi_camera:
- photos = band['photos']
-
- for p in photos:
- uuid = p.get_capture_id()
- if uuid is None:
- raise Exception("Cannot use UUID/capture time (no information in %s)" % p.filename)
-
- # Should match the primary band
- if unique_id_map.get(uuid) is None:
- raise Exception("Unreliable UUID/capture time detected (no primary band match)")
-
- s2p[p.filename] = unique_id_map[uuid]
-
- if band['name'] != band_name:
- p2s.setdefault(unique_id_map[uuid].filename, []).append(p)
-
- return s2p, p2s
- except Exception as e:
- # Fallback on filename conventions
- log.ODM_WARNING("%s, will use filenames instead" % str(e))
-
- filename_map = {}
- s2p = {}
- p2s = {}
- file_regex = re.compile(r"^(.+)[-_]\w+(\.[A-Za-z]{3,4})$")
-
- for p in primary_band_photos:
- filename_without_band = re.sub(file_regex, "\\1\\2", p.filename)
-
- # Quick check
- if filename_without_band == p.filename:
- raise Exception("Cannot match bands by filename on %s, make sure to name your files [filename]_band[.ext] uniformly." % p.filename)
-
- filename_map[filename_without_band] = p
-
- for band in multi_camera:
- photos = band['photos']
-
- for p in photos:
- filename_without_band = re.sub(file_regex, "\\1\\2", p.filename)
-
- # Quick check
- if filename_without_band == p.filename:
- raise Exception("Cannot match bands by filename on %s, make sure to name your files [filename]_band[.ext] uniformly." % p.filename)
-
- s2p[p.filename] = filename_map[filename_without_band]
-
- if band['name'] != band_name:
- p2s.setdefault(filename_map[filename_without_band].filename, []).append(p)
-
- return s2p, p2s
-
-def compute_alignment_matrices(multi_camera, primary_band_name, images_path, s2p, p2s, max_concurrency=1, max_samples=30):
- log.ODM_INFO("Computing band alignment")
-
- alignment_info = {}
-
- # For each secondary band
- for band in multi_camera:
- if band['name'] != primary_band_name:
- matrices = []
-
- def parallel_compute_homography(p):
- try:
- if len(matrices) >= max_samples:
- # log.ODM_INFO("Got enough samples for %s (%s)" % (band['name'], max_samples))
- return
-
- # Find good matrix candidates for alignment
-
- primary_band_photo = s2p.get(p['filename'])
- if primary_band_photo is None:
- log.ODM_WARNING("Cannot find primary band photo for %s" % p['filename'])
- return
-
- warp_matrix, dimension, algo = compute_homography(os.path.join(images_path, p['filename']),
- os.path.join(images_path, primary_band_photo.filename))
-
- if warp_matrix is not None:
- log.ODM_INFO("%s --> %s good match" % (p['filename'], primary_band_photo.filename))
-
- matrices.append({
- 'warp_matrix': warp_matrix,
- 'eigvals': np.linalg.eigvals(warp_matrix),
- 'dimension': dimension,
- 'algo': algo
- })
- else:
- log.ODM_INFO("%s --> %s cannot be matched" % (p['filename'], primary_band_photo.filename))
- except Exception as e:
- log.ODM_WARNING("Failed to compute homography for %s: %s" % (p['filename'], str(e)))
-
- parallel_map(parallel_compute_homography, [{'filename': p.filename} for p in band['photos']], max_concurrency, single_thread_fallback=False)
-
- # Find the matrix that has the most common eigvals
- # among all matrices. That should be the "best" alignment.
- for m1 in matrices:
- acc = np.array([0.0,0.0,0.0])
- e = m1['eigvals']
-
- for m2 in matrices:
- acc += abs(e - m2['eigvals'])
-
- m1['score'] = acc.sum()
-
- # Sort
- matrices.sort(key=lambda x: x['score'], reverse=False)
-
- if len(matrices) > 0:
- alignment_info[band['name']] = matrices[0]
- log.ODM_INFO("%s band will be aligned using warp matrix %s (score: %s)" % (band['name'], matrices[0]['warp_matrix'], matrices[0]['score']))
- else:
- log.ODM_WARNING("Cannot find alignment matrix for band %s, The band might end up misaligned!" % band['name'])
-
- return alignment_info
-
-def compute_homography(image_filename, align_image_filename):
- try:
- # Convert images to grayscale if needed
- image = imread(image_filename, unchanged=True, anydepth=True)
- if image.shape[2] == 3:
- image_gray = to_8bit(cv2.cvtColor(image, cv2.COLOR_BGR2GRAY))
- else:
- image_gray = to_8bit(image[:,:,0])
-
- max_dim = max(image_gray.shape)
- if max_dim <= 320:
- log.ODM_WARNING("Small image for band alignment (%sx%s), this might be tough to compute." % (image_gray.shape[1], image_gray.shape[0]))
-
- align_image = imread(align_image_filename, unchanged=True, anydepth=True)
- if align_image.shape[2] == 3:
- align_image_gray = to_8bit(cv2.cvtColor(align_image, cv2.COLOR_BGR2GRAY))
- else:
- align_image_gray = to_8bit(align_image[:,:,0])
-
- def compute_using(algorithm):
- try:
- h = algorithm(image_gray, align_image_gray)
- except Exception as e:
- log.ODM_WARNING("Cannot compute homography: %s" % str(e))
- return None, (None, None)
-
- if h is None:
- return None, (None, None)
-
- det = np.linalg.det(h)
-
- # Check #1 homography's determinant will not be close to zero
- if abs(det) < 0.25:
- return None, (None, None)
-
- # Check #2 the ratio of the first-to-last singular value is sane (not too high)
- svd = np.linalg.svd(h, compute_uv=False)
- if svd[-1] == 0:
- return None, (None, None)
-
- ratio = svd[0] / svd[-1]
- if ratio > 100000:
- return None, (None, None)
-
- return h, (align_image_gray.shape[1], align_image_gray.shape[0])
-
- warp_matrix = None
- dimension = None
- algo = None
-
- if max_dim > 320:
- algo = 'feat'
- result = compute_using(find_features_homography)
-
- if result[0] is None:
- algo = 'ecc'
- log.ODM_INFO("Can't use features matching, will use ECC (this might take a bit)")
- result = compute_using(find_ecc_homography)
- if result[0] is None:
- algo = None
-
- else: # ECC only for low resolution images
- algo = 'ecc'
- log.ODM_INFO("Using ECC (this might take a bit)")
- result = compute_using(find_ecc_homography)
- if result[0] is None:
- algo = None
-
- warp_matrix, dimension = result
- return warp_matrix, dimension, algo
-
- except Exception as e:
- log.ODM_WARNING("Compute homography: %s" % str(e))
- return None, (None, None), None
-
-def find_ecc_homography(image_gray, align_image_gray, number_of_iterations=1000, termination_eps=1e-8, start_eps=1e-4):
- pyramid_levels = 0
- h,w = image_gray.shape
- min_dim = min(h, w)
-
- while min_dim > 300:
- min_dim /= 2.0
- pyramid_levels += 1
-
- log.ODM_INFO("Pyramid levels: %s" % pyramid_levels)
-
- # Quick check on size
- if align_image_gray.shape[0] != image_gray.shape[0]:
- align_image_gray = to_8bit(align_image_gray)
- image_gray = to_8bit(image_gray)
-
- fx = align_image_gray.shape[1]/image_gray.shape[1]
- fy = align_image_gray.shape[0]/image_gray.shape[0]
-
- image_gray = cv2.resize(image_gray, None,
- fx=fx,
- fy=fy,
- interpolation=(cv2.INTER_AREA if (fx < 1.0 and fy < 1.0) else cv2.INTER_LANCZOS4))
-
- # Build pyramids
- image_gray_pyr = [image_gray]
- align_image_pyr = [align_image_gray]
-
- for level in range(pyramid_levels):
- image_gray_pyr[0] = to_8bit(image_gray_pyr[0], force_normalize=True)
- image_gray_pyr.insert(0, cv2.resize(image_gray_pyr[0], None, fx=1/2, fy=1/2,
- interpolation=cv2.INTER_AREA))
- align_image_pyr[0] = to_8bit(align_image_pyr[0], force_normalize=True)
- align_image_pyr.insert(0, cv2.resize(align_image_pyr[0], None, fx=1/2, fy=1/2,
- interpolation=cv2.INTER_AREA))
-
- # Define the motion model, scale the initial warp matrix to smallest level
- warp_matrix = np.eye(3, 3, dtype=np.float32)
- warp_matrix = warp_matrix * np.array([[1,1,2],[1,1,2],[0.5,0.5,1]], dtype=np.float32)**(1-(pyramid_levels+1))
-
- for level in range(pyramid_levels+1):
- ig = gradient(gaussian(image_gray_pyr[level]))
- aig = gradient(gaussian(align_image_pyr[level]))
-
- if level == pyramid_levels and pyramid_levels == 0:
- eps = termination_eps
- else:
- eps = start_eps - ((start_eps - termination_eps) / (pyramid_levels)) * level
-
- # Define termination criteria
- criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,
- number_of_iterations, eps)
-
- try:
- log.ODM_INFO("Computing ECC pyramid level %s" % level)
- _, warp_matrix = cv2.findTransformECC(ig, aig, warp_matrix, cv2.MOTION_HOMOGRAPHY, criteria, inputMask=None, gaussFiltSize=9)
- except Exception as e:
- if level != pyramid_levels:
- log.ODM_INFO("Could not compute ECC warp_matrix at pyramid level %s, resetting matrix" % level)
- warp_matrix = np.eye(3, 3, dtype=np.float32)
- warp_matrix = warp_matrix * np.array([[1,1,2],[1,1,2],[0.5,0.5,1]], dtype=np.float32)**(1-(pyramid_levels+1))
- else:
- raise e
-
- if level != pyramid_levels:
- warp_matrix = warp_matrix * np.array([[1,1,2],[1,1,2],[0.5,0.5,1]], dtype=np.float32)
-
- return warp_matrix
-
-
-def find_features_homography(image_gray, align_image_gray, feature_retention=0.7, min_match_count=10):
-
- # Detect SIFT features and compute descriptors.
- detector = cv2.SIFT_create(edgeThreshold=10, contrastThreshold=0.1)
- kp_image, desc_image = detector.detectAndCompute(image_gray, None)
- kp_align_image, desc_align_image = detector.detectAndCompute(align_image_gray, None)
-
- # Match
- FLANN_INDEX_KDTREE = 1
- index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
- search_params = dict(checks=50)
-
- flann = cv2.FlannBasedMatcher(index_params, search_params)
- try:
- matches = flann.knnMatch(desc_image, desc_align_image, k=2)
- except Exception as e:
- return None
-
- # Filter good matches following Lowe's ratio test
- good_matches = []
- for m, n in matches:
- if m.distance < feature_retention * n.distance:
- good_matches.append(m)
-
- matches = good_matches
-
- if len(matches) < min_match_count:
- return None
-
- # Debug
- # imMatches = cv2.drawMatches(im1, kp_image, im2, kp_align_image, matches, None)
- # cv2.imwrite("matches.jpg", imMatches)
-
- # Extract location of good matches
- points_image = np.zeros((len(matches), 2), dtype=np.float32)
- points_align_image = np.zeros((len(matches), 2), dtype=np.float32)
-
- for i, match in enumerate(matches):
- points_image[i, :] = kp_image[match.queryIdx].pt
- points_align_image[i, :] = kp_align_image[match.trainIdx].pt
-
- # Find homography
- h, _ = cv2.findHomography(points_image, points_align_image, cv2.RANSAC)
- return h
-
-def gradient(im, ksize=5):
- im = local_normalize(im)
- grad_x = cv2.Sobel(im,cv2.CV_32F,1,0,ksize=ksize)
- grad_y = cv2.Sobel(im,cv2.CV_32F,0,1,ksize=ksize)
- grad = cv2.addWeighted(np.absolute(grad_x), 0.5, np.absolute(grad_y), 0.5, 0)
- return grad
-
-def local_normalize(im):
- width, _ = im.shape
- disksize = int(width/5)
- if disksize % 2 == 0:
- disksize = disksize + 1
- selem = disk(disksize)
- im = rank.equalize(im, selem=selem)
- return im
-
-
-def align_image(image, warp_matrix, dimension):
- if warp_matrix.shape == (3, 3):
- return cv2.warpPerspective(image, warp_matrix, dimension)
- else:
- return cv2.warpAffine(image, warp_matrix, dimension)
-
-
-def to_8bit(image, force_normalize=False):
- if not force_normalize and image.dtype == np.uint8:
- return image
-
- # Convert to 8bit
- try:
- data_range = np.iinfo(image.dtype)
- min_value = 0
- value_range = float(data_range.max) - float(data_range.min)
- except ValueError:
- # For floats use the actual range of the image values
- min_value = float(image.min())
- value_range = float(image.max()) - min_value
-
- image = image.astype(np.float32)
- image -= min_value
- image *= 255.0 / value_range
- np.around(image, out=image)
- image[image > 255] = 255
- image[image < 0] = 0
- image = image.astype(np.uint8)
-
- return image
-
-
diff --git a/o/ODM/ODM-2.8.7/opendm/nvm.py b/o/ODM/ODM-2.8.7/opendm/nvm.py
deleted file mode 100644
index d3c07d99..00000000
--- a/o/ODM/ODM-2.8.7/opendm/nvm.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-from opendm import log
-
-def replace_nvm_images(src_nvm_file, img_map, dst_nvm_file):
- """
- Create a new NVM file from an existing NVM file
- replacing the image references based on img_map
- where img_map is a dict { "old_image" --> "new_image" } (filename only).
- The function does not write the points information (they are discarded)
- """
-
- with open(src_nvm_file) as f:
- lines = list(map(str.strip, f.read().split("\n")))
-
- # Quick check
- if len(lines) < 3 or lines[0] != "NVM_V3" or lines[1].strip() != "":
- raise Exception("%s does not seem to be a valid NVM file" % src_nvm_file)
-
- num_images = int(lines[2])
- entries = []
-
- for l in lines[3:3+num_images]:
- image_path, *p = l.split(" ")
-
- dir_name = os.path.dirname(image_path)
- file_name = os.path.basename(image_path)
-
- new_filename = img_map.get(file_name)
- if new_filename is not None:
- entries.append("%s %s" % (os.path.join(dir_name, new_filename), " ".join(p)))
- else:
- log.ODM_WARNING("Cannot find %s in image map for %s" % (file_name, dst_nvm_file))
-
- if num_images != len(entries):
- raise Exception("Cannot write %s, not all band images have been matched" % dst_nvm_file)
-
- with open(dst_nvm_file, "w") as f:
- f.write("NVM_V3\n\n%s\n" % len(entries))
- f.write("\n".join(entries))
- f.write("\n\n0\n0\n\n0")
-
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/ogctiles.py b/o/ODM/ODM-2.8.7/opendm/ogctiles.py
deleted file mode 100644
index de408981..00000000
--- a/o/ODM/ODM-2.8.7/opendm/ogctiles.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import os
-import sys
-import shutil
-import json
-import math
-from opendm.utils import double_quote
-from opendm import io
-from opendm import log
-from opendm import system
-from opendm.entwine import build_entwine
-import fiona
-from shapely.geometry import shape
-
-def build_textured_model(input_obj, output_path, reference_lla = None, model_bounds_file=None, rerun=False):
- if not os.path.isfile(input_obj):
- log.ODM_WARNING("No input OBJ file to process")
- return
-
- if rerun and io.dir_exists(output_path):
- log.ODM_WARNING("Removing previous 3D tiles directory: %s" % output_path)
- shutil.rmtree(output_path)
-
- log.ODM_INFO("Generating OGC 3D Tiles textured model")
- lat = lon = alt = 0
-
- # Read reference_lla.json (if provided)
- if reference_lla is not None and os.path.isfile(reference_lla):
- try:
- with open(reference_lla) as f:
- reference_lla = json.loads(f.read())
- lat = reference_lla['latitude']
- lon = reference_lla['longitude']
- alt = reference_lla['altitude']
- except Exception as e:
- log.ODM_WARNING("Cannot read %s: %s" % (reference_lla, str(e)))
-
- # Read model bounds (if provided)
- divisions = 1 # default
- DIV_THRESHOLD = 10000 # m^2 (this is somewhat arbitrary)
-
- if model_bounds_file is not None and os.path.isfile(model_bounds_file):
- try:
- with fiona.open(model_bounds_file, 'r') as f:
- if len(f) == 1:
- poly = shape(f[1]['geometry'])
- area = poly.area
- log.ODM_INFO("Approximate area: %s m^2" % round(area, 2))
-
- if area < DIV_THRESHOLD:
- divisions = 0
- else:
- divisions = math.ceil(math.log((area / DIV_THRESHOLD), 4))
- else:
- log.ODM_WARNING("Invalid boundary file: %s" % model_bounds_file)
- except Exception as e:
- log.ODM_WARNING("Cannot read %s: %s" % (model_bounds_file, str(e)))
-
- try:
- kwargs = {
- 'input': input_obj,
- 'output': output_path,
- 'divisions': divisions,
- 'lat': lat,
- 'lon': lon,
- 'alt': alt,
- }
- system.run('Obj2Tiles "{input}" "{output}" --divisions {divisions} '.format(**kwargs))
-
- except Exception as e:
- log.ODM_WARNING("Cannot build 3D tiles textured model: %s" % str(e))
-
-def build_pointcloud(input_pointcloud, output_path, max_concurrency, rerun=False):
- if not os.path.isfile(input_pointcloud):
- log.ODM_WARNING("No input point cloud file to process")
- return
-
- if rerun and io.dir_exists(output_path):
- log.ODM_WARNING("Removing previous 3D tiles directory: %s" % output_path)
- shutil.rmtree(output_path)
-
- log.ODM_INFO("Generating OGC 3D Tiles point cloud")
-
- try:
- if not os.path.isdir(output_path):
- os.mkdir(output_path)
-
- tmpdir = os.path.join(output_path, "tmp")
- entwine_output = os.path.join(output_path, "entwine")
-
- build_entwine([input_pointcloud], tmpdir, entwine_output, max_concurrency, "EPSG:4978")
-
- kwargs = {
- 'input': entwine_output,
- 'output': output_path,
- }
- system.run('entwine convert -i "{input}" -o "{output}"'.format(**kwargs))
-
- for d in [tmpdir, entwine_output]:
- if os.path.isdir(d):
- shutil.rmtree(d)
- except Exception as e:
- log.ODM_WARNING("Cannot build 3D tiles point cloud: %s" % str(e))
-
-
-def build_3dtiles(args, tree, reconstruction, rerun=False):
- tiles_output_path = tree.ogc_tiles
- model_output_path = os.path.join(tiles_output_path, "model")
- pointcloud_output_path = os.path.join(tiles_output_path, "pointcloud")
-
- if rerun and os.path.exists(tiles_output_path):
- shutil.rmtree(tiles_output_path)
-
- if not os.path.isdir(tiles_output_path):
- os.mkdir(tiles_output_path)
-
- # Model
-
- if not os.path.isdir(model_output_path) or rerun:
- reference_lla = os.path.join(tree.opensfm, "reference_lla.json")
- model_bounds_file = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
-
- input_obj = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
- if not os.path.isfile(input_obj):
- input_obj = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
-
- build_textured_model(input_obj, model_output_path, reference_lla, model_bounds_file, rerun)
- else:
- log.ODM_WARNING("OGC 3D Tiles model %s already generated" % model_output_path)
-
- # Point cloud
-
- if not os.path.isdir(pointcloud_output_path) or rerun:
- build_pointcloud(tree.odm_georeferencing_model_laz, pointcloud_output_path, args.max_concurrency, rerun)
- else:
- log.ODM_WARNING("OGC 3D Tiles model %s already generated" % model_output_path)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/orthophoto.py b/o/ODM/ODM-2.8.7/opendm/orthophoto.py
deleted file mode 100644
index d2d30bbc..00000000
--- a/o/ODM/ODM-2.8.7/opendm/orthophoto.py
+++ /dev/null
@@ -1,329 +0,0 @@
-import os
-from opendm import log
-from opendm import system
-from opendm.cropper import Cropper
-from opendm.concurrency import get_max_memory
-import math
-import numpy as np
-import rasterio
-import fiona
-from edt import edt
-from rasterio.transform import Affine, rowcol
-from rasterio.mask import mask
-from opendm import io
-from opendm.tiles.tiler import generate_orthophoto_tiles
-from opendm.cogeo import convert_to_cogeo
-from osgeo import gdal
-
-
-def get_orthophoto_vars(args):
- return {
- 'TILED': 'NO' if args.orthophoto_no_tiled else 'YES',
- 'COMPRESS': args.orthophoto_compression,
- 'PREDICTOR': '2' if args.orthophoto_compression in ['LZW', 'DEFLATE'] else '1',
- 'BIGTIFF': 'IF_SAFER',
- 'BLOCKXSIZE': 512,
- 'BLOCKYSIZE': 512,
- 'NUM_THREADS': args.max_concurrency
- }
-
-def build_overviews(orthophoto_file):
- log.ODM_INFO("Building Overviews")
- kwargs = {'orthophoto': orthophoto_file}
-
- # Run gdaladdo
- system.run('gdaladdo -r average '
- '--config BIGTIFF_OVERVIEW IF_SAFER '
- '--config COMPRESS_OVERVIEW JPEG '
- '{orthophoto} 2 4 8 16'.format(**kwargs))
-
-def generate_png(orthophoto_file, output_file=None, outsize=None):
- if output_file is None:
- base, ext = os.path.splitext(orthophoto_file)
- output_file = base + '.png'
-
- # See if we need to select top three bands
- bandparam = ""
-
- gtif = gdal.Open(orthophoto_file)
- if gtif.RasterCount > 4:
- bands = []
- for idx in range(1, gtif.RasterCount+1):
- bands.append(gtif.GetRasterBand(idx).GetColorInterpretation())
- bands = dict(zip(bands, range(1, len(bands)+1)))
-
- try:
- red = bands.get(gdal.GCI_RedBand)
- green = bands.get(gdal.GCI_GreenBand)
- blue = bands.get(gdal.GCI_BlueBand)
- bandparam = "-b %s -b %s -b %s -a_nodata 0" % (red, green, blue)
- except:
- bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"
- gtif = None
-
- osparam = ""
- if outsize is not None:
- osparam = "-outsize %s 0" % outsize
-
- system.run('gdal_translate -of png "%s" "%s" %s %s '
- '--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, output_file, osparam, bandparam, get_max_memory()))
-
-def generate_kmz(orthophoto_file, output_file=None, outsize=None):
- if output_file is None:
- base, ext = os.path.splitext(orthophoto_file)
- output_file = base + '.kmz'
-
- # See if we need to select top three bands
- bandparam = ""
- gtif = gdal.Open(orthophoto_file)
- if gtif.RasterCount > 4:
- bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"
-
- system.run('gdal_translate -of KMLSUPEROVERLAY -co FORMAT=JPEG "%s" "%s" %s '
- '--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, output_file, bandparam, get_max_memory()))
-
-def post_orthophoto_steps(args, bounds_file_path, orthophoto_file, orthophoto_tiles_dir):
- if args.crop > 0 or args.boundary:
- Cropper.crop(bounds_file_path, orthophoto_file, get_orthophoto_vars(args), keep_original=not args.optimize_disk_space, warp_options=['-dstalpha'])
-
- if args.build_overviews and not args.cog:
- build_overviews(orthophoto_file)
-
- if args.orthophoto_png:
- generate_png(orthophoto_file)
-
- if args.orthophoto_kmz:
- generate_kmz(orthophoto_file)
-
- if args.tiles:
- generate_orthophoto_tiles(orthophoto_file, orthophoto_tiles_dir, args.max_concurrency)
-
- if args.cog:
- convert_to_cogeo(orthophoto_file, max_workers=args.max_concurrency, compression=args.orthophoto_compression)
-
-def compute_mask_raster(input_raster, vector_mask, output_raster, blend_distance=20, only_max_coords_feature=False):
- if not os.path.exists(input_raster):
- log.ODM_WARNING("Cannot mask raster, %s does not exist" % input_raster)
- return
-
- if not os.path.exists(vector_mask):
- log.ODM_WARNING("Cannot mask raster, %s does not exist" % vector_mask)
- return
-
- log.ODM_INFO("Computing mask raster: %s" % output_raster)
-
- with rasterio.open(input_raster, 'r') as rast:
- with fiona.open(vector_mask) as src:
- burn_features = src
-
- if only_max_coords_feature:
- max_coords_count = 0
- max_coords_feature = None
- for feature in src:
- if feature is not None:
- # No complex shapes
- if len(feature['geometry']['coordinates'][0]) > max_coords_count:
- max_coords_count = len(feature['geometry']['coordinates'][0])
- max_coords_feature = feature
- if max_coords_feature is not None:
- burn_features = [max_coords_feature]
-
- shapes = [feature["geometry"] for feature in burn_features]
- out_image, out_transform = mask(rast, shapes, nodata=0)
-
- if blend_distance > 0:
- if out_image.shape[0] >= 4:
- # alpha_band = rast.dataset_mask()
- alpha_band = out_image[-1]
- dist_t = edt(alpha_band, black_border=True, parallel=0)
- dist_t[dist_t <= blend_distance] /= blend_distance
- dist_t[dist_t > blend_distance] = 1
- np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe")
- else:
- log.ODM_WARNING("%s does not have an alpha band, cannot blend cutline!" % input_raster)
-
- with rasterio.open(output_raster, 'w', BIGTIFF="IF_SAFER", **rast.profile) as dst:
- dst.colorinterp = rast.colorinterp
- dst.write(out_image)
-
- return output_raster
-
-def feather_raster(input_raster, output_raster, blend_distance=20):
- if not os.path.exists(input_raster):
- log.ODM_WARNING("Cannot feather raster, %s does not exist" % input_raster)
- return
-
- log.ODM_INFO("Computing feather raster: %s" % output_raster)
-
- with rasterio.open(input_raster, 'r') as rast:
- out_image = rast.read()
- if blend_distance > 0:
- if out_image.shape[0] >= 4:
- alpha_band = out_image[-1]
- dist_t = edt(alpha_band, black_border=True, parallel=0)
- dist_t[dist_t <= blend_distance] /= blend_distance
- dist_t[dist_t > blend_distance] = 1
- np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe")
- else:
- log.ODM_WARNING("%s does not have an alpha band, cannot feather raster!" % input_raster)
-
- with rasterio.open(output_raster, 'w', BIGTIFF="IF_SAFER", **rast.profile) as dst:
- dst.colorinterp = rast.colorinterp
- dst.write(out_image)
-
- return output_raster
-
-def merge(input_ortho_and_ortho_cuts, output_orthophoto, orthophoto_vars={}):
- """
- Based on https://github.com/mapbox/rio-merge-rgba/
- Merge orthophotos around cutlines using a blend buffer.
- """
- inputs = []
- bounds=None
- precision=7
-
- for o, c in input_ortho_and_ortho_cuts:
- if not io.file_exists(o):
- log.ODM_WARNING("%s does not exist. Will skip from merged orthophoto." % o)
- continue
- if not io.file_exists(c):
- log.ODM_WARNING("%s does not exist. Will skip from merged orthophoto." % c)
- continue
- inputs.append((o, c))
-
- if len(inputs) == 0:
- log.ODM_WARNING("No input orthophotos, skipping merge.")
- return
-
- with rasterio.open(inputs[0][0]) as first:
- res = first.res
- dtype = first.dtypes[0]
- profile = first.profile
- num_bands = first.meta['count'] - 1 # minus alpha
- colorinterp = first.colorinterp
-
- log.ODM_INFO("%s valid orthophoto rasters to merge" % len(inputs))
- sources = [(rasterio.open(o), rasterio.open(c)) for o,c in inputs]
-
- # scan input files.
- # while we're at it, validate assumptions about inputs
- xs = []
- ys = []
- for src, _ in sources:
- left, bottom, right, top = src.bounds
- xs.extend([left, right])
- ys.extend([bottom, top])
- if src.profile["count"] < 4:
- raise ValueError("Inputs must be at least 4-band rasters")
- dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)
- log.ODM_INFO("Output bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
-
- output_transform = Affine.translation(dst_w, dst_n)
- output_transform *= Affine.scale(res[0], -res[1])
-
- # Compute output array shape. We guarantee it will cover the output
- # bounds completely.
- output_width = int(math.ceil((dst_e - dst_w) / res[0]))
- output_height = int(math.ceil((dst_n - dst_s) / res[1]))
-
- # Adjust bounds to fit.
- dst_e, dst_s = output_transform * (output_width, output_height)
- log.ODM_INFO("Output width: %d, height: %d" % (output_width, output_height))
- log.ODM_INFO("Adjusted bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
-
- profile["transform"] = output_transform
- profile["height"] = output_height
- profile["width"] = output_width
- profile["tiled"] = orthophoto_vars.get('TILED', 'YES') == 'YES'
- profile["blockxsize"] = orthophoto_vars.get('BLOCKXSIZE', 512)
- profile["blockysize"] = orthophoto_vars.get('BLOCKYSIZE', 512)
- profile["compress"] = orthophoto_vars.get('COMPRESS', 'LZW')
- profile["predictor"] = orthophoto_vars.get('PREDICTOR', '2')
- profile["bigtiff"] = orthophoto_vars.get('BIGTIFF', 'IF_SAFER')
- profile.update()
-
- # create destination file
- with rasterio.open(output_orthophoto, "w", **profile) as dstrast:
- dstrast.colorinterp = colorinterp
- for idx, dst_window in dstrast.block_windows():
- left, bottom, right, top = dstrast.window_bounds(dst_window)
-
- blocksize = dst_window.width
- dst_rows, dst_cols = (dst_window.height, dst_window.width)
-
- # initialize array destined for the block
- dst_count = first.count
- dst_shape = (dst_count, dst_rows, dst_cols)
-
- dstarr = np.zeros(dst_shape, dtype=dtype)
-
- # First pass, write all rasters naively without blending
- for src, _ in sources:
- src_window = tuple(zip(rowcol(
- src.transform, left, top, op=round, precision=precision
- ), rowcol(
- src.transform, right, bottom, op=round, precision=precision
- )))
-
- temp = np.zeros(dst_shape, dtype=dtype)
- temp = src.read(
- out=temp, window=src_window, boundless=True, masked=False
- )
-
- # pixels without data yet are available to write
- write_region = np.logical_and(
- (dstarr[-1] == 0), (temp[-1] != 0) # 0 is nodata
- )
- np.copyto(dstarr, temp, where=write_region)
-
- # check if dest has any nodata pixels available
- if np.count_nonzero(dstarr[-1]) == blocksize:
- break
-
- # Second pass, write all feathered rasters
- # blending the edges
- for src, _ in sources:
- src_window = tuple(zip(rowcol(
- src.transform, left, top, op=round, precision=precision
- ), rowcol(
- src.transform, right, bottom, op=round, precision=precision
- )))
-
- temp = np.zeros(dst_shape, dtype=dtype)
- temp = src.read(
- out=temp, window=src_window, boundless=True, masked=False
- )
-
- where = temp[-1] != 0
- for b in range(0, num_bands):
- blended = temp[-1] / 255.0 * temp[b] + (1 - temp[-1] / 255.0) * dstarr[b]
- np.copyto(dstarr[b], blended, casting='unsafe', where=where)
- dstarr[-1][where] = 255.0
-
- # check if dest has any nodata pixels available
- if np.count_nonzero(dstarr[-1]) == blocksize:
- break
-
- # Third pass, write cut rasters
- # blending the cutlines
- for _, cut in sources:
- src_window = tuple(zip(rowcol(
- cut.transform, left, top, op=round, precision=precision
- ), rowcol(
- cut.transform, right, bottom, op=round, precision=precision
- )))
-
- temp = np.zeros(dst_shape, dtype=dtype)
- temp = cut.read(
- out=temp, window=src_window, boundless=True, masked=False
- )
-
- # For each band, average alpha values between
- # destination raster and cut raster
- for b in range(0, num_bands):
- blended = temp[-1] / 255.0 * temp[b] + (1 - temp[-1] / 255.0) * dstarr[b]
- np.copyto(dstarr[b], blended, casting='unsafe', where=temp[-1]!=0)
-
- dstrast.write(dstarr, window=dst_window)
-
- return output_orthophoto
diff --git a/o/ODM/ODM-2.8.7/opendm/osfm.py b/o/ODM/ODM-2.8.7/opendm/osfm.py
deleted file mode 100644
index ed8ad8fb..00000000
--- a/o/ODM/ODM-2.8.7/opendm/osfm.py
+++ /dev/null
@@ -1,772 +0,0 @@
-"""
-OpenSfM related utils
-"""
-
-import os, shutil, sys, json, argparse, copy
-import yaml
-import numpy as np
-import pyproj
-from pyproj import CRS
-from opendm import io
-from opendm import log
-from opendm import system
-from opendm import context
-from opendm import camera
-from opendm import location
-from opendm.photo import find_largest_photo_dim, find_largest_photo
-from opensfm.large import metadataset
-from opensfm.large import tools
-from opensfm.actions import undistort
-from opensfm.dataset import DataSet
-from opensfm.types import Reconstruction
-from opensfm import report
-from opendm.multispectral import get_photos_by_band
-from opendm.gpu import has_popsift_and_can_handle_texsize, has_gpu
-from opensfm import multiview, exif
-from opensfm.actions.export_geocoords import _transform
-
-class OSFMContext:
- def __init__(self, opensfm_project_path):
- self.opensfm_project_path = opensfm_project_path
-
- def run(self, command):
- osfm_bin = os.path.join(context.opensfm_path, 'bin', 'opensfm')
- system.run('"%s" %s "%s"' %
- (osfm_bin, command, self.opensfm_project_path))
-
- def is_reconstruction_done(self):
- tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
- reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
-
- return io.file_exists(tracks_file) and io.file_exists(reconstruction_file)
-
- def create_tracks(self, rerun=False):
- tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
- rs_file = self.path('rs_done.txt')
-
- if not io.file_exists(tracks_file) or rerun:
- self.run('create_tracks')
- else:
- log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' % tracks_file)
-
- def reconstruct(self, rolling_shutter_correct=False, rerun=False):
- reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
- if not io.file_exists(reconstruction_file) or rerun:
- self.run('reconstruct')
- self.check_merge_partial_reconstructions()
- else:
- log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % reconstruction_file)
-
- # Check that a reconstruction file has been created
- if not self.reconstructed():
- raise system.ExitException("The program could not process this dataset using the current settings. "
- "Check that the images have enough overlap, "
- "that there are enough recognizable features "
- "and that the images are in focus. "
- "You could also try to increase the --min-num-features parameter."
- "The program will now exit.")
-
- if rolling_shutter_correct:
- rs_file = self.path('rs_done.txt')
-
- if not io.file_exists(rs_file) or rerun:
- self.run('rs_correct')
-
- log.ODM_INFO("Re-running the reconstruction pipeline")
-
- self.match_features(True)
- self.create_tracks(True)
- self.reconstruct(rolling_shutter_correct=False, rerun=True)
-
- self.touch(rs_file)
- else:
- log.ODM_WARNING("Rolling shutter correction already applied")
-
- def check_merge_partial_reconstructions(self):
- if self.reconstructed():
- data = DataSet(self.opensfm_project_path)
- reconstructions = data.load_reconstruction()
- tracks_manager = data.load_tracks_manager()
-
- if len(reconstructions) > 1:
- log.ODM_WARNING("Multiple reconstructions detected (%s), this might be an indicator that some areas did not have sufficient overlap" % len(reconstructions))
- log.ODM_INFO("Attempting merge")
-
- merged = Reconstruction()
- merged.set_reference(reconstructions[0].reference)
-
- for ix_r, rec in enumerate(reconstructions):
- if merged.reference != rec.reference:
- # Should never happen
- continue
-
- log.ODM_INFO("Merging reconstruction %s" % ix_r)
-
- for camera in rec.cameras.values():
- merged.add_camera(camera)
-
- for point in rec.points.values():
- try:
- new_point = merged.create_point(point.id, point.coordinates)
- new_point.color = point.color
- except RuntimeError as e:
- log.ODM_WARNING("Cannot merge shot id %s (%s)" % (shot.id, str(e)))
- continue
-
- for shot in rec.shots.values():
- merged.add_shot(shot)
- try:
- obsdict = tracks_manager.get_shot_observations(shot.id)
- except RuntimeError:
- log.ODM_WARNING("Shot id %s missing from tracks_manager!" % shot.id)
- continue
- for track_id, obs in obsdict.items():
- if track_id in merged.points:
- merged.add_observation(shot.id, track_id, obs)
-
- data.save_reconstruction([merged])
-
- def setup(self, args, images_path, reconstruction, append_config = [], rerun=False):
- """
- Setup a OpenSfM project
- """
- if rerun and io.dir_exists(self.opensfm_project_path):
- shutil.rmtree(self.opensfm_project_path)
-
- if not io.dir_exists(self.opensfm_project_path):
- system.mkdir_p(self.opensfm_project_path)
-
- list_path = os.path.join(self.opensfm_project_path, 'image_list.txt')
- if not io.file_exists(list_path) or rerun:
-
- if reconstruction.multi_camera:
- photos = get_photos_by_band(reconstruction.multi_camera, args.primary_band)
- if len(photos) < 1:
- raise Exception("Not enough images in selected band %s" % args.primary_band.lower())
- log.ODM_INFO("Reconstruction will use %s images from %s band" % (len(photos), args.primary_band.lower()))
- else:
- photos = reconstruction.photos
-
- # create file list
- num_zero_alt = 0
- has_alt = True
- has_gps = False
- with open(list_path, 'w') as fout:
- for photo in photos:
- if photo.altitude is None:
- has_alt = False
- elif photo.altitude == 0:
- num_zero_alt += 1
- if photo.latitude is not None and photo.longitude is not None:
- has_gps = True
-
- fout.write('%s\n' % os.path.join(images_path, photo.filename))
-
- # check 0 altitude images percentage when has_alt is True
- if has_alt and num_zero_alt / len(photos) > 0.05:
- log.ODM_WARNING("More than 5% of images have zero altitude, this might be an indicator that the images have no altitude information")
- has_alt = False
-
- # check for image_groups.txt (split-merge)
- image_groups_file = os.path.join(args.project_path, "image_groups.txt")
- if 'split_image_groups_is_set' in args:
- image_groups_file = os.path.abspath(args.split_image_groups)
-
- if io.file_exists(image_groups_file):
- dst_groups_file = os.path.join(self.opensfm_project_path, "image_groups.txt")
- io.copy(image_groups_file, dst_groups_file)
- log.ODM_INFO("Copied %s to %s" % (image_groups_file, dst_groups_file))
-
- # check for cameras
- if args.cameras:
- try:
- camera_overrides = camera.get_opensfm_camera_models(args.cameras)
- with open(os.path.join(self.opensfm_project_path, "camera_models_overrides.json"), 'w') as f:
- f.write(json.dumps(camera_overrides))
- log.ODM_INFO("Wrote camera_models_overrides.json to OpenSfM directory")
- except Exception as e:
- log.ODM_WARNING("Cannot set camera_models_overrides.json: %s" % str(e))
-
- # Check image masks
- masks = []
- for p in photos:
- if p.mask is not None:
- masks.append((p.filename, os.path.join(images_path, p.mask)))
-
- if masks:
- log.ODM_INFO("Found %s image masks" % len(masks))
- with open(os.path.join(self.opensfm_project_path, "mask_list.txt"), 'w') as f:
- for fname, mask in masks:
- f.write("{} {}\n".format(fname, mask))
-
- # Compute feature_process_size
- feature_process_size = 2048 # default
-
- if ('resize_to_is_set' in args) and args.resize_to > 0:
- # Legacy
- log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.")
- feature_process_size = int(args.resize_to)
- else:
- feature_quality_scale = {
- 'ultra': 1,
- 'high': 0.5,
- 'medium': 0.25,
- 'low': 0.125,
- 'lowest': 0.0675,
- }
-
- max_dim = find_largest_photo_dim(photos)
-
- if max_dim > 0:
- log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim))
- feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality])
- log.ODM_INFO("Photo dimensions for feature extraction: %ipx" % feature_process_size)
- else:
- log.ODM_WARNING("Cannot compute max image dimensions, going with defaults")
-
- # create config file for OpenSfM
- if args.matcher_neighbors > 0:
- matcher_graph_rounds = 0
- matcher_neighbors = args.matcher_neighbors
- else:
- matcher_graph_rounds = 50
- matcher_neighbors = 0
-
- config = [
- "use_exif_size: no",
- "flann_algorithm: KDTREE", # more stable, faster than KMEANS
- "feature_process_size: %s" % feature_process_size,
- "feature_min_frames: %s" % args.min_num_features,
- "processes: %s" % args.max_concurrency,
- "matching_gps_neighbors: %s" % matcher_neighbors,
- "matching_gps_distance: 0",
- "matching_graph_rounds: %s" % matcher_graph_rounds,
- "optimize_camera_parameters: %s" % ('no' if args.use_fixed_camera_params else 'yes'),
- "reconstruction_algorithm: %s" % (args.sfm_algorithm),
- "undistorted_image_format: tif",
- "bundle_outlier_filtering_type: AUTO",
- "sift_peak_threshold: 0.066",
- "align_orientation_prior: vertical",
- "triangulation_type: ROBUST",
- "retriangulation_ratio: 2",
- ]
-
- if args.camera_lens != 'auto':
- config.append("camera_projection_type: %s" % args.camera_lens.upper())
-
- matcher_type = args.matcher_type
- feature_type = args.feature_type.upper()
-
- osfm_matchers = {
- "bow": "WORDS",
- "flann": "FLANN",
- "bruteforce": "BRUTEFORCE"
- }
-
- if not has_gps and not 'matcher_type_is_set' in args:
- log.ODM_INFO("No GPS information, using BOW matching by default (you can override this by setting --matcher-type explicitly)")
- matcher_type = "bow"
-
- if matcher_type == "bow":
- # Cannot use anything other than HAHOG with BOW
- if feature_type != "HAHOG":
- log.ODM_WARNING("Using BOW matching, will use HAHOG feature type, not SIFT")
- feature_type = "HAHOG"
-
- config.append("matcher_type: %s" % osfm_matchers[matcher_type])
-
- # GPU acceleration?
- if has_gpu(args):
- max_photo = find_largest_photo(photos)
- w, h = max_photo.width, max_photo.height
- if w > h:
- h = int((h / w) * feature_process_size)
- w = int(feature_process_size)
- else:
- w = int((w / h) * feature_process_size)
- h = int(feature_process_size)
-
- if has_popsift_and_can_handle_texsize(w, h) and feature_type == "SIFT":
- log.ODM_INFO("Using GPU for extracting SIFT features")
- feature_type = "SIFT_GPU"
- self.gpu_sift_feature_extraction = True
-
- config.append("feature_type: %s" % feature_type)
-
- if has_alt:
- log.ODM_INFO("Altitude data detected, enabling it for GPS alignment")
- config.append("use_altitude_tag: yes")
-
- gcp_path = reconstruction.gcp.gcp_path
- if has_alt or gcp_path:
- config.append("align_method: auto")
- else:
- config.append("align_method: orientation_prior")
-
- if args.use_hybrid_bundle_adjustment:
- log.ODM_INFO("Enabling hybrid bundle adjustment")
- config.append("bundle_interval: 100") # Bundle after adding 'bundle_interval' cameras
- config.append("bundle_new_points_ratio: 1.2") # Bundle when (new points) / (bundled points) > bundle_new_points_ratio
- config.append("local_bundle_radius: 1") # Max image graph distance for images to be included in local bundle adjustment
- else:
- config.append("local_bundle_radius: 0")
-
- if gcp_path:
- config.append("bundle_use_gcp: yes")
- if not args.force_gps:
- config.append("bundle_use_gps: no")
- else:
- config.append("bundle_compensate_gps_bias: yes")
-
- io.copy(gcp_path, self.path("gcp_list.txt"))
-
- config = config + append_config
-
- # write config file
- log.ODM_INFO(config)
- config_filename = self.get_config_file_path()
- with open(config_filename, 'w') as fout:
- fout.write("\n".join(config))
-
- # We impose our own reference_lla
- if reconstruction.is_georeferenced():
- self.write_reference_lla(reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset, reconstruction.georef.proj4())
- else:
- log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path)
-
- def get_config_file_path(self):
- return os.path.join(self.opensfm_project_path, 'config.yaml')
-
- def reconstructed(self):
- if not io.file_exists(self.path("reconstruction.json")):
- return False
-
- with open(self.path("reconstruction.json"), 'r') as f:
- return f.readline().strip() != "[]"
-
- def extract_metadata(self, rerun=False):
- metadata_dir = self.path("exif")
- if not io.dir_exists(metadata_dir) or rerun:
- self.run('extract_metadata')
-
- def photos_to_metadata(self, photos, rolling_shutter, rolling_shutter_readout, rerun=False):
- metadata_dir = self.path("exif")
-
- if io.dir_exists(metadata_dir) and not rerun:
- log.ODM_WARNING("%s already exists, not rerunning photo to metadata" % metadata_dir)
- return
-
- if io.dir_exists(metadata_dir):
- shutil.rmtree(metadata_dir)
-
- os.makedirs(metadata_dir, exist_ok=True)
-
- camera_models = {}
- data = DataSet(self.opensfm_project_path)
-
- for p in photos:
- d = p.to_opensfm_exif(rolling_shutter, rolling_shutter_readout)
- with open(os.path.join(metadata_dir, "%s.exif" % p.filename), 'w') as f:
- f.write(json.dumps(d, indent=4))
-
- camera_id = p.camera_id()
- if camera_id not in camera_models:
- camera = exif.camera_from_exif_metadata(d, data)
- camera_models[camera_id] = camera
-
- # Override any camera specified in the camera models overrides file.
- if data.camera_models_overrides_exists():
- overrides = data.load_camera_models_overrides()
- if "all" in overrides:
- for key in camera_models:
- camera_models[key] = copy.copy(overrides["all"])
- camera_models[key].id = key
- else:
- for key, value in overrides.items():
- camera_models[key] = value
- data.save_camera_models(camera_models)
-
- def is_feature_matching_done(self):
- features_dir = self.path("features")
- matches_dir = self.path("matches")
-
- return io.dir_exists(features_dir) and io.dir_exists(matches_dir)
-
- def feature_matching(self, rerun=False):
- features_dir = self.path("features")
-
- if not io.dir_exists(features_dir) or rerun:
- try:
- self.run('detect_features')
- except system.SubprocessException as e:
- # Sometimes feature extraction by GPU can fail
- # for various reasons, so before giving up
- # we try to fallback to CPU
- if hasattr(self, 'gpu_sift_feature_extraction'):
- log.ODM_WARNING("GPU SIFT extraction failed, maybe the graphics card is not supported? Attempting fallback to CPU")
- self.update_config({'feature_type': "SIFT"})
- self.run('detect_features')
- else:
- raise e
- else:
- log.ODM_WARNING('Detect features already done: %s exists' % features_dir)
-
- self.match_features(rerun)
-
- def match_features(self, rerun=False):
- matches_dir = self.path("matches")
- if not io.dir_exists(matches_dir) or rerun:
- self.run('match_features')
- else:
- log.ODM_WARNING('Match features already done: %s exists' % matches_dir)
-
- def align_reconstructions(self, rerun):
- alignment_file = self.path('alignment_done.txt')
- if not io.file_exists(alignment_file) or rerun:
- log.ODM_INFO("Aligning submodels...")
- meta_data = metadataset.MetaDataSet(self.opensfm_project_path)
- reconstruction_shots = tools.load_reconstruction_shots(meta_data)
- transformations = tools.align_reconstructions(reconstruction_shots,
- tools.partial_reconstruction_name,
- False)
- tools.apply_transformations(transformations)
-
- self.touch(alignment_file)
- else:
- log.ODM_WARNING('Found a alignment done progress file in: %s' % alignment_file)
-
- def touch(self, file):
- with open(file, 'w') as fout:
- fout.write("Done!\n")
-
- def path(self, *paths):
- return os.path.join(self.opensfm_project_path, *paths)
-
- def extract_cameras(self, output, rerun=False):
- if not os.path.exists(output) or rerun:
- try:
- reconstruction_file = self.path("reconstruction.json")
- with open(output, 'w') as fout:
- fout.write(json.dumps(camera.get_cameras_from_opensfm(reconstruction_file), indent=4))
- except Exception as e:
- log.ODM_WARNING("Cannot export cameras to %s. %s." % (output, str(e)))
- else:
- log.ODM_INFO("Already extracted cameras")
-
- def convert_and_undistort(self, rerun=False, imageFilter=None, image_list=None, runId="nominal"):
- log.ODM_INFO("Undistorting %s ..." % self.opensfm_project_path)
- done_flag_file = self.path("undistorted", "%s_done.txt" % runId)
-
- if not io.file_exists(done_flag_file) or rerun:
- ds = DataSet(self.opensfm_project_path)
-
- if image_list is not None:
- ds._set_image_list(image_list)
-
- undistort.run_dataset(ds, "reconstruction.json",
- 0, None, "undistorted", imageFilter)
-
- self.touch(done_flag_file)
- else:
- log.ODM_WARNING("Already undistorted (%s)" % runId)
-
- def restore_reconstruction_backup(self):
- if os.path.exists(self.recon_backup_file()):
- # This time export the actual reconstruction.json
- # (containing only the primary band)
- if os.path.exists(self.recon_file()):
- os.remove(self.recon_file())
- os.replace(self.recon_backup_file(), self.recon_file())
- log.ODM_INFO("Restored reconstruction.json")
-
- def backup_reconstruction(self):
- if os.path.exists(self.recon_backup_file()):
- os.remove(self.recon_backup_file())
-
- log.ODM_INFO("Backing up reconstruction")
- shutil.copyfile(self.recon_file(), self.recon_backup_file())
-
- def recon_backup_file(self):
- return self.path("reconstruction.backup.json")
-
- def recon_file(self):
- return self.path("reconstruction.json")
-
- def add_shots_to_reconstruction(self, p2s):
- with open(self.recon_file()) as f:
- reconstruction = json.loads(f.read())
-
- # Augment reconstruction.json
- for recon in reconstruction:
- shots = recon['shots']
- sids = list(shots)
-
- for shot_id in sids:
- secondary_photos = p2s.get(shot_id)
- if secondary_photos is None:
- log.ODM_WARNING("Cannot find secondary photos for %s" % shot_id)
- continue
-
- for p in secondary_photos:
- shots[p.filename] = shots[shot_id]
-
- with open(self.recon_file(), 'w') as f:
- f.write(json.dumps(reconstruction))
-
-
- def update_config(self, cfg_dict):
- cfg_file = self.get_config_file_path()
- log.ODM_INFO("Updating %s" % cfg_file)
- if os.path.exists(cfg_file):
- try:
- with open(cfg_file) as fin:
- cfg = yaml.safe_load(fin)
- for k, v in cfg_dict.items():
- cfg[k] = v
- log.ODM_INFO("%s: %s" % (k, v))
- with open(cfg_file, 'w') as fout:
- fout.write(yaml.dump(cfg, default_flow_style=False))
- except Exception as e:
- log.ODM_WARNING("Cannot update configuration file %s: %s" % (cfg_file, str(e)))
- else:
- log.ODM_WARNING("Tried to update configuration, but %s does not exist." % cfg_file)
-
- def export_stats(self, rerun=False):
- log.ODM_INFO("Export reconstruction stats")
- stats_path = self.path("stats", "stats.json")
- if not os.path.exists(stats_path) or rerun:
- self.run("compute_statistics --diagram_max_points 100000")
- else:
- log.ODM_WARNING("Found existing reconstruction stats %s" % stats_path)
-
- def export_report(self, report_path, odm_stats, rerun=False):
- log.ODM_INFO("Exporting report to %s" % report_path)
-
- osfm_report_path = self.path("stats", "report.pdf")
- if not os.path.exists(report_path) or rerun:
- data = DataSet(self.opensfm_project_path)
- pdf_report = report.Report(data, odm_stats)
- pdf_report.generate_report()
- pdf_report.save_report("report.pdf")
-
- if os.path.exists(osfm_report_path):
- shutil.move(osfm_report_path, report_path)
- else:
- log.ODM_WARNING("Report could not be generated")
- else:
- log.ODM_WARNING("Report %s already exported" % report_path)
-
- def write_reference_lla(self, offset_x, offset_y, proj4):
- reference_lla = self.path("reference_lla.json")
-
- longlat = CRS.from_epsg("4326")
- lon, lat = location.transform2(CRS.from_proj4(proj4), longlat, offset_x, offset_y)
-
- with open(reference_lla, 'w') as f:
- f.write(json.dumps({
- 'latitude': lat,
- 'longitude': lon,
- 'altitude': 0.0
- }, indent=4))
-
- log.ODM_INFO("Wrote reference_lla.json")
-
- def ground_control_points(self, proj4):
- """
- Load ground control point information.
- """
- gcp_stats_file = self.path("stats", "ground_control_points.json")
-
- if not io.file_exists(gcp_stats_file):
- return []
-
- gcps_stats = {}
- try:
- with open(gcp_stats_file) as f:
- gcps_stats = json.loads(f.read())
- except:
- log.ODM_INFO("Cannot parse %s" % gcp_stats_file)
-
- if not gcps_stats:
- return []
-
- ds = DataSet(self.opensfm_project_path)
- reference = ds.load_reference()
- projection = pyproj.Proj(proj4)
-
- result = []
- for gcp in gcps_stats:
- geocoords = _transform(gcp['coordinates'], reference, projection)
- result.append({
- 'id': gcp['id'],
- 'observations': gcp['observations'],
- 'coordinates': geocoords,
- 'error': gcp['error']
- })
-
- return result
-
-
- def name(self):
- return os.path.basename(os.path.abspath(self.path("..")))
-
-def get_submodel_argv(args, submodels_path = None, submodel_name = None):
- """
- Gets argv for a submodel starting from the args passed to the application startup.
- Additionally, if project_name, submodels_path and submodel_name are passed, the function
- handles the value and --project-path detection / override.
- When all arguments are set to None, --project-path and project name are always removed.
-
- :return the same as argv, but removing references to --split,
- setting/replacing --project-path and name
- removing --rerun-from, --rerun, --rerun-all, --sm-cluster
- removing --pc-las, --pc-csv, --pc-ept, --tiles flags (processing these is wasteful)
- adding --orthophoto-cutline
- adding --dem-euclidean-map
- adding --skip-3dmodel (split-merge does not support 3D model merging)
- tweaking --crop if necessary (DEM merging makes assumption about the area of DEMs and their euclidean maps that require cropping. If cropping is skipped, this leads to errors.)
- removing --gcp (the GCP path if specified is always "gcp_list.txt")
- reading the contents of --cameras
- reading the contents of --boundary
- """
- assure_always = ['orthophoto_cutline', 'dem_euclidean_map', 'skip_3dmodel', 'skip_report']
- remove_always = ['split', 'split_overlap', 'rerun_from', 'rerun', 'gcp', 'end_with', 'sm_cluster', 'rerun_all', 'pc_csv', 'pc_las', 'pc_ept', 'tiles', 'copy-to', 'cog']
- read_json_always = ['cameras', 'boundary']
-
- argv = sys.argv
-
- # Startup script (/path/to/run.py)
- startup_script = argv[0]
-
- # On Windows, make sure we always invoke the "run.bat" file
- if sys.platform == 'win32':
- startup_script_dir = os.path.dirname(startup_script)
- startup_script = os.path.join(startup_script_dir, "run")
-
- result = [startup_script]
-
- args_dict = vars(args).copy()
- set_keys = [k[:-len("_is_set")] for k in args_dict.keys() if k.endswith("_is_set")]
-
- # Handle project name and project path (special case)
- if "name" in set_keys:
- del args_dict["name"]
- set_keys.remove("name")
-
- if "project_path" in set_keys:
- del args_dict["project_path"]
- set_keys.remove("project_path")
-
- # Remove parameters
- set_keys = [k for k in set_keys if k not in remove_always]
-
- # Assure parameters
- for k in assure_always:
- if not k in set_keys:
- set_keys.append(k)
- args_dict[k] = True
-
- # Read JSON always
- for k in read_json_always:
- if k in set_keys:
- try:
- if isinstance(args_dict[k], str):
- args_dict[k] = io.path_or_json_string_to_dict(args_dict[k])
- if isinstance(args_dict[k], dict):
- args_dict[k] = json.dumps(args_dict[k])
- except ValueError as e:
- log.ODM_WARNING("Cannot parse/read JSON: {}".format(str(e)))
-
- # Handle crop (cannot be zero for split/merge)
- if "crop" in set_keys:
- crop_value = float(args_dict["crop"])
- if crop_value == 0:
- crop_value = 0.015625
- args_dict["crop"] = crop_value
-
- # Populate result
- for k in set_keys:
- result.append("--%s" % k.replace("_", "-"))
-
- # No second value for booleans
- if isinstance(args_dict[k], bool) and args_dict[k] == True:
- continue
-
- result.append(str(args_dict[k]))
-
- if submodels_path:
- result.append("--project-path")
- result.append(submodels_path)
-
- if submodel_name:
- result.append(submodel_name)
-
- return result
-
-def get_submodel_args_dict(args):
- submodel_argv = get_submodel_argv(args)
- result = {}
-
- i = 0
- while i < len(submodel_argv):
- arg = submodel_argv[i]
- next_arg = None if i == len(submodel_argv) - 1 else submodel_argv[i + 1]
-
- if next_arg and arg.startswith("--"):
- if next_arg.startswith("--"):
- result[arg[2:]] = True
- else:
- result[arg[2:]] = next_arg
- i += 1
- elif arg.startswith("--"):
- result[arg[2:]] = True
- i += 1
-
- return result
-
-
-def get_submodel_paths(submodels_path, *paths):
- """
- :return Existing paths for all submodels
- """
- result = []
- if not os.path.exists(submodels_path):
- return result
-
- for f in os.listdir(submodels_path):
- if f.startswith('submodel'):
- p = os.path.join(submodels_path, f, *paths)
- if os.path.exists(p):
- result.append(p)
- else:
- log.ODM_WARNING("Missing %s from submodel %s" % (p, f))
-
- return result
-
-def get_all_submodel_paths(submodels_path, *all_paths):
- """
- :return Existing, multiple paths for all submodels as a nested list (all or nothing for each submodel)
- if a single file is missing from the submodule, no files are returned for that submodel.
-
- (i.e. get_multi_submodel_paths("path/", "odm_orthophoto.tif", "dem.tif")) -->
- [["path/submodel_0000/odm_orthophoto.tif", "path/submodel_0000/dem.tif"],
- ["path/submodel_0001/odm_orthophoto.tif", "path/submodel_0001/dem.tif"]]
- """
- result = []
- if not os.path.exists(submodels_path):
- return result
-
- for f in os.listdir(submodels_path):
- if f.startswith('submodel'):
- all_found = True
-
- for ap in all_paths:
- p = os.path.join(submodels_path, f, ap)
- if not os.path.exists(p):
- log.ODM_WARNING("Missing %s from submodel %s" % (p, f))
- all_found = False
-
- if all_found:
- result.append([os.path.join(submodels_path, f, ap) for ap in all_paths])
-
- return result
diff --git a/o/ODM/ODM-2.8.7/opendm/photo.py b/o/ODM/ODM-2.8.7/opendm/photo.py
deleted file mode 100644
index 5451c76d..00000000
--- a/o/ODM/ODM-2.8.7/opendm/photo.py
+++ /dev/null
@@ -1,856 +0,0 @@
-import logging
-import re
-import os
-import math
-
-import exifread
-import numpy as np
-from six import string_types
-from datetime import datetime, timedelta, timezone
-import pytz
-
-from opendm import io
-from opendm import log
-from opendm import system
-from opendm.rollingshutter import get_rolling_shutter_readout
-import xmltodict as x2d
-from opendm import get_image_size
-from xml.parsers.expat import ExpatError
-from opensfm.sensors import sensor_data
-from opensfm.geo import ecef_from_lla
-
-projections = ['perspective', 'fisheye', 'brown', 'dual', 'equirectangular', 'spherical']
-
-def find_largest_photo_dims(photos):
- max_mp = 0
- max_dims = None
-
- for p in photos:
- if p.width is None or p.height is None:
- continue
- mp = p.width * p.height
- if mp > max_mp:
- max_mp = mp
- max_dims = (p.width, p.height)
-
- return max_dims
-
-def find_largest_photo_dim(photos):
- max_dim = 0
- for p in photos:
- if p.width is None:
- continue
- max_dim = max(max_dim, max(p.width, p.height))
-
- return max_dim
-
-def find_largest_photo(photos):
- max_p = None
- max_area = 0
- for p in photos:
- if p.width is None:
- continue
- area = p.width * p.height
-
- if area > max_area:
- max_area = area
- max_p = p
-
- return max_p
-
-def get_mm_per_unit(resolution_unit):
- """Length of a resolution unit in millimeters.
-
- Uses the values from the EXIF specs in
- https://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html
-
- Args:
- resolution_unit: the resolution unit value given in the EXIF
- """
- if resolution_unit == 2: # inch
- return 25.4
- elif resolution_unit == 3: # cm
- return 10
- elif resolution_unit == 4: # mm
- return 1
- elif resolution_unit == 5: # um
- return 0.001
- else:
- log.ODM_WARNING("Unknown EXIF resolution unit value: {}".format(resolution_unit))
- return None
-
-class PhotoCorruptedException(Exception):
- pass
-
-class GPSRefMock:
- def __init__(self, ref):
- self.values = [ref]
-
-
-class ODM_Photo:
- """ODMPhoto - a class for ODMPhotos"""
-
- def __init__(self, path_file):
- self.filename = os.path.basename(path_file)
- self.mask = None
-
- # Standard tags (virtually all photos have these)
- self.width = None
- self.height = None
- self.camera_make = ''
- self.camera_model = ''
- self.orientation = 1
-
- # Geo tags
- self.latitude = None
- self.longitude = None
- self.altitude = None
-
- # Multi-band fields
- self.band_name = 'RGB'
- self.band_index = 0
- self.capture_uuid = None # DJI only
-
- # Multi-spectral fields
- self.fnumber = None
- self.radiometric_calibration = None
- self.black_level = None
-
- # Capture info
- self.exposure_time = None
- self.iso_speed = None
- self.bits_per_sample = None
- self.vignetting_center = None
- self.vignetting_polynomial = None
- self.spectral_irradiance = None
- self.horizontal_irradiance = None
- self.irradiance_scale_to_si = None
- self.utc_time = None
-
- # OPK angles
- self.yaw = None
- self.pitch = None
- self.roll = None
- self.omega = None
- self.phi = None
- self.kappa = None
-
- # DLS
- self.sun_sensor = None
- self.dls_yaw = None
- self.dls_pitch = None
- self.dls_roll = None
-
- # Aircraft speed
- self.speed_x = None
- self.speed_y = None
- self.speed_z = None
-
- # self.center_wavelength = None
- # self.bandwidth = None
-
- # RTK
- self.gps_xy_stddev = None # Dilution of Precision X/Y
- self.gps_z_stddev = None # Dilution of Precision Z
-
- # Misc SFM
- self.camera_projection = 'brown'
- self.focal_ratio = 0.85
-
- # parse values from metadata
- self.parse_exif_values(path_file)
-
- # print log message
- log.ODM_DEBUG('Loaded {}'.format(self))
-
-
- def __str__(self):
- return '{} | camera: {} {} | dimensions: {} x {} | lat: {} | lon: {} | alt: {} | band: {} ({})'.format(
- self.filename, self.camera_make, self.camera_model, self.width, self.height,
- self.latitude, self.longitude, self.altitude, self.band_name, self.band_index)
-
- def set_mask(self, mask):
- self.mask = mask
-
- def update_with_geo_entry(self, geo_entry):
- self.latitude = geo_entry.y
- self.longitude = geo_entry.x
- self.altitude = geo_entry.z
- if geo_entry.yaw is not None and geo_entry.pitch is not None and geo_entry.roll is not None:
- self.yaw = geo_entry.yaw
- self.pitch = geo_entry.pitch
- self.roll = geo_entry.roll
- self.dls_yaw = geo_entry.yaw
- self.dls_pitch = geo_entry.pitch
- self.dls_roll = geo_entry.roll
- self.gps_xy_stddev = geo_entry.horizontal_accuracy
- self.gps_z_stddev = geo_entry.vertical_accuracy
-
- def parse_exif_values(self, _path_file):
- # Disable exifread log
- logging.getLogger('exifread').setLevel(logging.CRITICAL)
-
- try:
- self.width, self.height = get_image_size.get_image_size(_path_file)
- except Exception as e:
- raise PhotoCorruptedException(str(e))
-
- tags = {}
- xtags = {}
-
- with open(_path_file, 'rb') as f:
- tags = exifread.process_file(f, details=True, extract_thumbnail=False)
- try:
- if 'Image Make' in tags:
- try:
- self.camera_make = tags['Image Make'].values
- self.camera_make = self.camera_make.strip()
- except UnicodeDecodeError:
- log.ODM_WARNING("EXIF Image Make might be corrupted")
- self.camera_make = "unknown"
- if 'Image Model' in tags:
- try:
- self.camera_model = tags['Image Model'].values
- self.camera_model = self.camera_model.strip()
- except UnicodeDecodeError:
- log.ODM_WARNING("EXIF Image Model might be corrupted")
- self.camera_model = "unknown"
- if 'GPS GPSAltitude' in tags:
- self.altitude = self.float_value(tags['GPS GPSAltitude'])
- if 'GPS GPSAltitudeRef' in tags and self.int_value(tags['GPS GPSAltitudeRef']) is not None and self.int_value(tags['GPS GPSAltitudeRef']) > 0:
- self.altitude *= -1
- if 'GPS GPSLatitude' in tags and 'GPS GPSLatitudeRef' in tags:
- self.latitude = self.dms_to_decimal(tags['GPS GPSLatitude'], tags['GPS GPSLatitudeRef'])
- elif 'GPS GPSLatitude' in tags:
- log.ODM_WARNING("GPS position for %s might be incorrect, GPSLatitudeRef tag is missing (assuming N)" % self.filename)
- self.latitude = self.dms_to_decimal(tags['GPS GPSLatitude'], GPSRefMock('N'))
- if 'GPS GPSLongitude' in tags and 'GPS GPSLongitudeRef' in tags:
- self.longitude = self.dms_to_decimal(tags['GPS GPSLongitude'], tags['GPS GPSLongitudeRef'])
- elif 'GPS GPSLongitude' in tags:
- log.ODM_WARNING("GPS position for %s might be incorrect, GPSLongitudeRef tag is missing (assuming E)" % self.filename)
- self.longitude = self.dms_to_decimal(tags['GPS GPSLongitude'], GPSRefMock('E'))
- if 'Image Orientation' in tags:
- self.orientation = self.int_value(tags['Image Orientation'])
- except (IndexError, ValueError) as e:
- log.ODM_WARNING("Cannot read basic EXIF tags for %s: %s" % (self.filename, str(e)))
-
- try:
- if 'Image Tag 0xC61A' in tags:
- self.black_level = self.list_values(tags['Image Tag 0xC61A'])
- elif 'BlackLevel' in tags:
- self.black_level = self.list_values(tags['BlackLevel'])
-
- if 'EXIF ExposureTime' in tags:
- self.exposure_time = self.float_value(tags['EXIF ExposureTime'])
-
- if 'EXIF FNumber' in tags:
- self.fnumber = self.float_value(tags['EXIF FNumber'])
-
- if 'EXIF ISOSpeed' in tags:
- self.iso_speed = self.int_value(tags['EXIF ISOSpeed'])
- elif 'EXIF PhotographicSensitivity' in tags:
- self.iso_speed = self.int_value(tags['EXIF PhotographicSensitivity'])
- elif 'EXIF ISOSpeedRatings' in tags:
- self.iso_speed = self.int_value(tags['EXIF ISOSpeedRatings'])
-
-
- if 'Image BitsPerSample' in tags:
- self.bits_per_sample = self.int_value(tags['Image BitsPerSample'])
- if 'EXIF DateTimeOriginal' in tags:
- str_time = tags['EXIF DateTimeOriginal'].values
- utc_time = datetime.strptime(str_time, "%Y:%m:%d %H:%M:%S")
- subsec = 0
- if 'EXIF SubSecTime' in tags:
- subsec = self.int_value(tags['EXIF SubSecTime'])
- negative = 1.0
- if subsec < 0:
- negative = -1.0
- subsec *= -1.0
- subsec = float('0.{}'.format(int(subsec)))
- subsec *= negative
- ms = subsec * 1e3
- utc_time += timedelta(milliseconds = ms)
- timezone = pytz.timezone('UTC')
- epoch = timezone.localize(datetime.utcfromtimestamp(0))
- self.utc_time = (timezone.localize(utc_time) - epoch).total_seconds() * 1000.0
-
- if 'MakerNote SpeedX' in tags and \
- 'MakerNote SpeedY' in tags and \
- 'MakerNote SpeedZ' in tags:
- self.speed_x = self.float_value(tags['MakerNote SpeedX'])
- self.speed_y = self.float_value(tags['MakerNote SpeedY'])
- self.speed_z = self.float_value(tags['MakerNote SpeedZ'])
-
- except Exception as e:
- log.ODM_WARNING("Cannot read extended EXIF tags for %s: %s" % (self.filename, str(e)))
-
- # Warn if GPS coordinates are suspiciously wrong
- if self.latitude is not None and self.latitude == 0 and \
- self.longitude is not None and self.longitude == 0:
- log.ODM_WARNING("%s has GPS position (0,0), possibly corrupted" % self.filename)
-
-
- # Extract XMP tags
- f.seek(0)
- xmp = self.get_xmp(f)
-
- for xtags in xmp:
- try:
- band_name = self.get_xmp_tag(xtags, ['Camera:BandName', '@Camera:BandName'])
- if band_name is not None:
- self.band_name = band_name.replace(" ", "")
-
- self.set_attr_from_xmp_tag('band_index', xtags, [
- 'DLS:SensorId', # Micasense RedEdge
- '@Camera:RigCameraIndex', # Parrot Sequoia, Sentera 21244-00_3.2MP-GS-0001
- 'Camera:RigCameraIndex', # MicaSense Altum
- ])
-
- self.set_attr_from_xmp_tag('radiometric_calibration', xtags, [
- 'MicaSense:RadiometricCalibration',
- ])
-
- self.set_attr_from_xmp_tag('vignetting_center', xtags, [
- 'Camera:VignettingCenter',
- 'Sentera:VignettingCenter',
- ])
-
- self.set_attr_from_xmp_tag('vignetting_polynomial', xtags, [
- 'Camera:VignettingPolynomial',
- 'Sentera:VignettingPolynomial',
- ])
-
- self.set_attr_from_xmp_tag('horizontal_irradiance', xtags, [
- 'Camera:HorizontalIrradiance'
- ], float)
-
- self.set_attr_from_xmp_tag('irradiance_scale_to_si', xtags, [
- 'Camera:IrradianceScaleToSIUnits'
- ], float)
-
- self.set_attr_from_xmp_tag('sun_sensor', xtags, [
- 'Camera:SunSensor',
- ], float)
-
- self.set_attr_from_xmp_tag('spectral_irradiance', xtags, [
- 'Camera:SpectralIrradiance',
- 'Camera:Irradiance',
- ], float)
-
- self.set_attr_from_xmp_tag('capture_uuid', xtags, [
- '@drone-dji:CaptureUUID', # DJI
- 'MicaSense:CaptureId', # MicaSense Altum
- '@Camera:ImageUniqueID', # sentera 6x
- ])
-
- # Camera make / model for some cameras is stored in the XMP
- if self.camera_make == '':
- self.set_attr_from_xmp_tag('camera_make', xtags, [
- '@tiff:Make'
- ])
- if self.camera_model == '':
- self.set_attr_from_xmp_tag('camera_model', xtags, [
- '@tiff:Model'
- ])
-
- # DJI GPS tags
- self.set_attr_from_xmp_tag('longitude', xtags, [
- '@drone-dji:Longitude'
- ], float)
- self.set_attr_from_xmp_tag('latitude', xtags, [
- '@drone-dji:Latitude'
- ], float)
- self.set_attr_from_xmp_tag('altitude', xtags, [
- '@drone-dji:AbsoluteAltitude'
- ], float)
-
- # Phantom 4 RTK
- if '@drone-dji:RtkStdLon' in xtags:
- y = float(self.get_xmp_tag(xtags, '@drone-dji:RtkStdLon'))
- x = float(self.get_xmp_tag(xtags, '@drone-dji:RtkStdLat'))
- self.gps_xy_stddev = max(x, y)
-
- if '@drone-dji:RtkStdHgt' in xtags:
- self.gps_z_stddev = float(self.get_xmp_tag(xtags, '@drone-dji:RtkStdHgt'))
- else:
- self.set_attr_from_xmp_tag('gps_xy_stddev', xtags, [
- '@Camera:GPSXYAccuracy',
- 'GPSXYAccuracy'
- ], float)
- self.set_attr_from_xmp_tag('gps_z_stddev', xtags, [
- '@Camera:GPSZAccuracy',
- 'GPSZAccuracy'
- ], float)
-
- # DJI Speed tags
- if '@drone-dji:FlightXSpeed' in xtags and \
- '@drone-dji:FlightYSpeed' in xtags and \
- '@drone-dji:FlightZSpeed' in xtags:
- self.set_attr_from_xmp_tag('speed_x', xtags, [
- '@drone-dji:FlightXSpeed'
- ], float)
- self.set_attr_from_xmp_tag('speed_y', xtags, [
- '@drone-dji:FlightYSpeed',
- ], float)
- self.set_attr_from_xmp_tag('speed_z', xtags, [
- '@drone-dji:FlightZSpeed',
- ], float)
-
- # Account for over-estimation
- if self.gps_xy_stddev is not None:
- self.gps_xy_stddev *= 2.0
- if self.gps_z_stddev is not None:
- self.gps_z_stddev *= 2.0
-
- if 'DLS:Yaw' in xtags:
- self.set_attr_from_xmp_tag('dls_yaw', xtags, ['DLS:Yaw'], float)
- self.set_attr_from_xmp_tag('dls_pitch', xtags, ['DLS:Pitch'], float)
- self.set_attr_from_xmp_tag('dls_roll', xtags, ['DLS:Roll'], float)
-
- camera_projection = self.get_xmp_tag(xtags, ['@Camera:ModelType', 'Camera:ModelType'])
- if camera_projection is not None:
- camera_projection = camera_projection.lower()
- if camera_projection in projections:
- self.camera_projection = camera_projection
-
- # OPK
- self.set_attr_from_xmp_tag('yaw', xtags, ['@drone-dji:FlightYawDegree', '@Camera:Yaw', 'Camera:Yaw'], float)
- self.set_attr_from_xmp_tag('pitch', xtags, ['@drone-dji:GimbalPitchDegree', '@Camera:Pitch', 'Camera:Pitch'], float)
- self.set_attr_from_xmp_tag('roll', xtags, ['@drone-dji:GimbalRollDegree', '@Camera:Roll', 'Camera:Roll'], float)
-
- # Normalize YPR conventions (assuming nadir camera)
- # Yaw: 0 --> top of image points north
- # Yaw: 90 --> top of image points east
- # Yaw: 270 --> top of image points west
- # Pitch: 0 --> nadir camera
- # Pitch: 90 --> camera is looking forward
- # Roll: 0 (assuming gimbal)
- if self.has_ypr():
- if self.camera_make.lower() in ['dji', 'hasselblad']:
- self.pitch = 90 + self.pitch
-
- if self.camera_make.lower() == 'sensefly':
- self.roll *= -1
-
- except Exception as e:
- log.ODM_WARNING("Cannot read XMP tags for %s: %s" % (self.filename, str(e)))
-
- # self.set_attr_from_xmp_tag('center_wavelength', xtags, [
- # 'Camera:CentralWavelength'
- # ], float)
-
- # self.set_attr_from_xmp_tag('bandwidth', xtags, [
- # 'Camera:WavelengthFWHM'
- # ], float)
-
- # Sanitize band name since we use it in folder paths
- self.band_name = re.sub('[^A-Za-z0-9]+', '', self.band_name)
-
- self.compute_focal(tags, xtags)
- self.compute_opk()
-
- def compute_focal(self, tags, xtags):
- try:
- self.focal_ratio = self.extract_focal(self.camera_make, self.camera_model, tags, xtags)
- except (IndexError, ValueError) as e:
- log.ODM_WARNING("Cannot extract focal ratio for %s: %s" % (self.filename, str(e)))
-
- def extract_focal(self, make, model, tags, xtags):
- if make != "unknown":
- # remove duplicate 'make' information in 'model'
- model = model.replace(make, "")
-
- sensor_string = (make.strip() + " " + model.strip()).strip().lower()
-
- sensor_width = None
- if ("EXIF FocalPlaneResolutionUnit" in tags and "EXIF FocalPlaneXResolution" in tags):
- resolution_unit = self.float_value(tags["EXIF FocalPlaneResolutionUnit"])
- mm_per_unit = get_mm_per_unit(resolution_unit)
- if mm_per_unit:
- pixels_per_unit = self.float_value(tags["EXIF FocalPlaneXResolution"])
- if pixels_per_unit <= 0 and "EXIF FocalPlaneYResolution" in tags:
- pixels_per_unit = self.float_value(tags["EXIF FocalPlaneYResolution"])
-
- if pixels_per_unit > 0 and self.width is not None:
- units_per_pixel = 1 / pixels_per_unit
- sensor_width = self.width * units_per_pixel * mm_per_unit
-
- focal_35 = None
- focal = None
- if "EXIF FocalLengthIn35mmFilm" in tags:
- focal_35 = self.float_value(tags["EXIF FocalLengthIn35mmFilm"])
- if "EXIF FocalLength" in tags:
- focal = self.float_value(tags["EXIF FocalLength"])
- if focal is None and "@aux:Lens" in xtags:
- lens = self.get_xmp_tag(xtags, ["@aux:Lens"])
- matches = re.search('([\d\.]+)mm', str(lens))
- if matches:
- focal = float(matches.group(1))
-
- if focal_35 is not None and focal_35 > 0:
- focal_ratio = focal_35 / 36.0 # 35mm film produces 36x24mm pictures.
- else:
- if not sensor_width:
- sensor_width = sensor_data().get(sensor_string, None)
- if sensor_width and focal:
- focal_ratio = focal / sensor_width
- else:
- focal_ratio = 0.85
-
- return focal_ratio
-
- def set_attr_from_xmp_tag(self, attr, xmp_tags, tags, cast=None):
- v = self.get_xmp_tag(xmp_tags, tags)
- if v is not None:
- if cast is None:
- setattr(self, attr, v)
- else:
- # Handle fractions
- if (cast == float or cast == int) and "/" in v:
- v = self.try_parse_fraction(v)
- setattr(self, attr, cast(v))
-
- def get_xmp_tag(self, xmp_tags, tags):
- if isinstance(tags, str):
- tags = [tags]
-
- for tag in tags:
- if tag in xmp_tags:
- t = xmp_tags[tag]
-
- if isinstance(t, string_types):
- return str(t)
- elif isinstance(t, dict):
- items = t.get('rdf:Seq', {}).get('rdf:li', {})
- if items:
- if isinstance(items, string_types):
- return items
- return " ".join(items)
- elif isinstance(t, int) or isinstance(t, float):
- return t
-
-
- # From https://github.com/mapillary/OpenSfM/blob/master/opensfm/exif.py
- def get_xmp(self, file):
- img_bytes = file.read()
- xmp_start = img_bytes.find(b' 0:
- return v[0]
-
- def int_values(self, tag):
- if isinstance(tag.values, list):
- return [int(v) for v in tag.values]
- else:
- return [int(tag.values)]
-
- def int_value(self, tag):
- v = self.int_values(tag)
- if len(v) > 0:
- return v[0]
-
- def list_values(self, tag):
- return " ".join(map(str, tag.values))
-
- def try_parse_fraction(self, val):
- parts = val.split("/")
- if len(parts) == 2:
- try:
- num, den = map(float, parts)
- return num / den if den != 0 else val
- except ValueError:
- pass
- return val
-
- def get_radiometric_calibration(self):
- if isinstance(self.radiometric_calibration, str):
- parts = self.radiometric_calibration.split(" ")
- if len(parts) == 3:
- return list(map(float, parts))
-
- return [None, None, None]
-
- def get_dark_level(self):
- if self.black_level:
- levels = np.array([float(v) for v in self.black_level.split(" ")])
- return levels.mean()
-
- def get_gain(self):
- #(gain = ISO/100)
- if self.iso_speed:
- return self.iso_speed / 100.0
-
- def get_vignetting_center(self):
- if self.vignetting_center:
- parts = self.vignetting_center.split(" ")
- if len(parts) == 2:
- return list(map(float, parts))
- return [None, None]
-
- def get_vignetting_polynomial(self):
- if self.vignetting_polynomial:
- parts = self.vignetting_polynomial.split(" ")
- if len(parts) > 0:
- coeffs = list(map(float, parts))
-
- # Different camera vendors seem to use different ordering for the coefficients
- if self.camera_make != "Sentera":
- coeffs.reverse()
- return coeffs
-
- def get_utc_time(self):
- if self.utc_time:
- return datetime.fromtimestamp(self.utc_time / 1000, timezone.utc)
-
- def get_photometric_exposure(self):
- # H ~= (exposure_time) / (f_number^2)
- if self.fnumber is not None and self.exposure_time is not None and self.exposure_time > 0 and self.fnumber > 0:
- return self.exposure_time / (self.fnumber * self.fnumber)
-
- def get_horizontal_irradiance(self):
- if self.horizontal_irradiance is not None:
- scale = 1.0 # Assumed
- if self.irradiance_scale_to_si is not None:
- scale = self.irradiance_scale_to_si
-
- return self.horizontal_irradiance * scale
-
- def get_sun_sensor(self):
- if self.sun_sensor is not None:
- # TODO: Presence of XMP:SunSensorExposureTime
- # and XMP:SunSensorSensitivity might
- # require additional logic. If these two tags are present,
- # then sun_sensor is not in physical units?
- return self.sun_sensor / 65535.0 # normalize uint16 (is this correct?)
- elif self.spectral_irradiance is not None:
- scale = 1.0 # Assumed
- if self.irradiance_scale_to_si is not None:
- scale = self.irradiance_scale_to_si
-
- return self.spectral_irradiance * scale
-
- def get_dls_pose(self):
- if self.dls_yaw is not None:
- return [self.dls_yaw, self.dls_pitch, self.dls_roll]
- return [0.0, 0.0, 0.0]
-
- def get_bit_depth_max(self):
- if self.bits_per_sample:
- return float(2 ** self.bits_per_sample)
-
- return None
-
- def get_capture_id(self):
- # Use capture UUID first, capture time as fallback
- if self.capture_uuid is not None:
- return self.capture_uuid
-
- return self.get_utc_time()
-
- def get_gps_dop(self):
- val = -9999
- if self.gps_xy_stddev is not None:
- val = self.gps_xy_stddev
- if self.gps_z_stddev is not None:
- val = max(val, self.gps_z_stddev)
- if val > 0:
- return val
-
- return None
-
- def override_gps_dop(self, dop):
- self.gps_xy_stddev = self.gps_z_stddev = dop
-
- def override_camera_projection(self, camera_projection):
- if camera_projection in projections:
- self.camera_projection = camera_projection
-
- def is_thermal(self):
- #Added for support M2EA camera sensor
- if(self.camera_make == "DJI"):
- return self.camera_model == "MAVIC2-ENTERPRISE-ADVANCED" and self.width == 640 and self.height == 512
- return self.band_name.upper() in ["LWIR"] # TODO: more?
-
- def camera_id(self):
- return " ".join(
- [
- "v2",
- self.camera_make.strip(),
- self.camera_model.strip(),
- str(int(self.width)),
- str(int(self.height)),
- self.camera_projection,
- str(float(self.focal_ratio))[:6],
- ]
- ).lower()
-
- def to_opensfm_exif(self, rolling_shutter = False, rolling_shutter_readout = 0):
- capture_time = 0.0
- if self.utc_time is not None:
- capture_time = self.utc_time / 1000.0
-
- gps = {}
- has_gps = self.latitude is not None and self.longitude is not None
- if has_gps:
- gps['latitude'] = self.latitude
- gps['longitude'] = self.longitude
- if self.altitude is not None:
- gps['altitude'] = self.altitude
- else:
- gps['altitude'] = 0.0
-
- dop = self.get_gps_dop()
- if dop is None:
- dop = 10.0 # Default
-
- gps['dop'] = dop
-
- d = {
- "make": self.camera_make,
- "model": self.camera_model,
- "width": self.width,
- "height": self.height,
- "projection_type": self.camera_projection,
- "focal_ratio": self.focal_ratio,
- "orientation": self.orientation,
- "capture_time": capture_time,
- "gps": gps,
- "camera": self.camera_id()
- }
-
- if self.has_opk():
- d['opk'] = {
- 'omega': self.omega,
- 'phi': self.phi,
- 'kappa': self.kappa
- }
-
- # Speed is not useful without GPS
- if self.has_speed() and has_gps:
- d['speed'] = [self.speed_y, self.speed_x, self.speed_z]
-
- if rolling_shutter:
- d['rolling_shutter'] = get_rolling_shutter_readout(self.camera_make, self.camera_model, rolling_shutter_readout)
-
- return d
-
- def has_ypr(self):
- return self.yaw is not None and \
- self.pitch is not None and \
- self.roll is not None
-
- def has_opk(self):
- return self.omega is not None and \
- self.phi is not None and \
- self.kappa is not None
-
- def has_speed(self):
- return self.speed_x is not None and \
- self.speed_y is not None and \
- self.speed_z is not None
-
- def has_geo(self):
- return self.latitude is not None and \
- self.longitude is not None
-
- def compute_opk(self):
- if self.has_ypr() and self.has_geo():
- y, p, r = math.radians(self.yaw), math.radians(self.pitch), math.radians(self.roll)
-
- # Ref: New Calibration and Computing Method for Direct
- # Georeferencing of Image and Scanner Data Using the
- # Position and Angular Data of an Hybrid Inertial Navigation System
- # by Manfred Bäumker
-
- # YPR rotation matrix
- cnb = np.array([[ math.cos(y) * math.cos(p), math.cos(y) * math.sin(p) * math.sin(r) - math.sin(y) * math.cos(r), math.cos(y) * math.sin(p) * math.cos(r) + math.sin(y) * math.sin(r)],
- [ math.sin(y) * math.cos(p), math.sin(y) * math.sin(p) * math.sin(r) + math.cos(y) * math.cos(r), math.sin(y) * math.sin(p) * math.cos(r) - math.cos(y) * math.sin(r)],
- [ -math.sin(p), math.cos(p) * math.sin(r), math.cos(p) * math.cos(r)],
- ])
-
- # Convert between image and body coordinates
- # Top of image pixels point to flying direction
- # and camera is looking down.
- # We might need to change this if we want different
- # camera mount orientations (e.g. backward or sideways)
-
- # (Swap X/Y, flip Z)
- cbb = np.array([[0, 1, 0],
- [1, 0, 0],
- [0, 0, -1]])
-
- delta = 1e-7
-
- alt = self.altitude if self.altitude is not None else 0.0
- p1 = np.array(ecef_from_lla(self.latitude + delta, self.longitude, alt))
- p2 = np.array(ecef_from_lla(self.latitude - delta, self.longitude, alt))
- xnp = p1 - p2
- m = np.linalg.norm(xnp)
-
- if m == 0:
- log.ODM_WARNING("Cannot compute OPK angles, divider = 0")
- return
-
- # Unit vector pointing north
- xnp /= m
-
- znp = np.array([0, 0, -1]).T
- ynp = np.cross(znp, xnp)
-
- cen = np.array([xnp, ynp, znp]).T
-
- # OPK rotation matrix
- ceb = cen.dot(cnb).dot(cbb)
-
- self.omega = math.degrees(math.atan2(-ceb[1][2], ceb[2][2]))
- self.phi = math.degrees(math.asin(ceb[0][2]))
- self.kappa = math.degrees(math.atan2(-ceb[0][1], ceb[0][0]))
diff --git a/o/ODM/ODM-2.8.7/opendm/point_cloud.py b/o/ODM/ODM-2.8.7/opendm/point_cloud.py
deleted file mode 100644
index 481ef33a..00000000
--- a/o/ODM/ODM-2.8.7/opendm/point_cloud.py
+++ /dev/null
@@ -1,294 +0,0 @@
-import os, sys, shutil, tempfile, math, json
-from opendm import system
-from opendm import log
-from opendm import context
-from opendm.system import run
-from opendm import entwine
-from opendm import io
-from opendm.concurrency import parallel_map
-from opendm.utils import double_quote
-from opendm.boundary import as_polygon, as_geojson
-from opendm.dem.pdal import run_pipeline
-
-def ply_info(input_ply):
- if not os.path.exists(input_ply):
- raise IOError("%s does not exist" % input_ply)
-
- # Read PLY header, check if point cloud has normals
- has_normals = False
- has_views = False
- vertex_count = 0
-
- with open(input_ply, 'r', errors='ignore') as f:
- line = f.readline().strip().lower()
- i = 0
- while line != "end_header":
- line = f.readline().strip().lower()
- props = line.split(" ")
- if len(props) == 3:
- if props[0] == "property" and props[2] in ["nx", "normalx", "normal_x"]:
- has_normals = True
- if props[0] == "property" and props[2] in ["views"]:
- has_views = True
- elif props[0] == "element" and props[1] == "vertex":
- vertex_count = int(props[2])
- i += 1
- if i > 100:
- raise IOError("Cannot find end_header field. Invalid PLY?")
-
-
- return {
- 'has_normals': has_normals,
- 'vertex_count': vertex_count,
- 'has_views': has_views,
- 'header_lines': i + 1
- }
-
-
-def split(input_point_cloud, outdir, filename_template, capacity, dims=None):
- log.ODM_INFO("Splitting point cloud filtering in chunks of {} vertices".format(capacity))
-
- if not os.path.exists(input_point_cloud):
- log.ODM_ERROR("{} does not exist, cannot split point cloud. The program will now exit.".format(input_point_cloud))
- sys.exit(1)
-
- if not os.path.exists(outdir):
- system.mkdir_p(outdir)
-
- if len(os.listdir(outdir)) != 0:
- log.ODM_ERROR("%s already contains some files. The program will now exit.".format(outdir))
- sys.exit(1)
-
- cmd = 'pdal split -i "%s" -o "%s" --capacity %s ' % (input_point_cloud, os.path.join(outdir, filename_template), capacity)
-
- if filename_template.endswith(".ply"):
- cmd += ("--writers.ply.sized_types=false "
- "--writers.ply.storage_mode=\"little endian\" ")
- if dims is not None:
- cmd += '--writers.ply.dims="%s"' % dims
- system.run(cmd)
-
- return [os.path.join(outdir, f) for f in os.listdir(outdir)]
-
-
-def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, boundary=None, verbose=False, max_concurrency=1):
- """
- Filters a point cloud
- """
- if not os.path.exists(input_point_cloud):
- log.ODM_ERROR("{} does not exist. The program will now exit.".format(input_point_cloud))
- sys.exit(1)
-
- args = [
- '--input "%s"' % input_point_cloud,
- '--output "%s"' % output_point_cloud,
- '--concurrency %s' % max_concurrency,
- '--verbose' if verbose else '',
- ]
-
- if sample_radius > 0:
- log.ODM_INFO("Sampling points around a %sm radius" % sample_radius)
- args.append('--radius %s' % sample_radius)
-
- if standard_deviation > 0 and meank > 0:
- log.ODM_INFO("Filtering {} (statistical, meanK {}, standard deviation {})".format(input_point_cloud, meank, standard_deviation))
- args.append('--meank %s' % meank)
- args.append('--std %s' % standard_deviation)
-
- if boundary is not None:
- log.ODM_INFO("Boundary {}".format(boundary))
- fd, boundary_json_file = tempfile.mkstemp(suffix='.boundary.json')
- os.close(fd)
- with open(boundary_json_file, 'w') as f:
- f.write(as_geojson(boundary))
- args.append('--boundary "%s"' % boundary_json_file)
-
- system.run('"%s" %s' % (context.fpcfilter_path, " ".join(args)))
-
- if not os.path.exists(output_point_cloud):
- log.ODM_WARNING("{} not found, filtering has failed.".format(output_point_cloud))
-
-def export_info_json(pointcloud_path, info_file_path):
- system.run('pdal info --dimensions "X,Y,Z" "{0}" > "{1}"'.format(pointcloud_path, info_file_path))
-
-
-def export_summary_json(pointcloud_path, summary_file_path):
- system.run('pdal info --summary "{0}" > "{1}"'.format(pointcloud_path, summary_file_path))
-
-def get_extent(input_point_cloud):
- fd, json_file = tempfile.mkstemp(suffix='.json')
- os.close(fd)
-
- # Get point cloud extent
- fallback = False
-
- # We know PLY files do not have --summary support
- if input_point_cloud.lower().endswith(".ply"):
- fallback = True
- run('pdal info "{0}" > "{1}"'.format(input_point_cloud, json_file))
-
- try:
- if not fallback:
- run('pdal info --summary "{0}" > "{1}"'.format(input_point_cloud, json_file))
- except:
- fallback = True
- run('pdal info "{0}" > "{1}"'.format(input_point_cloud, json_file))
-
- bounds = {}
- with open(json_file, 'r') as f:
- result = json.loads(f.read())
-
- if not fallback:
- summary = result.get('summary')
- if summary is None: raise Exception("Cannot compute summary for %s (summary key missing)" % input_point_cloud)
- bounds = summary.get('bounds')
- else:
- stats = result.get('stats')
- if stats is None: raise Exception("Cannot compute bounds for %s (stats key missing)" % input_point_cloud)
- bbox = stats.get('bbox')
- if bbox is None: raise Exception("Cannot compute bounds for %s (bbox key missing)" % input_point_cloud)
- native = bbox.get('native')
- if native is None: raise Exception("Cannot compute bounds for %s (native key missing)" % input_point_cloud)
- bounds = native.get('bbox')
-
- if bounds is None: raise Exception("Cannot compute bounds for %s (bounds key missing)" % input_point_cloud)
-
- if bounds.get('maxx', None) is None or \
- bounds.get('minx', None) is None or \
- bounds.get('maxy', None) is None or \
- bounds.get('miny', None) is None or \
- bounds.get('maxz', None) is None or \
- bounds.get('minz', None) is None:
- raise Exception("Cannot compute bounds for %s (invalid keys) %s" % (input_point_cloud, str(bounds)))
-
- os.remove(json_file)
- return bounds
-
-
-def merge(input_point_cloud_files, output_file, rerun=False):
- num_files = len(input_point_cloud_files)
- if num_files == 0:
- log.ODM_WARNING("No input point cloud files to process")
- return
-
- if io.file_exists(output_file):
- log.ODM_WARNING("Removing previous point cloud: %s" % output_file)
- os.remove(output_file)
-
- kwargs = {
- 'all_inputs': " ".join(map(double_quote, input_point_cloud_files)),
- 'output': output_file
- }
-
- system.run('lasmerge -i {all_inputs} -o "{output}"'.format(**kwargs))
-
-
-def fast_merge_ply(input_point_cloud_files, output_file):
- # Assumes that all input files share the same header/content format
- # As the merge is a naive byte stream copy
-
- num_files = len(input_point_cloud_files)
- if num_files == 0:
- log.ODM_WARNING("No input point cloud files to process")
- return
-
- if io.file_exists(output_file):
- log.ODM_WARNING("Removing previous point cloud: %s" % output_file)
- os.remove(output_file)
-
- vertex_count = sum([ply_info(pcf)['vertex_count'] for pcf in input_point_cloud_files])
- master_file = input_point_cloud_files[0]
- with open(output_file, "wb") as out:
- with open(master_file, "r", errors="ignore") as fhead:
- # Copy header
- line = fhead.readline()
- out.write(line.encode('utf8'))
-
- i = 0
- while line.strip().lower() != "end_header":
- line = fhead.readline()
-
- # Intercept element vertex field
- if line.lower().startswith("element vertex "):
- out.write(("element vertex %s\n" % vertex_count).encode('utf8'))
- else:
- out.write(line.encode('utf8'))
-
- i += 1
- if i > 100:
- raise IOError("Cannot find end_header field. Invalid PLY?")
-
- for ipc in input_point_cloud_files:
- i = 0
- with open(ipc, "rb") as fin:
- # Skip header
- line = fin.readline()
- while line.strip().lower() != b"end_header":
- line = fin.readline()
-
- i += 1
- if i > 100:
- raise IOError("Cannot find end_header field. Invalid PLY?")
-
- # Write fields
- out.write(fin.read())
-
- return output_file
-
-
-def merge_ply(input_point_cloud_files, output_file, dims=None):
- num_files = len(input_point_cloud_files)
- if num_files == 0:
- log.ODM_WARNING("No input point cloud files to process")
- return
-
- cmd = [
- 'pdal',
- 'merge',
- '--writers.ply.sized_types=false',
- '--writers.ply.storage_mode="little endian"',
- ('--writers.ply.dims="%s"' % dims) if dims is not None else '',
- ' '.join(map(double_quote, input_point_cloud_files + [output_file])),
- ]
-
- system.run(' '.join(cmd))
-
-def post_point_cloud_steps(args, tree, rerun=False):
- # XYZ point cloud output
- if args.pc_csv:
- log.ODM_INFO("Creating CSV file (XYZ format)")
-
- if not io.file_exists(tree.odm_georeferencing_xyz_file) or rerun:
- system.run("pdal translate -i \"{}\" "
- "-o \"{}\" "
- "--writers.text.format=csv "
- "--writers.text.order=\"X,Y,Z\" "
- "--writers.text.keep_unspecified=false ".format(
- tree.odm_georeferencing_model_laz,
- tree.odm_georeferencing_xyz_file))
- else:
- log.ODM_WARNING("Found existing CSV file %s" % tree.odm_georeferencing_xyz_file)
-
- # LAS point cloud output
- if args.pc_las:
- log.ODM_INFO("Creating LAS file")
-
- if not io.file_exists(tree.odm_georeferencing_model_las) or rerun:
- system.run("pdal translate -i \"{}\" "
- "-o \"{}\" ".format(
- tree.odm_georeferencing_model_laz,
- tree.odm_georeferencing_model_las))
- else:
- log.ODM_WARNING("Found existing LAS file %s" % tree.odm_georeferencing_xyz_file)
-
- # EPT point cloud output
- if args.pc_ept:
- log.ODM_INFO("Creating Entwine Point Tile output")
- entwine.build([tree.odm_georeferencing_model_laz], tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=rerun)
-
- # COPC point clouds
- if args.pc_copc:
- log.ODM_INFO("Creating Cloud Optimized Point Cloud (COPC)")
-
- copc_output = io.related_file_path(tree.odm_georeferencing_model_laz, postfix=".copc")
- entwine.build_copc([tree.odm_georeferencing_model_laz], copc_output)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/progress.py b/o/ODM/ODM-2.8.7/opendm/progress.py
deleted file mode 100644
index 264db82d..00000000
--- a/o/ODM/ODM-2.8.7/opendm/progress.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import socket
-import os
-from opendm import log
-
-PROGRESS_BROADCAST_PORT = 6367 #ODMR
-try:
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-except:
- log.ODM_WARNING("Cannot create UDP socket, progress reporting will be disabled.")
- sock = None
-
-class Broadcaster:
- def __init__(self, port):
- self.port = port
- self.project_name = ""
- self.pid = os.getpid()
-
- def set_project_name(self, project_name):
- self.project_name = project_name
-
- def send_update(self, global_progress):
- """
- Update any listener on the pipeline progress (in percentage terms)
- """
- if not sock:
- return
-
- UDP_IP = "127.0.0.1"
-
- if global_progress > 100:
- log.ODM_WARNING("Global progress is > 100 (%s), please contact the developers." % global_progress)
- global_progress = 100
-
- try:
- sock.sendto("PGUP/{}/{}/{}".format(self.pid, self.project_name, float(global_progress)).encode('utf8'),
- (UDP_IP, self.port))
- except Exception as e:
- log.ODM_WARNING("Failed to broadcast progress update on UDP port %s (%s)" % (str(self.port), str(e)))
-
-progressbc = Broadcaster(PROGRESS_BROADCAST_PORT)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/pseudogeo.py b/o/ODM/ODM-2.8.7/opendm/pseudogeo.py
deleted file mode 100644
index 5d706fef..00000000
--- a/o/ODM/ODM-2.8.7/opendm/pseudogeo.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from osgeo import osr
-from osgeo import gdal
-from osgeo.gdalconst import GA_Update
-from opendm import io
-from opendm import log
-
-def get_pseudogeo_utm():
- return '+proj=utm +zone=30 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
-
-def get_pseudogeo_scale():
- return 0.1 # Arbitrarily chosen
-
-def add_pseudo_georeferencing(geotiff):
- if not io.file_exists(geotiff):
- log.ODM_WARNING("Cannot add pseudo georeferencing, %s does not exist" % geotiff)
- return
-
- try:
- log.ODM_INFO("Adding pseudo georeferencing (raster should show up at the equator) to %s" % geotiff)
-
- dst_ds = gdal.Open(geotiff, GA_Update)
- srs = osr.SpatialReference()
- srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
- srs.ImportFromProj4(get_pseudogeo_utm())
- dst_ds.SetProjection( srs.ExportToWkt() )
- dst_ds.SetGeoTransform( [ 0.0, get_pseudogeo_scale(), 0.0, 0.0, 0.0, -get_pseudogeo_scale() ] )
- dst_ds = None
-
- except Exception as e:
- log.ODM_WARNING("Cannot add pseudo georeferencing to %s (%s), skipping..." % (geotiff, str(e)))
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/remote.py b/o/ODM/ODM-2.8.7/opendm/remote.py
deleted file mode 100644
index 1c614282..00000000
--- a/o/ODM/ODM-2.8.7/opendm/remote.py
+++ /dev/null
@@ -1,515 +0,0 @@
-import time
-import datetime
-import os
-import sys
-import threading
-import signal
-import zipfile
-import glob
-from opendm import log
-from opendm import system
-from opendm import config
-from pyodm import Node, exceptions
-from pyodm.utils import AtomicCounter
-from pyodm.types import TaskStatus
-from opendm.osfm import OSFMContext, get_submodel_args_dict, get_submodel_argv
-from opendm.utils import double_quote
-
-try:
- import queue
-except ImportError:
- import Queue as queue
-
-class LocalRemoteExecutor:
- """
- A class for performing OpenSfM reconstructions and full ODM pipeline executions
- using a mix of local and remote processing. Tasks are executed locally one at a time
- and remotely until a node runs out of available slots for processing. This allows us
- to use the processing power of the current machine as well as offloading tasks to a
- network node.
- """
- def __init__(self, nodeUrl, rolling_shutter = False, rerun = False):
- self.node = Node.from_url(nodeUrl)
- self.params = {
- 'tasks': [],
- 'threads': [],
- 'rolling_shutter': rolling_shutter,
- 'rerun': rerun
- }
- self.node_online = True
-
- log.ODM_INFO("LRE: Initializing using cluster node %s:%s" % (self.node.host, self.node.port))
- try:
- info = self.node.info()
- log.ODM_INFO("LRE: Node is online and running %s version %s" % (info.engine, info.engine_version))
- except exceptions.NodeConnectionError:
- log.ODM_WARNING("LRE: The node seems to be offline! We'll still process the dataset, but it's going to run entirely locally.")
- self.node_online = False
- except Exception as e:
- raise system.ExitException("LRE: An unexpected problem happened while opening the node connection: %s" % str(e))
-
- def set_projects(self, paths):
- self.project_paths = paths
-
- def run_reconstruction(self):
- self.run(ReconstructionTask)
-
- def run_toolchain(self):
- self.run(ToolchainTask)
-
- def run(self, taskClass):
- if not self.project_paths:
- return
-
- # Shared variables across threads
- class nonloc:
- error = None
- local_processing = False
- max_remote_tasks = None
-
- calculate_task_limit_lock = threading.Lock()
- finished_tasks = AtomicCounter(0)
- remote_running_tasks = AtomicCounter(0)
-
- # Create queue
- q = queue.Queue()
- for pp in self.project_paths:
- log.ODM_INFO("LRE: Adding to queue %s" % pp)
- q.put(taskClass(pp, self.node, self.params))
-
- def remove_task_safe(task):
- try:
- removed = task.remove()
- except exceptions.OdmError:
- removed = False
- return removed
-
- def cleanup_remote_tasks():
- if self.params['tasks']:
- log.ODM_WARNING("LRE: Attempting to cleanup remote tasks")
- else:
- log.ODM_INFO("LRE: No remote tasks left to cleanup")
-
- for task in self.params['tasks']:
- log.ODM_INFO("LRE: Removing remote task %s... %s" % (task.uuid, 'OK' if remove_task_safe(task) else 'NO'))
-
- def handle_result(task, local, error = None, partial=False):
- def cleanup_remote():
- if not partial and task.remote_task:
- log.ODM_INFO("LRE: Cleaning up remote task (%s)... %s" % (task.remote_task.uuid, 'OK' if remove_task_safe(task.remote_task) else 'NO'))
- self.params['tasks'].remove(task.remote_task)
- task.remote_task = None
-
- if error:
- log.ODM_WARNING("LRE: %s failed with: %s" % (task, str(error)))
-
- # Special case in which the error is caused by a SIGTERM signal
- # this means a local processing was terminated either by CTRL+C or
- # by canceling the task.
- if str(error) == "Child was terminated by signal 15":
- system.exit_gracefully()
-
- task_limit_reached = isinstance(error, NodeTaskLimitReachedException)
- if task_limit_reached:
- # Estimate the maximum number of tasks based on how many tasks
- # are currently running
- with calculate_task_limit_lock:
- if nonloc.max_remote_tasks is None:
- node_task_limit = 0
- for t in self.params['tasks']:
- try:
- info = t.info(with_output=-3)
- if info.status == TaskStatus.RUNNING and info.processing_time >= 0 and len(info.output) >= 3:
- node_task_limit += 1
- except exceptions.OdmError:
- pass
-
- nonloc.max_remote_tasks = max(1, node_task_limit)
- log.ODM_INFO("LRE: Node task limit reached. Setting max remote tasks to %s" % node_task_limit)
-
-
- # Retry, but only if the error is not related to a task failure
- if task.retries < task.max_retries and not isinstance(error, exceptions.TaskFailedError):
- # Put task back in queue
- # Don't increment the retry counter if this task simply reached the task
- # limit count.
- if not task_limit_reached:
- task.retries += 1
- task.wait_until = datetime.datetime.now() + datetime.timedelta(seconds=task.retries * task.retry_timeout)
- cleanup_remote()
- q.task_done()
-
- log.ODM_INFO("LRE: Re-queueing %s (retries: %s)" % (task, task.retries))
- q.put(task)
- if not local: remote_running_tasks.increment(-1)
- return
- else:
- nonloc.error = error
- finished_tasks.increment()
- if not local: remote_running_tasks.increment(-1)
- else:
- if not partial:
- log.ODM_INFO("LRE: %s finished successfully" % task)
- finished_tasks.increment()
- if not local: remote_running_tasks.increment(-1)
-
- cleanup_remote()
- if not partial: q.task_done()
-
- def local_worker():
- while True:
- # Block until a new queue item is available
- task = q.get()
-
- if task is None or nonloc.error is not None:
- q.task_done()
- break
-
- # Process local
- try:
- nonloc.local_processing = True
- task.process(True, handle_result)
- except Exception as e:
- handle_result(task, True, e)
- finally:
- nonloc.local_processing = False
-
-
- def remote_worker():
- while True:
- # Block until a new queue item is available
- task = q.get()
-
- if task is None or nonloc.error is not None:
- q.task_done()
- break
-
- # Yield to local processing
- if not nonloc.local_processing:
- log.ODM_INFO("LRE: Yielding to local processing, sending %s back to the queue" % task)
- q.put(task)
- q.task_done()
- time.sleep(0.05)
- continue
-
- # If we've found an estimate of the limit on the maximum number of tasks
- # a node can process, we block until some tasks have completed
- if nonloc.max_remote_tasks is not None and remote_running_tasks.value >= nonloc.max_remote_tasks:
- q.put(task)
- q.task_done()
- time.sleep(2)
- continue
-
- # Process remote
- try:
- remote_running_tasks.increment()
- task.process(False, handle_result)
- except Exception as e:
- handle_result(task, False, e)
-
- # Create queue thread
- local_thread = threading.Thread(target=local_worker)
- if self.node_online:
- remote_thread = threading.Thread(target=remote_worker)
-
- system.add_cleanup_callback(cleanup_remote_tasks)
-
- # Start workers
- local_thread.start()
- if self.node_online:
- remote_thread.start()
-
- # block until all tasks are done (or CTRL+C)
- try:
- while finished_tasks.value < len(self.project_paths) and nonloc.error is None:
- time.sleep(0.5)
- except KeyboardInterrupt:
- log.ODM_WARNING("LRE: CTRL+C")
- system.exit_gracefully()
-
- # stop workers
- q.put(None)
- if self.node_online:
- q.put(None)
-
- # Wait for queue thread
- local_thread.join()
- if self.node_online:
- remote_thread.join()
-
- # Wait for all remains threads
- for thrds in self.params['threads']:
- thrds.join()
-
- system.remove_cleanup_callback(cleanup_remote_tasks)
- cleanup_remote_tasks()
-
- if nonloc.error is not None:
- # Try not to leak access token
- if isinstance(nonloc.error, exceptions.NodeConnectionError):
- raise exceptions.NodeConnectionError("A connection error happened. Check the connection to the processing node and try again.")
- else:
- raise nonloc.error
-
-
-class NodeTaskLimitReachedException(Exception):
- pass
-
-class Task:
- def __init__(self, project_path, node, params, max_retries=5, retry_timeout=10):
- self.project_path = project_path
- self.node = node
- self.params = params
- self.wait_until = datetime.datetime.now() # Don't run this task until a certain time
- self.max_retries = max_retries
- self.retries = 0
- self.retry_timeout = retry_timeout
- self.remote_task = None
-
- def process(self, local, done):
- def handle_result(error = None, partial=False):
- done(self, local, error, partial)
-
- log.ODM_INFO("LRE: About to process %s %s" % (self, 'locally' if local else 'remotely'))
-
- if local:
- self._process_local(handle_result) # Block until complete
- else:
- now = datetime.datetime.now()
- if self.wait_until > now:
- wait_for = (self.wait_until - now).seconds + 1
- log.ODM_INFO("LRE: Waiting %s seconds before processing %s" % (wait_for, self))
- time.sleep(wait_for)
-
- # TODO: we could consider uploading multiple tasks
- # in parallel. But since we are using the same node
- # perhaps this wouldn't be a big speedup.
- self._process_remote(handle_result) # Block until upload is complete
-
- def path(self, *paths):
- return os.path.join(self.project_path, *paths)
-
- def touch(self, file):
- with open(file, 'w') as fout:
- fout.write("Done!\n")
-
- def create_seed_payload(self, paths, touch_files=[]):
- paths = filter(os.path.exists, map(lambda p: self.path(p), paths))
- outfile = self.path("seed.zip")
-
- with zipfile.ZipFile(outfile, "w", compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zf:
- for p in paths:
- if os.path.isdir(p):
- for root, _, filenames in os.walk(p):
- for filename in filenames:
- filename = os.path.join(root, filename)
- filename = os.path.normpath(filename)
- zf.write(filename, os.path.relpath(filename, self.project_path))
- else:
- zf.write(p, os.path.relpath(p, self.project_path))
-
- for tf in touch_files:
- zf.writestr(tf, "")
-
- return outfile
-
- def _process_local(self, done):
- try:
- self.process_local()
- done()
- except Exception as e:
- done(e)
-
- def _process_remote(self, done):
- try:
- self.process_remote(done)
- done(error=None, partial=True) # Upload is completed, but processing is not (partial)
- except Exception as e:
- done(e)
-
- def execute_remote_task(self, done, seed_files = [], seed_touch_files = [], outputs = [], ):
- """
- Run a task by creating a seed file with all files in seed_files, optionally
- creating empty files (for flag checks) specified in seed_touch_files
- and returning the results specified in outputs. Yeah it's pretty cool!
- """
- seed_file = self.create_seed_payload(seed_files, touch_files=seed_touch_files)
-
- # Find all images
- images = glob.glob(self.path("images/**"))
-
- # Add GCP (optional)
- if os.path.exists(self.path("gcp_list.txt")):
- images.append(self.path("gcp_list.txt"))
-
- # Add GEO (optional)
- if os.path.exists(self.path("geo.txt")):
- images.append(self.path("geo.txt"))
-
- # Add seed file
- images.append(seed_file)
-
- class nonloc:
- last_update = 0
-
- def print_progress(percentage):
- if (time.time() - nonloc.last_update >= 2) or int(percentage) == 100:
- log.ODM_INFO("LRE: Upload of %s at [%s%%]" % (self, int(percentage)))
- nonloc.last_update = time.time()
-
- # Upload task
- task = self.node.create_task(images,
- get_submodel_args_dict(config.config()),
- progress_callback=print_progress,
- skip_post_processing=True,
- outputs=outputs)
- self.remote_task = task
-
- # Cleanup seed file
- os.remove(seed_file)
-
- # Keep track of tasks for cleanup
- self.params['tasks'].append(task)
-
- # Check status
- info = task.info()
- if info.status in [TaskStatus.RUNNING, TaskStatus.COMPLETED]:
- def monitor():
- class nonloc:
- status_callback_calls = 0
- last_update = 0
-
- def status_callback(info):
- # If a task switches from RUNNING to QUEUED, then we need to
- # stop the process and re-add the task to the queue.
- if info.status == TaskStatus.QUEUED:
- log.ODM_WARNING("LRE: %s (%s) turned from RUNNING to QUEUED. Re-adding to back of the queue." % (self, task.uuid))
- raise NodeTaskLimitReachedException("Delayed task limit reached")
- elif info.status == TaskStatus.RUNNING:
- # Print a status message once in a while
- nonloc.status_callback_calls += 1
- if nonloc.status_callback_calls > 30:
- log.ODM_INFO("LRE: %s (%s) is still running" % (self, task.uuid))
- nonloc.status_callback_calls = 0
- try:
- def print_progress(percentage):
- if (time.time() - nonloc.last_update >= 2) or int(percentage) == 100:
- log.ODM_INFO("LRE: Download of %s at [%s%%]" % (self, int(percentage)))
- nonloc.last_update = time.time()
-
- task.wait_for_completion(status_callback=status_callback)
- log.ODM_INFO("LRE: Downloading assets for %s" % self)
- task.download_assets(self.project_path, progress_callback=print_progress)
- log.ODM_INFO("LRE: Downloaded and extracted assets for %s" % self)
- done()
- except exceptions.TaskFailedError as e:
- # Try to get output
- try:
- output_lines = task.output()
-
- # Save to file
- error_log_path = self.path("error.log")
- with open(error_log_path, 'w') as f:
- f.write('\n'.join(output_lines) + '\n')
-
- msg = "(%s) failed with task output: %s\nFull log saved at %s" % (task.uuid, "\n".join(output_lines[-10:]), error_log_path)
- done(exceptions.TaskFailedError(msg))
- except:
- log.ODM_WARNING("LRE: Could not retrieve task output for %s (%s)" % (self, task.uuid))
- done(e)
- except Exception as e:
- done(e)
-
- # Launch monitor thread and return
- t = threading.Thread(target=monitor)
- self.params['threads'].append(t)
- t.start()
- elif info.status == TaskStatus.QUEUED:
- raise NodeTaskLimitReachedException("Task limit reached")
- else:
- raise Exception("Could not send task to node, task status is %s" % str(info.status))
-
-
- def process_local(self):
- raise NotImplementedError()
-
- def process_remote(self, done):
- raise NotImplementedError()
-
- def __str__(self):
- return os.path.basename(self.project_path)
-
-
-class ReconstructionTask(Task):
- def process_local(self):
- octx = OSFMContext(self.path("opensfm"))
- log.ODM_INFO("==================================")
- log.ODM_INFO("Local Reconstruction %s" % octx.name())
- log.ODM_INFO("==================================")
- octx.feature_matching(self.params['rerun'])
- octx.create_tracks(self.params['rerun'])
- octx.reconstruct(self.params['rolling_shutter'], self.params['rerun'])
-
- def process_remote(self, done):
- octx = OSFMContext(self.path("opensfm"))
- if not octx.is_feature_matching_done() or not octx.is_reconstruction_done() or self.params['rerun']:
- self.execute_remote_task(done, seed_files=["opensfm/exif",
- "opensfm/camera_models.json",
- "opensfm/reference_lla.json"],
- seed_touch_files=["opensfm/split_merge_stop_at_reconstruction.txt"],
- outputs=["opensfm/matches", "opensfm/features",
- "opensfm/reconstruction.json",
- "opensfm/tracks.csv",
- "cameras.json"])
- else:
- log.ODM_INFO("Already processed feature matching and reconstruction for %s" % octx.name())
- done()
-
-class ToolchainTask(Task):
- def process_local(self):
- completed_file = self.path("toolchain_completed.txt")
- submodel_name = os.path.basename(self.project_path)
-
- if not os.path.exists(completed_file) or self.params['rerun']:
- log.ODM_INFO("=============================")
- log.ODM_INFO("Local Toolchain %s" % self)
- log.ODM_INFO("=============================")
-
- submodels_path = os.path.abspath(self.path(".."))
- argv = get_submodel_argv(config.config(), submodels_path, submodel_name)
-
- # Re-run the ODM toolchain on the submodel
- system.run(" ".join(map(double_quote, map(str, argv))), env_vars=os.environ.copy())
-
- # This will only get executed if the command above succeeds
- self.touch(completed_file)
- else:
- log.ODM_INFO("Already processed toolchain for %s" % submodel_name)
-
- def process_remote(self, done):
- completed_file = self.path("toolchain_completed.txt")
- submodel_name = os.path.basename(self.project_path)
-
- def handle_result(error = None):
- # Mark task as completed if no error
- if error is None:
- self.touch(completed_file)
- done(error=error)
-
- if not os.path.exists(completed_file) or self.params['rerun']:
- self.execute_remote_task(handle_result, seed_files=["opensfm/camera_models.json",
- "opensfm/reference_lla.json",
- "opensfm/reconstruction.json",
- "opensfm/tracks.csv"],
- seed_touch_files=["opensfm/features/empty",
- "opensfm/matches/empty",
- "opensfm/exif/empty"],
- outputs=["odm_orthophoto/cutline.gpkg",
- "odm_orthophoto/odm_orthophoto_cut.tif",
- "odm_orthophoto/odm_orthophoto_feathered.tif",
- "odm_dem",
- "odm_report",
- "odm_georeferencing"])
- else:
- log.ODM_INFO("Already processed toolchain for %s" % submodel_name)
- handle_result()
diff --git a/o/ODM/ODM-2.8.7/opendm/report/dsm_gradient.png b/o/ODM/ODM-2.8.7/opendm/report/dsm_gradient.png
deleted file mode 100644
index 710ed1dd..00000000
Binary files a/o/ODM/ODM-2.8.7/opendm/report/dsm_gradient.png and /dev/null differ
diff --git a/o/ODM/ODM-2.8.7/opendm/report/overlap_color_map.txt b/o/ODM/ODM-2.8.7/opendm/report/overlap_color_map.txt
deleted file mode 100644
index 924fc9b0..00000000
--- a/o/ODM/ODM-2.8.7/opendm/report/overlap_color_map.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-# QGIS Generated Color Map Export File
-2,215,25,28,255,2
-3,246,201,5,255,3
-4,117,188,39,255,4
-5,26,150,65,255,5+
-inf,26,150,65,255,> 5
-nv 0 0 0 0
diff --git a/o/ODM/ODM-2.8.7/opendm/report/overlap_diagram_legend.png b/o/ODM/ODM-2.8.7/opendm/report/overlap_diagram_legend.png
deleted file mode 100644
index 6136e438..00000000
Binary files a/o/ODM/ODM-2.8.7/opendm/report/overlap_diagram_legend.png and /dev/null differ
diff --git a/o/ODM/ODM-2.8.7/opendm/rollingshutter.py b/o/ODM/ODM-2.8.7/opendm/rollingshutter.py
deleted file mode 100644
index 6417406a..00000000
--- a/o/ODM/ODM-2.8.7/opendm/rollingshutter.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from opendm import log
-
-# Make Model (lowercase) --> readout time (ms)
-RS_DATABASE = {
- 'dji phantom vision fc200': 74, # Phantom 2
-
- 'dji fc300s': 33, # Phantom 3 Advanced
- 'dji fc300c': 33, # Phantom 3 Standard
- 'dji fc300x': 33, # Phantom 3 Professional
-
- 'dji fc330': 33, # Phantom 4
- 'dji fc6310': 33, # Phantom 4 Professional
-
- 'dji fc7203': 20, # Mavic Mini v1
- 'dji fc3170': 27, # DJI Mavic Air 2
-
- 'dji fc350': 30, # Inspire 1
-
- 'gopro hero4 black': 30, # GoPro Hero 4 Black
- 'gopro hero8 black': 17, # GoPro Hero 8 Black
-
- 'teracube teracube one': 32 # TeraCube TeraCube_One TR1907Q Mobile Phone
-
- # Help us add more!
- # See: https://github.com/OpenDroneMap/RSCalibration for instructions
-}
-DEFAULT_RS_READOUT = 30 # Just a guess
-
-def make_model_key(make, model):
- return ("%s %s" % (make.strip(), model.strip())).lower().strip()
-
-warn_db_missing = {}
-info_db_found = {}
-
-def get_rolling_shutter_readout(make, model, override_value=0):
- global warn_db_missing
- global info_db_found
-
- if override_value > 0:
- return override_value
-
- key = make_model_key(make, model)
- if key in RS_DATABASE:
- if not key in info_db_found:
- log.ODM_INFO("Rolling shutter profile for \"%s %s\" selected, using %sms as --rolling-shutter-readout." % (make, model, RS_DATABASE[key]))
- info_db_found[key] = True
- return float(RS_DATABASE[key])
- else:
- # Warn once
- if not key in warn_db_missing:
- log.ODM_WARNING("Rolling shutter readout time for \"%s %s\" is not in our database, using default of %sms which might be incorrect. Use --rolling-shutter-readout to set an actual value (see https://github.com/OpenDroneMap/RSCalibration for instructions on how to calculate this value)" % (make, model, DEFAULT_RS_READOUT))
- warn_db_missing[key] = True
- return float(DEFAULT_RS_READOUT)
diff --git a/o/ODM/ODM-2.8.7/opendm/shots.py b/o/ODM/ODM-2.8.7/opendm/shots.py
deleted file mode 100644
index 0440257e..00000000
--- a/o/ODM/ODM-2.8.7/opendm/shots.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import os, json
-from opendm import log
-from opendm.pseudogeo import get_pseudogeo_utm, get_pseudogeo_scale
-from opendm.location import transformer
-from pyproj import CRS
-from osgeo import gdal
-import numpy as np
-import cv2
-
-def get_rotation_matrix(rotation):
- """Get rotation as a 3x3 matrix."""
- return cv2.Rodrigues(rotation)[0]
-
-def matrix_to_rotation(rotation_matrix):
- R = np.array(rotation_matrix, dtype=float)
- # if not np.isclose(np.linalg.det(R), 1):
- # raise ValueError("Determinant != 1")
- # if not np.allclose(np.linalg.inv(R), R.T):
- # raise ValueError("Not orthogonal")
- return cv2.Rodrigues(R)[0].ravel()
-
-def get_origin(shot):
- """The origin of the pose in world coordinates."""
- return -get_rotation_matrix(np.array(shot['rotation'])).T.dot(np.array(shot['translation']))
-
-def get_geojson_shots_from_opensfm(reconstruction_file, utm_srs=None, utm_offset=None, pseudo_geotiff=None):
- """
- Extract shots from OpenSfM's reconstruction.json
- """
- pseudo_geocoords = None
-
- if pseudo_geotiff is not None and os.path.exists(pseudo_geotiff):
- # pseudogeo transform
- utm_srs = get_pseudogeo_utm()
-
- # the pseudo-georeferencing CRS UL corner is at 0,0
- # but our shot coordinates aren't, so we need to offset them
- raster = gdal.Open(pseudo_geotiff)
- ulx, xres, _, uly, _, yres = raster.GetGeoTransform()
- lrx = ulx + (raster.RasterXSize * xres)
- lry = uly + (raster.RasterYSize * yres)
-
- pseudo_geocoords = np.array([[1.0 / get_pseudogeo_scale() ** 2, 0, 0, ulx + lrx / 2.0],
- [0, 1.0 / get_pseudogeo_scale() ** 2, 0, uly + lry / 2.0],
- [0, 0, 1, 0],
- [0, 0, 0, 1]])
- raster = None
- pseudo = True
-
- # Couldn't get a SRS?
- if utm_srs is None:
- return None
-
- crstrans = transformer(CRS.from_proj4(utm_srs), CRS.from_epsg("4326"))
-
- if os.path.exists(reconstruction_file):
- with open(reconstruction_file, 'r') as fin:
- reconstructions = json.loads(fin.read())
-
- feats = []
- added_shots = {}
- for recon in reconstructions:
- cameras = recon.get('cameras', {})
-
- for filename in recon.get('shots', {}):
- shot = recon['shots'][filename]
- cam = shot.get('camera')
- if (not cam in cameras) or (filename in added_shots):
- continue
-
- cam = cameras[cam]
- if pseudo_geocoords is not None:
- Rs, T = pseudo_geocoords[:3, :3], pseudo_geocoords[:3, 3]
- Rs1 = np.linalg.inv(Rs)
- origin = get_origin(shot)
-
- # Translation
- utm_coords = np.dot(Rs, origin) + T
- trans_coords = crstrans.TransformPoint(utm_coords[0], utm_coords[1], utm_coords[2])
-
- # Rotation
- rotation_matrix = get_rotation_matrix(np.array(shot['rotation']))
- rotation = matrix_to_rotation(np.dot(rotation_matrix, Rs1))
-
- translation = origin
- else:
- rotation = shot['rotation']
-
- # Just add UTM offset
- origin = get_origin(shot)
-
- utm_coords = [origin[0] + utm_offset[0],
- origin[1] + utm_offset[1],
- origin[2]]
- translation = utm_coords
- trans_coords = crstrans.TransformPoint(utm_coords[0], utm_coords[1], utm_coords[2])
-
- feats.append({
- 'type': 'Feature',
- 'properties': {
- 'filename': filename,
- 'focal': cam.get('focal', cam.get('focal_x')), # Focal ratio = focal length (mm) / max(sensor_width, sensor_height) (mm)
- 'width': cam.get('width', 0),
- 'height': cam.get('height', 0),
- 'translation': list(translation),
- 'rotation': list(rotation)
- },
- 'geometry':{
- 'type': 'Point',
- 'coordinates': list(trans_coords)
- }
- })
-
- added_shots[filename] = True
-
- return {
- 'type': 'FeatureCollection',
- 'features': feats
- }
- else:
- raise RuntimeError("%s does not exist." % reconstruction_file)
-
-def merge_geojson_shots(geojson_shots_files, output_geojson_file):
- result = {}
- added_files = {}
- for shot_file in geojson_shots_files:
- with open(shot_file, "r") as f:
- shots = json.loads(f.read())
-
- if len(result) == 0:
- for feat in shots.get('features', []):
- added_files[feat['properties']['filename']] = True
-
- # Use first file as base
- result = shots
- else:
- # Append features if filename not already added
- for feat in shots.get('features', []):
- if not feat['properties']['filename'] in added_files:
- result['features'].append(feat)
-
- with open(output_geojson_file, "w") as f:
- f.write(json.dumps(result))
diff --git a/o/ODM/ODM-2.8.7/opendm/system.py b/o/ODM/ODM-2.8.7/opendm/system.py
deleted file mode 100644
index bf5f279a..00000000
--- a/o/ODM/ODM-2.8.7/opendm/system.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import os
-import errno
-import json
-import datetime
-import sys
-import subprocess
-import string
-import signal
-import io
-from collections import deque
-
-from opendm import context
-from opendm import log
-
-class SubprocessException(Exception):
- def __init__(self, msg, errorCode):
- super().__init__(msg)
- self.errorCode = errorCode
-
-class ExitException(Exception):
- pass
-
-def get_ccd_widths():
- """Return the CCD Width of the camera listed in the JSON defs file."""
- with open(context.ccd_widths_path) as f:
- sensor_data = json.loads(f.read())
- return dict(zip(map(string.lower, sensor_data.keys()), sensor_data.values()))
-
-running_subprocesses = []
-cleanup_callbacks = []
-
-def add_cleanup_callback(func):
- global cleanup_callbacks
- cleanup_callbacks.append(func)
-
-def remove_cleanup_callback(func):
- global cleanup_callbacks
-
- try:
- cleanup_callbacks.remove(func)
- except ValueError as e:
- log.ODM_EXCEPTION("Tried to remove %s from cleanup_callbacks but got: %s" % (str(func), str(e)))
-
-def exit_gracefully():
- global running_subprocesses
- global cleanup_callbacks
-
- log.ODM_WARNING("Caught TERM/INT signal, attempting to exit gracefully...")
-
- for cb in cleanup_callbacks:
- cb()
-
- for sp in running_subprocesses:
- log.ODM_WARNING("Sending TERM signal to PID %s..." % sp.pid)
- if sys.platform == 'win32':
- os.kill(sp.pid, signal.CTRL_C_EVENT)
- else:
- os.killpg(os.getpgid(sp.pid), signal.SIGTERM)
-
- os._exit(1)
-
-def sighandler(signum, frame):
- exit_gracefully()
-
-signal.signal(signal.SIGINT, sighandler)
-signal.signal(signal.SIGTERM, sighandler)
-
-def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}, packages_paths=context.python_packages_paths):
- """Run a system command"""
- global running_subprocesses
-
- log.ODM_INFO('running %s' % cmd)
- env = os.environ.copy()
-
- sep = ":"
- if sys.platform == 'win32':
- sep = ";"
-
- if len(env_paths) > 0:
- env["PATH"] = env["PATH"] + sep + sep.join(env_paths)
-
- if len(packages_paths) > 0:
- env["PYTHONPATH"] = env.get("PYTHONPATH", "") + sep + sep.join(packages_paths)
-
- for k in env_vars:
- env[k] = str(env_vars[k])
-
- p = subprocess.Popen(cmd, shell=True, env=env, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- running_subprocesses.append(p)
- lines = deque()
- for line in io.TextIOWrapper(p.stdout):
- print(line, end="")
-
- lines.append(line.strip())
- if len(lines) == 11:
- lines.popleft()
-
- retcode = p.wait()
-
- log.logger.log_json_process(cmd, retcode, list(lines))
-
- running_subprocesses.remove(p)
- if retcode < 0:
- raise SubprocessException("Child was terminated by signal {}".format(-retcode), -retcode)
- elif retcode > 0:
- raise SubprocessException("Child returned {}".format(retcode), retcode)
-
-
-def now():
- """Return the current time"""
- return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y')
-
-
-def now_raw():
- return datetime.datetime.now()
-
-
-def benchmark(start, benchmarking_file, process):
- """
- runs a benchmark with a start datetime object
- :return: the running time (delta)
- """
- # Write to benchmark file
- delta = (datetime.datetime.now() - start).total_seconds()
- with open(benchmarking_file, 'a') as b:
- b.write('%s runtime: %s seconds\n' % (process, delta))
-
-def mkdir_p(path):
- """Make a directory including parent directories.
- """
- try:
- os.makedirs(path)
- except os.error as exc:
- if exc.errno != errno.EEXIST or not os.path.isdir(path):
- raise
-
-# Python2 shutil.which
-def which(program):
- path=os.getenv('PATH')
- for p in path.split(os.path.pathsep):
- p=os.path.join(p,program)
- if os.path.exists(p) and os.access(p,os.X_OK):
- return p
diff --git a/o/ODM/ODM-2.8.7/opendm/thermal.py b/o/ODM/ODM-2.8.7/opendm/thermal.py
deleted file mode 100644
index c64ad223..00000000
--- a/o/ODM/ODM-2.8.7/opendm/thermal.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from opendm import log
-from opendm.thermal_tools import dji_unpack
-import cv2
-
-def resize_to_match(image, match_photo = None):
- """
- Resize images to match the dimension of another photo
- :param image numpy array containing image data to resize
- :param match_photo ODM_Photo whose dimensions should be used for resize
- :return numpy array with resized image data
- """
- if match_photo is not None:
- h, w, _ = image.shape
- if w != match_photo.width or h != match_photo.height:
- image = cv2.resize(image, None,
- fx=match_photo.width/w,
- fy=match_photo.height/h,
- interpolation=cv2.INTER_LANCZOS4)
- return image
-
-def dn_to_temperature(photo, image, dataset_tree):
- """
- Convert Digital Number values to temperature (C) values
- :param photo ODM_Photo
- :param image numpy array containing image data
- :param dataset_tree path to original source image to read data using PIL for DJI thermal photos
- :return numpy array with temperature (C) image values
- """
-
-
-
- # Handle thermal bands
- if photo.is_thermal():
- # Every camera stores thermal information differently
- # The following will work for MicaSense Altum cameras
- # but not necessarily for others
- if photo.camera_make == "MicaSense" and photo.camera_model == "Altum":
- image = image.astype("float32")
- image -= (273.15 * 100.0) # Convert Kelvin to Celsius
- image *= 0.01
- return image
- elif photo.camera_make == "DJI" and photo.camera_model == "MAVIC2-ENTERPRISE-ADVANCED":
- image = dji_unpack.extract_temperatures_dji(photo, image, dataset_tree)
- image = image.astype("float32")
- return image
- else:
- log.ODM_WARNING("Unsupported camera [%s %s], thermal band will have digital numbers." % (photo.camera_make, photo.camera_model))
- else:
- image = image.astype("float32")
- log.ODM_WARNING("Tried to radiometrically calibrate a non-thermal image with temperature values (%s)" % photo.filename)
- return image
-
diff --git a/o/ODM/ODM-2.8.7/opendm/thermal_tools/__init__.py b/o/ODM/ODM-2.8.7/opendm/thermal_tools/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/o/ODM/ODM-2.8.7/opendm/thermal_tools/dji_unpack.py b/o/ODM/ODM-2.8.7/opendm/thermal_tools/dji_unpack.py
deleted file mode 100644
index 4794ab41..00000000
--- a/o/ODM/ODM-2.8.7/opendm/thermal_tools/dji_unpack.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from PIL import Image
-import numpy as np
-from opendm import system
-from opendm import log
-
-from opendm.thermal_tools.thermal_utils import sensor_vals_to_temp
-
-def extract_temperatures_dji(photo, image, dataset_tree):
- """Extracts the DJI-encoded thermal image as 2D floating-point numpy array with temperatures in degC.
- The raw sensor values are obtained using the sample binaries provided in the official Thermal SDK by DJI.
- The executable file is run and generates a 16 bit unsigned RAW image with Little Endian byte order.
- Link to DJI Forum post: https://forum.dji.com/forum.php?mod=redirect&goto=findpost&ptid=230321&pid=2389016
- """
- # Hardcoded metadata for mean of values
- # This is added to support the possibility of extracting RJPEG from DJI M2EA
- meta = {
- "Emissivity": 0.95,
- "ObjectDistance": 50, #This is mean value of flights for better results. Need to be changed later, or improved by bypassing options from task broker
- "AtmosphericTemperature": 20,
- "ReflectedApparentTemperature": 30,
- "IRWindowTemperature": 20,
- "IRWindowTransmission": 1,
- "RelativeHumidity": 40,
- "PlanckR1": 21106.77,
- "PlanckB": 1501,
- "PlanckF": 1,
- "PlanckO": -7340,
- "PlanckR2": 0.012545258,
- }
-
- if photo.camera_model == "MAVIC2-ENTERPRISE-ADVANCED":
- # Adding support for MAVIC2-ENTERPRISE-ADVANCED Camera images
- im = Image.open(f"{dataset_tree}/{photo.filename}")
- # concatenate APP3 chunks of data
- a = im.applist[3][1]
- for i in range(4, 14):
- a += im.applist[i][1]
- # create image from bytes
- try:
- img = Image.frombytes("I;16L", (640, 512), a)
- except ValueError as e:
- log.ODM_ERROR("Error during extracting temperature values for file %s : %s" % photo.filename, e)
- else:
- log.ODM_DEBUG("Only DJI M2EA currently supported, please wait for new updates")
- return image
- # Extract raw sensor values from generated image into numpy array
- raw_sensor_np = np.array(img)
- ## extracting the temperatures from thermal images
- thermal_np = sensor_vals_to_temp(raw_sensor_np, **meta)
- return thermal_np
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/thermal_tools/flir_unpack.py b/o/ODM/ODM-2.8.7/opendm/thermal_tools/flir_unpack.py
deleted file mode 100644
index 903d34d9..00000000
--- a/o/ODM/ODM-2.8.7/opendm/thermal_tools/flir_unpack.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""
-THIS IS WIP, DON'T USE THIS FILE, IT IS HERE FOR FURTHER IMPROVEMENT
-Tools for extracting thermal data from FLIR images.
-Derived from https://bitbucket.org/nimmerwoner/flyr/src/master/
-"""
-
-import os
-from io import BufferedIOBase, BytesIO
-from typing import BinaryIO, Dict, Optional, Tuple, Union
-
-import numpy as np
-from PIL import Image
-
-# Constants
-SEGMENT_SEP = b"\xff"
-APP1_MARKER = b"\xe1"
-MAGIC_FLIR_DEF = b"FLIR\x00"
-
-CHUNK_APP1_BYTES_COUNT = len(APP1_MARKER)
-CHUNK_LENGTH_BYTES_COUNT = 2
-CHUNK_MAGIC_BYTES_COUNT = len(MAGIC_FLIR_DEF)
-CHUNK_SKIP_BYTES_COUNT = 1
-CHUNK_NUM_BYTES_COUNT = 1
-CHUNK_TOT_BYTES_COUNT = 1
-CHUNK_PARTIAL_METADATA_LENGTH = CHUNK_APP1_BYTES_COUNT + CHUNK_LENGTH_BYTES_COUNT + CHUNK_MAGIC_BYTES_COUNT
-CHUNK_METADATA_LENGTH = (
- CHUNK_PARTIAL_METADATA_LENGTH + CHUNK_SKIP_BYTES_COUNT + CHUNK_NUM_BYTES_COUNT + CHUNK_TOT_BYTES_COUNT
-)
-
-
-def unpack(path_or_stream: Union[str, BinaryIO]) -> np.ndarray:
- """Unpacks the FLIR image, meaning that it will return the thermal data embedded in the image.
-
- Parameters
- ----------
- path_or_stream : Union[str, BinaryIO]
- Either a path (string) to a FLIR file, or a byte stream such as
- BytesIO or file opened as `open(file_path, "rb")`.
-
- Returns
- -------
- FlyrThermogram
- When successful, a FlyrThermogram object containing thermogram data.
- """
- if isinstance(path_or_stream, str) and os.path.isfile(path_or_stream):
- with open(path_or_stream, "rb") as flirh:
- return unpack(flirh)
- elif isinstance(path_or_stream, BufferedIOBase):
- stream = path_or_stream
- flir_app1_stream = extract_flir_app1(stream)
- flir_records = parse_flir_app1(flir_app1_stream)
- raw_np = parse_thermal(flir_app1_stream, flir_records)
-
- return raw_np
- else:
- raise ValueError("Incorrect input")
-
-
-def extract_flir_app1(stream: BinaryIO) -> BinaryIO:
- """Extracts the FLIR APP1 bytes.
-
- Parameters
- ---------
- stream : BinaryIO
- A full bytes stream of a JPEG file, expected to be a FLIR file.
-
- Raises
- ------
- ValueError
- When the file is invalid in one the next ways, a
- ValueError is thrown.
-
- * File is not a JPEG
- * A FLIR chunk number occurs more than once
- * The total chunks count is inconsistent over multiple chunks
- * No APP1 segments are successfully parsed
-
- Returns
- -------
- BinaryIO
- A bytes stream of the APP1 FLIR segments
- """
- # Check JPEG-ness
- _ = stream.read(2)
-
- chunks_count: Optional[int] = None
- chunks: Dict[int, bytes] = {}
- while True:
- b = stream.read(1)
- if b == b"":
- break
-
- if b != SEGMENT_SEP:
- continue
-
- parsed_chunk = parse_flir_chunk(stream, chunks_count)
- if not parsed_chunk:
- continue
-
- chunks_count, chunk_num, chunk = parsed_chunk
- chunk_exists = chunks.get(chunk_num, None) is not None
- if chunk_exists:
- raise ValueError("Invalid FLIR: duplicate chunk number")
- chunks[chunk_num] = chunk
-
- # Encountered all chunks, break out of loop to process found metadata
- if chunk_num == chunks_count:
- break
-
- if chunks_count is None:
- raise ValueError("Invalid FLIR: no metadata encountered")
-
- flir_app1_bytes = b""
- for chunk_num in range(chunks_count + 1):
- flir_app1_bytes += chunks[chunk_num]
-
- flir_app1_stream = BytesIO(flir_app1_bytes)
- flir_app1_stream.seek(0)
- return flir_app1_stream
-
-
-def parse_flir_chunk(stream: BinaryIO, chunks_count: Optional[int]) -> Optional[Tuple[int, int, bytes]]:
- """Parse flir chunk."""
- # Parse the chunk header. Headers are as follows (definition with example):
- #
- # \xff\xe1FLIR\x00\x01
- # \xff\xe1\xff\xfeFLIR\x00\x01\x01\x0b
- #
- # Meaning: Exif APP1, 65534 long, FLIR chunk 1 out of 12
- marker = stream.read(CHUNK_APP1_BYTES_COUNT)
-
- length_bytes = stream.read(CHUNK_LENGTH_BYTES_COUNT)
- length = int.from_bytes(length_bytes, "big")
- length -= CHUNK_METADATA_LENGTH
- magic_flir = stream.read(CHUNK_MAGIC_BYTES_COUNT)
-
- if not (marker == APP1_MARKER and magic_flir == MAGIC_FLIR_DEF):
- # Seek back to just after byte b and continue searching for chunks
- stream.seek(-len(marker) - len(length_bytes) - len(magic_flir), 1)
- return None
-
- stream.seek(1, 1) # skip 1 byte, unsure what it is for
-
- chunk_num = int.from_bytes(stream.read(CHUNK_NUM_BYTES_COUNT), "big")
- chunks_tot = int.from_bytes(stream.read(CHUNK_TOT_BYTES_COUNT), "big")
-
- # Remember total chunks to verify metadata consistency
- if chunks_count is None:
- chunks_count = chunks_tot
-
- if ( # Check whether chunk metadata is consistent
- chunks_tot is None or chunk_num < 0 or chunk_num > chunks_tot or chunks_tot != chunks_count
- ):
- raise ValueError(f"Invalid FLIR: inconsistent total chunks, should be 0 or greater, but is {chunks_tot}")
-
- return chunks_tot, chunk_num, stream.read(length + 1)
-
-
-def parse_thermal(stream: BinaryIO, records: Dict[int, Tuple[int, int, int, int]]) -> np.ndarray:
- """Parse thermal."""
- RECORD_IDX_RAW_DATA = 1
- raw_data_md = records[RECORD_IDX_RAW_DATA]
- _, _, raw_data = parse_raw_data(stream, raw_data_md)
- return raw_data
-
-
-def parse_flir_app1(stream: BinaryIO) -> Dict[int, Tuple[int, int, int, int]]:
- """Parse flir app1."""
- # 0x00 - string[4] file format ID = "FFF\0"
- # 0x04 - string[16] file creator: seen "\0","MTX IR\0","CAMCTRL\0"
- # 0x14 - int32u file format version = 100
- # 0x18 - int32u offset to record directory
- # 0x1c - int32u number of entries in record directory
- # 0x20 - int32u next free index ID = 2
- # 0x24 - int16u swap pattern = 0 (?)
- # 0x28 - int16u[7] spares
- # 0x34 - int32u[2] reserved
- # 0x3c - int32u checksum
-
- # 1. Read 0x40 bytes and verify that its contents equals AFF\0 or FFF\0
- _ = stream.read(4)
-
- # 2. Read FLIR record directory metadata (ref 3)
- stream.seek(16, 1)
- _ = int.from_bytes(stream.read(4), "big")
- record_dir_offset = int.from_bytes(stream.read(4), "big")
- record_dir_entries_count = int.from_bytes(stream.read(4), "big")
- stream.seek(28, 1)
- _ = int.from_bytes(stream.read(4), "big")
-
- # 3. Read record directory (which is a FLIR record entry repeated
- # `record_dir_entries_count` times)
- stream.seek(record_dir_offset)
- record_dir_stream = BytesIO(stream.read(32 * record_dir_entries_count))
-
- # First parse the record metadata
- record_details: Dict[int, Tuple[int, int, int, int]] = {}
- for record_nr in range(record_dir_entries_count):
- record_dir_stream.seek(0)
- details = parse_flir_record_metadata(stream, record_nr)
- if details:
- record_details[details[1]] = details
-
- # Then parse the actual records
- # for (entry_idx, type, offset, length) in record_details:
- # parse_record = record_parsers[type]
- # stream.seek(offset)
- # record = BytesIO(stream.read(length + 36)) # + 36 needed to find end
- # parse_record(record, offset, length)
-
- return record_details
-
-
-def parse_flir_record_metadata(stream: BinaryIO, record_nr: int) -> Optional[Tuple[int, int, int, int]]:
- """Parse flir record metadata."""
- # FLIR record entry (ref 3):
- # 0x00 - int16u record type
- # 0x02 - int16u record subtype: RawData 1=BE, 2=LE, 3=PNG; 1 for other record types
- # 0x04 - int32u record version: seen 0x64,0x66,0x67,0x68,0x6f,0x104
- # 0x08 - int32u index id = 1
- # 0x0c - int32u record offset from start of FLIR data
- # 0x10 - int32u record length
- # 0x14 - int32u parent = 0 (?)
- # 0x18 - int32u object number = 0 (?)
- # 0x1c - int32u checksum: 0 for no checksum
- entry = 32 * record_nr
- stream.seek(entry)
- record_type = int.from_bytes(stream.read(2), "big")
- if record_type < 1:
- return None
-
- _ = int.from_bytes(stream.read(2), "big")
- _ = int.from_bytes(stream.read(4), "big")
- _ = int.from_bytes(stream.read(4), "big")
- record_offset = int.from_bytes(stream.read(4), "big")
- record_length = int.from_bytes(stream.read(4), "big")
- _ = int.from_bytes(stream.read(4), "big")
- _ = int.from_bytes(stream.read(4), "big")
- _ = int.from_bytes(stream.read(4), "big")
- return (entry, record_type, record_offset, record_length)
-
-
-def parse_raw_data(stream: BinaryIO, metadata: Tuple[int, int, int, int]):
- """Parse raw data."""
- (_, _, offset, length) = metadata
- stream.seek(offset)
-
- stream.seek(2, 1)
- width = int.from_bytes(stream.read(2), "little")
- height = int.from_bytes(stream.read(2), "little")
-
- stream.seek(offset + 32)
-
- # Read the bytes with the raw thermal data and decode using PIL
- thermal_bytes = stream.read(length)
- thermal_stream = BytesIO(thermal_bytes)
- thermal_img = Image.open(thermal_stream)
- thermal_np = np.array(thermal_img)
-
- # Check shape
- if thermal_np.shape != (height, width):
- msg = "Invalid FLIR: metadata's width and height don't match thermal data's actual width\
- and height ({} vs ({}, {})"
- msg = msg.format(thermal_np.shape, height, width)
- raise ValueError(msg)
-
- # FLIR PNG data is in the wrong byte order, fix that
- fix_byte_order = np.vectorize(lambda x: (x >> 8) + ((x & 0x00FF) << 8))
- thermal_np = fix_byte_order(thermal_np)
-
- return width, height, thermal_np
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/thermal_tools/thermal_utils.py b/o/ODM/ODM-2.8.7/opendm/thermal_tools/thermal_utils.py
deleted file mode 100644
index 6dbfdf5f..00000000
--- a/o/ODM/ODM-2.8.7/opendm/thermal_tools/thermal_utils.py
+++ /dev/null
@@ -1,139 +0,0 @@
-"""Thermal Image manipulation utilities."""
-"""Based on https://github.com/detecttechnologies/thermal_base"""
-import numpy as np
-
-def sensor_vals_to_temp(
- raw,
- Emissivity=1.0,
- ObjectDistance=1,
- AtmosphericTemperature=20,
- ReflectedApparentTemperature=20,
- IRWindowTemperature=20,
- IRWindowTransmission=1,
- RelativeHumidity=50,
- PlanckR1=21106.77,
- PlanckB=1501,
- PlanckF=1,
- PlanckO=-7340,
- PlanckR2=0.012545258,
- **kwargs,):
- """Convert raw values from the thermographic sensor sensor to temperatures in °C. Tested for Flir and DJI cams."""
- # this calculation has been ported to python from https://github.com/gtatters/Thermimage/blob/master/R/raw2temp.R
- # a detailed explanation of what is going on here can be found there
-
- # constants
- ATA1 = 0.006569
- ATA2 = 0.01262
- ATB1 = -0.002276
- ATB2 = -0.00667
- ATX = 1.9
-
- # transmission through window (calibrated)
- emiss_wind = 1 - IRWindowTransmission
- refl_wind = 0
-
- # transmission through the air
- h2o = (RelativeHumidity / 100) * np.exp(
- 1.5587
- + 0.06939 * (AtmosphericTemperature)
- - 0.00027816 * (AtmosphericTemperature) ** 2
- + 0.00000068455 * (AtmosphericTemperature) ** 3
- )
- tau1 = ATX * np.exp(-np.sqrt(ObjectDistance / 2) * (ATA1 + ATB1 * np.sqrt(h2o))) + (1 - ATX) * np.exp(
- -np.sqrt(ObjectDistance / 2) * (ATA2 + ATB2 * np.sqrt(h2o))
- )
- tau2 = ATX * np.exp(-np.sqrt(ObjectDistance / 2) * (ATA1 + ATB1 * np.sqrt(h2o))) + (1 - ATX) * np.exp(
- -np.sqrt(ObjectDistance / 2) * (ATA2 + ATB2 * np.sqrt(h2o))
- )
- # radiance from the environment
- raw_refl1 = PlanckR1 / (PlanckR2 * (np.exp(PlanckB / (ReflectedApparentTemperature + 273.15)) - PlanckF)) - PlanckO
-
- # Reflected component
- raw_refl1_attn = (1 - Emissivity) / Emissivity * raw_refl1
-
- # Emission from atmosphere 1
- raw_atm1 = (
- PlanckR1 / (PlanckR2 * (np.exp(PlanckB / (AtmosphericTemperature + 273.15)) - PlanckF)) - PlanckO
- )
-
- # attenuation for atmospheric 1 emission
- raw_atm1_attn = (1 - tau1) / Emissivity / tau1 * raw_atm1
-
- # Emission from window due to its own temp
- raw_wind = (
- PlanckR1 / (PlanckR2 * (np.exp(PlanckB / (IRWindowTemperature + 273.15)) - PlanckF)) - PlanckO
- )
- # Componen due to window emissivity
- raw_wind_attn = (
- emiss_wind / Emissivity / tau1 / IRWindowTransmission * raw_wind
- )
- # Reflection from window due to external objects
- raw_refl2 = (
- PlanckR1 / (PlanckR2 * (np.exp(PlanckB / (ReflectedApparentTemperature + 273.15)) - PlanckF)) - PlanckO
- )
- # component due to window reflectivity
- raw_refl2_attn = (
- refl_wind / Emissivity / tau1 / IRWindowTransmission * raw_refl2
- )
- # Emission from atmosphere 2
- raw_atm2 = (
- PlanckR1 / (PlanckR2 * (np.exp(PlanckB / (AtmosphericTemperature + 273.15)) - PlanckF)) - PlanckO
- )
- # attenuation for atmospheric 2 emission
- raw_atm2_attn = (
- (1 - tau2) / Emissivity / tau1 / IRWindowTransmission / tau2 * raw_atm2
- )
-
- raw_obj = (
- raw / Emissivity / tau1 / IRWindowTransmission / tau2
- - raw_atm1_attn
- - raw_atm2_attn
- - raw_wind_attn
- - raw_refl1_attn
- - raw_refl2_attn
- )
- val_to_log = PlanckR1 / (PlanckR2 * (raw_obj + PlanckO)) + PlanckF
- if any(val_to_log.ravel() < 0):
- raise Exception("Image seems to be corrupted")
- # temperature from radiance
- return PlanckB / np.log(val_to_log) - 273.15
-
-
-def parse_from_exif_str(temp_str):
- """String to float parser."""
- # we assume degrees celsius for temperature, metres for length
- if isinstance(temp_str, str):
- return float(temp_str.split()[0])
- return float(temp_str)
-
-
-def normalize_temp_matrix(thermal_np):
- """Normalize a temperature matrix to the 0-255 uint8 image range."""
- num = thermal_np - np.amin(thermal_np)
- den = np.amax(thermal_np) - np.amin(thermal_np)
- thermal_np = num / den
- return thermal_np
-
-def clip_temp_to_roi(thermal_np, thermal_roi_values):
- """
- Given an RoI within a temperature matrix, this function clips the temperature values in the entire thermal.
-
- Image temperature values above and below the max/min temperatures within the RoI are clipped to said max/min.
-
- Args:
- thermal_np (np.ndarray): Floating point array containing the temperature matrix.
- thermal_roi_values (np.ndarray / list): Any iterable containing the temperature values within the RoI.
-
- Returns:
- np.ndarray: The clipped temperature matrix.
- """
- maximum = np.amax(thermal_roi_values)
- minimum = np.amin(thermal_roi_values)
- thermal_np[thermal_np > maximum] = maximum
- thermal_np[thermal_np < minimum] = minimum
- return thermal_np
-
-
-def scale_with_roi(thermal_np, thermal_roi_values):
- """Alias for clip_temp_to_roi, to be deprecated in the future."""
- return clip_temp_to_roi(thermal_np, thermal_roi_values)
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/tiles/color_relief.txt b/o/ODM/ODM-2.8.7/opendm/tiles/color_relief.txt
deleted file mode 100644
index 12f25ce0..00000000
--- a/o/ODM/ODM-2.8.7/opendm/tiles/color_relief.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-0% 68 1 84
-10% 72 36 117
-20% 64 67 135
-30% 52 95 141
-40% 41 120 142
-50% 32 144 141
-60% 34 168 132
-70% 67 191 112
-80% 122 210 81
-90% 188 223 39
-100% 253 231 37
-nv 0 0 0 0
\ No newline at end of file
diff --git a/o/ODM/ODM-2.8.7/opendm/tiles/gdal2tiles.py b/o/ODM/ODM-2.8.7/opendm/tiles/gdal2tiles.py
deleted file mode 100644
index 82e32604..00000000
--- a/o/ODM/ODM-2.8.7/opendm/tiles/gdal2tiles.py
+++ /dev/null
@@ -1,2949 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# ******************************************************************************
-# $Id$
-#
-# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/)
-# Support: BRGM (http://www.brgm.fr)
-# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory.
-# - generate Google Earth metadata (KML SuperOverlay)
-# - generate simple HTML viewer based on Google Maps and OpenLayers
-# - support of global tiles (Spherical Mercator) for compatibility
-# with interactive web maps a la Google Maps
-# Author: Klokan Petr Pridal, klokan at klokan dot cz
-# Web: http://www.klokan.cz/projects/gdal2tiles/
-# GUI: http://www.maptiler.org/
-#
-###############################################################################
-# Copyright (c) 2008, Klokan Petr Pridal
-# Copyright (c) 2010-2013, Even Rouault
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-# DEALINGS IN THE SOFTWARE.
-# ******************************************************************************
-
-from __future__ import print_function, division
-
-import math
-from multiprocessing import Pipe, Pool, Process, Manager
-import os
-import tempfile
-import shutil
-import sys
-from uuid import uuid4
-from xml.etree import ElementTree
-
-from osgeo import gdal
-from osgeo import osr
-
-try:
- from PIL import Image
- import numpy
- import osgeo.gdal_array as gdalarray
-except Exception:
- # 'antialias' resampling is not available
- pass
-
-__version__ = "$Id$"
-
-resampling_list = ('average', 'near', 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'antialias')
-profile_list = ('mercator', 'geodetic', 'raster')
-webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none')
-
-# =============================================================================
-# =============================================================================
-# =============================================================================
-
-__doc__globalmaptiles = """
-globalmaptiles.py
-
-Global Map Tiles as defined in Tile Map Service (TMS) Profiles
-==============================================================
-
-Functions necessary for generation of global tiles used on the web.
-It contains classes implementing coordinate conversions for:
-
- - GlobalMercator (based on EPSG:3857)
- for Google Maps, Yahoo Maps, Bing Maps compatible tiles
- - GlobalGeodetic (based on EPSG:4326)
- for OpenLayers Base Map and Google Earth compatible tiles
-
-More info at:
-
-http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification
-http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation
-http://msdn.microsoft.com/en-us/library/bb259689.aspx
-http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates
-
-Created by Klokan Petr Pridal on 2008-07-03.
-Google Summer of Code 2008, project GDAL2Tiles for OSGEO.
-
-In case you use this class in your product, translate it to another language
-or find it useful for your project please let me know.
-My email: klokan at klokan dot cz.
-I would like to know where it was used.
-
-Class is available under the open-source GDAL license (www.gdal.org).
-"""
-
-MAXZOOMLEVEL = 32
-
-
-class GlobalMercator(object):
- r"""
- TMS Global Mercator Profile
- ---------------------------
-
- Functions necessary for generation of tiles in Spherical Mercator projection,
- EPSG:3857.
-
- Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps,
- UK Ordnance Survey OpenSpace API, ...
- and you can overlay them on top of base maps of those web mapping applications.
-
- Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
-
- What coordinate conversions do we need for TMS Global Mercator tiles::
-
- LatLon <-> Meters <-> Pixels <-> Tile
-
- WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid
- lat/lon XY in meters XY pixels Z zoom XYZ from TMS
- EPSG:4326 EPSG:387
- .----. --------- -- TMS
- / \ <-> | | <-> /----/ <-> Google
- \ / | | /--------/ QuadTree
- ----- --------- /------------/
- KML, public WebMapService Web Clients TileMapService
-
- What is the coordinate extent of Earth in EPSG:3857?
-
- [-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244]
- Constant 20037508.342789244 comes from the circumference of the Earth in meters,
- which is 40 thousand kilometers, the coordinate origin is in the middle of extent.
- In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0
- $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857
- Polar areas with abs(latitude) bigger then 85.05112878 are clipped off.
-
- What are zoom level constants (pixels/meter) for pyramid with EPSG:3857?
-
- whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile,
- every lower zoom level resolution is always divided by two
- initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062
-
- What is the difference between TMS and Google Maps/QuadTree tile name convention?
-
- The tile raster itself is the same (equal extent, projection, pixel size),
- there is just different identification of the same raster tile.
- Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ.
- Google placed the origin [0,0] to the top-left corner, reference is XYZ.
- Microsoft is referencing tiles by a QuadTree name, defined on the website:
- http://msdn2.microsoft.com/en-us/library/bb259689.aspx
-
- The lat/lon coordinates are using WGS84 datum, yes?
-
- Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum.
- Well, the web clients like Google Maps are projecting those coordinates by
- Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if
- the were on the WGS84 ellipsoid.
-
- From MSDN documentation:
- To simplify the calculations, we use the spherical form of projection, not
- the ellipsoidal form. Since the projection is used only for map display,
- and not for displaying numeric coordinates, we don't need the extra precision
- of an ellipsoidal projection. The spherical projection causes approximately
- 0.33 percent scale distortion in the Y direction, which is not visually
- noticeable.
-
- How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4?
-
- You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform.
- All of the tools supports -t_srs 'epsg:3857'.
-
- For other GIS programs check the exact definition of the projection:
- More info at http://spatialreference.org/ref/user/google-projection/
- The same projection is designated as EPSG:3857. WKT definition is in the
- official EPSG database.
-
- Proj4 Text:
- +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0
- +k=1.0 +units=m +nadgrids=@null +no_defs
-
- Human readable WKT format of EPSG:3857:
- PROJCS["Google Maps Global Mercator",
- GEOGCS["WGS 84",
- DATUM["WGS_1984",
- SPHEROID["WGS 84",6378137,298.257223563,
- AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich",0],
- UNIT["degree",0.0174532925199433],
- AUTHORITY["EPSG","4326"]],
- PROJECTION["Mercator_1SP"],
- PARAMETER["central_meridian",0],
- PARAMETER["scale_factor",1],
- PARAMETER["false_easting",0],
- PARAMETER["false_northing",0],
- UNIT["metre",1,
- AUTHORITY["EPSG","9001"]]]
- """
-
- def __init__(self, tileSize=256):
- "Initialize the TMS Global Mercator pyramid"
- self.tileSize = tileSize
- self.initialResolution = 2 * math.pi * 6378137 / self.tileSize
- # 156543.03392804062 for tileSize 256 pixels
- self.originShift = 2 * math.pi * 6378137 / 2.0
- # 20037508.342789244
-
- def LatLonToMeters(self, lat, lon):
- "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857"
-
- mx = lon * self.originShift / 180.0
- my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / (math.pi / 180.0)
-
- my = my * self.originShift / 180.0
- return mx, my
-
- def MetersToLatLon(self, mx, my):
- "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum"
-
- lon = (mx / self.originShift) * 180.0
- lat = (my / self.originShift) * 180.0
-
- lat = 180 / math.pi * (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0)
- return lat, lon
-
- def PixelsToMeters(self, px, py, zoom):
- "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857"
-
- res = self.Resolution(zoom)
- mx = px * res - self.originShift
- my = py * res - self.originShift
- return mx, my
-
- def MetersToPixels(self, mx, my, zoom):
- "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level"
-
- res = self.Resolution(zoom)
- px = (mx + self.originShift) / res
- py = (my + self.originShift) / res
- return px, py
-
- def PixelsToTile(self, px, py):
- "Returns a tile covering region in given pixel coordinates"
-
- tx = int(math.ceil(px / float(self.tileSize)) - 1)
- ty = int(math.ceil(py / float(self.tileSize)) - 1)
- return tx, ty
-
- def PixelsToRaster(self, px, py, zoom):
- "Move the origin of pixel coordinates to top-left corner"
-
- mapSize = self.tileSize << zoom
- return px, mapSize - py
-
- def MetersToTile(self, mx, my, zoom):
- "Returns tile for given mercator coordinates"
-
- px, py = self.MetersToPixels(mx, my, zoom)
- return self.PixelsToTile(px, py)
-
- def TileBounds(self, tx, ty, zoom):
- "Returns bounds of the given tile in EPSG:3857 coordinates"
-
- minx, miny = self.PixelsToMeters(tx*self.tileSize, ty*self.tileSize, zoom)
- maxx, maxy = self.PixelsToMeters((tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom)
- return (minx, miny, maxx, maxy)
-
- def TileLatLonBounds(self, tx, ty, zoom):
- "Returns bounds of the given tile in latitude/longitude using WGS84 datum"
-
- bounds = self.TileBounds(tx, ty, zoom)
- minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
- maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
-
- return (minLat, minLon, maxLat, maxLon)
-
- def Resolution(self, zoom):
- "Resolution (meters/pixel) for given zoom level (measured at Equator)"
-
- # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom)
- return self.initialResolution / (2**zoom)
-
- def ZoomForPixelSize(self, pixelSize):
- "Maximal scaledown zoom of the pyramid closest to the pixelSize."
-
- for i in range(MAXZOOMLEVEL):
- if pixelSize > self.Resolution(i):
- if i != -1:
- return i-1
- else:
- return 0 # We don't want to scale up
-
- def GoogleTile(self, tx, ty, zoom):
- "Converts TMS tile coordinates to Google Tile coordinates"
-
- # coordinate origin is moved from bottom-left to top-left corner of the extent
- return tx, (2**zoom - 1) - ty
-
- def QuadTree(self, tx, ty, zoom):
- "Converts TMS tile coordinates to Microsoft QuadTree"
-
- quadKey = ""
- ty = (2**zoom - 1) - ty
- for i in range(zoom, 0, -1):
- digit = 0
- mask = 1 << (i-1)
- if (tx & mask) != 0:
- digit += 1
- if (ty & mask) != 0:
- digit += 2
- quadKey += str(digit)
-
- return quadKey
-
-
-class GlobalGeodetic(object):
- r"""
- TMS Global Geodetic Profile
- ---------------------------
-
- Functions necessary for generation of global tiles in Plate Carre projection,
- EPSG:4326, "unprojected profile".
-
- Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters)
- and you can overlay the tiles on top of OpenLayers base map.
-
- Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
-
- What coordinate conversions do we need for TMS Global Geodetic tiles?
-
- Global Geodetic tiles are using geodetic coordinates (latitude,longitude)
- directly as planar coordinates XY (it is also called Unprojected or Plate
- Carre). We need only scaling to pixel pyramid and cutting to tiles.
- Pyramid has on top level two tiles, so it is not square but rectangle.
- Area [-180,-90,180,90] is scaled to 512x256 pixels.
- TMS has coordinate origin (for pixels and tiles) in bottom-left corner.
- Rasters are in EPSG:4326 and therefore are compatible with Google Earth.
-
- LatLon <-> Pixels <-> Tiles
-
- WGS84 coordinates Pixels in pyramid Tiles in pyramid
- lat/lon XY pixels Z zoom XYZ from TMS
- EPSG:4326
- .----. ----
- / \ <-> /--------/ <-> TMS
- \ / /--------------/
- ----- /--------------------/
- WMS, KML Web Clients, Google Earth TileMapService
- """
-
- def __init__(self, tmscompatible, tileSize=256):
- self.tileSize = tileSize
- if tmscompatible is not None:
- # Defaults the resolution factor to 0.703125 (2 tiles @ level 0)
- # Adhers to OSGeo TMS spec
- # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic
- self.resFact = 180.0 / self.tileSize
- else:
- # Defaults the resolution factor to 1.40625 (1 tile @ level 0)
- # Adheres OpenLayers, MapProxy, etc default resolution for WMTS
- self.resFact = 360.0 / self.tileSize
-
- def LonLatToPixels(self, lon, lat, zoom):
- "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid"
-
- res = self.resFact / 2**zoom
- px = (180 + lon) / res
- py = (90 + lat) / res
- return px, py
-
- def PixelsToTile(self, px, py):
- "Returns coordinates of the tile covering region in pixel coordinates"
-
- tx = int(math.ceil(px / float(self.tileSize)) - 1)
- ty = int(math.ceil(py / float(self.tileSize)) - 1)
- return tx, ty
-
- def LonLatToTile(self, lon, lat, zoom):
- "Returns the tile for zoom which covers given lon/lat coordinates"
-
- px, py = self.LonLatToPixels(lon, lat, zoom)
- return self.PixelsToTile(px, py)
-
- def Resolution(self, zoom):
- "Resolution (arc/pixel) for given zoom level (measured at Equator)"
-
- return self.resFact / 2**zoom
-
- def ZoomForPixelSize(self, pixelSize):
- "Maximal scaledown zoom of the pyramid closest to the pixelSize."
-
- for i in range(MAXZOOMLEVEL):
- if pixelSize > self.Resolution(i):
- if i != 0:
- return i-1
- else:
- return 0 # We don't want to scale up
-
- def TileBounds(self, tx, ty, zoom):
- "Returns bounds of the given tile"
- res = self.resFact / 2**zoom
- return (
- tx*self.tileSize*res - 180,
- ty*self.tileSize*res - 90,
- (tx+1)*self.tileSize*res - 180,
- (ty+1)*self.tileSize*res - 90
- )
-
- def TileLatLonBounds(self, tx, ty, zoom):
- "Returns bounds of the given tile in the SWNE form"
- b = self.TileBounds(tx, ty, zoom)
- return (b[1], b[0], b[3], b[2])
-
-
-class Zoomify(object):
- """
- Tiles compatible with the Zoomify viewer
- ----------------------------------------
- """
-
- def __init__(self, width, height, tilesize=256, tileformat='jpg'):
- """Initialization of the Zoomify tile tree"""
-
- self.tilesize = tilesize
- self.tileformat = tileformat
- imagesize = (width, height)
- tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize))
-
- # Size (in tiles) for each tier of pyramid.
- self.tierSizeInTiles = []
- self.tierSizeInTiles.append(tiles)
-
- # Image size in pixels for each pyramid tierself
- self.tierImageSize = []
- self.tierImageSize.append(imagesize)
-
- while (imagesize[0] > tilesize or imagesize[1] > tilesize):
- imagesize = (math.floor(imagesize[0] / 2), math.floor(imagesize[1] / 2))
- tiles = (math.ceil(imagesize[0] / tilesize), math.ceil(imagesize[1] / tilesize))
- self.tierSizeInTiles.append(tiles)
- self.tierImageSize.append(imagesize)
-
- self.tierSizeInTiles.reverse()
- self.tierImageSize.reverse()
-
- # Depth of the Zoomify pyramid, number of tiers (zoom levels)
- self.numberOfTiers = len(self.tierSizeInTiles)
-
- # Number of tiles up to the given tier of pyramid.
- self.tileCountUpToTier = []
- self.tileCountUpToTier[0] = 0
- for i in range(1, self.numberOfTiers+1):
- self.tileCountUpToTier.append(
- self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] +
- self.tileCountUpToTier[i-1]
- )
-
- def tilefilename(self, x, y, z):
- """Returns filename for tile with given coordinates"""
-
- tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z]
- return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256),
- "%s-%s-%s.%s" % (z, x, y, self.tileformat))
-
-
-class GDALError(Exception):
- pass
-
-import os
-main_pid = os.getpid()
-
-def exit_with_error(message, details=""):
- # Message printing and exit code kept from the way it worked using the OptionParser (in case
- # someone parses the error output)
- sys.stderr.write("Usage: gdal2tiles.py [options] input_file [output]\n\n")
- sys.stderr.write("gdal2tiles.py: error: %s\n" % message)
- if details:
- sys.stderr.write("\n\n%s\n" % details)
-
- import signal
- os.kill(main_pid, signal.SIGINT)
- sys.exit(2)
-
-
-def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):
- """
- Template for the KML. Returns filled string.
- """
- if not children:
- children = []
-
- args['tx'], args['ty'], args['tz'] = tx, ty, tz
- args['tileformat'] = tileext
- if 'tilesize' not in args:
- args['tilesize'] = tilesize
-
- if 'minlodpixels' not in args:
- args['minlodpixels'] = int(args['tilesize'] / 2)
- if 'maxlodpixels' not in args:
- args['maxlodpixels'] = int(args['tilesize'] * 8)
- if children == []:
- args['maxlodpixels'] = -1
-
- if tx is None:
- tilekml = False
- args['title'] = options.title
- else:
- tilekml = True
- args['title'] = "%d/%d/%d.kml" % (tz, tx, ty)
- args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)
-
- if tx == 0:
- args['drawOrder'] = 2 * tz + 1
- elif tx is not None:
- args['drawOrder'] = 2 * tz
- else:
- args['drawOrder'] = 0
-
- url = options.url
- if not url:
- if tilekml:
- url = "../../"
- else:
- url = ""
-
- s = """
-
-
- %(title)s
-
- """ % args
- if tilekml:
- s += """
-
-
- %(north).14f
- %(south).14f
- %(east).14f
- %(west).14f
-
-
- %(minlodpixels)d
- %(maxlodpixels)d
-
-
-
- %(drawOrder)d
-
- %(ty)d.%(tileformat)s
-
-
- %(north).14f
- %(south).14f
- %(east).14f
- %(west).14f
-
-
-""" % args
-
- for cx, cy, cz in children:
- csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)
- s += """
-
- %d/%d/%d.%s
-
-
- %.14f
- %.14f
- %.14f
- %.14f
-
-
- %d
- -1
-
-
-
- %s%d/%d/%d.kml
- onRegion
-
-
-
- """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,
- args['minlodpixels'], url, cz, cx, cy)
-
- s += """
-
- """
- return s
-
-
-def scale_query_to_tile(dsquery, dstile, tiledriver, options, tilefilename=''):
- """Scales down query dataset to the tile dataset"""
-
- querysize = dsquery.RasterXSize
- tilesize = dstile.RasterXSize
- tilebands = dstile.RasterCount
-
- if options.resampling == 'average':
-
- # Function: gdal.RegenerateOverview()
- for i in range(1, tilebands+1):
- # Black border around NODATA
- res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i),
- 'average')
- if res != 0:
- exit_with_error("RegenerateOverview() failed on %s, error %d" % (
- tilefilename, res))
-
- elif options.resampling == 'antialias':
-
- # Scaling by PIL (Python Imaging Library) - improved Lanczos
- array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8)
- for i in range(tilebands):
- array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1),
- 0, 0, querysize, querysize)
- im = Image.fromarray(array, 'RGBA') # Always four bands
- im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS)
- if os.path.exists(tilefilename):
- im0 = Image.open(tilefilename)
- im1 = Image.composite(im1, im0, im1)
- im1.save(tilefilename, tiledriver)
-
- else:
-
- if options.resampling == 'near':
- gdal_resampling = gdal.GRA_NearestNeighbour
-
- elif options.resampling == 'bilinear':
- gdal_resampling = gdal.GRA_Bilinear
-
- elif options.resampling == 'cubic':
- gdal_resampling = gdal.GRA_Cubic
-
- elif options.resampling == 'cubicspline':
- gdal_resampling = gdal.GRA_CubicSpline
-
- elif options.resampling == 'lanczos':
- gdal_resampling = gdal.GRA_Lanczos
-
- # Other algorithms are implemented by gdal.ReprojectImage().
- dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0,
- tilesize / float(querysize)))
- dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
-
- res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling)
- if res != 0:
- exit_with_error("ReprojectImage() failed on %s, error %d" % (tilefilename, res))
-
-
-def setup_no_data_values(input_dataset, options):
- """
- Extract the NODATA values from the dataset or use the passed arguments as override if any
- """
- in_nodata = []
- if options.srcnodata:
- nds = list(map(float, options.srcnodata.split(',')))
- if len(nds) < input_dataset.RasterCount:
- in_nodata = (nds * input_dataset.RasterCount)[:input_dataset.RasterCount]
- else:
- in_nodata = nds
- else:
- for i in range(1, input_dataset.RasterCount+1):
- raster_no_data = input_dataset.GetRasterBand(i).GetNoDataValue()
- if raster_no_data is not None:
- in_nodata.append(raster_no_data)
-
- if options.verbose:
- print("NODATA: %s" % in_nodata)
-
- return in_nodata
-
-
-def setup_input_srs(input_dataset, options):
- """
- Determines and returns the Input Spatial Reference System (SRS) as an osr object and as a
- WKT representation
-
- Uses in priority the one passed in the command line arguments. If None, tries to extract them
- from the input dataset
- """
-
- input_srs = None
- input_srs_wkt = None
-
- if options.s_srs:
- input_srs = osr.SpatialReference()
- input_srs.SetFromUserInput(options.s_srs)
- input_srs_wkt = input_srs.ExportToWkt()
- else:
- input_srs_wkt = input_dataset.GetProjection()
- if not input_srs_wkt and input_dataset.GetGCPCount() != 0:
- input_srs_wkt = input_dataset.GetGCPProjection()
- if input_srs_wkt:
- input_srs = osr.SpatialReference()
- input_srs.ImportFromWkt(input_srs_wkt)
-
- return input_srs, input_srs_wkt
-
-
-def setup_output_srs(input_srs, options):
- """
- Setup the desired SRS (based on options)
- """
- output_srs = osr.SpatialReference()
-
- if options.profile == 'mercator':
- output_srs.ImportFromEPSG(3857)
- elif options.profile == 'geodetic':
- output_srs.ImportFromEPSG(4326)
- else:
- output_srs = input_srs
-
- return output_srs
-
-
-def has_georeference(dataset):
- return (dataset.GetGeoTransform() != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) or
- dataset.GetGCPCount() != 0)
-
-
-def reproject_dataset(from_dataset, from_srs, to_srs, options=None):
- """
- Returns the input dataset in the expected "destination" SRS.
- If the dataset is already in the correct SRS, returns it unmodified
- """
- if not from_srs or not to_srs:
- raise GDALError("from and to SRS must be defined to reproject the dataset")
-
- if (from_srs.ExportToProj4() != to_srs.ExportToProj4()) or (from_dataset.GetGCPCount() != 0):
- to_dataset = gdal.AutoCreateWarpedVRT(from_dataset,
- from_srs.ExportToWkt(), to_srs.ExportToWkt())
-
- if options and options.verbose:
- print("Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')")
- to_dataset.GetDriver().CreateCopy("tiles.vrt", to_dataset)
-
- return to_dataset
- else:
- return from_dataset
-
-
-def add_gdal_warp_options_to_string(vrt_string, warp_options):
- if not warp_options:
- return vrt_string
-
- vrt_root = ElementTree.fromstring(vrt_string)
- options = vrt_root.find("GDALWarpOptions")
-
- if options is None:
- return vrt_string
-
- for key, value in warp_options.items():
- tb = ElementTree.TreeBuilder()
- tb.start("Option", {"name": key})
- tb.data(value)
- tb.end("Option")
- elem = tb.close()
- options.insert(0, elem)
-
- return ElementTree.tostring(vrt_root).decode()
-
-
-def update_no_data_values(warped_vrt_dataset, nodata_values, options=None):
- """
- Takes an array of NODATA values and forces them on the WarpedVRT file dataset passed
- """
- # TODO: gbataille - Seems that I forgot tests there
- if nodata_values != []:
- temp_file = gettempfilename('-gdal2tiles.vrt')
- warped_vrt_dataset.GetDriver().CreateCopy(temp_file, warped_vrt_dataset)
- with open(temp_file, 'r') as f:
- vrt_string = f.read()
-
- vrt_string = add_gdal_warp_options_to_string(
- vrt_string, {"INIT_DEST": "NO_DATA", "UNIFIED_SRC_NODATA": "YES"})
-
-# TODO: gbataille - check the need for this replacement. Seems to work without
-# # replace BandMapping tag for NODATA bands....
-# for i in range(len(nodata_values)):
-# s = s.replace(
-# '' % ((i+1), (i+1)),
-# """
-#
-# %i
-# 0
-# %i
-# 0
-#
-# """ % ((i+1), (i+1), nodata_values[i], nodata_values[i]))
-
- # save the corrected VRT
- with open(temp_file, 'w') as f:
- f.write(vrt_string)
-
- corrected_dataset = gdal.Open(temp_file)
- os.unlink(temp_file)
-
- # set NODATA_VALUE metadata
- corrected_dataset.SetMetadataItem(
- 'NODATA_VALUES', ' '.join([str(i) for i in nodata_values]))
-
- if options and options.verbose:
- print("Modified warping result saved into 'tiles1.vrt'")
- # TODO: gbataille - test replacing that with a gdal write of the dataset (more
- # accurately what's used, even if should be the same
- with open("tiles1.vrt", "w") as f:
- f.write(vrt_string)
-
- return corrected_dataset
-
-
-def add_alpha_band_to_string_vrt(vrt_string):
- # TODO: gbataille - Old code speak of this being equivalent to gdalwarp -dstalpha
- # To be checked
-
- vrt_root = ElementTree.fromstring(vrt_string)
-
- index = 0
- nb_bands = 0
- for subelem in list(vrt_root):
- if subelem.tag == "VRTRasterBand":
- nb_bands += 1
- color_node = subelem.find("./ColorInterp")
- if color_node is not None and color_node.text == "Alpha":
- raise Exception("Alpha band already present")
- else:
- if nb_bands:
- # This means that we are one element after the Band definitions
- break
-
- index += 1
-
- tb = ElementTree.TreeBuilder()
- tb.start("VRTRasterBand",
- {'dataType': "Byte", "band": str(nb_bands + 1), "subClass": "VRTWarpedRasterBand"})
- tb.start("ColorInterp", {})
- tb.data("Alpha")
- tb.end("ColorInterp")
- tb.end("VRTRasterBand")
- elem = tb.close()
-
- vrt_root.insert(index, elem)
-
- warp_options = vrt_root.find(".//GDALWarpOptions")
- tb = ElementTree.TreeBuilder()
- tb.start("DstAlphaBand", {})
- tb.data(str(nb_bands + 1))
- tb.end("DstAlphaBand")
- elem = tb.close()
- warp_options.append(elem)
-
- # TODO: gbataille - this is a GDALWarpOptions. Why put it in a specific place?
- tb = ElementTree.TreeBuilder()
- tb.start("Option", {"name": "INIT_DEST"})
- tb.data("0")
- tb.end("Option")
- elem = tb.close()
- warp_options.append(elem)
-
- return ElementTree.tostring(vrt_root).decode()
-
-
-def update_alpha_value_for_non_alpha_inputs(warped_vrt_dataset, options=None):
- """
- Handles dataset with 1 or 3 bands, i.e. without alpha channel, in the case the nodata value has
- not been forced by options
- """
- if warped_vrt_dataset.RasterCount in [1, 3]:
- tempfilename = gettempfilename('-gdal2tiles.vrt')
- warped_vrt_dataset.GetDriver().CreateCopy(tempfilename, warped_vrt_dataset)
- with open(tempfilename) as f:
- orig_data = f.read()
- alpha_data = add_alpha_band_to_string_vrt(orig_data)
- with open(tempfilename, 'w') as f:
- f.write(alpha_data)
-
- warped_vrt_dataset = gdal.Open(tempfilename)
- os.unlink(tempfilename)
-
- if options and options.verbose:
- print("Modified -dstalpha warping result saved into 'tiles1.vrt'")
- # TODO: gbataille - test replacing that with a gdal write of the dataset (more
- # accurately what's used, even if should be the same
- with open("tiles1.vrt", "w") as f:
- f.write(alpha_data)
-
- return warped_vrt_dataset
-
-
-def nb_data_bands(dataset):
- """
- Return the number of data (non-alpha) bands of a gdal dataset
- """
- alphaband = dataset.GetRasterBand(1).GetMaskBand()
- if ((alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or
- dataset.RasterCount == 4 or
- dataset.RasterCount == 2):
- return dataset.RasterCount - 1
- else:
- return dataset.RasterCount
-
-
-def gettempfilename(suffix):
- """Returns a temporary filename"""
- if '_' in os.environ:
- # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly)
- if os.environ['_'].find('wine') >= 0:
- tmpdir = '.'
- if 'TMP' in os.environ:
- tmpdir = os.environ['TMP']
- import time
- import random
- random.seed(time.time())
- random_part = 'file%d' % random.randint(0, 1000000000)
- return os.path.join(tmpdir, random_part + suffix)
-
- return tempfile.mktemp(suffix)
-
-
-def create_base_tile(tile_job_info, tile_detail, queue=None):
- gdal.AllRegister()
-
- dataBandsCount = tile_job_info.nb_data_bands
- output = tile_job_info.output_file_path
- tileext = tile_job_info.tile_extension
- tilesize = tile_job_info.tile_size
- options = tile_job_info.options
-
- tilebands = dataBandsCount + 1
- ds = gdal.Open(tile_job_info.src_file, gdal.GA_ReadOnly)
- mem_drv = gdal.GetDriverByName('MEM')
- out_drv = gdal.GetDriverByName(tile_job_info.tile_driver)
- alphaband = ds.GetRasterBand(1).GetMaskBand()
-
- tx = tile_detail.tx
- ty = tile_detail.ty
- tz = tile_detail.tz
- rx = tile_detail.rx
- ry = tile_detail.ry
- rxsize = tile_detail.rxsize
- rysize = tile_detail.rysize
- wx = tile_detail.wx
- wy = tile_detail.wy
- wxsize = tile_detail.wxsize
- wysize = tile_detail.wysize
- querysize = tile_detail.querysize
-
- # Tile dataset in memory
- tilefilename = os.path.join(
- output, str(tz), str(tx), "%s.%s" % (ty, tileext))
- dstile = mem_drv.Create('', tilesize, tilesize, tilebands)
-
- data = alpha = None
-
- if options.verbose:
- print("\tReadRaster Extent: ",
- (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize))
-
- # Query is in 'nearest neighbour' but can be bigger in then the tilesize
- # We scale down the query to the tilesize by supplied algorithm.
-
- if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0:
- data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize,
- band_list=list(range(1, dataBandsCount+1)))
- alpha = alphaband.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize)
-
- # The tile in memory is a transparent file by default. Write pixel values into it if
- # any
- if data:
- if tilesize == querysize:
- # Use the ReadRaster result directly in tiles ('nearest neighbour' query)
- dstile.WriteRaster(wx, wy, wxsize, wysize, data,
- band_list=list(range(1, dataBandsCount+1)))
- dstile.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])
-
- # Note: For source drivers based on WaveLet compression (JPEG2000, ECW,
- # MrSID) the ReadRaster function returns high-quality raster (not ugly
- # nearest neighbour)
- # TODO: Use directly 'near' for WaveLet files
- else:
- # Big ReadRaster query in memory scaled to the tilesize - all but 'near'
- # algo
- dsquery = mem_drv.Create('', querysize, querysize, tilebands)
- # TODO: fill the null value in case a tile without alpha is produced (now
- # only png tiles are supported)
- dsquery.WriteRaster(wx, wy, wxsize, wysize, data,
- band_list=list(range(1, dataBandsCount+1)))
- dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])
-
- scale_query_to_tile(dsquery, dstile, tile_job_info.tile_driver, options,
- tilefilename=tilefilename)
- del dsquery
-
- # Force freeing the memory to make sure the C++ destructor is called and the memory as well as
- # the file locks are released
- del ds
- del data
-
- if options.resampling != 'antialias':
- # Write a copy of tile to png/jpg
- out_drv.CreateCopy(tilefilename, dstile, strict=0)
-
- del dstile
-
- # Create a KML file for this tile.
- if tile_job_info.kml:
- kmlfilename = os.path.join(output, str(tz), str(tx), '%d.kml' % ty)
- if not options.resume or not os.path.exists(kmlfilename):
- with open(kmlfilename, 'wb') as f:
- f.write(generate_kml(
- tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size,
- tile_job_info.tile_swne, tile_job_info.options
- ).encode('utf-8'))
-
- if queue:
- queue.put("tile %s %s %s" % (tx, ty, tz))
-
-
-def create_overview_tiles(tile_job_info, output_folder, options):
- """Generation of the overview tiles (higher in the pyramid) based on existing tiles"""
- mem_driver = gdal.GetDriverByName('MEM')
- tile_driver = tile_job_info.tile_driver
- out_driver = gdal.GetDriverByName(tile_driver)
-
- tilebands = tile_job_info.nb_data_bands + 1
-
- # Usage of existing tiles: from 4 underlying tiles generate one as overview.
-
- tcount = 0
- for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
- tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
- tcount += (1 + abs(tmaxx-tminx)) * (1 + abs(tmaxy-tminy))
-
- ti = 0
-
- if tcount == 0:
- return
-
- if not options.quiet:
- print("Generating Overview Tiles:")
-
- progress_bar = ProgressBar(tcount)
- progress_bar.start()
-
- for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
- tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
- for ty in range(tmaxy, tminy - 1, -1):
- for tx in range(tminx, tmaxx + 1):
-
- ti += 1
- tilefilename = os.path.join(output_folder,
- str(tz),
- str(tx),
- "%s.%s" % (ty, tile_job_info.tile_extension))
-
- if options.verbose:
- print(ti, '/', tcount, tilefilename)
-
- if options.resume and os.path.exists(tilefilename):
- if options.verbose:
- print("Tile generation skipped because of --resume")
- else:
- progress_bar.log_progress()
- continue
-
- # Create directories for the tile
- if not os.path.exists(os.path.dirname(tilefilename)):
- os.makedirs(os.path.dirname(tilefilename))
-
- dsquery = mem_driver.Create('', 2 * tile_job_info.tile_size,
- 2 * tile_job_info.tile_size, tilebands)
- # TODO: fill the null value
- dstile = mem_driver.Create('', tile_job_info.tile_size, tile_job_info.tile_size,
- tilebands)
-
- # TODO: Implement more clever walking on the tiles with cache functionality
- # probably walk should start with reading of four tiles from top left corner
- # Hilbert curve
-
- children = []
- # Read the tiles and write them to query window
- for y in range(2 * ty, 2 * ty + 2):
- for x in range(2 * tx, 2 * tx + 2):
- minx, miny, maxx, maxy = tile_job_info.tminmax[tz + 1]
- if x >= minx and x <= maxx and y >= miny and y <= maxy:
- dsquerytile = gdal.Open(
- os.path.join(output_folder, str(tz + 1), str(x),
- "%s.%s" % (y, tile_job_info.tile_extension)),
- gdal.GA_ReadOnly)
- if (ty == 0 and y == 1) or (ty != 0 and (y % (2 * ty)) != 0):
- tileposy = 0
- else:
- tileposy = tile_job_info.tile_size
- if tx:
- tileposx = x % (2 * tx) * tile_job_info.tile_size
- elif tx == 0 and x == 1:
- tileposx = tile_job_info.tile_size
- else:
- tileposx = 0
- dsquery.WriteRaster(
- tileposx, tileposy, tile_job_info.tile_size,
- tile_job_info.tile_size,
- dsquerytile.ReadRaster(0, 0,
- tile_job_info.tile_size,
- tile_job_info.tile_size),
- band_list=list(range(1, tilebands + 1)))
- children.append([x, y, tz + 1])
-
- scale_query_to_tile(dsquery, dstile, tile_driver, options,
- tilefilename=tilefilename)
- # Write a copy of tile to png/jpg
- if options.resampling != 'antialias':
- # Write a copy of tile to png/jpg
- out_driver.CreateCopy(tilefilename, dstile, strict=0)
-
- if options.verbose:
- print("\tbuild from zoom", tz + 1,
- " tiles:", (2 * tx, 2 * ty), (2 * tx + 1, 2 * ty),
- (2 * tx, 2 * ty + 1), (2 * tx + 1, 2 * ty + 1))
-
- # Create a KML file for this tile.
- if tile_job_info.kml:
- with open(os.path.join(
- output_folder,
- '%d/%d/%d.kml' % (tz, tx, ty)
- ), 'wb') as f:
- f.write(generate_kml(
- tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size,
- get_tile_swne(tile_job_info, options), options, children
- ).encode('utf-8'))
-
- if not options.verbose and not options.quiet:
- progress_bar.log_progress()
-
-
-def optparse_init():
- """Prepare the option parser for input (argv)"""
-
- from optparse import OptionParser, OptionGroup
- usage = "Usage: %prog [options] input_file [output]"
- p = OptionParser(usage, version="%prog " + __version__)
- p.add_option("-p", "--profile", dest='profile',
- type='choice', choices=profile_list,
- help=("Tile cutting profile (%s) - default 'mercator' "
- "(Google Maps compatible)" % ",".join(profile_list)))
- p.add_option("-r", "--resampling", dest="resampling",
- type='choice', choices=resampling_list,
- help="Resampling method (%s) - default 'average'" % ",".join(resampling_list))
- p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS",
- help="The spatial reference system used for the source input data")
- p.add_option('-z', '--zoom', dest="zoom",
- help="Zoom levels to render (format:'2-5' or '10').")
- p.add_option('-e', '--resume', dest="resume", action="store_true",
- help="Resume mode. Generate only missing files.")
- p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA",
- help="NODATA transparency value to assign to the input data")
- p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true",
- help=("When using the geodetic profile, specifies the base resolution "
- "as 0.703125 or 2 tiles at zoom level 0."))
- p.add_option("-v", "--verbose",
- action="store_true", dest="verbose",
- help="Print status messages to stdout")
- p.add_option("-q", "--quiet",
- action="store_true", dest="quiet",
- help="Disable messages and status to stdout")
- p.add_option("--processes",
- dest="nb_processes",
- type='int',
- help="Number of processes to use for tiling")
-
- # KML options
- g = OptionGroup(p, "KML (Google Earth) options",
- "Options for generated Google Earth SuperOverlay metadata")
- g.add_option("-k", "--force-kml", dest='kml', action="store_true",
- help=("Generate KML for Google Earth - default for 'geodetic' profile and "
- "'raster' in EPSG:4326. For a dataset with different projection use "
- "with caution!"))
- g.add_option("-n", "--no-kml", dest='kml', action="store_false",
- help="Avoid automatic generation of KML files for EPSG:4326")
- g.add_option("-u", "--url", dest='url',
- help="URL address where the generated tiles are going to be published")
- p.add_option_group(g)
-
- # HTML options
- g = OptionGroup(p, "Web viewer options",
- "Options for generated HTML viewers a la Google Maps")
- g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list,
- help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list))
- g.add_option("-t", "--title", dest='title',
- help="Title of the map")
- g.add_option("-c", "--copyright", dest='copyright',
- help="Copyright for the map")
- g.add_option("-g", "--googlekey", dest='googlekey',
- help="Google Maps API key from http://code.google.com/apis/maps/signup.html")
- g.add_option("-b", "--bingkey", dest='bingkey',
- help="Bing Maps API key from https://www.bingmapsportal.com/")
- p.add_option_group(g)
-
- p.set_defaults(verbose=False, profile="mercator", kml=False, url='',
- webviewer='all', copyright='', resampling='average', resume=False,
- googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE',
- processes=1)
-
- return p
-
-
-def process_args(argv):
- parser = optparse_init()
- options, args = parser.parse_args(args=argv)
-
- # Args should be either an input file OR an input file and an output folder
- if (len(args) == 0):
- exit_with_error("You need to specify at least an input file as argument to the script")
- if (len(args) > 2):
- exit_with_error("Processing of several input files is not supported.",
- "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the "
- "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(args))
-
- input_file = args[0]
- if not os.path.isfile(input_file):
- exit_with_error("The provided input file %s does not exist or is not a file" % input_file)
-
- if len(args) == 2:
- output_folder = args[1]
- else:
- output_folder = os.path.basename(input_file)
-
- options = options_post_processing(options, input_file, output_folder)
-
- return input_file, output_folder, options
-
-
-def options_post_processing(options, input_file, output_folder):
- if not options.title:
- options.title = os.path.basename(input_file)
-
- if options.url and not options.url.endswith('/'):
- options.url += '/'
- if options.url:
- out_path = output_folder
- if out_path.endswith("/"):
- out_path = out_path[:-1]
- options.url += os.path.basename(out_path) + '/'
-
- # Supported options
- if options.resampling == 'average':
- try:
- if gdal.RegenerateOverview:
- pass
- except Exception:
- exit_with_error("'average' resampling algorithm is not available.",
- "Please use -r 'near' argument or upgrade to newer version of GDAL.")
-
- elif options.resampling == 'antialias':
- try:
- if numpy: # pylint:disable=W0125
- pass
- except Exception:
- exit_with_error("'antialias' resampling algorithm is not available.",
- "Install PIL (Python Imaging Library) and numpy.")
-
- try:
- os.path.basename(input_file).encode('ascii')
- except UnicodeEncodeError:
- full_ascii = False
- else:
- full_ascii = True
-
- # LC_CTYPE check
- if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""):
- if not options.quiet:
- print("\nWARNING: "
- "You are running gdal2tiles.py with a LC_CTYPE environment variable that is "
- "not UTF-8 compatible, and your input file contains non-ascii characters. "
- "The generated sample googlemaps, openlayers or "
- "leaflet files might contain some invalid characters as a result\n")
-
- # Output the results
- if options.verbose:
- print("Options:", options)
- print("Input:", input_file)
- print("Output:", output_folder)
- print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024))
- print('')
-
- return options
-
-
-class TileDetail(object):
- tx = 0
- ty = 0
- tz = 0
- rx = 0
- ry = 0
- rxsize = 0
- rysize = 0
- wx = 0
- wy = 0
- wxsize = 0
- wysize = 0
- querysize = 0
-
- def __init__(self, **kwargs):
- for key in kwargs:
- if hasattr(self, key):
- setattr(self, key, kwargs[key])
-
- def __unicode__(self):
- return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz)
-
- def __str__(self):
- return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz)
-
- def __repr__(self):
- return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz)
-
-
-class TileJobInfo(object):
- """
- Plain object to hold tile job configuration for a dataset
- """
- src_file = ""
- nb_data_bands = 0
- output_file_path = ""
- tile_extension = ""
- tile_size = 0
- tile_driver = None
- kml = False
- tminmax = []
- tminz = 0
- tmaxz = 0
- in_srs_wkt = 0
- out_geo_trans = []
- ominy = 0
- is_epsg_4326 = False
- options = None
-
- def __init__(self, **kwargs):
- for key in kwargs:
- if hasattr(self, key):
- setattr(self, key, kwargs[key])
-
- def __unicode__(self):
- return "TileJobInfo %s\n" % (self.src_file)
-
- def __str__(self):
- return "TileJobInfo %s\n" % (self.src_file)
-
- def __repr__(self):
- return "TileJobInfo %s\n" % (self.src_file)
-
-
-class Gdal2TilesError(Exception):
- pass
-
-
-class GDAL2Tiles(object):
-
- def __init__(self, input_file, output_folder, options):
- """Constructor function - initialization"""
- self.out_drv = None
- self.mem_drv = None
- self.warped_input_dataset = None
- self.out_srs = None
- self.nativezoom = None
- self.tminmax = None
- self.tsize = None
- self.mercator = None
- self.geodetic = None
- self.alphaband = None
- self.dataBandsCount = None
- self.out_gt = None
- self.tileswne = None
- self.swne = None
- self.ominx = None
- self.omaxx = None
- self.omaxy = None
- self.ominy = None
-
- self.input_file = None
- self.output_folder = None
-
- # Tile format
- self.tilesize = 256
- self.tiledriver = 'PNG'
- self.tileext = 'png'
- self.tmp_dir = tempfile.mkdtemp()
- self.tmp_vrt_filename = os.path.join(self.tmp_dir, str(uuid4()) + '.vrt')
-
- # Should we read bigger window of the input raster and scale it down?
- # Note: Modified later by open_input()
- # Not for 'near' resampling
- # Not for Wavelet based drivers (JPEG2000, ECW, MrSID)
- # Not for 'raster' profile
- self.scaledquery = True
- # How big should be query window be for scaling down
- # Later on reset according the chosen resampling algorithm
- self.querysize = 4 * self.tilesize
-
- # Should we use Read on the input file for generating overview tiles?
- # Note: Modified later by open_input()
- # Otherwise the overview tiles are generated from existing underlying tiles
- self.overviewquery = False
-
- self.input_file = input_file
- self.output_folder = output_folder
- self.options = options
-
- if self.options.resampling == 'near':
- self.querysize = self.tilesize
-
- elif self.options.resampling == 'bilinear':
- self.querysize = self.tilesize * 2
-
- # User specified zoom levels
- self.tminz = None
- self.tmaxz = None
- if self.options.zoom:
- minmax = self.options.zoom.split('-', 1)
- minmax.extend([''])
- zoom_min, zoom_max = minmax[:2]
- self.tminz = int(zoom_min)
- if zoom_max:
- self.tmaxz = int(zoom_max)
- else:
- self.tmaxz = int(zoom_min)
-
- # KML generation
- self.kml = self.options.kml
-
- # -------------------------------------------------------------------------
- def open_input(self):
- """Initialization of the input raster, reprojection if necessary"""
- gdal.AllRegister()
-
- self.out_drv = gdal.GetDriverByName(self.tiledriver)
- self.mem_drv = gdal.GetDriverByName('MEM')
-
- if not self.out_drv:
- raise Exception("The '%s' driver was not found, is it available in this GDAL build?",
- self.tiledriver)
- if not self.mem_drv:
- raise Exception("The 'MEM' driver was not found, is it available in this GDAL build?")
-
- # Open the input file
-
- if self.input_file:
- input_dataset = gdal.Open(self.input_file, gdal.GA_ReadOnly)
- else:
- raise Exception("No input file was specified")
-
- if self.options.verbose:
- print("Input file:",
- "( %sP x %sL - %s bands)" % (input_dataset.RasterXSize,
- input_dataset.RasterYSize,
- input_dataset.RasterCount))
-
- if not input_dataset:
- # Note: GDAL prints the ERROR message too
- exit_with_error("It is not possible to open the input file '%s'." % self.input_file)
-
- # Read metadata from the input file
- if input_dataset.RasterCount == 0:
- exit_with_error("Input file '%s' has no raster band" % self.input_file)
-
- if input_dataset.GetRasterBand(1).GetRasterColorTable():
- exit_with_error(
- "Please convert this file to RGB/RGBA and run gdal2tiles on the result.",
- "From paletted file you can create RGBA file (temp.vrt) by:\n"
- "gdal_translate -of vrt -expand rgba %s temp.vrt\n"
- "then run:\n"
- "gdal2tiles temp.vrt" % self.input_file
- )
-
- in_nodata = setup_no_data_values(input_dataset, self.options)
-
- if self.options.verbose:
- print("Preprocessed file:",
- "( %sP x %sL - %s bands)" % (input_dataset.RasterXSize,
- input_dataset.RasterYSize,
- input_dataset.RasterCount))
-
- in_srs, self.in_srs_wkt = setup_input_srs(input_dataset, self.options)
-
- self.out_srs = setup_output_srs(in_srs, self.options)
-
- # If input and output reference systems are different, we reproject the input dataset into
- # the output reference system for easier manipulation
-
- self.warped_input_dataset = None
-
- if self.options.profile in ('mercator', 'geodetic'):
-
- if not in_srs:
- exit_with_error(
- "Input file has unknown SRS.",
- "Use --s_srs ESPG:xyz (or similar) to provide source reference system.")
-
- if not has_georeference(input_dataset):
- exit_with_error(
- "There is no georeference - neither affine transformation (worldfile) "
- "nor GCPs. You can generate only 'raster' profile tiles.",
- "Either gdal2tiles with parameter -p 'raster' or use another GIS "
- "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs"
- )
-
- if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or
- (input_dataset.GetGCPCount() != 0)):
- self.warped_input_dataset = reproject_dataset(
- input_dataset, in_srs, self.out_srs)
-
- if in_nodata:
- self.warped_input_dataset = update_no_data_values(
- self.warped_input_dataset, in_nodata, options=self.options)
- else:
- self.warped_input_dataset = update_alpha_value_for_non_alpha_inputs(
- self.warped_input_dataset, options=self.options)
-
- if self.warped_input_dataset and self.options.verbose:
- print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % (
- self.warped_input_dataset.RasterXSize,
- self.warped_input_dataset.RasterYSize,
- self.warped_input_dataset.RasterCount))
-
- if not self.warped_input_dataset:
- self.warped_input_dataset = input_dataset
-
- self.warped_input_dataset.GetDriver().CreateCopy(self.tmp_vrt_filename,
- self.warped_input_dataset)
-
- # Get alpha band (either directly or from NODATA value)
- self.alphaband = self.warped_input_dataset.GetRasterBand(1).GetMaskBand()
- self.dataBandsCount = nb_data_bands(self.warped_input_dataset)
-
- # KML test
- self.isepsg4326 = False
- srs4326 = osr.SpatialReference()
- srs4326.ImportFromEPSG(4326)
- if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4():
- self.kml = True
- self.isepsg4326 = True
- if self.options.verbose:
- print("KML autotest OK!")
-
- # Read the georeference
- self.out_gt = self.warped_input_dataset.GetGeoTransform()
-
- # Test the size of the pixel
-
- # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile)
- if (self.out_gt[2], self.out_gt[4]) != (0, 0):
- exit_with_error("Georeference of the raster contains rotation or skew. "
- "Such raster is not supported. Please use gdalwarp first.")
-
- # Here we expect: pixel is square, no rotation on the raster
-
- # Output Bounds - coordinates in the output SRS
- self.ominx = self.out_gt[0]
- self.omaxx = self.out_gt[0] + self.warped_input_dataset.RasterXSize * self.out_gt[1]
- self.omaxy = self.out_gt[3]
- self.ominy = self.out_gt[3] - self.warped_input_dataset.RasterYSize * self.out_gt[1]
- # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15
-
- if self.options.verbose:
- print("Bounds (output srs):", round(self.ominx, 13), self.ominy, self.omaxx, self.omaxy)
-
- # Calculating ranges for tiles in different zoom levels
- if self.options.profile == 'mercator':
-
- self.mercator = GlobalMercator()
-
- # Function which generates SWNE in LatLong for given tile
- self.tileswne = self.mercator.TileLatLonBounds
-
- # Generate table with min max tile coordinates for all zoomlevels
- self.tminmax = list(range(0, 32))
- for tz in range(0, 32):
- tminx, tminy = self.mercator.MetersToTile(self.ominx, self.ominy, tz)
- tmaxx, tmaxy = self.mercator.MetersToTile(self.omaxx, self.omaxy, tz)
- # crop tiles extending world limits (+-180,+-90)
- tminx, tminy = max(0, tminx), max(0, tminy)
- tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy)
- self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)
-
- # TODO: Maps crossing 180E (Alaska?)
-
- # Get the minimal zoom level (map covers area equivalent to one tile)
- if self.tminz is None:
- self.tminz = self.mercator.ZoomForPixelSize(
- self.out_gt[1] *
- max(self.warped_input_dataset.RasterXSize,
- self.warped_input_dataset.RasterYSize) /
- float(self.tilesize))
-
- # Get the maximal zoom level
- # (closest possible zoom level up on the resolution of raster)
- if self.tmaxz is None:
- self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1])
-
- if self.options.verbose:
- print("Bounds (latlong):",
- self.mercator.MetersToLatLon(self.ominx, self.ominy),
- self.mercator.MetersToLatLon(self.omaxx, self.omaxy))
- print('MinZoomLevel:', self.tminz)
- print("MaxZoomLevel:",
- self.tmaxz,
- "(",
- self.mercator.Resolution(self.tmaxz),
- ")")
-
- if self.options.profile == 'geodetic':
-
- self.geodetic = GlobalGeodetic(self.options.tmscompatible)
-
- # Function which generates SWNE in LatLong for given tile
- self.tileswne = self.geodetic.TileLatLonBounds
-
- # Generate table with min max tile coordinates for all zoomlevels
- self.tminmax = list(range(0, 32))
- for tz in range(0, 32):
- tminx, tminy = self.geodetic.LonLatToTile(self.ominx, self.ominy, tz)
- tmaxx, tmaxy = self.geodetic.LonLatToTile(self.omaxx, self.omaxy, tz)
- # crop tiles extending world limits (+-180,+-90)
- tminx, tminy = max(0, tminx), max(0, tminy)
- tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy)
- self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)
-
- # TODO: Maps crossing 180E (Alaska?)
-
- # Get the maximal zoom level
- # (closest possible zoom level up on the resolution of raster)
- if self.tminz is None:
- self.tminz = self.geodetic.ZoomForPixelSize(
- self.out_gt[1] *
- max(self.warped_input_dataset.RasterXSize,
- self.warped_input_dataset.RasterYSize) /
- float(self.tilesize))
-
- # Get the maximal zoom level
- # (closest possible zoom level up on the resolution of raster)
- if self.tmaxz is None:
- self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1])
-
- if self.options.verbose:
- print("Bounds (latlong):", self.ominx, self.ominy, self.omaxx, self.omaxy)
-
- if self.options.profile == 'raster':
-
- def log2(x):
- return math.log10(x) / math.log10(2)
-
- self.nativezoom = int(
- max(math.ceil(log2(self.warped_input_dataset.RasterXSize/float(self.tilesize))),
- math.ceil(log2(self.warped_input_dataset.RasterYSize/float(self.tilesize)))))
-
- if self.options.verbose:
- print("Native zoom of the raster:", self.nativezoom)
-
- # Get the minimal zoom level (whole raster in one tile)
- if self.tminz is None:
- self.tminz = 0
-
- # Get the maximal zoom level (native resolution of the raster)
- if self.tmaxz is None:
- self.tmaxz = self.nativezoom
-
- # Generate table with min max tile coordinates for all zoomlevels
- self.tminmax = list(range(0, self.tmaxz+1))
- self.tsize = list(range(0, self.tmaxz+1))
- for tz in range(0, self.tmaxz+1):
- tsize = 2.0**(self.nativezoom-tz)*self.tilesize
- tminx, tminy = 0, 0
- tmaxx = int(math.ceil(self.warped_input_dataset.RasterXSize / tsize)) - 1
- tmaxy = int(math.ceil(self.warped_input_dataset.RasterYSize / tsize)) - 1
- self.tsize[tz] = math.ceil(tsize)
- self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy)
-
- # Function which generates SWNE in LatLong for given tile
- if self.kml and self.in_srs_wkt:
- ct = osr.CoordinateTransformation(in_srs, srs4326)
-
- def rastertileswne(x, y, z):
- pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) # X-pixel size in level
- west = self.out_gt[0] + x*self.tilesize*pixelsizex
- east = west + self.tilesize*pixelsizex
- south = self.ominy + y*self.tilesize*pixelsizex
- north = south + self.tilesize*pixelsizex
- if not self.isepsg4326:
- # Transformation to EPSG:4326 (WGS84 datum)
- west, south = ct.TransformPoint(west, south)[:2]
- east, north = ct.TransformPoint(east, north)[:2]
- return south, west, north, east
-
- self.tileswne = rastertileswne
- else:
- self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa
-
- def generate_metadata(self):
- """
- Generation of main metadata files and HTML viewers (metadata related to particular
- tiles are generated during the tile processing).
- """
-
- if not os.path.exists(self.output_folder):
- os.makedirs(self.output_folder)
-
- if self.options.profile == 'mercator':
-
- south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy)
- north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy)
- south, west = max(-85.05112878, south), max(-180.0, west)
- north, east = min(85.05112878, north), min(180.0, east)
- self.swne = (south, west, north, east)
-
- # Generate googlemaps.html
- if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator':
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'googlemaps.html'))):
- with open(os.path.join(self.output_folder, 'googlemaps.html'), 'wb') as f:
- f.write(self.generate_googlemaps().encode('utf-8'))
-
- # Generate openlayers.html
- if self.options.webviewer in ('all', 'openlayers'):
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
- with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
- f.write(self.generate_openlayers().encode('utf-8'))
-
- # Generate leaflet.html
- if self.options.webviewer in ('all', 'leaflet'):
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'leaflet.html'))):
- with open(os.path.join(self.output_folder, 'leaflet.html'), 'wb') as f:
- f.write(self.generate_leaflet().encode('utf-8'))
-
- elif self.options.profile == 'geodetic':
-
- west, south = self.ominx, self.ominy
- east, north = self.omaxx, self.omaxy
- south, west = max(-90.0, south), max(-180.0, west)
- north, east = min(90.0, north), min(180.0, east)
- self.swne = (south, west, north, east)
-
- # Generate openlayers.html
- if self.options.webviewer in ('all', 'openlayers'):
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
- with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
- f.write(self.generate_openlayers().encode('utf-8'))
-
- elif self.options.profile == 'raster':
-
- west, south = self.ominx, self.ominy
- east, north = self.omaxx, self.omaxy
-
- self.swne = (south, west, north, east)
-
- # Generate openlayers.html
- if self.options.webviewer in ('all', 'openlayers'):
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
- with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
- f.write(self.generate_openlayers().encode('utf-8'))
-
- # Generate tilemapresource.xml.
- if not self.options.resume or not os.path.exists(os.path.join(self.output_folder, 'tilemapresource.xml')):
- with open(os.path.join(self.output_folder, 'tilemapresource.xml'), 'wb') as f:
- f.write(self.generate_tilemapresource().encode('utf-8'))
-
- if self.kml:
- # TODO: Maybe problem for not automatically generated tminz
- # The root KML should contain links to all tiles in the tminz level
- children = []
- xmin, ymin, xmax, ymax = self.tminmax[self.tminz]
- for x in range(xmin, xmax+1):
- for y in range(ymin, ymax+1):
- children.append([x, y, self.tminz])
- # Generate Root KML
- if self.kml:
- if (not self.options.resume or not
- os.path.exists(os.path.join(self.output_folder, 'doc.kml'))):
- with open(os.path.join(self.output_folder, 'doc.kml'), 'wb') as f:
- f.write(generate_kml(
- None, None, None, self.tileext, self.tilesize, self.tileswne,
- self.options, children
- ).encode('utf-8'))
-
- def generate_base_tiles(self):
- """
- Generation of the base tiles (the lowest in the pyramid) directly from the input raster
- """
-
- if not self.options.quiet:
- print("Generating Base Tiles:")
-
- if self.options.verbose:
- print('')
- print("Tiles generated from the max zoom level:")
- print("----------------------------------------")
- print('')
-
- # Set the bounds
- tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz]
-
- ds = self.warped_input_dataset
- tilebands = self.dataBandsCount + 1
- querysize = self.querysize
-
- if self.options.verbose:
- print("dataBandsCount: ", self.dataBandsCount)
- print("tilebands: ", tilebands)
-
- tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy))
- ti = 0
-
- tile_details = []
-
- tz = self.tmaxz
- for ty in range(tmaxy, tminy-1, -1):
- for tx in range(tminx, tmaxx+1):
-
- ti += 1
- tilefilename = os.path.join(
- self.output_folder, str(tz), str(tx), "%s.%s" % (ty, self.tileext))
- if self.options.verbose:
- print(ti, '/', tcount, tilefilename)
-
- if self.options.resume and os.path.exists(tilefilename):
- if self.options.verbose:
- print("Tile generation skipped because of --resume")
- continue
-
- # Create directories for the tile
- if not os.path.exists(os.path.dirname(tilefilename)):
- os.makedirs(os.path.dirname(tilefilename))
-
- if self.options.profile == 'mercator':
- # Tile bounds in EPSG:3857
- b = self.mercator.TileBounds(tx, ty, tz)
- elif self.options.profile == 'geodetic':
- b = self.geodetic.TileBounds(tx, ty, tz)
-
- # Don't scale up by nearest neighbour, better change the querysize
- # to the native resolution (and return smaller query tile) for scaling
-
- if self.options.profile in ('mercator', 'geodetic'):
- rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1])
-
- # Pixel size in the raster covering query geo extent
- nativesize = wb[0] + wb[2]
- if self.options.verbose:
- print("\tNative Extent (querysize", nativesize, "): ", rb, wb)
-
- # Tile bounds in raster coordinates for ReadRaster query
- rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1], querysize=querysize)
-
- rx, ry, rxsize, rysize = rb
- wx, wy, wxsize, wysize = wb
-
- else: # 'raster' profile:
-
- tsize = int(self.tsize[tz]) # tilesize in raster coordinates for actual zoom
- xsize = self.warped_input_dataset.RasterXSize # size of the raster in pixels
- ysize = self.warped_input_dataset.RasterYSize
- if tz >= self.nativezoom:
- querysize = self.tilesize
-
- rx = (tx) * tsize
- rxsize = 0
- if tx == tmaxx:
- rxsize = xsize % tsize
- if rxsize == 0:
- rxsize = tsize
-
- rysize = 0
- if ty == tmaxy:
- rysize = ysize % tsize
- if rysize == 0:
- rysize = tsize
- ry = ysize - (ty * tsize) - rysize
-
- wx, wy = 0, 0
- wxsize = int(rxsize/float(tsize) * self.tilesize)
- wysize = int(rysize/float(tsize) * self.tilesize)
- if wysize != self.tilesize:
- wy = self.tilesize - wysize
-
- # Read the source raster if anything is going inside the tile as per the computed
- # geo_query
- tile_details.append(
- TileDetail(
- tx=tx, ty=ty, tz=tz, rx=rx, ry=ry, rxsize=rxsize, rysize=rysize, wx=wx,
- wy=wy, wxsize=wxsize, wysize=wysize, querysize=querysize,
- )
- )
-
- conf = TileJobInfo(
- src_file=self.tmp_vrt_filename,
- nb_data_bands=self.dataBandsCount,
- output_file_path=self.output_folder,
- tile_extension=self.tileext,
- tile_driver=self.tiledriver,
- tile_size=self.tilesize,
- kml=self.kml,
- tminmax=self.tminmax,
- tminz=self.tminz,
- tmaxz=self.tmaxz,
- in_srs_wkt=self.in_srs_wkt,
- out_geo_trans=self.out_gt,
- ominy=self.ominy,
- is_epsg_4326=self.isepsg4326,
- options=self.options,
- )
-
- return conf, tile_details
-
- def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0):
- """
- For given dataset and query in cartographic coordinates returns parameters for ReadRaster()
- in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the
- extent is returned in the native resolution of dataset ds.
-
- raises Gdal2TilesError if the dataset does not contain anything inside this geo_query
- """
- geotran = ds.GetGeoTransform()
- rx = int((ulx - geotran[0]) / geotran[1] + 0.001)
- ry = int((uly - geotran[3]) / geotran[5] + 0.001)
- rxsize = int((lrx - ulx) / geotran[1] + 0.5)
- rysize = int((lry - uly) / geotran[5] + 0.5)
-
- if not querysize:
- wxsize, wysize = rxsize, rysize
- else:
- wxsize, wysize = querysize, querysize
-
- # Coordinates should not go out of the bounds of the raster
- wx = 0
- if rx < 0:
- rxshift = abs(rx)
- wx = int(wxsize * (float(rxshift) / rxsize))
- wxsize = wxsize - wx
- rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize))
- rx = 0
- if rx+rxsize > ds.RasterXSize:
- wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize))
- rxsize = ds.RasterXSize - rx
-
- wy = 0
- if ry < 0:
- ryshift = abs(ry)
- wy = int(wysize * (float(ryshift) / rysize))
- wysize = wysize - wy
- rysize = rysize - int(rysize * (float(ryshift) / rysize))
- ry = 0
- if ry+rysize > ds.RasterYSize:
- wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize))
- rysize = ds.RasterYSize - ry
-
- return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)
-
- def generate_tilemapresource(self):
- """
- Template for tilemapresource.xml. Returns filled string. Expected variables:
- title, north, south, east, west, isepsg4326, projection, publishurl,
- zoompixels, tilesize, tileformat, profile
- """
-
- args = {}
- args['title'] = self.options.title
- args['south'], args['west'], args['north'], args['east'] = self.swne
- args['tilesize'] = self.tilesize
- args['tileformat'] = self.tileext
- args['publishurl'] = self.options.url
- args['profile'] = self.options.profile
-
- if self.options.profile == 'mercator':
- args['srs'] = "EPSG:3857"
- elif self.options.profile == 'geodetic':
- args['srs'] = "EPSG:4326"
- elif self.options.s_srs:
- args['srs'] = self.options.s_srs
- elif self.out_srs:
- args['srs'] = self.out_srs.ExportToWkt()
- else:
- args['srs'] = ""
-
- s = """
-
- %(title)s
-
- %(srs)s
-
-
-
-
-""" % args # noqa
- for z in range(self.tminz, self.tmaxz+1):
- if self.options.profile == 'raster':
- s += """ \n""" % (
- args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z)
- elif self.options.profile == 'mercator':
- s += """ \n""" % (
- args['publishurl'], z, 156543.0339/2**z, z)
- elif self.options.profile == 'geodetic':
- s += """ \n""" % (
- args['publishurl'], z, 0.703125/2**z, z)
- s += """
-
- """
- return s
-
- def generate_googlemaps(self):
- """
- Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile.
- It returns filled string. Expected variables:
- title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat,
- publishurl
- """
- args = {}
- args['title'] = self.options.title
- args['googlemapskey'] = self.options.googlekey
- args['south'], args['west'], args['north'], args['east'] = self.swne
- args['minzoom'] = self.tminz
- args['maxzoom'] = self.tmaxz
- args['tilesize'] = self.tilesize
- args['tileformat'] = self.tileext
- args['publishurl'] = self.options.url
- args['copyright'] = self.options.copyright
-
- s = r"""
-
-
- %(title)s
-
-
-
-
-
-
-
-
-
-
-
-
- """ % args # noqa
-
- return s
-
- def generate_leaflet(self):
- """
- Template for leaflet.html implementing overlay of tiles for 'mercator' profile.
- It returns filled string. Expected variables:
- title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
- """
-
- args = {}
- args['title'] = self.options.title.replace('"', '\\"')
- args['htmltitle'] = self.options.title
- args['south'], args['west'], args['north'], args['east'] = self.swne
- args['centerlon'] = (args['north'] + args['south']) / 2.
- args['centerlat'] = (args['west'] + args['east']) / 2.
- args['minzoom'] = self.tminz
- args['maxzoom'] = self.tmaxz
- args['beginzoom'] = self.tmaxz
- args['tilesize'] = self.tilesize # not used
- args['tileformat'] = self.tileext
- args['publishurl'] = self.options.url # not used
- args['copyright'] = self.options.copyright.replace('"', '\\"')
-
- s = """
-
-
-
-
- %(htmltitle)s
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- """ % args # noqa
-
- return s
-
- def generate_openlayers(self):
- """
- Template for openlayers.html implementing overlay of available Spherical Mercator layers.
-
- It returns filled string. Expected variables:
- title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
- """
-
- args = {}
- args['title'] = self.options.title
- args['bingkey'] = self.options.bingkey
- args['south'], args['west'], args['north'], args['east'] = self.swne
- args['minzoom'] = self.tminz
- args['maxzoom'] = self.tmaxz
- args['tilesize'] = self.tilesize
- args['tileformat'] = self.tileext
- args['publishurl'] = self.options.url
- args['copyright'] = self.options.copyright
- if self.options.tmscompatible:
- args['tmsoffset'] = "-1"
- else:
- args['tmsoffset'] = ""
- if self.options.profile == 'raster':
- args['rasterzoomlevels'] = self.tmaxz+1
- args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1]
-
- s = r"""
-
- %(title)s
-
- """ % args # noqa
-
- if self.options.profile == 'mercator':
- s += """
-
- """ % args
-
- s += """
-
-
-
-
-
-
-
-
-
-