commit 36e083eee0c9840601565fee08bac8c41c19bf93 Author: gabriel venberg Date: Tue Mar 2 14:07:44 2021 -0600 inital commit. basic heatmap generator. still have to make many options configurable. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2540e81 --- /dev/null +++ b/.gitignore @@ -0,0 +1,146 @@ +*.png + +# Created by https://www.toptal.com/developers/gitignore/api/python +# Edit at https://www.toptal.com/developers/gitignore?templates=python + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +pytestdebug.log + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +doc/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pythonenv* + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# profiling data +.prof + +# End of https://www.toptal.com/developers/gitignore/api/python diff --git a/outputHightMap.py b/outputHightMap.py new file mode 100755 index 0000000..0625a11 --- /dev/null +++ b/outputHightMap.py @@ -0,0 +1,111 @@ +#! /usr/bin/env python3 +#command line arguments: +# --help, -h, outputs usage of the program +# -x, -y, outputs width and hight of the output image +# --output, -o, name of output file. if there are multiple input files, there will be a number prepended to this. +# after all comamnd line arguments, file or files(space seperated) to process. + +import numpy as np +import sys, argparse, laspy, logging +import seaborn as sns; sns.set_theme() +import matplotlib.pyplot as plt +from PIL import Image + +logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) + +imgX=100 +imgY=100 + +#TODO: make it iterate over multiple files. +inFile = sys.argv[1] +lasFile = laspy.file.File(inFile, mode = "r") +#import each dimention scaled. +x = lasFile.x +y = lasFile.y +z = lasFile.z +maxes = np.array(lasFile.header.max)*np.array(lasFile.header.scale) +mins = np.array(lasFile.header.min)*np.array(lasFile.header.scale) +logging.debug(f'max values is {maxes}') +logging.debug(f'min values is {mins}') +intensity = lasFile.intensity + +#dimention that will be z(top down) dimention in final heatmap. TODO: auto detect this based on dimention with least variance. +zDim=0 +xDim=1 +yDim=2 + +points = np.stack((x,y,z,intensity), axis=-1) +#points should now look like +#[[x,y,z,intensity] +# [x,y,z,intensity] +# ... +# [x,y,z,intensity] +# [x,y,z,intensity]] + +logging.debug(f'points is\n{points}') + +xRange = maxes[xDim]-mins[xDim] +yRange = maxes[yDim]-mins[yDim] +zRange = maxes[zDim]-mins[zDim] +def sort(array): + #sort by zDim column, first to last. + logging.debug(f'zDim sliced points is\n{array[:,zDim]}') + #the [::-1] reverses the resulting array, so that sortedPoints will be from biggest to smallest. + ind = np.argsort(array[:,zDim])[::-1] + sortedPoints = array[ind] + logging.debug(f'sortedPoints is\n{sortedPoints}') + return sortedPoints + +sortedPoints = sort(points) + +imageArray = np.zeros((imgX, imgY)) + +def scale(array, xRange, yRange, maxX, maxY): + logging.debug(f'xRange is {xRange} and yRange is {yRange}') + xScale = maxX/xRange + yScale = maxY/yRange + + scaledArray = sortedPoints[:, 0:3] + scaledArray[:,xDim]=scaledArray[:,xDim]-mins[xDim] + scaledArray[:,xDim]=scaledArray[:,xDim]*xScale + logging.debug(f'xmin in scaledArray is {scaledArray[:,xDim].min()}') + logging.debug(f'xmin in scaledArray is {scaledArray[:,xDim].max()}') + + scaledArray[:,yDim]=scaledArray[:,yDim]-mins[yDim] + scaledArray[:,yDim]=scaledArray[:,yDim]*yScale + logging.debug(f'ymin in scaledArray is {scaledArray[:,yDim].min()}') + logging.debug(f'ymin in scaledArray is {scaledArray[:,yDim].max()}') + logging.debug(f'scaledArray is\n{scaledArray}') + return scaledArray + +scaledArray = scale(points, xRange, yRange, imgX, imgY) + +def isInxyRange(xMin, xMax, yMin, yMax, xVal, yVal): + return (xMin<=xVal) and (xVal