Bathymetry generator
This commit is contained in:
commit
3d9eec1ef7
11 changed files with 248391 additions and 0 deletions
152
.gitignore
vendored
Normal file
152
.gitignore
vendored
Normal file
|
@ -0,0 +1,152 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
3
bathymetry/.gitignore
vendored
Normal file
3
bathymetry/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
/1_scad
|
||||
/2_stl
|
||||
/3_pd
|
|
@ -0,0 +1,6 @@
|
|||
translate([150, 0, 0])
|
||||
mirror([1, 0, 0])
|
||||
rotate([90, 0, 0])
|
||||
translate([0, 0, -10])
|
||||
linear_extrude(20)
|
||||
polygon({});
|
247958
bathymetry/0_data/MNT_COTIER_BAIE_SJL_TANDEM_20m_WGS84_NM_ZNEG.glz
Normal file
247958
bathymetry/0_data/MNT_COTIER_BAIE_SJL_TANDEM_20m_WGS84_NM_ZNEG.glz
Normal file
File diff suppressed because it is too large
Load diff
2
bathymetry/0_data/artha_coords.csv
Normal file
2
bathymetry/0_data/artha_coords.csv
Normal file
|
@ -0,0 +1,2 @@
|
|||
lat,lon
|
||||
43.398489,-1.672400
|
|
4
bathymetry/0_data/base.scad
Normal file
4
bathymetry/0_data/base.scad
Normal file
|
@ -0,0 +1,4 @@
|
|||
rotate([90, 0, 0])
|
||||
translate([0, 0, -10])
|
||||
linear_extrude(20)
|
||||
polygon({});
|
8
bathymetry/0_data/bloc0.csv
Normal file
8
bathymetry/0_data/bloc0.csv
Normal file
|
@ -0,0 +1,8 @@
|
|||
x,z
|
||||
-4,-0.5
|
||||
11.7,-0.5
|
||||
11.7,2.5
|
||||
9.25,8.2
|
||||
0,8.2
|
||||
0,2.5
|
||||
-4,2.5
|
|
5
bathymetry/0_data/bloc1.csv
Normal file
5
bathymetry/0_data/bloc1.csv
Normal file
|
@ -0,0 +1,5 @@
|
|||
x,z
|
||||
9.25,-0.5
|
||||
9.25,4.75
|
||||
16.70,4.75
|
||||
16.6,-0.5
|
|
3
bathymetry/0_data/points.csv
Normal file
3
bathymetry/0_data/points.csv
Normal file
|
@ -0,0 +1,3 @@
|
|||
lat,lon
|
||||
43.398133,-1.672285
|
||||
43.400464,-1.67302
|
|
34
bathymetry/config.ini
Normal file
34
bathymetry/config.ini
Normal file
|
@ -0,0 +1,34 @@
|
|||
[main]
|
||||
logging = INFO
|
||||
plot = False
|
||||
N = 1024
|
||||
L0 = 120
|
||||
L1 = 30
|
||||
dir = 350
|
||||
bathy_max=-15
|
||||
|
||||
[data]
|
||||
root = 0_data
|
||||
bathy = MNT_COTIER_BAIE_SJL_TANDEM_20m_WGS84_NM_ZNEG.glz
|
||||
artha = artha_coords.csv
|
||||
base_scad = base.scad
|
||||
points = points.csv
|
||||
blocs = bloc0.csv,bloc1.csv
|
||||
|
||||
[scad]
|
||||
root = 1_scad
|
||||
bathy = bathy.scad
|
||||
bloc0 = bloc0.scad
|
||||
bloc1 = bloc1.scad
|
||||
rubble = rub.scad
|
||||
|
||||
[stl]
|
||||
root = 2_stl
|
||||
bathy = bathy.stl
|
||||
bloc0 = bloc0.stl
|
||||
bloc1 = bloc1.stl
|
||||
rubble = rub.stl
|
||||
|
||||
[pandas]
|
||||
root = 3_pd
|
||||
file = data.hdf
|
216
bathymetry/generate/__main__.py
Normal file
216
bathymetry/generate/__main__.py
Normal file
|
@ -0,0 +1,216 @@
|
|||
from pprint import pp
|
||||
import logging
|
||||
import configparser
|
||||
import pathlib
|
||||
import subprocess
|
||||
from time import time
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
_t0 = time()
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
if config['main']['plot'] == 'True': import matplotlib.pyplot as plt
|
||||
|
||||
logging.basicConfig(level=config['main']['logging'])
|
||||
log = logging.getLogger('bathymetry')
|
||||
|
||||
|
||||
# --- Initialization
|
||||
log.info('Initialization')
|
||||
folders = {
|
||||
'data': pathlib.Path(config['data']['root']),
|
||||
'scad': pathlib.Path(config['scad']['root']),
|
||||
'stl': pathlib.Path(config['stl']['root']),
|
||||
'pandas': pathlib.Path(config['pandas']['root']),
|
||||
}
|
||||
for path in folders.values():
|
||||
path.mkdir(exist_ok=True)
|
||||
|
||||
bathy = pd.read_csv(
|
||||
folders['data'].joinpath(config['data']['bathy']),
|
||||
sep=' ',
|
||||
names=('lon', 'lat', 'z'),
|
||||
)
|
||||
artha = pd.read_csv(
|
||||
folders['data'].joinpath(config['data']['artha']),
|
||||
)
|
||||
|
||||
earth_radius = 6371e3
|
||||
L0 = float(config['main']['L0'])
|
||||
L1 = float(config['main']['L1'])
|
||||
direction = float(config['main']['dir'])
|
||||
|
||||
artha = artha.append(pd.DataFrame([
|
||||
{
|
||||
'lat': artha.lat.at[0] \
|
||||
+ np.cos(direction*np.pi/180) * L0/earth_radius * 180/np.pi,
|
||||
'lon': artha.lon.at[0] \
|
||||
+ np.sin(direction*np.pi/180) * L0/earth_radius * 180/np.pi \
|
||||
/ np.cos(artha.lat.at[0] * np.pi/180),
|
||||
},
|
||||
{
|
||||
'lat': artha.lat.at[0] \
|
||||
- np.cos(direction*np.pi/180) * L1/earth_radius * 180/np.pi,
|
||||
'lon': artha.lon.at[0] \
|
||||
- np.sin(direction*np.pi/180) * L1/earth_radius * 180/np.pi \
|
||||
/ np.cos(artha.lat.at[0] * np.pi/180),
|
||||
},
|
||||
], index=[-1, 1]))
|
||||
|
||||
# --- Interpolation
|
||||
log.info('Interpolating data')
|
||||
N = int(config['main']['N'])
|
||||
line = pd.DataFrame({
|
||||
'x': np.linspace(0, L0+L1, N),
|
||||
'lat': np.linspace(artha.lat.at[-1], artha.lat.at[1], N),
|
||||
'lon': np.linspace(artha.lon.at[-1], artha.lon.at[1], N),
|
||||
})
|
||||
line.set_index('x', inplace=True)
|
||||
|
||||
line['z'] = griddata(
|
||||
bathy[['lon','lat']],
|
||||
bathy.z,
|
||||
line[['lon','lat']],
|
||||
method='linear',
|
||||
)
|
||||
|
||||
# --- Adding blocs
|
||||
log.info('Adding blocs')
|
||||
b = config['data']['blocs'].split(',')
|
||||
blocs = pd.Series([
|
||||
pd.read_csv(
|
||||
folders['data'].joinpath(path),
|
||||
index_col='x',
|
||||
) for path in b
|
||||
])
|
||||
for bloc in blocs:
|
||||
bloc.index = L0 - bloc.index
|
||||
|
||||
|
||||
#line.z = all_blocs.z.fillna(line.z)
|
||||
|
||||
lim = float(config['main']['bathy_max'])
|
||||
bathy_line = line.z.clip(upper=lim)
|
||||
rubble_line = line.z[line.z > lim]
|
||||
|
||||
data_dict = {}
|
||||
for i in range(blocs.size):
|
||||
data_dict[f'bloc{i}'] = blocs.iat[i].reset_index().values
|
||||
for name, data in (
|
||||
('bathy', bathy_line),
|
||||
('rubble', rubble_line),
|
||||
):
|
||||
data_dict[name] = np.concatenate((
|
||||
data.reset_index().values,
|
||||
[[data.index.max(), data.min()],
|
||||
[data.index.min(),data.min()]]
|
||||
))
|
||||
|
||||
# --- Generating SCAD
|
||||
|
||||
with open(folders['data'].joinpath(config['data']['base_scad'])) as bsf:
|
||||
base_scad = bsf.read()
|
||||
for (name, data) in data_dict.items():
|
||||
log.info(f'Generating {name}')
|
||||
log.info('\tGenerating SCAD file')
|
||||
scad_file = folders['scad'].joinpath(config['scad'][name])
|
||||
with open(scad_file, 'w') as osf:
|
||||
osf.write(base_scad.format(
|
||||
np.array2string(
|
||||
data,
|
||||
threshold=np.inf,
|
||||
separator=','
|
||||
)
|
||||
))
|
||||
log.info('\tGenerating STL file')
|
||||
subprocess.run(('openscad', scad_file, '-o',
|
||||
folders['stl'].joinpath(config['stl'][name])),
|
||||
check=True,
|
||||
capture_output=True)
|
||||
|
||||
|
||||
# --- Saving pandas
|
||||
log.info('Saving Pandas')
|
||||
|
||||
with pd.HDFStore(
|
||||
folders['pandas'].joinpath(config['pandas']['file']),
|
||||
mode='w',
|
||||
complib='blosc',
|
||||
) as hdf:
|
||||
for name, data in (
|
||||
('bathy', bathy_line),
|
||||
('rubble', rubble_line),
|
||||
):
|
||||
hdf.put(name, data)
|
||||
for i, bloc in blocs.items():
|
||||
hdf.put(f'bloc{i}', bloc)
|
||||
|
||||
|
||||
_t1 = time()
|
||||
log.info(f'Program ended successfully after {_t1-_t0:.2f}s')
|
||||
|
||||
# --- Plotting
|
||||
|
||||
if config['main']['plot'] == 'True':
|
||||
log.info('Plotting data')
|
||||
flt = (
|
||||
((bathy.lon-artha.lon.at[0]).abs() < 0.002) & \
|
||||
((bathy.lat-artha.lat.at[0]).abs() < 0.002)
|
||||
)
|
||||
|
||||
fig, ax = plt.subplots()
|
||||
ax.scatter(
|
||||
bathy.lon[flt],
|
||||
bathy.lat[flt],
|
||||
c=bathy.z[flt],
|
||||
marker='1',
|
||||
lw=1,
|
||||
)
|
||||
ax.scatter(
|
||||
artha.lon,
|
||||
artha.lat,
|
||||
color='k',
|
||||
marker='+',
|
||||
lw=1,
|
||||
)
|
||||
ax.set(
|
||||
aspect='equal',
|
||||
)
|
||||
|
||||
fig, ax = plt.subplots()
|
||||
ax.plot(
|
||||
bathy_line.index,
|
||||
bathy_line,
|
||||
color='k',
|
||||
lw=1,
|
||||
zorder=10,
|
||||
label='Bathymetry',
|
||||
)
|
||||
ax.plot(
|
||||
rubble_line.index,
|
||||
rubble_line,
|
||||
color='r',
|
||||
lw=1,
|
||||
zorder=11,
|
||||
label='Rubble',
|
||||
)
|
||||
blocs.apply(lambda bloc: ax.fill_between(
|
||||
bloc.index,
|
||||
bloc.z,
|
||||
color='k',
|
||||
zorder=9,
|
||||
alpha=.1,
|
||||
label='Caisson',
|
||||
))
|
||||
ax.set(
|
||||
aspect='equal',
|
||||
xlim=(bathy_line.index.min(), bathy_line.index.max()),
|
||||
)
|
||||
ax.grid()
|
||||
fig.legend()
|
||||
|
||||
plt.show(block=True)
|
Reference in a new issue