Updated mat file conversion: layered, layered vectors...
This commit is contained in:
parent
fd555e413e
commit
65c8ac67cd
2 changed files with 152 additions and 44 deletions
|
@ -9,6 +9,8 @@ from multiprocessing.pool import ThreadPool
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import scipy.io as sio
|
import scipy.io as sio
|
||||||
|
|
||||||
|
from .read_mat import ReadSwash
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Convert swash output to numpy")
|
parser = argparse.ArgumentParser(description="Convert swash output to numpy")
|
||||||
parser.add_argument("-v", "--verbose", action="count", default=0)
|
parser.add_argument("-v", "--verbose", action="count", default=0)
|
||||||
parser.add_argument("-c", "--config", default="config.ini")
|
parser.add_argument("-c", "--config", default="config.ini")
|
||||||
|
@ -26,48 +28,16 @@ inp = pathlib.Path(config.get("post", "inp"))
|
||||||
inp.mkdir(parents=True, exist_ok=True)
|
inp.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
log.info(f"Reading swash output from '{sws_out}'")
|
log.info(f"Reading swash output from '{sws_out}'")
|
||||||
raw_tsec = sio.loadmat(sws_out.joinpath("tsec.mat"))
|
|
||||||
i = np.fromiter(
|
|
||||||
(k[5:] for k in raw_tsec.keys() if re.compile(r"^Tsec_").match(k)), dtype="U10"
|
|
||||||
)
|
|
||||||
t = np.fromiter((raw_tsec[f"Tsec_{k}"][0, 0] * 10**3 for k in i), dtype=np.uintc)
|
|
||||||
np.save(inp.joinpath("t"), t)
|
|
||||||
del raw_tsec
|
|
||||||
raw_xp = sio.loadmat(sws_out.joinpath("xp.mat"), variable_names="Xp")
|
|
||||||
x = raw_xp["Xp"][0]
|
|
||||||
np.save(inp.joinpath("x"), x)
|
|
||||||
del raw_xp
|
|
||||||
|
|
||||||
raw_botl = sio.loadmat(sws_out.joinpath("botl.mat"), variable_names="Botlev")
|
swr = ReadSwash(sws_out, inp)
|
||||||
botl = raw_botl["Botlev"][0]
|
swr.save("t")
|
||||||
np.save(inp.joinpath("botl"), botl)
|
swr.save("x")
|
||||||
del raw_botl, botl
|
swr.save("c", "botl", "Botlev")
|
||||||
|
swr.save("s", "watl", "Watlev")
|
||||||
raw_watl = sio.loadmat(sws_out.joinpath("watl.mat"))
|
swr.save("s", "press", "Press")
|
||||||
watl = np.asarray(
|
swr.save("v", "vel", "vel")
|
||||||
[raw_watl[i0][0] for i0 in np.char.add("Watlev_", i)], dtype=np.single
|
swr.save("sk", "zk", "zk")
|
||||||
)
|
swr.save("sk", "nhprsk", "Nprs_k")
|
||||||
np.save(inp.joinpath("watl"), watl)
|
swr.save("sk", "pressk", "Pres_k")
|
||||||
del raw_watl, watl
|
swr.save("sk", "vz", "w")
|
||||||
|
swr.save("vk", "velk", "vel_k")
|
||||||
raw_vel = sio.loadmat(sws_out.joinpath("vel.mat"))
|
|
||||||
vel_x = np.asarray([raw_vel[i0][0] for i0 in np.char.add("vel_x_", i)], dtype=np.single)
|
|
||||||
np.save(inp.joinpath("vel_x"), vel_x)
|
|
||||||
del raw_vel, vel_x
|
|
||||||
|
|
||||||
raw_zk = sio.loadmat(sws_out.joinpath("zk.mat"))
|
|
||||||
n_zk = (len(raw_zk.keys()) - 3) // t.size
|
|
||||||
zk = np.asarray(
|
|
||||||
[
|
|
||||||
raw_zk[i0][0]
|
|
||||||
for i0 in np.char.add(
|
|
||||||
np.char.replace("zki_", "i", np.arange(n_zk).astype("U1"), count=1)[
|
|
||||||
None, :
|
|
||||||
],
|
|
||||||
i[:, None],
|
|
||||||
).reshape(-1)
|
|
||||||
],
|
|
||||||
dtype=np.single,
|
|
||||||
).reshape((t.size, n_zk, x.size))
|
|
||||||
np.save(inp.joinpath("zk"), zk)
|
|
||||||
del raw_zk, zk
|
|
||||||
|
|
138
swash/processing/read_mat.py
Normal file
138
swash/processing/read_mat.py
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import scipy.io as sio
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger("read_mat")
|
||||||
|
|
||||||
|
|
||||||
|
class ReadSwash:
|
||||||
|
def __init__(self, root, out):
|
||||||
|
self._root = root
|
||||||
|
self._out = out
|
||||||
|
self._n_x = None
|
||||||
|
self._n_t = None
|
||||||
|
self._i = None
|
||||||
|
self._t = None
|
||||||
|
self._x = None
|
||||||
|
|
||||||
|
def read_raw(self, var, var_names=None):
|
||||||
|
res = sio.loadmat(self._root.joinpath(f"{var}.mat"), variable_names=var_names)
|
||||||
|
res.pop("__header__")
|
||||||
|
res.pop("__version__")
|
||||||
|
res.pop("__globals__")
|
||||||
|
return res
|
||||||
|
|
||||||
|
def read_t(self):
|
||||||
|
raw_t = self.read_raw("tsec")
|
||||||
|
self._i = np.char.lstrip(np.fromiter(raw_t.keys(), dtype="U15"), "Tsec_")
|
||||||
|
self._t = np.fromiter(
|
||||||
|
(raw_t[k][0, 0] * 10**3 for k in np.char.add("Tsec_", self._i)),
|
||||||
|
dtype=np.uintc,
|
||||||
|
)
|
||||||
|
self._n_t = self._t.size
|
||||||
|
return self.t
|
||||||
|
|
||||||
|
def read_x(self):
|
||||||
|
self._x = self.read_const("xp", "Xp")
|
||||||
|
self._n_x = self._x.size
|
||||||
|
return self.x
|
||||||
|
|
||||||
|
def read_scalar(self, var, var_name):
|
||||||
|
raw = self.read_raw(var)
|
||||||
|
return np.asarray(
|
||||||
|
[raw[i0][0] for i0 in np.char.add(f"{var_name}_", self._i)], dtype=np.single
|
||||||
|
)
|
||||||
|
|
||||||
|
def read_const(self, var, var_name):
|
||||||
|
raw = self.read_raw(var, var_name)
|
||||||
|
return raw[var_name][0]
|
||||||
|
|
||||||
|
def read_vector(self, var, var_name):
|
||||||
|
raw = self.read_raw(var)
|
||||||
|
return (
|
||||||
|
np.asarray(
|
||||||
|
[raw[i0][0] for i0 in np.char.add(f"{var_name}_x_", self._i)],
|
||||||
|
dtype=np.single,
|
||||||
|
),
|
||||||
|
np.asarray(
|
||||||
|
[raw[i0][0] for i0 in np.char.add(f"{var_name}_y_", self._i)],
|
||||||
|
dtype=np.single,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def read_scalar_lay(self, var, var_name):
|
||||||
|
raw = self.read_raw(var)
|
||||||
|
n = len(raw.keys()) // self._n_t
|
||||||
|
ra = (n,) if f"{var_name}0_{self._i[0]}" in raw.keys() else (1, n + 1)
|
||||||
|
return np.asarray(
|
||||||
|
[
|
||||||
|
raw[i0][0]
|
||||||
|
for i0 in np.char.add(
|
||||||
|
np.char.replace(f"{var_name}[]_", "[]", np.arange(*ra).astype("U1"))[
|
||||||
|
None, :
|
||||||
|
],
|
||||||
|
self._i[:, None],
|
||||||
|
).reshape(-1)
|
||||||
|
],
|
||||||
|
dtype=np.single,
|
||||||
|
).reshape((self._n_t, n, self._n_x))
|
||||||
|
|
||||||
|
def read_vector_lay(self, var, var_name):
|
||||||
|
raw = self.read_raw(var)
|
||||||
|
n = len(raw.keys()) // (self._n_t * 2)
|
||||||
|
ra = (n,) if f"{var_name}0_x_{self._i[0]}" in raw.keys() else (1, n + 1)
|
||||||
|
return (
|
||||||
|
np.asarray(
|
||||||
|
[
|
||||||
|
raw[i0][0]
|
||||||
|
for i0 in np.char.add(
|
||||||
|
np.char.replace(
|
||||||
|
f"{var_name}[]_x_", "[]", np.arange(*ra).astype("U1")
|
||||||
|
)[None, :],
|
||||||
|
self._i[:, None],
|
||||||
|
).reshape(-1)
|
||||||
|
],
|
||||||
|
dtype=np.single,
|
||||||
|
).reshape((self._n_t, n, self._n_x)),
|
||||||
|
np.asarray(
|
||||||
|
[
|
||||||
|
raw[i0][0]
|
||||||
|
for i0 in np.char.add(
|
||||||
|
np.char.replace(
|
||||||
|
f"{var_name}[]_y_", "[]", np.arange(*ra).astype("U1")
|
||||||
|
)[None, :],
|
||||||
|
self._i[:, None],
|
||||||
|
).reshape(-1)
|
||||||
|
],
|
||||||
|
dtype=np.single,
|
||||||
|
).reshape((self._n_t, n, self._n_x)),
|
||||||
|
)
|
||||||
|
|
||||||
|
def save(self, t, var=None, var_name=None):
|
||||||
|
fct = {
|
||||||
|
"t": self.read_t,
|
||||||
|
"x": self.read_x,
|
||||||
|
"s": self.read_scalar,
|
||||||
|
"c": self.read_const,
|
||||||
|
"v": self.read_vector,
|
||||||
|
"sk": self.read_scalar_lay,
|
||||||
|
"vk": self.read_vector_lay,
|
||||||
|
}
|
||||||
|
if t in ("x", "t"):
|
||||||
|
log.info(f"Converting {t}")
|
||||||
|
np.save(self._out.joinpath(t), fct[t]())
|
||||||
|
else:
|
||||||
|
log.info(f"Converting {var}")
|
||||||
|
np.save(self._out.joinpath(var), fct[t](var, var_name))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def t(self):
|
||||||
|
return self._t
|
||||||
|
|
||||||
|
@property
|
||||||
|
def x(self):
|
||||||
|
return self._x
|
Loading…
Reference in a new issue