More efficient ? .dat file reading
This commit is contained in:
parent
2eea373e79
commit
a59bcccde7
2 changed files with 16 additions and 21 deletions
|
@ -16,14 +16,12 @@ class ReadSwash:
|
|||
|
||||
@classmethod
|
||||
def read_nohead(cls, path):
|
||||
with tempfile.TemporaryFile(mode="w+t") as tmpfile:
|
||||
log.info(f"Replacing \\s with \\n in '{path}'")
|
||||
subprocess.run(("sed", r"s/\s\+/\n/g;/^$/d", path), stdout=tmpfile, text=True)
|
||||
log.info(f"Loading '{path}'")
|
||||
tmpfile.seek(0)
|
||||
a = np.asarray(tmpfile.readlines(), dtype=float)
|
||||
log.debug(f"path={a}")
|
||||
return a
|
||||
data = []
|
||||
with path.open() as inp:
|
||||
for line in inp:
|
||||
data += line.split()
|
||||
|
||||
return np.asarray(data, dtype=float)
|
||||
|
||||
def read_time(self, path):
|
||||
self._t = np.unique(self.read_nohead(path))
|
||||
|
|
|
@ -33,20 +33,17 @@ var = {
|
|||
#"dep": rsws.read_scalar,
|
||||
"botl": rsws.read_const,
|
||||
"watl": rsws.read_scalar,
|
||||
"pressk": rsws.read_scalar_lay,
|
||||
"nhprsk": rsws.read_scalar_lay,
|
||||
"zk": rsws.read_scalar_lay,
|
||||
"velk": rsws.read_vector_lay,
|
||||
"vz": rsws.read_scalar_lay,
|
||||
#"pressk": rsws.read_scalar_lay,
|
||||
#"nhprsk": rsws.read_scalar_lay,
|
||||
#"zk": rsws.read_scalar_lay,
|
||||
#"velk": rsws.read_vector_lay,
|
||||
#"vz": rsws.read_scalar_lay,
|
||||
"vel": rsws.read_vector,
|
||||
}
|
||||
|
||||
with ThreadPool(len(var)) as pool:
|
||||
log.info("Converting all data")
|
||||
pool.map(
|
||||
lambda x: np.save(
|
||||
inp.joinpath(x[0]),
|
||||
x[1](sws_out.joinpath(x[0]).with_suffix(".dat")),
|
||||
),
|
||||
var.items(),
|
||||
for name, f in var.items():
|
||||
log.info(f"Converting {name}")
|
||||
np.save(
|
||||
inp.joinpath(name),
|
||||
f(sws_out.joinpath(name).with_suffix(".dat")),
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue