1
Fork 0

Switched to multithreading

This commit is contained in:
Edgar P. Burkhart 2022-03-15 11:37:59 +01:00
parent d55ae54d32
commit 64e5cac36f
Signed by: edpibu
GPG key ID: 9833D3C5A25BD227
2 changed files with 20 additions and 23 deletions

View file

@ -31,6 +31,9 @@ class ReadSwash:
return self.read_nohead(path).reshape((self._n_t, self._n_x))[0, :] return self.read_nohead(path).reshape((self._n_t, self._n_x))[0, :]
return self.read_nohead(path).reshape((self._n_t, self._n_x)) return self.read_nohead(path).reshape((self._n_t, self._n_x))
def read_const(self, path):
return self.read_scalar(path, const=True)
def read_vector(self, path): def read_vector(self, path):
return self.read_nohead(path).reshape((self._n_t, 2, self._n_x)) return self.read_nohead(path).reshape((self._n_t, 2, self._n_x))

View file

@ -1,6 +1,7 @@
import argparse import argparse
import configparser import configparser
import logging import logging
from multiprocessing import ThreadPool
import pathlib import pathlib
import numpy as np import numpy as np
@ -27,28 +28,21 @@ rsws = ReadSwash()
rsws.read_time(sws_out.joinpath("tsec.dat")) rsws.read_time(sws_out.joinpath("tsec.dat"))
rsws.read_x(sws_out.joinpath("xp.dat")) rsws.read_x(sws_out.joinpath("xp.dat"))
inp.mkdir(exist_ok=True) var = {
log.info(f"Wrinting output in '{inp}'") "dep": rsws.read_scalar,
log.info("Reading 'dep'") "botl": rsws.read_const,
np.save(inp.joinpath("dep"), rsws.read_scalar(sws_out.joinpath("dep.dat"))) "watl": rsws.read_scalar,
log.info("Reading 'botl'") "vel": rsws.read_vector,
np.save( "press": rsws.read_scalar,
inp.joinpath("botl"), "zk": rsws.read_scalar_lay,
rsws.read_scalar(sws_out.joinpath("botl.dat"), const=True), "velk": rsws.read_vector_lay,
) }
log.info("Reading 'watl'")
np.save(inp.joinpath("watl"), rsws.read_scalar(sws_out.joinpath("watl.dat")))
log.info("Reading 'vel'")
np.save(inp.joinpath("vel"), rsws.read_vector(sws_out.joinpath("vel.dat")))
log.info("Reading 'press'")
np.save(inp.joinpath("press"), rsws.read_scalar(sws_out.joinpath("press.dat")))
log.info("Reading 'zk'")
np.save(inp.joinpath("zk"), rsws.read_scalar_lay(sws_out.joinpath("zk.dat")))
log.info("Reading 'velk'")
np.save(
inp.joinpath("velk"), rsws.read_vector_lay(sws_out.joinpath("velk.dat"))
)
log.info(f"Writing npz file in '{inp}'")
inp.mkdir(exist_ok=True) inp.mkdir(exist_ok=True)
np.savez_compressed(inp.joinpath("sws"), t=rsws.t, x=rsws.x, **rsws.data) with ThreadPool() as pool:
pool.map(
lambda name, f: np.save(
inp.joinpath(name), f(sws_out.joinpath(name).with_suffix(".dat"))
),
var.items(),
)