Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【PPSCI Export&Infer No.14】add export and inference function for deepcfd #994

Merged
merged 9 commits into from
Sep 26, 2024
20 changes: 19 additions & 1 deletion examples/deepcfd/conf/deepcfd.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ hydra:
subdir: ./

# general settings
mode: train # running mode: train/eval
mode: train # running mode: train/eval/export/inference
seed: 2023
output_dir: ${hydra:run.dir}
log_freq: 20
Expand Down Expand Up @@ -60,3 +60,21 @@ EVAL:
pretrained_model_path: null
eval_with_no_grad: true
batch_size: 8

INFER:
pretrained_model_path: "https://paddle-org.bj.bcebos.com/paddlescience/models/deepcfd/deepcfd_pretrained.pdparams"
export_path: ./inference/deepcfd
pdmodel_path: ${INFER.export_path}.pdmodel
pdiparams_path: ${INFER.export_path}.pdiparams
device: gpu
engine: native
precision: fp32
onnx_path: ${INFER.export_path}.onnx
ir_optim: true
min_subgraph_size: 10
gpu_mem: 6000
gpu_id: 0
max_batch_size: 100
num_cpu_threads: 4
total_size: 981
batch_size: 100
246 changes: 246 additions & 0 deletions examples/deepcfd/deepcfd.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,12 +450,258 @@ def metric_expr(
predict_and_save_plot(test_x, test_y, 0, solver, PLOT_DIR)


def export(cfg: DictConfig):
model = ppsci.arch.UNetEx(**cfg.MODEL)

solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
)

from paddle.static import InputSpec

input_spec = [
{
key: InputSpec(
[None, cfg.CHANNEL_SIZE, cfg.X_SIZE, cfg.Y_SIZE], "float32", name=key
)
for key in model.input_keys
},
]

solver.export(input_spec, cfg.INFER.export_path)
print(f"Model has been exported to {cfg.INFER.export_path}")


def predict_and_save_plot_infer(
x: np.ndarray,
y: np.ndarray,
pred_y: np.ndarray,
index: int,
plot_dir: str,
):
"""Make prediction and save visualization of result during inference.

Args:
x (np.ndarray): Input of test dataset.
y (np.ndarray): Ground truth output of test dataset.
pred_y (np.ndarray): Predicted output from inference.
index (int): Index of data to visualize.
plot_dir (str): Directory to save plot.
"""
import os

import matplotlib.pyplot as plt

# 提取各通道的真实值和预测值
u_true = y[index, 0, :, :]
v_true = y[index, 1, :, :]
p_true = y[index, 2, :, :]

u_pred = pred_y[index, 0, :, :]
v_pred = pred_y[index, 1, :, :]
p_pred = pred_y[index, 2, :, :]

# 计算误差
error_u = np.abs(u_true - u_pred)
error_v = np.abs(v_true - v_pred)
error_p = np.abs(p_true - p_pred)

# 计算各通道的最小和最大值
min_u, max_u = u_true.min(), u_true.max()
min_v, max_v = v_true.min(), v_true.max()
min_p, max_p = p_true.min(), p_true.max()

min_error_u, max_error_u = error_u.min(), error_u.max()
min_error_v, max_error_v = error_v.min(), error_v.max()
min_error_p, max_error_p = error_p.min(), error_p.max()

# 开始绘图
plt.figure(figsize=(15, 10))

# 绘制 Ux
plt.subplot(3, 3, 1)
plt.title("OpenFOAM Ux", fontsize=18)
plt.imshow(
np.transpose(u_true),
cmap="jet",
vmin=min_u,
vmax=max_u,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")
plt.ylabel("Ux", fontsize=18)

plt.subplot(3, 3, 2)
plt.title("DeepCFD Ux", fontsize=18)
plt.imshow(
np.transpose(u_pred),
cmap="jet",
vmin=min_u,
vmax=max_u,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

plt.subplot(3, 3, 3)
plt.title("Error Ux", fontsize=18)
plt.imshow(
np.transpose(error_u),
cmap="jet",
vmin=min_error_u,
vmax=max_error_u,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

# 绘制 Uy
plt.subplot(3, 3, 4)
plt.imshow(
np.transpose(v_true),
cmap="jet",
vmin=min_v,
vmax=max_v,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")
plt.ylabel("Uy", fontsize=18)

plt.subplot(3, 3, 5)
plt.imshow(
np.transpose(v_pred),
cmap="jet",
vmin=min_v,
vmax=max_v,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

plt.subplot(3, 3, 6)
plt.imshow(
np.transpose(error_v),
cmap="jet",
vmin=min_error_v,
vmax=max_error_v,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

# 绘制 p
plt.subplot(3, 3, 7)
plt.imshow(
np.transpose(p_true),
cmap="jet",
vmin=min_p,
vmax=max_p,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")
plt.ylabel("p", fontsize=18)

plt.subplot(3, 3, 8)
plt.imshow(
np.transpose(p_pred),
cmap="jet",
vmin=min_p,
vmax=max_p,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

plt.subplot(3, 3, 9)
plt.imshow(
np.transpose(error_p),
cmap="jet",
vmin=min_error_p,
vmax=max_error_p,
origin="lower",
extent=[0, 260, 0, 120],
)
plt.colorbar(orientation="horizontal")

plt.tight_layout()
plt.savefig(os.path.join(plot_dir, f"cfd_{index}.png"), bbox_inches="tight")
plt.close()


def inference(cfg: DictConfig):
from deploy.python_infer import pinn_predictor

with open(cfg.DATAX_PATH, "rb") as file:
x = pickle.load(file)
with open(cfg.DATAY_PATH, "rb") as file:
y = pickle.load(file)

_, test_dataset = split_tensors(x, y, ratio=cfg.SLIPT_RATIO)
test_x, test_y = test_dataset

input_dict = {cfg.MODEL.input_key: test_x}

predictor = pinn_predictor.PINNPredictor(cfg)

output_dict = predictor.predict(input_dict, batch_size=cfg.INFER.batch_size)

actual_output_key = cfg.MODEL.output_key

output_keys = (
actual_output_key
if isinstance(actual_output_key, (list, tuple))
else [actual_output_key]
)
if len(output_keys) != len(output_dict):
raise ValueError(
"The number of output_keys does not match the number of output_dict keys."
)

output_dict = {
origin: value for origin, value in zip(output_keys, output_dict.values())
}

concat_output = output_dict[actual_output_key]

if concat_output.ndim != 4 or concat_output.shape[1] != 3:
raise ValueError(
f"Unexpected shape of '{actual_output_key}': {concat_output.shape}. Expected (batch_size, 3, x_size, y_size)."
)

try:
u_pred = concat_output[:, 0, :, :] # Ux
v_pred = concat_output[:, 1, :, :] # Uy
p_pred = concat_output[:, 2, :, :] # p
except IndexError as e:
print(f"Error in splitting '{actual_output_key}': {e}")
raise

pred_y = np.stack([u_pred, v_pred, p_pred], axis=1)

PLOT_DIR = os.path.join(cfg.output_dir, "infer_visual")
os.makedirs(PLOT_DIR, exist_ok=True)

# 绘制前五组数据的结果
for index in range(min(5, pred_y.shape[0])):
predict_and_save_plot_infer(test_x, test_y, pred_y, index, PLOT_DIR)

print(f"Inference completed. Results are saved in {PLOT_DIR}")


@hydra.main(version_base=None, config_path="./conf", config_name="deepcfd.yaml")
def main(cfg: DictConfig):
if cfg.mode == "train":
train(cfg)
elif cfg.mode == "eval":
evaluate(cfg)
elif cfg.mode == "export":
export(cfg)
elif cfg.mode == "infer":
inference(cfg)
else:
raise ValueError(f"cfg.mode should in ['train', 'eval'], but got '{cfg.mode}'")

Expand Down