ppak10 commited on
Commit
c128a53
·
1 Parent(s): f7709b6

Adds scripts

Browse files
scripts/01_generate_views.py ADDED
@@ -0,0 +1,545 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Generate all views for every unprocessed simulation case.
4
+
5
+ W (the adaptive crop half-width) is derived directly from the laser spot
6
+ radius in the prepin file — no temperature scan needed.
7
+
8
+ Single pass over source timesteps produces:
9
+ • top-down 2D projection (50×50, 1 mm × 1 mm) per field
10
+ • adaptive-crop 3D volume (Z×W×W) per field
11
+
12
+ Outputs written to views/<case>/:
13
+ metadata.json simulation parameters + per-field stats
14
+ cropped.npz all fields (N, Z, W, W), timestep, power,
15
+ velocity, window_cells
16
+ <field>/top_down.gif matplotlib 2D animation (1 mm × 1 mm)
17
+ <field>/cropped.gif PyVista 3D volume render animation
18
+
19
+ Cases that already have cropped.npz are skipped automatically.
20
+ Use --workers to control parallelism (default: cpu_count // 2).
21
+ """
22
+
23
+ import argparse
24
+ import json
25
+ import multiprocessing
26
+ import re
27
+ import shutil
28
+ import zipfile
29
+ from pathlib import Path
30
+ from typing import NamedTuple
31
+
32
+ import imageio
33
+ import matplotlib.animation as animation
34
+ import matplotlib.pyplot as plt
35
+ import numpy as np
36
+ import pyvista as pv
37
+ from tqdm import tqdm
38
+
39
+ # ── Domain constants (CGS: cm) ────────────────────────────────────────────────
40
+ CELL_SIZE_CM = 0.002
41
+ TOP_DOWN_HALF = 25 # fixed 1 mm window → 50 cells → ±25
42
+ LIQUIDUS_K = 1697.15
43
+ T_AMBIENT = 299.15
44
+ BUFFER_CELLS = 5
45
+ # Ghost cell counts added by FLOW-3D at array boundaries
46
+ GHOST_Z = 2
47
+ GHOST_Y = 1
48
+ GHOST_X = 1
49
+
50
+
51
+ # ── Field configuration ───────────────────────────────────────────────────────
52
+ class Field(NamedTuple):
53
+ name: str
54
+ label: str
55
+ cmap: str
56
+ project: str # "max" or "sum" along Z for 2-D top-down
57
+ vmin_fixed: float | None
58
+ vmax_pct: float = 99.9
59
+ vmin_pct: float = 0.0
60
+ sparse: bool = False # percentile over nonzero cells only
61
+
62
+
63
+ FIELDS = [
64
+ Field("temperature", "Temperature (K)", "inferno", "max", T_AMBIENT),
65
+ Field("fraction_of_fluid", "Fluid depth (cells)", "Blues", "sum", None, 99.9, 1.0),
66
+ Field("liquid_label", "Liquid region", "plasma", "max", 0.0, 100.0),
67
+ Field("melt_region", "Melt region", "hot", "max", 0.0, 99.9, 0.0, sparse=True),
68
+ Field("pressure", "Pressure (dyne/cm²)", "RdBu_r", "max", None, 99.9, 0.1),
69
+ Field("velocity_magnitude", "Velocity magnitude (cm/s)", "viridis", "max", 0.0, 99.9, 0.0, sparse=True),
70
+ Field("temperature_gradient", "Temperature gradient (raw)", "magma", "max", 0.0, 99.9, 0.0, sparse=True),
71
+ Field("gradient_magnitude", "|∇T| from components (K/cm)", "magma", "max", 0.0, 99.9, 0.0, sparse=True),
72
+ ]
73
+ FIELD_MAP = {f.name: f for f in FIELDS}
74
+
75
+
76
+ # ── Prepin parsing ────────────────────────────────────────────────────────────
77
+
78
+ def find_prepin(case_dir: Path) -> Path:
79
+ matches = list(case_dir.glob("prepin.*"))
80
+ if not matches:
81
+ raise FileNotFoundError(f"No prepin file in {case_dir}")
82
+ return matches[0]
83
+
84
+
85
+ def parse_prepin(text: str) -> dict:
86
+ def f(pat): return float(re.search(pat, text).group(1))
87
+ power_raw = f(r"powlbm\(1,1\)\s*=\s*([\d.eE+-]+)")
88
+ return {
89
+ "px1": f(r"px\(1\)\s*=\s*([\d.eE+-]+)"), # cm, mesh X start
90
+ "py1": f(r"py\(1\)\s*=\s*([\d.eE+-]+)"), # cm, mesh Y start
91
+ "xb0": f(r"xb0lbm\(1\)\s*=\s*([\d.eE+-]+)"), # cm
92
+ "yb0": f(r"yb0lbm\(1\)\s*=\s*([\d.eE+-]+)"), # cm
93
+ "velocity_cms":f(r"utlbm\(1,1\)\s*=\s*([\d.eE+-]+)"), # cm/s
94
+ "spot_radius": f(r"rflbm\(1\)\s*=\s*([\d.eE+-]+)"), # cm
95
+ "gauss_radius":f(r"rblbm\(1\)\s*=\s*([\d.eE+-]+)"), # cm
96
+ "power_w": round(power_raw / 1e7),
97
+ "t_liquidus": f(r"tl1\s*=\s*([\d.eE+-]+)"),
98
+ "t_solidus": f(r"ts1\s*=\s*([\d.eE+-]+)"),
99
+ "t_finish": f(r"twfin\s*=\s*([\d.eE+-]+)"),
100
+ }
101
+
102
+
103
+ def beam_x_index(t: float, pp: dict) -> int:
104
+ """Beam X index relative to the mesh origin (px1)."""
105
+ x_abs = pp["xb0"] + pp["velocity_cms"] * t
106
+ return round((x_abs - pp["px1"] - CELL_SIZE_CM / 2) / CELL_SIZE_CM)
107
+
108
+
109
+ def domain_from_raw(raw: dict, pp: dict) -> tuple[int, int, int, int]:
110
+ """Return (nz, ny, nx, y_center) from the first npz array and prepin.
111
+
112
+ FLOW-3D appends ghost cells at the end of each axis:
113
+ Z: +2, Y: +1, X: +1
114
+ y_center is the beam Y position as a cell index within the mesh.
115
+ """
116
+ shape = raw["temperature"].shape # (1, nz+2, ny+1, nx+1)
117
+ nz = shape[1] - GHOST_Z
118
+ ny = shape[2] - GHOST_Y
119
+ nx = shape[3] - GHOST_X
120
+ y_center = round((pp["yb0"] - pp["py1"]) / CELL_SIZE_CM)
121
+ return nz, ny, nx, y_center
122
+
123
+
124
+ def crop_half_from_prepin(spot_radius_cm: float) -> int:
125
+ """Derive the crop half-width from the laser spot radius.
126
+
127
+ Uses 4× the spot radius as a conservative bound for the melt pool
128
+ half-width in Y, then adds the safety buffer.
129
+ """
130
+ return int(np.ceil(spot_radius_cm / CELL_SIZE_CM * 4)) + BUFFER_CELLS
131
+
132
+
133
+ # ── Cropping helpers ──────────────────────────────────────────────────────────
134
+
135
+ def _crop_params(center: int, half: int, size: int) -> tuple[int, int, int, int]:
136
+ """Compute (lo, hi, pad_left, pad_right) for a centered crop of width 2*half.
137
+
138
+ Always guarantees (hi - lo) + pad_left + pad_right == 2 * half, even when
139
+ the window is partially or fully outside [0, size).
140
+ """
141
+ lo = center - half
142
+ hi = center + half
143
+ pl = max(0, -lo); lo = max(0, lo)
144
+ pr = max(0, hi - size); hi = min(size, hi)
145
+ lo = min(lo, hi) # clamp when window is past the end
146
+ pr = 2 * half - pl - (hi - lo) # ensure exact output width
147
+ return lo, hi, pl, pr
148
+
149
+
150
+ def top_down_2d(proj: np.ndarray, beam_x: int, y_center: int) -> np.ndarray:
151
+ """(Y, X) → (TOP_DOWN_HALF*2, TOP_DOWN_HALF*2) fixed 1 mm × 1 mm window."""
152
+ # X axis: follow the beam
153
+ xlo, xhi, xpl, xpr = _crop_params(beam_x, TOP_DOWN_HALF, proj.shape[1])
154
+ w = proj[:, xlo:xhi]
155
+ if xpl or xpr:
156
+ w = np.pad(w, ((0, 0), (xpl, xpr)), constant_values=0.0)
157
+ # Y axis: centered on beam y position
158
+ ylo, yhi, ypt, ypb = _crop_params(y_center, TOP_DOWN_HALF, w.shape[0])
159
+ w = w[ylo:yhi, :]
160
+ if ypt or ypb:
161
+ w = np.pad(w, ((ypt, ypb), (0, 0)), constant_values=0.0)
162
+ return w
163
+
164
+
165
+ def crop_3d(arr: np.ndarray, beam_x: int, half: int, y_center: int,
166
+ pad_val: float = 0.0) -> np.ndarray:
167
+ """(Z, Y, X[, C]) → (Z, W, W[, C]) adaptive square window."""
168
+ xlo, xhi, xpl, xpr = _crop_params(beam_x, half, arr.shape[2])
169
+ ylo, yhi, ypt, ypb = _crop_params(y_center, half, arr.shape[1])
170
+ w = arr[:, ylo:yhi, xlo:xhi]
171
+ if xpl or xpr or ypt or ypb:
172
+ pad = ((0,0),(ypt,ypb),(xpl,xpr)) if arr.ndim == 3 \
173
+ else ((0,0),(ypt,ypb),(xpl,xpr),(0,0))
174
+ w = np.pad(w, pad, constant_values=pad_val)
175
+ return w
176
+
177
+
178
+ # ── Per-timestep extraction ───────────────────────────────────────────────────
179
+
180
+ def extract(
181
+ raw: dict, beam_x: int, crop_half: int,
182
+ nz: int, ny: int, nx: int, y_center: int,
183
+ ) -> tuple[dict, dict]:
184
+ """Return (top_down, cropped) dicts from one loaded npz.
185
+
186
+ top_down[field] : (TOP_DOWN_HALF*2, TOP_DOWN_HALF*2)
187
+ cropped[field] : (nz, W, W) scalars
188
+ cropped['vx_vy_vz'] : (nz, W, W, 3)
189
+ cropped['dtdx_dtdy_dtdz']: (nz, W, W, 3)
190
+ """
191
+ def slc(k):
192
+ return raw[k][0, :nz, :ny, :nx]
193
+
194
+ def proj(arr, method):
195
+ return arr.max(axis=0) if method == "max" else arr.sum(axis=0)
196
+
197
+ td, cr = {}, {}
198
+
199
+ for name, method in [
200
+ ("temperature", "max"),
201
+ ("fraction_of_fluid", "sum"),
202
+ ("liquid_label", "max"),
203
+ ("melt_region", "max"),
204
+ ("pressure", "max"),
205
+ ("temperature_gradient", "max"),
206
+ ]:
207
+ arr = slc(name)
208
+ pad = T_AMBIENT if name == "temperature" else 0.0
209
+ td[name] = top_down_2d(proj(arr, method), beam_x, y_center)
210
+ cr[name] = crop_3d(arr, beam_x, crop_half, y_center, pad_val=pad)
211
+
212
+ vel = slc("vx_vy_vz")
213
+ vmag = np.linalg.norm(vel, axis=-1)
214
+ td["velocity_magnitude"] = top_down_2d(proj(vmag, "max"), beam_x, y_center)
215
+ cr["velocity_magnitude"] = crop_3d(vmag, beam_x, crop_half, y_center)
216
+ cr["vx_vy_vz"] = crop_3d(vel, beam_x, crop_half, y_center)
217
+
218
+ grad = slc("dtdx_dtdy_dtdz")
219
+ gmag = np.linalg.norm(grad, axis=-1)
220
+ td["gradient_magnitude"] = top_down_2d(proj(gmag, "max"), beam_x, y_center)
221
+ cr["gradient_magnitude"] = crop_3d(gmag, beam_x, crop_half, y_center)
222
+ cr["dtdx_dtdy_dtdz"] = crop_3d(grad, beam_x, crop_half, y_center)
223
+
224
+ return td, cr
225
+
226
+
227
+ # ── GIF: top-down (matplotlib) ────────────────────────────────────────────────
228
+
229
+ def make_top_down_gif(
230
+ frames: np.ndarray, field: Field, case_name: str,
231
+ out_path: Path, fps: int, max_frames: int,
232
+ ) -> None:
233
+ stride = max(1, len(frames) // max_frames)
234
+ gf = frames[::stride]
235
+ src = gf[gf > 0] if field.sparse else gf
236
+ vmax = float(np.percentile(src if src.size else gf, field.vmax_pct))
237
+ vmin = field.vmin_fixed if field.vmin_fixed is not None \
238
+ else float(np.percentile(src if src.size else gf, field.vmin_pct))
239
+
240
+ fig, ax = plt.subplots(figsize=(5, 5))
241
+ im = ax.imshow(gf[0], origin="lower", cmap=field.cmap,
242
+ vmin=vmin, vmax=vmax, extent=[0, 1, 0, 1])
243
+ ax.set_xlabel("X (mm)"); ax.set_ylabel("Y (mm)")
244
+ ax.set_title(f"{case_name}\n{field.label}")
245
+ plt.colorbar(im, ax=ax, label=field.label)
246
+ plt.tight_layout()
247
+
248
+ def _update(i):
249
+ im.set_data(gf[i])
250
+ return [im]
251
+
252
+ anim = animation.FuncAnimation(fig, _update, frames=len(gf),
253
+ interval=1000 // fps, blit=True)
254
+ anim.save(out_path, writer="pillow", fps=fps)
255
+ plt.close()
256
+
257
+
258
+ # ── GIF: cropped 3D volume (PyVista) ─────────────────────────────────────────
259
+
260
+ def _render_pyvista(vol: np.ndarray, cmap: str, opacity,
261
+ vmin: float, vmax: float) -> np.ndarray:
262
+ nz, ny, nx = vol.shape
263
+ grid = pv.ImageData(dimensions=(nx, ny, nz),
264
+ spacing=(CELL_SIZE_CM,) * 3)
265
+ grid.point_data["v"] = vol.transpose(2, 1, 0).ravel(order="F")
266
+ pl = pv.Plotter(off_screen=True, window_size=[600, 500])
267
+ pl.set_background("black")
268
+ pl.add_volume(grid, scalars="v", cmap=cmap, opacity=opacity,
269
+ clim=[vmin, vmax], show_scalar_bar=False)
270
+ pl.camera_position = "iso"
271
+ img = pl.screenshot(return_img=True)
272
+ pl.close()
273
+ return img
274
+
275
+
276
+ def make_cropped_gif(
277
+ frames: np.ndarray, field: Field,
278
+ out_path: Path, fps: int, max_frames: int,
279
+ ) -> None:
280
+ stride = max(1, len(frames) // max_frames)
281
+ gf = frames[::stride]
282
+ src = frames[frames > 0] if field.sparse else frames
283
+ if src.size == 0:
284
+ return
285
+ vmax = float(np.percentile(src, field.vmax_pct))
286
+ vmin = field.vmin_fixed if field.vmin_fixed is not None \
287
+ else float(np.percentile(src, field.vmin_pct))
288
+
289
+ # For temperature: fade in above liquidus; others: standard sigmoid
290
+ if field.name == "temperature":
291
+ opacity = [0.0, 0.0, 0.05, 0.3, 0.8, 1.0]
292
+ else:
293
+ opacity = field.cmap # reuse cmap name as opacity preset string
294
+ opacity = "sigmoid"
295
+
296
+ rendered = [_render_pyvista(f, field.cmap, opacity, vmin, vmax) for f in gf]
297
+ imageio.mimsave(str(out_path), rendered, fps=fps, loop=0)
298
+
299
+
300
+ # ── Metadata ──────────────────────────────────────────────────────────────────
301
+
302
+ def build_metadata(
303
+ case_name: str,
304
+ pp: dict,
305
+ npz_files: list[Path],
306
+ W: int,
307
+ nz: int, ny: int, nx: int,
308
+ crop_data: dict[str, np.ndarray],
309
+ timesteps: np.ndarray,
310
+ ) -> dict:
311
+ vel_mps = pp["velocity_cms"] / 100.0
312
+
313
+ meta: dict = {
314
+ "case_id": case_name,
315
+ "simulation": {
316
+ "power_w": pp["power_w"],
317
+ "velocity_mps": vel_mps,
318
+ "angle_deg": int(re.search(r"A(\d+)deg", case_name).group(1)),
319
+ "finish_time_s": pp["t_finish"],
320
+ "n_timesteps": len(timesteps),
321
+ "t_start_s": float(timesteps[0]),
322
+ "t_end_s": float(timesteps[-1]),
323
+ },
324
+ "laser": {
325
+ "spot_radius_cm": pp["spot_radius"],
326
+ "gauss_radius_cm": pp["gauss_radius"],
327
+ "beam_start_x_cm": pp["xb0"],
328
+ "beam_start_y_cm": pp["yb0"],
329
+ },
330
+ "material": {
331
+ "name": "316L Stainless Steel",
332
+ "t_liquidus_k": pp["t_liquidus"],
333
+ "t_solidus_k": pp["t_solidus"],
334
+ "t_ambient_k": T_AMBIENT,
335
+ },
336
+ "domain": {
337
+ "cell_size_um": CELL_SIZE_CM * 1e4,
338
+ "nx": nx, "ny": ny, "nz": nz,
339
+ "x_mm": nx * CELL_SIZE_CM * 10,
340
+ "y_mm": ny * CELL_SIZE_CM * 10,
341
+ "z_mm": nz * CELL_SIZE_CM * 10,
342
+ },
343
+ "windows": {
344
+ "top_down_cells": TOP_DOWN_HALF * 2,
345
+ "top_down_mm": TOP_DOWN_HALF * 2 * CELL_SIZE_CM * 10,
346
+ "crop_cells": W,
347
+ "crop_mm": W * CELL_SIZE_CM * 10,
348
+ "crop_z_cells": nz,
349
+ "crop_z_mm": nz * CELL_SIZE_CM * 10,
350
+ },
351
+ "fields": {},
352
+ }
353
+
354
+ for field in FIELDS:
355
+ key = field.name
356
+ if key not in crop_data:
357
+ continue
358
+ arr = crop_data[key]
359
+ nz_vals = arr[arr != 0] if field.sparse else arr
360
+ meta["fields"][key] = {
361
+ "shape": list(arr.shape),
362
+ "min": float(arr.min()),
363
+ "max": float(arr.max()),
364
+ "mean": float(np.mean(nz_vals)) if nz_vals.size else 0.0,
365
+ "std": float(np.std(nz_vals)) if nz_vals.size else 0.0,
366
+ "p99": float(np.percentile(nz_vals, 99)) if nz_vals.size else 0.0,
367
+ }
368
+
369
+ return meta
370
+
371
+
372
+ # ── Per-case worker ───────────────────────────────────────────────────────────
373
+
374
+ def process_case(args: tuple) -> str:
375
+ """Process one case. Runs in a worker process. Returns case name on completion."""
376
+ import os
377
+ case_name, root, gif_fps, gif_max_frames = args
378
+
379
+ # Redirect stderr → /dev/null in this worker to suppress EGL/libEGL/VTK noise.
380
+ # Worker processes are isolated, so this doesn't affect the main process.
381
+ # Python-level exceptions still propagate back via the pool mechanism.
382
+ _devnull = os.open(os.devnull, os.O_WRONLY)
383
+ os.dup2(_devnull, 2)
384
+ os.close(_devnull)
385
+
386
+ # Each worker process needs its own OFF_SCREEN flag
387
+ pv.OFF_SCREEN = True
388
+ try:
389
+ import vtk
390
+ vtk.vtkObject.GlobalWarningDisplayOff()
391
+ except Exception:
392
+ pass
393
+
394
+ case_dir = root / "source" / case_name
395
+ npz_dir = case_dir / "flslnk_npz"
396
+ out_dir = root / "views" / case_name
397
+ out_dir.mkdir(parents=True, exist_ok=True)
398
+
399
+ # ── unzip flslnk_npz.zip if not already extracted ─────────────────────────
400
+ zip_path = case_dir / "flslnk_npz.zip"
401
+ unzipped = False
402
+ if not npz_dir.exists():
403
+ if not zip_path.exists():
404
+ raise FileNotFoundError(f"Neither flslnk_npz/ nor flslnk_npz.zip found in {case_dir}")
405
+ npz_dir.mkdir()
406
+ with zipfile.ZipFile(zip_path) as zf:
407
+ zf.extractall(npz_dir)
408
+ unzipped = True
409
+
410
+ pp = parse_prepin(find_prepin(case_dir).read_text())
411
+ npz_files = sorted(npz_dir.glob("*.npz"))
412
+ crop_half = crop_half_from_prepin(pp["spot_radius"])
413
+ W = crop_half * 2
414
+
415
+ # ── determine domain dimensions from first readable timestep ─────────────
416
+ nz = ny = nx = y_center = None
417
+ for _f in npz_files:
418
+ try:
419
+ first_raw = np.load(_f)
420
+ nz, ny, nx, y_center = domain_from_raw(first_raw, pp)
421
+ break
422
+ except (EOFError, Exception):
423
+ continue
424
+ if nz is None:
425
+ raise RuntimeError(f"No readable NPZ files found for {case_name}")
426
+
427
+ # ── extraction ────────────────────────────────────────────────────────────
428
+ td_acc: dict[str, list] = {}
429
+ crop_acc: dict[str, list] = {}
430
+ timesteps: list[float] = []
431
+ n_skipped = 0
432
+
433
+ for npz_path in tqdm(npz_files, desc=case_name, leave=False, position=1):
434
+ try:
435
+ raw = np.load(npz_path)
436
+ t = float(raw["timestep"][0])
437
+ except (EOFError, Exception):
438
+ n_skipped += 1
439
+ continue
440
+ bx = beam_x_index(t, pp)
441
+ td, cr = extract(raw, bx, crop_half, nz, ny, nx, y_center)
442
+ for k, v in td.items(): td_acc.setdefault(k, []).append(v)
443
+ for k, v in cr.items(): crop_acc.setdefault(k, []).append(v)
444
+ timesteps.append(t)
445
+
446
+ if n_skipped:
447
+ tqdm.write(f" ! {case_name}: skipped {n_skipped} corrupt NPZ files")
448
+
449
+ ts = np.array(timesteps, dtype=np.float64)
450
+ crop_arrays = {k: np.array(v, dtype=np.float32) for k, v in crop_acc.items()}
451
+
452
+ # ── cropped.npz ───────────────────────────────────────────────────────────
453
+ np.savez_compressed(out_dir / "cropped.npz", **{
454
+ "timestep": ts,
455
+ "power": np.array([pp["power_w"]], dtype=np.int32),
456
+ "velocity": np.array([pp["velocity_cms"] / 100.0], dtype=np.float64),
457
+ "window_cells": np.array([W], dtype=np.int32),
458
+ **crop_arrays,
459
+ })
460
+
461
+ # ── delete extracted npz folder if we unzipped it ────────────────────────
462
+ if unzipped:
463
+ shutil.rmtree(npz_dir)
464
+
465
+ # ── metadata.json ─────────────────────────────────────────────────────────
466
+ meta = build_metadata(case_name, pp, npz_files, W, nz, ny, nx, crop_arrays, ts)
467
+ with open(out_dir / "metadata.json", "w") as f:
468
+ json.dump(meta, f, indent=2)
469
+
470
+ # ── GIFs ──────────────────────────────────────────────────────────────────
471
+ for field in tqdm(FIELDS, desc=f"{case_name} GIFs", leave=False, position=1):
472
+ fd = out_dir / field.name
473
+ fd.mkdir(exist_ok=True)
474
+ make_top_down_gif(
475
+ np.array(td_acc[field.name], dtype=np.float32),
476
+ field, case_name, fd / "top_down.gif", gif_fps, gif_max_frames,
477
+ )
478
+ make_cropped_gif(
479
+ np.array(crop_acc[field.name], dtype=np.float32),
480
+ field, fd / "cropped.gif", gif_fps, gif_max_frames,
481
+ )
482
+
483
+ return case_name
484
+
485
+
486
+ # ── Main ──────────────────────────────────────────────────────────────────────
487
+
488
+ def main() -> None:
489
+ parser = argparse.ArgumentParser(
490
+ description="Generate all views for every unprocessed simulation case."
491
+ )
492
+ parser.add_argument(
493
+ "cases", nargs="*",
494
+ help="Case folder names under source/ (default: all unprocessed)",
495
+ )
496
+ parser.add_argument("--gif-fps", type=int, default=20)
497
+ parser.add_argument("--gif-max-frames", type=int, default=200)
498
+ parser.add_argument(
499
+ "--workers", type=int,
500
+ default=max(1, multiprocessing.cpu_count() // 2),
501
+ help="Parallel worker processes (default: cpu_count // 2)",
502
+ )
503
+ args = parser.parse_args()
504
+
505
+ root = Path(__file__).parent.parent
506
+
507
+ # Resolve case list — explicit args or all source folders with flslnk_npz/
508
+ if args.cases:
509
+ candidates = [root / "source" / c for c in args.cases]
510
+ else:
511
+ candidates = sorted((root / "source").iterdir())
512
+
513
+ # Filter to valid, unprocessed cases
514
+ todo = []
515
+ for case_dir in candidates:
516
+ npz_dir = case_dir / "flslnk_npz"
517
+ zip_path = case_dir / "flslnk_npz.zip"
518
+ cropped = root / "views" / case_dir.name / "cropped.npz"
519
+ if not npz_dir.exists() and not zip_path.exists():
520
+ print(f" skip {case_dir.name} — no flslnk_npz/ or flslnk_npz.zip")
521
+ continue
522
+ if cropped.exists():
523
+ print(f" skip {case_dir.name} — already done")
524
+ continue
525
+ todo.append(case_dir.name)
526
+
527
+ if not todo:
528
+ print("Nothing to do.")
529
+ return
530
+
531
+ print(f"\nProcessing {len(todo)} cases with {args.workers} workers\n")
532
+
533
+ worker_args = [(c, root, args.gif_fps, args.gif_max_frames) for c in todo]
534
+
535
+ with multiprocessing.Pool(args.workers) as pool:
536
+ for done in tqdm(
537
+ pool.imap_unordered(process_case, worker_args),
538
+ total=len(todo), desc="Cases", position=0,
539
+ ):
540
+ tqdm.write(f" ✓ {done}")
541
+
542
+
543
+ if __name__ == "__main__":
544
+ multiprocessing.set_start_method("spawn", force=True)
545
+ main()
scripts/02_build_hf_dataset.py ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Build HuggingFace dataset from processed simulation views.
4
+
5
+ One config per field (e.g. load_dataset("...", "temperature")).
6
+ One Parquet file per case per config, written to:
7
+ data/<config>/<case_id>.parquet
8
+
9
+ Each row is one timestep. All 3D volumes are padded to (NZ_MAX, W, W)
10
+ along the Z axis; vector fields are padded to (NZ_MAX, W, W, 3).
11
+
12
+ Padding fill values:
13
+ temperature → T_AMBIENT (299.15 K)
14
+ all others → 0.0
15
+
16
+ Run:
17
+ uv run python scripts/02_build_hf_dataset.py
18
+ uv run python scripts/02_build_hf_dataset.py --cases P100W_V0.30mps_A00deg ...
19
+ uv run python scripts/02_build_hf_dataset.py --config temperature
20
+ """
21
+
22
+ import argparse
23
+ import io
24
+ import json
25
+ from pathlib import Path
26
+
27
+ import imageio.v3 as iio
28
+ import numpy as np
29
+ from datasets import Array3D, Array4D, Dataset, Features, Image, Value
30
+ from PIL import Image as PILImage
31
+ from tqdm import tqdm
32
+
33
+ # ── Constants ─────────────────────────────────────────────────────────────────
34
+ NZ_MAX = 50
35
+ W = 30
36
+ T_AMBIENT = 299.15
37
+ CELL_SIZE_CM = 0.002 # 20 µm
38
+
39
+ SCALAR_CONFIGS = [
40
+ ("temperature", T_AMBIENT),
41
+ ("fraction_of_fluid", 0.0),
42
+ ("liquid_label", 0.0),
43
+ ("melt_region", 0.0),
44
+ ("pressure", 0.0),
45
+ ("temperature_gradient", 0.0),
46
+ ("velocity_magnitude", 0.0),
47
+ ("gradient_magnitude", 0.0),
48
+ ]
49
+
50
+ VECTOR_CONFIGS = [
51
+ ("vx_vy_vz", 0.0),
52
+ ("dtdx_dtdy_dtdz", 0.0),
53
+ ]
54
+
55
+ ALL_CONFIGS = SCALAR_CONFIGS + VECTOR_CONFIGS
56
+
57
+
58
+ # ── Features ──────────────────────────────────────────────────────────────────
59
+
60
+ COMMON_FEATURES = {
61
+ "id": Value("string"),
62
+ "top_down_preview": Image(),
63
+ "volume_preview": Image(),
64
+ "timestep_index": Value("int32"),
65
+ "timestep_count": Value("int32"),
66
+ "timestep": Value("float64"),
67
+ "power_w": Value("int32"),
68
+ "velocity_mmps": Value("float64"),
69
+ "angle_deg": Value("int32"),
70
+ "spot_radius_mm": Value("float64"),
71
+ "gauss_radius_mm": Value("float64"),
72
+ "beam_x_mm": Value("float64"),
73
+ "nx": Value("int32"),
74
+ "ny": Value("int32"),
75
+ "nz": Value("int32"),
76
+ "mesh_bound_x_mm": Value("float64"),
77
+ "mesh_bound_y_mm": Value("float64"),
78
+ "mesh_bound_z_mm": Value("float64"),
79
+ "mesh_resolution_mm": Value("float64"),
80
+ "material_name": Value("string"),
81
+ "t_liquidus_k": Value("float64"),
82
+ "t_solidus_k": Value("float64"),
83
+ }
84
+
85
+
86
+ def scalar_features() -> Features:
87
+ return Features({**COMMON_FEATURES, "volume": Array3D(shape=(NZ_MAX, W, W), dtype="float32")})
88
+
89
+
90
+ def vector_features() -> Features:
91
+ return Features({**COMMON_FEATURES, "volume": Array4D(shape=(NZ_MAX, W, W, 3), dtype="float32")})
92
+
93
+
94
+ # ── Padding ───────────────────────────────────────────────────────────────────
95
+
96
+ def pad_volume(vol: np.ndarray, pad_val: float) -> np.ndarray:
97
+ """Pad (nz, W, W[, 3]) → (NZ_MAX, W, W[, 3]) along Z axis."""
98
+ nz = vol.shape[0]
99
+ if nz == NZ_MAX:
100
+ return vol
101
+ if vol.ndim == 3:
102
+ pad = np.full((NZ_MAX - nz, W, W), pad_val, dtype=np.float32)
103
+ else:
104
+ pad = np.full((NZ_MAX - nz, W, W, 3), pad_val, dtype=np.float32)
105
+ return np.concatenate([vol, pad], axis=0)
106
+
107
+
108
+ # ── GIF frame extraction ──────────────────────────────────────────────────────
109
+
110
+ # GIF generation stride mirrors scripts/01_generate_views.py
111
+ GIF_MAX_FRAMES = 200
112
+
113
+ # Vector fields have no dedicated GIF directory; fall back to their magnitude.
114
+ GIF_FALLBACK = {
115
+ "vx_vy_vz": "velocity_magnitude",
116
+ "dtdx_dtdy_dtdz": "gradient_magnitude",
117
+ }
118
+
119
+ # 1×1 white PNG used as placeholder when no GIF exists.
120
+ _placeholder_buf = io.BytesIO()
121
+ PILImage.new("RGB", (1, 1), (255, 255, 255)).save(_placeholder_buf, format="PNG")
122
+ _PLACEHOLDER_PNG: bytes = _placeholder_buf.getvalue()
123
+
124
+
125
+ def load_gif_frames(path: Path) -> np.ndarray | None:
126
+ """Load all frames from a GIF as (N, H, W, C) uint8 array, or None."""
127
+ if not path.exists():
128
+ return None
129
+ return iio.imread(str(path), index=None) # (N, H, W, C)
130
+
131
+
132
+ def frame_to_png(frame: np.ndarray) -> bytes:
133
+ """Convert a single (H, W, C) uint8 frame to PNG bytes."""
134
+ buf = io.BytesIO()
135
+ PILImage.fromarray(frame).save(buf, format="PNG")
136
+ return buf.getvalue()
137
+
138
+
139
+ def gif_frame_index(timestep_index: int, timestep_count: int, n_frames: int) -> int:
140
+ """Map a timestep index to the corresponding GIF frame index."""
141
+ stride = max(1, timestep_count // GIF_MAX_FRAMES)
142
+ return min(timestep_index // stride, n_frames - 1)
143
+
144
+
145
+ def load_case_frames(
146
+ case_dir: Path, field_name: str
147
+ ) -> tuple[np.ndarray | None, np.ndarray | None]:
148
+ """Return (top_down_frames, volume_frames) for a case+field."""
149
+ resolved = GIF_FALLBACK.get(field_name, field_name)
150
+ field_dir = case_dir / resolved
151
+ return (
152
+ load_gif_frames(field_dir / "top_down.gif"),
153
+ load_gif_frames(field_dir / "cropped.gif"),
154
+ )
155
+
156
+
157
+ # ── Case builder ──────────────────────────────────────────────────────────────
158
+
159
+ def build_case_rows(
160
+ case_dir: Path,
161
+ field_name: str,
162
+ pad_val: float,
163
+ ) -> list[dict]:
164
+ meta_path = case_dir / "metadata.json"
165
+ npz_path = case_dir / "cropped.npz"
166
+
167
+ if not meta_path.exists() or not npz_path.exists():
168
+ return []
169
+
170
+ meta = json.loads(meta_path.read_text())
171
+ npz = np.load(npz_path)
172
+
173
+ if field_name not in npz:
174
+ return []
175
+
176
+ case_id = meta["case_id"]
177
+ sim = meta["simulation"]
178
+ laser = meta["laser"]
179
+ material = meta["material"]
180
+ domain = meta["domain"]
181
+
182
+ power_w = int(sim["power_w"])
183
+ velocity_mmps = sim["velocity_mps"] * 1000.0
184
+ velocity_cms = sim["velocity_mps"] * 100.0
185
+ angle_deg = int(sim["angle_deg"])
186
+ spot_radius_mm = laser["spot_radius_cm"] * 10.0
187
+ gauss_radius_mm = laser["gauss_radius_cm"] * 10.0
188
+ beam_start_x_cm = laser["beam_start_x_cm"]
189
+ nx = int(domain["nx"])
190
+ ny = int(domain["ny"])
191
+ nz = int(domain["nz"])
192
+ mesh_bound_x_mm = domain["x_mm"]
193
+ mesh_bound_y_mm = domain["y_mm"]
194
+ mesh_bound_z_mm = domain["z_mm"]
195
+ mesh_res_mm = domain["cell_size_um"] / 1000.0
196
+ material_name = material["name"]
197
+ t_liquidus_k = material["t_liquidus_k"]
198
+ t_solidus_k = material["t_solidus_k"]
199
+
200
+ timesteps = npz["timestep"] # (N,)
201
+ volumes = npz[field_name] # (N, nz, W, W[, 3])
202
+ timestep_count = len(timesteps)
203
+
204
+ td_frames, vol_frames = load_case_frames(case_dir, field_name)
205
+
206
+ rows = []
207
+ for i, t in enumerate(timesteps):
208
+ beam_x_mm = (beam_start_x_cm + velocity_cms * float(t)) * 10.0
209
+ vol_padded = pad_volume(volumes[i], pad_val)
210
+
211
+ if td_frames is not None:
212
+ fi = gif_frame_index(i, timestep_count, len(td_frames))
213
+ td_png = frame_to_png(td_frames[fi])
214
+ else:
215
+ td_png = _PLACEHOLDER_PNG
216
+
217
+ if vol_frames is not None:
218
+ fi = gif_frame_index(i, timestep_count, len(vol_frames))
219
+ vol_png = frame_to_png(vol_frames[fi])
220
+ else:
221
+ vol_png = _PLACEHOLDER_PNG
222
+
223
+ rows.append({
224
+ "id": case_id,
225
+ "top_down_preview": {"bytes": td_png, "path": None},
226
+ "volume_preview": {"bytes": vol_png, "path": None},
227
+ "timestep_index": i,
228
+ "timestep_count": timestep_count,
229
+ "timestep": float(t),
230
+ "power_w": power_w,
231
+ "velocity_mmps": velocity_mmps,
232
+ "angle_deg": angle_deg,
233
+ "spot_radius_mm": spot_radius_mm,
234
+ "gauss_radius_mm": gauss_radius_mm,
235
+ "beam_x_mm": beam_x_mm,
236
+ "nx": nx,
237
+ "ny": ny,
238
+ "nz": nz,
239
+ "mesh_bound_x_mm": mesh_bound_x_mm,
240
+ "mesh_bound_y_mm": mesh_bound_y_mm,
241
+ "mesh_bound_z_mm": mesh_bound_z_mm,
242
+ "mesh_resolution_mm": mesh_res_mm,
243
+ "material_name": material_name,
244
+ "t_liquidus_k": t_liquidus_k,
245
+ "t_solidus_k": t_solidus_k,
246
+ "volume": vol_padded,
247
+ })
248
+
249
+ return rows
250
+
251
+
252
+ # ── Main ──────────────────────────────────────────────────────────────────────
253
+
254
+ def main() -> None:
255
+ parser = argparse.ArgumentParser(
256
+ description="Build HuggingFace Parquet dataset from simulation views."
257
+ )
258
+ parser.add_argument(
259
+ "cases", nargs="*",
260
+ help="Case folder names under views/ (default: all)",
261
+ )
262
+ parser.add_argument(
263
+ "--config", nargs="*",
264
+ help="Field config names to build (default: all)",
265
+ )
266
+ args = parser.parse_args()
267
+
268
+ root = Path(__file__).parent.parent
269
+ views_dir = root / "views"
270
+ data_dir = root / "data"
271
+
272
+ # Resolve case list
273
+ if args.cases:
274
+ case_dirs = [views_dir / c for c in args.cases]
275
+ else:
276
+ case_dirs = sorted(
277
+ d for d in views_dir.iterdir()
278
+ if d.is_dir() and (d / "cropped.npz").exists()
279
+ )
280
+
281
+ # Resolve config list
282
+ if args.config:
283
+ configs = [(n, p) for n, p in ALL_CONFIGS if n in args.config]
284
+ else:
285
+ configs = ALL_CONFIGS
286
+
287
+ print(f"Cases: {len(case_dirs)}")
288
+ print(f"Configs: {[n for n, _ in configs]}\n")
289
+
290
+ for field_name, pad_val in configs:
291
+ is_vector = field_name in {n for n, _ in VECTOR_CONFIGS}
292
+ features = vector_features() if is_vector else scalar_features()
293
+ out_dir = data_dir / field_name
294
+ out_dir.mkdir(parents=True, exist_ok=True)
295
+
296
+ print(f"[{field_name}]")
297
+ for case_dir in tqdm(case_dirs, desc=field_name):
298
+ out_path = out_dir / f"{case_dir.name}.parquet"
299
+ if out_path.exists():
300
+ tqdm.write(f" skip {case_dir.name} — already done")
301
+ continue
302
+
303
+ rows = build_case_rows(case_dir, field_name, pad_val)
304
+ if not rows:
305
+ tqdm.write(f" skip {case_dir.name} — no data")
306
+ continue
307
+
308
+ ds = Dataset.from_list(rows, features=features)
309
+ ds.to_parquet(str(out_path))
310
+ tqdm.write(f" ✓ {case_dir.name} ({len(rows)} rows)")
311
+
312
+ print()
313
+
314
+
315
+ if __name__ == "__main__":
316
+ main()
scripts/03_rename_keyhole.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Rename the 6 initial angle-sweep cases in data/ from
4
+ P350W_V0.30mps_A{XX}deg → P350W_V0.30mps_A{XX}deg_keyhole.
5
+
6
+ Updates both the parquet filename and the 'id' column value inside each file.
7
+ """
8
+
9
+ import pyarrow as pa
10
+ import pyarrow.parquet as pq
11
+ from pathlib import Path
12
+
13
+ CASES = [
14
+ "P350W_V0.30mps_A00deg",
15
+ "P350W_V0.30mps_A05deg",
16
+ "P350W_V0.30mps_A10deg",
17
+ "P350W_V0.30mps_A15deg",
18
+ "P350W_V0.30mps_A20deg",
19
+ "P350W_V0.30mps_A25deg",
20
+ ]
21
+
22
+ data_dir = Path(__file__).parent.parent / "data"
23
+ configs = sorted(d.name for d in data_dir.iterdir() if d.is_dir())
24
+
25
+ for config in configs:
26
+ for case in CASES:
27
+ old_path = data_dir / config / f"{case}.parquet"
28
+ new_path = data_dir / config / f"{case}_keyhole.parquet"
29
+ if not old_path.exists():
30
+ print(f" skip {config}/{case} — not found")
31
+ continue
32
+ table = pq.read_table(old_path)
33
+ new_id = pa.array([f"{case}_keyhole"] * len(table), type=pa.string())
34
+ table = table.set_column(table.schema.get_field_index("id"), "id", new_id)
35
+ pq.write_table(table, new_path, compression="snappy")
36
+ old_path.unlink()
37
+ print(f" ✓ {config}/{case} → {case}_keyhole")