"Python workflow framework for FISPACT."
Project description
Description
The module loads FISPACT JSON output files and converts to Polars dataframes with minor data normalization. This allows efficient data extraction and aggregation. Multiple JSON files can be combined using simple additional identification for different FISPACT runs. So far we use just two-dimensional identification by material and case. The case usually identifies certain neutron flux.
Implemented functionality
export to DuckDB
export to parquet files
Installation
From PyPI
pip install xpypact
As dependency
poetry add xpypact
From source
pip install htpps://github.com/MC-kit/xpypact.git
Examples
from xpypact import FullDataCollector, Inventory
def get_material_id(p: Path) -> int:
...
def get_case_id(p: Path) -> int:
...
jsons = [path1, path2, ...]
material_ids = {p: get_material_id(p) for p in jsons }
case_ids = {c: get_case_id(p) for p in jsons }
collector = FullDataCollector()
if sequential_load:
for json in jsons:
inventory = Inventory.from_json(json)
collector.append(inventory, material_id=material_ids[json], case_id=case_ids[json])
else: # multithreading is allowed for collector as well
task_list = ... # list of tuples[directory, case_id, tasks_sequence]
threads = 16 # whatever
def _find_path(arg) -> tuple[int, int, Path]:
_case, path, inventory = arg
json_path: Path = (Path(path) / inventory).with_suffix(".json")
if not json_path.exists():
msg = f"Cannot find file {json_path}"
raise FindPathError(msg)
try:
material_id = int(inventory[_LEN_INVENTORY:])
case_str = json_path.parent.parts[-1]
case_id = int(case_str[_LEN_CASE:])
except (ValueError, IndexError) as x:
msg = f"Cannot define material_id and case_id from {json_path}"
raise FindPathError(msg) from x
if case_id != _case:
msg = f"Contradicting values of case_id in case path and database: {case_id} != {_case}"
raise FindPathError(msg)
return material_id, case_id, json_path
with futures.ThreadPoolExecutor(max_workers=threads) as executor:
mcp_futures = [
executor.submit(_find_path, arg)
for arg in (
(task_case[0], task_case[1], task)
for task_case in task_list
for task in task_case[2].split(",")
if task.startswith("inventory-")
)
]
mips = [x.result() for x in futures.as_completed(mcp_futures)]
mips.sort(key=lambda x: x[0:2]) # sort by material_id, case_id
def _load_json(arg) -> None:
collector, material_id, case_id, json_path = arg
collector.append(from_json(json_path.read_text(encoding="utf8")), material_id, case_id)
with futures.ThreadPoolExecutor(max_workers=threads) as executor:
executor.map(_load_json, ((collector, *mip) for mip in mips))
collected = collector.get_result()
# save to parquet files
collected.save_to_parquets(Path.cwd() / "parquets")
# or use DuckDB database
import from xpypact.dao save
import duckdb as db
con = db.connect()
save(con, collected)
gamma_from_db = con.sql(
"""
select
g, rate
from timestep_gamma
where material_id = 1 and case_id = 54 and time_step_number = 7
order by g
""",
).fetchall()
Contributing
Just follow ordinary practice:
References
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
xpypact-0.12.2.tar.gz
(25.7 kB
view details)
Built Distribution
xpypact-0.12.2-py3-none-any.whl
(21.9 kB
view details)
File details
Details for the file xpypact-0.12.2.tar.gz
.
File metadata
- Download URL: xpypact-0.12.2.tar.gz
- Upload date:
- Size: 25.7 kB
- Tags: Source
- Uploaded using Trusted Publishing? No
- Uploaded via: poetry/1.8.3 CPython/3.11.9 Linux/6.5.0-1025-azure
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | e670382e474b20522ed18f88961bff67f6fc801032a90a932220488a0d929b7f |
|
MD5 | 8ccba0765a55b9425fafaeb9498c46a7 |
|
BLAKE2b-256 | 224f81504c51f1eb652fe8fbe10f108abdfbebe024eb43bf409eceeb17d24031 |
File details
Details for the file xpypact-0.12.2-py3-none-any.whl
.
File metadata
- Download URL: xpypact-0.12.2-py3-none-any.whl
- Upload date:
- Size: 21.9 kB
- Tags: Python 3
- Uploaded using Trusted Publishing? No
- Uploaded via: poetry/1.8.3 CPython/3.11.9 Linux/6.5.0-1025-azure
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | 73ab61097915162dae2ae8eef3ef881b10559448bf7f4259897c8c43a250b98f |
|
MD5 | 5dad9690dbae8404cf32efd81532b745 |
|
BLAKE2b-256 | 5f2030784e2458a9c21111a89d57347206f7c485410225f449a4d496a44e52e2 |