Skip to content

Commit

Permalink
Add overwrite NBLOCK (#11)
Browse files Browse the repository at this point in the history
* add append feature

* add docs
  • Loading branch information
akaszynski authored Sep 18, 2023
1 parent a543273 commit f2baff1
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 11 deletions.
56 changes: 56 additions & 0 deletions mapdl_archive/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import os
import pathlib
import shutil

import numpy as np
from pyvista import CellType, UnstructuredGrid
Expand Down Expand Up @@ -226,6 +227,61 @@ def plot(self, *args, **kwargs):
kwargs.setdefault("show_edges", True)
return self.grid.plot(*args, **kwargs)

@property
def _nblock_start(self) -> int:
"""Return the start of the node block in the original file."""
return self._raw["nblock_start"]

@property
def _nblock_end(self) -> int:
"""Return the end of the node block in the original file."""
return self._raw["nblock_end"]

def overwrite_nblock(self, filename, node_id, pos, angles=None, sig_digits=13):
"""Write out an archive file to disk while replacing its NBLOCK.
Parameters
----------
filename : str or file handle
Filename to write node block to.
node_id : numpy.ndarray
ANSYS node numbers.
pos : np.ndarray
Array of node coordinates.
angles : numpy.ndarray, optional
Writes the node angles for each node when included.
sig_digits : int, default: 13
Number of significant digits to use when writing the nodes. Must be
greater than 0.
Examples
--------
Write a new archive file that overwrites the NBLOCK with random nodes
and reuse the existing node numbers.
>>> import numpy as np
>>> import mapdl_archive
>>> archive = mapdl_archive.Archive(examples.hexarchivefile)
>>> new_nodes = np.random.random(archive.nodes.shape)
>>> archive.overwrite_nblock("new_archive.cdb", archive.nnum, new_nodes)
"""
with open(self._filename, "rb") as src_file, open(filename, "wb") as dest_file:
# Copy the beginning of the file up to _nblock_start
dest_file.write(src_file.read(self._nblock_start))

# Write new nblock
write_nblock(
filename, node_id, pos, angles=angles, mode="a", sig_digits=sig_digits
)

# Copy the rest of the original file
with open(self._filename, "rb") as src_file, open(filename, "ab") as dest_file:
src_file.seek(self._nblock_end)
dest_file.seek(0, io.SEEK_END)

shutil.copyfileobj(src_file, dest_file)


def save_as_archive(
filename,
Expand Down
32 changes: 21 additions & 11 deletions mapdl_archive/cython/_reader.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def read(filename, read_parameters=False, debug=False, read_eblock=True):
cdef int [::1] nnum = np.empty(0, ctypes.c_int)
cdef double [:, ::1] nodes = np.empty((0, 0))
cdef int d_size[3]
cdef int f_size, nfields
cdef int f_size, nfields, nblock_start, nblock_end, _start

# EBLOCK
cdef int nelem = 0
Expand Down Expand Up @@ -314,6 +314,7 @@ def read(filename, read_parameters=False, debug=False, read_eblock=True):

elif first_char == 'N' or first_char == 'n':
ungetc(first_char, cfile); # Put the character back into the stream
_start = ftell(cfile) # used for NBLOCK
fgets(line, sizeof(line), cfile)

if debug:
Expand All @@ -328,6 +329,9 @@ def read(filename, read_parameters=False, debug=False, read_eblock=True):
if debug:
print('reading NBLOCK due to ', line.decode())

# Before reading NBLOCK, save where the nblock started
nblock_start = _start

# Get size of NBLOCK
nnodes = int(line[line.rfind(b',') + 1:])
nnum = np.empty(nnodes, dtype=ctypes.c_int)
Expand All @@ -343,6 +347,10 @@ def read(filename, read_parameters=False, debug=False, read_eblock=True):
nnodes_read = read_nblock_cfile(cfile, &nnum[0], &nodes[0, 0], nnodes, d_size, f_size)
nodes_read = True

# read final line
fgets(line, sizeof(line), cfile)
nblock_end = ftell(cfile)

if nnodes_read != nnodes:
nnodes = nnodes_read
nodes = nodes[:nnodes]
Expand Down Expand Up @@ -465,16 +473,18 @@ def read(filename, read_parameters=False, debug=False, read_eblock=True):

return {
'rnum': np.asarray(rnum),
'rdat': rdat,
'ekey': np.asarray(elem_type, ctypes.c_int),
'nnum': np.asarray(nnum),
'nodes': np.asarray(nodes),
'elem': np.array(elem[:elem_sz]),
'elem_off': np.array(elem_off),
'node_comps': node_comps,
'elem_comps': elem_comps,
'keyopt': keyopt,
'parameters': parameters
'rdat': rdat,
'ekey': np.asarray(elem_type, ctypes.c_int),
'nnum': np.asarray(nnum),
'nodes': np.asarray(nodes),
'elem': np.array(elem[:elem_sz]),
'elem_off': np.array(elem_off),
'node_comps': node_comps,
'elem_comps': elem_comps,
'keyopt': keyopt,
'parameters': parameters,
'nblock_start': nblock_start,
'nblock_end': nblock_end,
}


Expand Down
28 changes: 28 additions & 0 deletions tests/test_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,9 @@ def test_archive_init(hex_archive):
assert isinstance(hex_archive._raw, dict)
assert isinstance(hex_archive.grid, pv.UnstructuredGrid)

assert hex_archive._nblock_start > 0
assert hex_archive._nblock_end > hex_archive._nblock_start


def test_parse_vtk(hex_archive):
grid = hex_archive.grid
Expand Down Expand Up @@ -545,6 +548,31 @@ def test_etblock():
assert archive.n_elem == 1


def test_overwrite_nblock(tmpdir, hex_archive):
# ensure that we capture the entire NBLOCK
with open(hex_archive._filename, "rb") as fid:
fid.seek(hex_archive._nblock_start)
nblock_txt = fid.read(
hex_archive._nblock_end - hex_archive._nblock_start
).decode()

assert nblock_txt.startswith("NBLOCK")
assert nblock_txt.splitlines()[-1].endswith("-1,")

filename = str(tmpdir.mkdir("tmpdir").join("tmp.cdb"))
nodes = np.random.random(hex_archive.nodes.shape)
hex_archive.overwrite_nblock(
filename,
hex_archive.nnum,
nodes,
)

archive_new = Archive(filename)
assert np.allclose(nodes, archive_new.grid.points)
assert np.allclose(hex_archive.nnum, archive_new.nnum)
assert np.allclose(hex_archive.grid.cells.size, archive_new.grid.cells.size)


class TestPathlibFilename:
def test_pathlib_filename_property(self, pathlib_archive):
assert isinstance(pathlib_archive.pathlib_filename, pathlib.Path)
Expand Down

0 comments on commit f2baff1

Please sign in to comment.