Skip to content

Plotting API

yabplot.build_cortical_atlas(nii_path, wb_txt_path, out_dir, include_list=None, exclude_list=None, atlasname='atlas')

builds a custom yabplot cortical atlas from a volumetric NIfTI file.

projects a volumetric NIfTI atlas to standard fsLR32k surfaces using connectome workbench, cleans the medial wall, and applies majority-vote boundary smoothing to remove voxel artifacts.

Parameters:

Name Type Description Default
nii_path str

absolute path to the 3D NIfTI volume of the atlas.

required
wb_txt_path str

absolute path to the text file formatted specifically for connectome workbench.

required
out_dir str

directory where the final .csv map and .txt LUT will be saved.

required
include_list list of str

keywords of regions to strictly include. all other regions are ignored.

None
exclude_list list of str

keywords of regions to strictly exclude. all other regions are kept.

None
atlasname str

prefix name for the output files. default is 'atlas'.

'atlas'

Raises:

Type Description
ValueError

if both include_list and exclude_list are provided.

Source code in yabplot/atlas_builder.py
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
def build_cortical_atlas(nii_path, wb_txt_path, out_dir, include_list=None, exclude_list=None, atlasname='atlas'):
    """
    builds a custom yabplot cortical atlas from a volumetric NIfTI file.

    projects a volumetric NIfTI atlas to standard fsLR32k surfaces using connectome workbench, 
    cleans the medial wall, and applies majority-vote boundary smoothing to remove voxel artifacts.

    parameters
    ----------
    nii_path : str
        absolute path to the 3D NIfTI volume of the atlas.
    wb_txt_path : str
        absolute path to the text file formatted specifically for connectome workbench.
    out_dir : str
        directory where the final .csv map and .txt LUT will be saved.
    include_list : list of str, optional
        keywords of regions to strictly include. all other regions are ignored.
    exclude_list : list of str, optional
        keywords of regions to strictly exclude. all other regions are kept.
    atlasname : str, optional
        prefix name for the output files. default is 'atlas'.

    raises
    ------
    ValueError
        if both include_list and exclude_list are provided.
    """
    if include_list and exclude_list:
        raise ValueError("please provide either 'include_list' or 'exclude_list', not both.")

    os.makedirs(out_dir, exist_ok=True)

    # define intermediate and output paths
    labeled_nii = os.path.join(out_dir, 'temp_labeled.nii.gz')
    lh_gii = os.path.join(out_dir, 'lh_temp.label.gii')
    rh_gii = os.path.join(out_dir, 'rh_temp.label.gii')
    out_csv = os.path.join(out_dir, f'{atlasname}.csv')
    out_lut = os.path.join(out_dir, f'{atlasname}.txt')

    # fetch standard fsLR32k surfaces and masks via yabplot data system
    print("fetching standard surfaces...")
    lh_mid, rh_mid = get_surface_paths('midthickness', 'bmesh')
    lh_white, rh_white = get_surface_paths('white', 'bmesh')
    lh_pial, rh_pial = get_surface_paths('pial', 'bmesh')
    lh_mask_path, rh_mask_path = get_surface_paths('nomedialwall', 'label')

    # run wb_command wrappers
    print("running volume-to-surface projection...")
    run_wb_import(nii_path, wb_txt_path, labeled_nii)
    run_wb_projection(labeled_nii, lh_mid, lh_gii, lh_white, lh_pial)
    run_wb_projection(labeled_nii, rh_mid, rh_gii, rh_white, rh_pial)

    # extract LUT and apply include/exclude filtering
    labels_dict = nib.load(lh_gii).labeltable.get_labels_as_dict()
    valid_ids = []
    lut_dict = {} 

    for rid, name in labels_dict.items():
        if rid == 0 or name == '???': continue 

        # filter logic
        if include_list:
            if not any(inc in name for inc in include_list):
                continue
        elif exclude_list:
            if any(exc in name for exc in exclude_list):
                continue

        clean_name = name.replace(' ', '_').replace('/', '-')
        np.random.seed(rid)
        r, g, b = np.random.randint(50, 255, 3) 

        # store the string in the dictionary instead of writing to a file yet
        lut_dict[rid] = f"{rid}  {clean_name}  {r}  {g}  {b}  0"
        valid_ids.append(rid)

    print(f"found {len(valid_ids)} initial cortical regions. mapping and cleaning...")

    # merge LH and RH, then apply masks
    data = np.concatenate([
        nib.load(lh_gii).darrays[0].data.astype(int).flatten(),
        nib.load(rh_gii).darrays[0].data.astype(int).flatten()
    ])

    mask = np.concatenate([
        nib.load(lh_mask_path).darrays[0].data.astype(int).flatten() != 0,
        nib.load(rh_mask_path).darrays[0].data.astype(int).flatten() != 0
    ])

    data[~mask] = 0
    data[~np.isin(data, valid_ids)] = 0

    # build adjacency and run hole-filling
    print("building surface adjacency and filling holes...")
    adj = sp.block_diag((_build_adjacency(lh_mid), _build_adjacency(rh_mid))).tocsr()
    adj.setdiag(1) 
    n_vert = len(data)

    for _ in range(20): 
        holes = (data == 0) & mask
        if not np.any(holes): break

        unique, inv = np.unique(data, return_inverse=True)
        one_hot = sp.coo_matrix((np.ones(n_vert), (np.arange(n_vert), inv)), shape=(n_vert, len(unique))).tocsr()
        votes = (adj @ one_hot).toarray()

        zero_idx = np.where(unique == 0)[0][0]
        votes[:, zero_idx] = 0 

        winner = np.argmax(votes, axis=1)
        fill_vals = unique[winner]
        data[holes] = fill_vals[holes]

    # smooth final boundaries
    print("smoothing boundaries...")
    for _ in range(10): 
        unique, inv = np.unique(data, return_inverse=True)
        one_hot = sp.coo_matrix((np.ones(n_vert), (np.arange(n_vert), inv)), shape=(n_vert, len(unique))).tocsr()
        winner = np.argmax((adj @ one_hot).toarray(), axis=1)
        data = unique[winner]
        data[~mask] = 0

    # save the final vertex map
    np.savetxt(out_csv, data, fmt='%i')

    # find out which regions actually survived the smoothing/masking
    surviving_ids = np.unique(data)

    # filter the LUT lines to only include survivors
    final_lines = []
    dropped_count = 0

    for rid, line_str in lut_dict.items():
        if rid in surviving_ids:
            final_lines.append(line_str)
        else:
            region_name = line_str.split()[1]
            print(f"[WARNING] {region_name} (id {rid}) lost during smoothing/masking. dropping from lut.")
            dropped_count += 1

    # write the perfectly clean file
    with open(out_lut, 'w') as f:
        f.write("\n".join(final_lines))

    print(f"final polished atlas saved to: {out_dir}")
    print(f"saved {len(final_lines)} regions ({dropped_count} empty regions dropped).")

    # cleanup intermediate Workbench files to save space
    for temp_file in [labeled_nii, lh_gii, rh_gii]:
        if os.path.exists(temp_file):
            os.remove(temp_file)

yabplot.build_subcortical_atlas(nii_path, labels_dict, out_dir, include_list=None, exclude_list=None, smooth_i=15, smooth_f=0.6)

extracts 3D subcortical meshes from a volumetric nifti atlas.

uses the marching cubes algorithm to generate 3D surface meshes for specific regions, applies laplacian smoothing to remove voxel artifacts, and saves them as .vtk files.

Parameters:

Name Type Description Default
nii_path str

absolute path to the 3D nifti volume.

required
labels_dict dict

dictionary mapping integer region IDs to string names (e.g., {1: 'thalamus_l'}).

required
out_dir str

directory where the .vtk mesh files will be saved.

required
include_list list of str

keywords of regions to strictly include. all other regions are ignored.

None
exclude_list list of str

keywords of regions to strictly exclude. all other regions are kept.

None
smooth_i int

number of iterations for laplacian mesh smoothing. default is 15.

15
smooth_f float

relaxation factor for laplacian mesh smoothing (0.0 to 1.0). default is 0.6.

0.6

Raises:

Type Description
ValueError

if both include_list and exclude_list are provided.

Source code in yabplot/atlas_builder.py
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
def build_subcortical_atlas(nii_path, labels_dict, out_dir, include_list=None, exclude_list=None,
                            smooth_i=15, smooth_f=0.6):
    """
    extracts 3D subcortical meshes from a volumetric nifti atlas.

    uses the marching cubes algorithm to generate 3D surface meshes for specific 
    regions, applies laplacian smoothing to remove voxel artifacts, and saves them as .vtk files.

    parameters
    ----------
    nii_path : str
        absolute path to the 3D nifti volume.
    labels_dict : dict
        dictionary mapping integer region IDs to string names (e.g., {1: 'thalamus_l'}).
    out_dir : str
        directory where the .vtk mesh files will be saved.
    include_list : list of str, optional
        keywords of regions to strictly include. all other regions are ignored.
    exclude_list : list of str, optional
        keywords of regions to strictly exclude. all other regions are kept.
    smooth_i : int, optional
        number of iterations for laplacian mesh smoothing. default is 15.
    smooth_f : float, optional
        relaxation factor for laplacian mesh smoothing (0.0 to 1.0). default is 0.6.

    raises
    ------
    ValueError
        if both include_list and exclude_list are provided.
    """
    if include_list and exclude_list:
        raise ValueError("please provide either 'include_list' or 'exclude_list', not both.")

    os.makedirs(out_dir, exist_ok=True)

    # apply the include/exclude filters to the provided dictionary
    targets = {}
    for rid, name in labels_dict.items():
        if include_list:
            if not any(inc in name for inc in include_list):
                continue
        elif exclude_list:
            if any(exc in name for exc in exclude_list):
                continue

        targets[rid] = name

    print(f"filtered down to {len(targets)} subcortical regions to extract.")

    # load the nifti volume and its affine matrix
    img = nib.load(nii_path)
    data = img.get_fdata()
    affine = img.affine

    # loop through targets, extract meshes, and save
    for rid, name in targets.items():
        # create a binary mask for just this region
        mask = (data == rid).astype(np.uint8)

        # skip if empty
        if np.sum(mask) == 0:
            print(f"[WARNING] {name} is empty in the volume!")
            continue

        print(f"extracting: {name} (id {rid})...")

        # run marching cubes to get raw vertices and faces
        verts, faces, normals, values = measure.marching_cubes(mask, level=0.5)

        # apply the nifti affine matrix cleanly using nibabel
        verts_mni = nib.affines.apply_affine(affine, verts)

        # format faces for pyvista: [n_points, p1, p2, p3, n_points, p1, p2, p3...]
        faces_pv = np.column_stack((np.full(len(faces), 3), faces)).flatten()

        # create the 3d pyvista mesh
        mesh = pv.PolyData(verts_mni, faces_pv)

        # apply laplacian smoothing to melt away the blocky voxel edges
        mesh = mesh.smooth(n_iter=smooth_i, relaxation_factor=smooth_f)
        mesh.compute_normals(inplace=True)

        # we remove super small structures which would not be visible
        if mesh.n_points < 4 or abs(mesh.volume) < 0.01:
            print(f"[WARNING] {name} is too small to form a 3D mesh (volume: {abs(mesh.volume):.4f} mm³). dropping from atlas.")
            continue

        # save as a vtk file
        out_file = os.path.join(out_dir, f"{name}.vtk")
        mesh.save(out_file)

    print(f"\nsubcortical atlas successfully saved to: {out_dir}")