Citation

BibTex format

@article{Cheng:2026:10.1016/j.jcp.2025.114567,
author = {Cheng, CW and Huang, J and Zhang, Y and Yang, G and Schönlieb, CB and Aviles-Rivero, AI},
doi = {10.1016/j.jcp.2025.114567},
journal = {Journal of Computational Physics},
title = {Mamba neural operator: Who wins? transformers vs. state-space models for PDEs},
url = {http://dx.doi.org/10.1016/j.jcp.2025.114567},
volume = {548},
year = {2026}
}

RIS format (EndNote, RefMan)

TY  - JOUR
AB - Partial differential equations (PDEs) are widely used to model complex physical systems, but solving them efficiently remains a significant challenge. Recently, Transformers have emerged as the preferred architecture for PDEs due to their ability to capture intricate dependencies. However, they struggle with representing continuous dynamics and long-range interactions. To overcome these limitations, we introduce the Mamba Neural Operator (MNO), a novel framework that enhances neural operator-based techniques for solving PDEs. MNO establishes a formal theoretical connection between structured state-space models (SSMs) and neural operators, offering a unified structure that can adapt to diverse architectures, including Transformer-based models. By leveraging the structured design of SSMs, MNO captures long-range dependencies and continuous dynamics more effectively than traditional Transformers. Through extensive analysis, we show that MNO significantly boosts the expressive power and accuracy of neural operators, making it not just a complement but a superior framework for PDE-related tasks, bridging the gap between efficient representation and accurate solution approximation.
AU - Cheng,CW
AU - Huang,J
AU - Zhang,Y
AU - Yang,G
AU - Schönlieb,CB
AU - Aviles-Rivero,AI
DO - 10.1016/j.jcp.2025.114567
PY - 2026///
SN - 0021-9991
TI - Mamba neural operator: Who wins? transformers vs. state-space models for PDEs
T2 - Journal of Computational Physics
UR - http://dx.doi.org/10.1016/j.jcp.2025.114567
VL - 548
ER -