BibTex format

author = {Haug, T and Self, CN and Kim, MS},
doi = {2632-2153/acb0b4},
journal = {Machine Learning: Science and Technology},
pages = {1--17},
title = {Quantum machine learning of large datasets using randomized measurements},
url = {},
volume = {4},
year = {2023}

RIS format (EndNote, RefMan)

AB - Quantum computers promise to enhance machine learning for practical applications. Quantum machine learning for real-world data has to handle extensive amounts of high-dimensional data. However, conventional methods for measuring quantum kernels are impractical for large datasets as they scale with the square of the dataset size. Here, we measure quantum kernels using randomized measurements. The quantum computation time scales linearly with dataset size and quadratic for classical post-processing. While our method scales in general exponentially in qubit number, we gain a substantial speed-up when running on intermediate-sized quantum computers. Further, we efficiently encode high-dimensional data into quantum computers with the number of features scaling linearly with the circuit depth. The encoding is characterized by the quantum Fisher information metric and is related to the radial basis function kernel. Our approach is robust to noise via a cost-free error mitigation scheme. We demonstrate the advantages of our methods for noisy quantum computers by classifying images with the IBM quantum computer. To achieve further speedups we distribute the quantum computational tasks between different quantum computers. Our method enables benchmarking of quantum machine learning algorithms with large datasets on currently available quantum computers.
AU - Haug,T
AU - Self,CN
AU - Kim,MS
DO - 2632-2153/acb0b4
EP - 17
PY - 2023///
SN - 2632-2153
SP - 1
TI - Quantum machine learning of large datasets using randomized measurements
T2 - Machine Learning: Science and Technology
UR -
UR -
UR -
UR -
VL - 4
ER -