JACoW is a publisher in Geneva, Switzerland that publishes the proceedings of accelerator conferences held around the world by an international collaboration of editors.
@inproceedings{scheinker:napac2022-thxd2, author = {A. Scheinker}, title = {{6D Phase Space Diagnostics Based on Adaptive Tuning of the Latent Space of Encoder-Decoder Convolutional Neural Networks}}, & booktitle = {Proc. NAPAC'22}, booktitle = {Proc. 5th Int. Particle Accel. Conf. (NAPAC'22)}, pages = {837--841}, eid = {THXD2}, language = {english}, keywords = {controls, solenoid, feedback, network, electron}, venue = {Albuquerque, NM, USA}, series = {International Particle Accelerator Conference}, number = {5}, publisher = {JACoW Publishing, Geneva, Switzerland}, month = {10}, year = {2022}, issn = {2673-7000}, isbn = {978-3-95450-232-5}, doi = {10.18429/JACoW-NAPAC2022-THXD2}, url = {https://jacow.org/napac2022/papers/thxd2.pdf}, abstract = {{We present a general approach to 6D phase space diagnostics for charged particle beams based on adaptively tuning the low-dimensional latent space of generative encoder-decoder convolutional neural networks (CNN). Our approach first trains the CNN based on supervised learning to learn the correlations and physics constrains within a given accelerator system. The input of the CNN is a high dimensional collection of 2D phase space projections of the beam at the accelerator entrance together with a vector of accelerator parameters such as magnet and RF settings. The inputs are squeezed down to a low-dimensional latent space from which we generate the output in the form of projections of the beam’s 6D phase space at various accelerator locations. After training the CNN is applied in an unsupervised adaptive manner by comparing a subset of the output predictions to available measurements with the error guiding feedback directly in the low-dimensional latent space. We show that our approach is robust to unseen time-variation of the input beam and accelerator parameters and a study of the robustness of the method to go beyond the span of the training data.}}, }