Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add bunch monitor for longitudinal coordinates #504

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions tests/test_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def test_constructor(test_context):
#xt.ParticlesMonitor(_context=test_context, start_at_turn=0, stop_at_turn=10, num_particles=42),
#xt.LastTurnsMonitor(_context=test_context, n_last_turns=5, num_particles=42),
xt.BeamPositionMonitor(_context=test_context, stop_at_turn=10),
xt.BunchMonitor(_context=test_context, stop_at_turn=10),
]

# test to_dict / from_dict
Expand Down Expand Up @@ -492,3 +493,69 @@ def test_beam_position_monitor(test_context):
expected_x_centroid[18:20] = np.nan
assert_allclose(monitor.x_cen, expected_x_centroid, err_msg="Monitor x centroid does not match expected values")
assert_allclose(monitor.y_cen, -expected_x_centroid, err_msg="Monitor y centroid does not match expected values")

@for_all_test_contexts
def test_bunch_monitor(test_context):

npart = 512 # must be even and >= 512
zeta = np.empty(npart+4) # generate a few more than we record to test "num_particles"
zeta[0:npart:2] = zeta1 = np.linspace(-0.2, 0.2, npart//2) # first bunch centered around 0
zeta[1:npart:2] = zeta2 = -0.5 + zeta1 # second bunch centered around -1/harmonic
zeta[npart:] = np.linspace(2,4,4)

particles = xp.Particles(
p0c=6.5e12,
zeta=zeta,
delta=1e-3*zeta,
_context=test_context,
)

monitor = xt.BunchMonitor(
num_particles=npart,
start_at_turn=0,
stop_at_turn=10,
frev=2.99792458e8,
harmonic=2,
_context=test_context,
)

line = xt.Line([monitor])
line.build_tracker(_context=test_context)

for turn in range(11): # track a few more than we record to test "stop_at_turn"
# Note that indicees are re-ordered upon particle loss on CPU contexts,
# so sort before manipulation
if isinstance(test_context, xo.ContextCpu):
particles.sort(interleave_lost_particles=True)

# manipulate particles for testing
particles.zeta[0] += 0.005
particles.delta[0] += 1e-3*0.005
if turn == 8:
particles.state[256:] = 0
if turn == 9:
particles.state[:] = 0

if isinstance(test_context, xo.ContextCpu):
particles.reorganize()

# track and monitor
line.track(particles, num_turns=1)

# Check against expected values
expected_count = np.tile([npart//2, npart//2], 10)
expected_count[16:18] = 128
expected_count[18:20] = 0
assert_equal(monitor.count, expected_count, err_msg="Monitor count does not match expected particle count")

expected_zeta_sum = np.zeros(20)
expected_zeta_sum[0::2] = np.sum(zeta1) + 0.005*np.arange(1, 11)
expected_zeta_sum[1::2] = np.sum(zeta2)
expected_zeta_sum[16] -= np.sum(zeta1[128:])
expected_zeta_sum[17] -= np.sum(zeta2[128:])
expected_zeta_sum[18:] = 0
assert_allclose(monitor.zeta_sum, expected_zeta_sum, err_msg="Monitor zeta sum does not match expected values")
assert_allclose(monitor.delta_sum, 1e-3*expected_zeta_sum, err_msg="Monitor delta sum does not match expected values")

# TODO: test zeta_mean, delta_mean, zeta2_sum, delta2_sum, zeta_std, delta_std, zeta_var, delta_var

1 change: 1 addition & 0 deletions xtrack/monitors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@
from .beam_position_monitor import *
from .beam_size_monitor import *
from .beam_profile_monitor import *
from .bunch_monitor import *

monitor_classes = tuple(v for v in globals().values() if isinstance(v, type) and issubclass(v, BeamElement))
71 changes: 71 additions & 0 deletions xtrack/monitors/bunch_monitor.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
// ##################################
// Bunch Monitor
//
// Author: Philipp Niedermayer, Cristopher Cortes
// Date: 2023-08-14
// Edit: 2024-06-12
// ##################################


#ifndef XTRACK_BUNCH_MONITOR_H
#define XTRACK_BUNCH_MONITOR_H

#if !defined( C_LIGHT )
#define C_LIGHT ( 299792458.0 )
#endif /* !defined( C_LIGHT ) */

/*gpufun*/
void BunchMonitor_track_local_particle(BunchMonitorData el, LocalParticle* part0){

// get parameters
int64_t const start_at_turn = BunchMonitorData_get_start_at_turn(el);
int64_t particle_id_start = BunchMonitorData_get_particle_id_start(el);
int64_t particle_id_stop = particle_id_start + BunchMonitorData_get_num_particles(el);
int64_t const harmonic = BunchMonitorData_get_harmonic(el);
double const frev = BunchMonitorData_get_frev(el);

BunchMonitorRecord record = BunchMonitorData_getp_data(el);

int64_t max_slot = BunchMonitorRecord_len_count(record);

//start_per_particle_block(part0->part)

int64_t particle_id = LocalParticle_get_particle_id(part);
if (particle_id_stop < 0 || (particle_id_start <= particle_id && particle_id < particle_id_stop)){

// zeta is the absolute path length deviation from the reference particle: zeta = (s - beta0*c*t)
// but without limits, i.e. it can exceed the circumference (for coasting beams)
// as the particle falls behind or overtakes the reference particle
double const zeta = LocalParticle_get_zeta(part);
double const at_turn = LocalParticle_get_at_turn(part);
double const beta0 = LocalParticle_get_beta0(part);

// compute sample index
int64_t slot = round( harmonic * ( (at_turn-start_at_turn) - frev * zeta/beta0/C_LIGHT ));
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm thinking maybe you could use the circumference line.get_length() instead to simplify the slot calculation.
In principle, the zeta correction is not required if the monitor is only used for bunched beams and h≥1. But I think it's good to keep to properly handle RF gymnastics like de- and re-bunching.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree, maybe it could be useful in the future.


if (slot >= 0 && slot < max_slot){

double const delta = LocalParticle_get_delta(part);

/*gpuglmem*/ double* count = BunchMonitorRecord_getp1_count(record, slot);
atomicAdd(count, 1);

/*gpuglmem*/ double * zeta_sum = BunchMonitorRecord_getp1_zeta_sum(record, slot);
atomicAdd(zeta_sum, zeta);

/*gpuglmem*/ double * delta_sum = BunchMonitorRecord_getp1_delta_sum(record, slot);
atomicAdd(delta_sum, delta);

/*gpuglmem*/ double * zeta2_sum = BunchMonitorRecord_getp1_zeta2_sum(record, slot);
atomicAdd(zeta2_sum, zeta*zeta);

/*gpuglmem*/ double * delta2_sum = BunchMonitorRecord_getp1_delta2_sum(record, slot);
atomicAdd(delta2_sum, delta*delta);
}
}

//end_per_particle_block
}

#endif

157 changes: 157 additions & 0 deletions xtrack/monitors/bunch_monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
"""
Beam Size Monitor

Author: Philipp Niedermayer, Cristopher Cortes
Date: 2023-08-14
Edit: 2024-06-12
"""

import numpy as np

import xobjects as xo

from ..base_element import BeamElement
from ..beam_elements import Marker
from ..internal_record import RecordIndex
from ..general import _pkg_root

class BunchMonitorRecord(xo.Struct):
count = xo.Float64[:]
zeta_sum = xo.Float64[:]
zeta2_sum = xo.Float64[:]
delta_sum = xo.Float64[:]
delta2_sum = xo.Float64[:]

class BunchMonitor(BeamElement):

_xofields={
'particle_id_start': xo.Int64,
'num_particles': xo.Int64,
'start_at_turn': xo.Int64,
'stop_at_turn': xo.Int64,
'frev': xo.Float64,
'harmonic': xo.Int64,
'_index': RecordIndex,
'data': BunchMonitorRecord,
}

behaves_like_drift = True
allow_loss_refinement = True

properties = [field.name for field in BunchMonitorRecord._fields]

_extra_c_sources = [
_pkg_root.joinpath('headers/atomicadd.h'),
_pkg_root.joinpath('monitors/bunch_monitor.h')
]

def __init__(self, *, particle_id_range=None, particle_id_start=None, num_particles=None,
start_at_turn=None, stop_at_turn=None, frev=None,
harmonic=None, _xobject=None, **kwargs):
"""
Monitor to save the longitudinal bunch position and size (mean and std of zeta) as well as mean and std of momentum spread (delta)


The monitor allows for arbitrary sampling rate and can thus not only be used to monitor
bunch emittance, but also to record coasting beams. Internally, the particle arrival time
is used when determining the record index:

i = harmonic * ( ( at_turn - start_turn ) - f_rev * zeta / beta0 / c0 )

where zeta=(s-beta0*c0*t) is the longitudinal coordinate of the particle, beta0 the
relativistic beta factor of the particle, c0 is the speed of light, at_turn is the
current turn number, f_rev is the revolution frequency, and sampling_frequency is the
sampling frequency.

Note that the index is rounded, i.e. the result array represents data of particles
equally distributed around the reference particle. For example, if the sampling_frequency
is twice the revolution frequency, the first item contains data from particles in the
range zeta/circumference = -0.25 .. 0.25, the second item in the range 0.25 .. 0.75 and
so on.

The monitor is a carbon copy of the beam size monitor but dedicated to the longitudinal coordinates.

The monitor provides the following data:
- `count` Number of particles
- `zeta_mean`, `delta_mean` Beam position in m and unitless (centroid, i.e. mean of particle zeta, delta)
- `zeta_std`, `delta_std` Beam size in m (standard deviation of particle zeta, delta)
- `zeta_var`, `delta_var` Variance of particle zeta [m²], delta (= std**2)
- `zeta_sum`, `delta_sum` Sum of particle zeta [m], delta (= mean * count)
- `zeta2_sum`, `delta2_sum` Sum of particle zeta [m²], delta squared (= (std**2 + mean**2) * count)
each as an array of size:
size = int(( stop_at_turn - start_at_turn ) * harmonic)

Args:
num_particles (int, optional): Number of particles to monitor. Defaults to -1 which means ALL.
particle_id_start (int, optional): First particle id to monitor. Defaults to 0.
particle_id_range (tuple, optional): Range of particle ids to monitor (start, stop). Stop is exclusive.
Defaults to (particle_id_start, particle_id_start+num_particles).
start_at_turn (int): First turn of reference particle (inclusive) at which to monitor.
stop_at_turn (int): Last turn of reference particle (exclusiv) at which to monitor.
frev (float): Revolution frequency in Hz of circulating beam (used to relate turn number to sample index).
harmonic (int): Harmonic of the revolution frequency.

"""
if _xobject is not None:
super().__init__(_xobject=_xobject)
else:
# dict parameters
if particle_id_range is None:
if particle_id_start is None:
particle_id_start = 0
if num_particles is None:
num_particles = -1
elif particle_id_start is None and num_particles is None:
particle_id_start = particle_id_range[0]
num_particles = particle_id_range[1] - particle_id_range[0]
else:
raise ValueError("Parameter `particle_id_range` must not be used together with `num_particles` and/or `particle_id_start`")
if start_at_turn is None:
start_at_turn = 0
if stop_at_turn is None:
stop_at_turn = 0
if frev is None:
frev = 1
if harmonic is None:
harmonic = 1

if "data" not in kwargs:
# explicitely init with zeros (instead of size only) to have consistent initial values
size = int(round(( stop_at_turn - start_at_turn ) * harmonic))
kwargs["data"] = {prop: np.zeros(size) for prop in self.properties}

super().__init__(particle_id_start=particle_id_start, num_particles=num_particles,
start_at_turn=start_at_turn, stop_at_turn=stop_at_turn, frev=frev,
harmonic=harmonic, **kwargs)

def __repr__(self):
return (
f"{type(self).__qualname__}(start_at_turn={self.start_at_turn}, stop_at_turn={self.stop_at_turn}, "
f"particle_id_start={self.particle_id_start}, num_particles={self.num_particles}, frev={self.frev}, "
f"harmonic={self.harmonic}) at {hex(id(self))}"
)

def __getattr__(self, attr):
if attr in self.properties:
return getattr(self.data, attr).to_nparray()

if attr in ('zeta_mean', 'delta_mean', 'zeta_cen', 'delta_cen', 'zeta_centroid', 'delta_centroid'):
with np.errstate(invalid='ignore'): # NaN for zero particles is expected behaviour
attri = attr.split('_')[0]
return getattr(self, attri+"_sum") / self.count

if attr in ('zeta_var', 'delta_var'):
with np.errstate(invalid='ignore'): # NaN for zero particles is expected behaviour
# var = mean(x^2) - mean(x)^2
attri = attr.split('_')[0]
return getattr(self, attri+"2_sum") / self.count - getattr(self, attri+"_mean")**2

if attr in ('zeta_std', 'delta_std'):
attri = attr.split('_')[0]
return getattr(self, attr[0]+"_var")**0.5

return getattr(super(), attr)


def get_backtrack_element(self, _context=None, _buffer=None, _offset=None):
return Marker(_context=_context, _buffer=_buffer, _offset=_offset)