Skip to content

Commit

Permalink
Merge pull request xsuite#2 from xsuite/2023_pp_reference_run
Browse files Browse the repository at this point in the history
2023 pp reference run
  • Loading branch information
andreafornara authored Jul 18, 2023
2 parents fe55e20 + 3c90aa6 commit 9a42939
Show file tree
Hide file tree
Showing 8 changed files with 243 additions and 40 deletions.
19 changes: 19 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,22 @@ master_study/submission_files/*
# Old analysis
master_study/old/*

master_study/master_jobs/1_build_distr_and_collider/tree_maker.log
master_study/master_jobs/1_build_distr_and_collider/collider/collider.json
master_study/master_jobs/1_build_distr_and_collider/particles/00.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/01.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/02.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/03.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/04.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/05.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/06.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/07.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/08.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/09.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/10.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/11.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/12.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/13.parquet
master_study/master_jobs/1_build_distr_and_collider/particles/14.parquet
master_study/master_jobs/2_configure_and_track/collider.json
master_study/master_jobs/2_configure_and_track/output_particles.parquet
4 changes: 4 additions & 0 deletions make_miniforge.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ git clone https://github.com/xsuite/tree_maker.git
python -m pip install -e tree_maker
git clone https://github.com/xsuite/xmask.git
pip install -e xmask
cd xmask
git submodule init
git submodule update
cd ..
cd ..
xsuite-prebuild

Original file line number Diff line number Diff line change
Expand Up @@ -28,38 +28,38 @@ def build_sequence(
""")

mad.input("""
! Slice nominal sequence
myslice: macro = {
slicefactor=4;
myslice_plus: macro = {
if (MBX.4L2->l>0) {
select, flag=makethin, clear;
select, flag=makethin, class=mb, slice=2;
select, flag=makethin, class=mq, slice=2;
select, flag=makethin, class=mqxa, slice=16; !old triplet
select, flag=makethin, class=mqxb, slice=16; !old triplet
select, flag=makethin, class=mqxc, slice=16; !new mqxa (q1,q3)
select, flag=makethin, class=mqxd, slice=16; !new mqxb (q2a,q2b)
select, flag=makethin, class=mqxfa, slice=16; !new (q1,q3 v1.1)
select, flag=makethin, class=mqxfb, slice=16; !new (q2a,q2b v1.1)
select, flag=makethin, class=mq, slice=2*slicefactor;
select, flag=makethin, class=mqxa, slice=16*slicefactor; !old triplet
select, flag=makethin, class=mqxb, slice=16*slicefactor; !old triplet
select, flag=makethin, class=mqxc, slice=16*slicefactor; !new mqxa (q1,q3)
select, flag=makethin, class=mqxd, slice=16*slicefactor; !new mqxb (q2a,q2b)
select, flag=makethin, class=mqxfa, slice=16*slicefactor; !new (q1,q3 v1.1)
select, flag=makethin, class=mqxfb, slice=16*slicefactor; !new (q2a,q2b v1.1)
select, flag=makethin, class=mbxa, slice=4; !new d1
select, flag=makethin, class=mbxf, slice=4; !new d1 (v1.1)
select, flag=makethin, class=mbrd, slice=4; !new d2 (if needed)
select, flag=makethin, class=mqyy, slice=4; !new q4
select, flag=makethin, class=mqyl, slice=4; !new q5
select, flag=makethin, class=mqyy, slice=4*slicefactor; !new q4
select, flag=makethin, class=mqyl, slice=4*slicefactor; !new q5
select, flag=makethin, class=mbh, slice=4; !11T dipoles
select, flag=makethin, pattern=mbx\., slice=4;
select, flag=makethin, pattern=mbrb\., slice=4;
select, flag=makethin, pattern=mbrc\., slice=4;
select, flag=makethin, pattern=mbrs\., slice=4;
select, flag=makethin, pattern=mbh\., slice=4;
select, flag=makethin, pattern=mqwa\., slice=4;
select, flag=makethin, pattern=mqwb\., slice=4;
select, flag=makethin, pattern=mqy\., slice=4;
select, flag=makethin, pattern=mqm\., slice=4;
select, flag=makethin, pattern=mqmc\., slice=4;
select, flag=makethin, pattern=mqml\., slice=4;
select, flag=makethin, pattern=mqtlh\., slice=2;
select, flag=makethin, pattern=mqtli\., slice=2;
select, flag=makethin, pattern=mqwa\., slice=4*slicefactor;
select, flag=makethin, pattern=mqwb\., slice=4*slicefactor;
select, flag=makethin, pattern=mqy\., slice=4*slicefactor;
select, flag=makethin, pattern=mqm\., slice=4*slicefactor;
select, flag=makethin, pattern=mqmc\., slice=4*slicefactor;
select, flag=makethin, pattern=mqml\., slice=4*slicefactor;
select, flag=makethin, pattern=mqtlh\., slice=2*slicefactor;
select, flag=makethin, pattern=mqtli\., slice=2*slicefactor;
select, flag=makethin, pattern=mqt\. , slice=2;
!thin lens
if (version >= 50208) { !
Expand All @@ -78,13 +78,13 @@ def build_sequence(
is_thin=1;
};
exec, myslice;
exec, myslice_plus;
""")

mad.input(f"""
nrj=450;
beam,particle=proton,sequence=lhcb1,energy=nrj,npart=1.15E11,sige=4.5e-4;
beam,particle=proton,sequence=lhcb2,energy=nrj,bv = -1,npart=1.15E11,sige=4.5e-4;
beam,particle=proton,sequence=lhcb1,energy=nrj,npart=1.6E11,sige=4.5e-4;
beam,particle=proton,sequence=lhcb2,energy=nrj,bv = -1,npart=1.6E11,sige=4.5e-4;
""")

if not ignore_cycling:
Expand Down
45 changes: 45 additions & 0 deletions master_study/master_jobs/1_build_distr_and_collider/test_it.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import xtrack as xt
import numpy as np
import pandas as pd

collider = xt.Multiline.from_json('./collider/collider.json')

collider.build_trackers()



collider.vars['on_alice_normalized'] = 1
collider.vars['on_lhcb_normalized'] = 1

assert np.abs(collider.vars['on_alice_normalized']._value)==1
assert np.abs(collider.vars['on_lhcb_normalized']._value)==1


twiss_b1 = collider['lhcb1'].twiss()
twiss_b2 = collider['lhcb2'].twiss().reverse()

survey_b1 = collider['lhcb1'].survey()
survey_b2 = collider['lhcb2'].survey().reverse()

for my_ip in ['on_alice_normalized','on_lhcb_normalized']:
print(f'*****************\nValues for {my_ip} (polarity):')
print(collider.vars[my_ip]._value)
print(f'*****************\n')

for my_ip in [1,2,5,8]:
print(f'*****************\nValues for IP{my_ip}:')
my_df = []
for my_table, my_beam in zip([twiss_b1, twiss_b2],[1,2]):
my_df.append(my_table[
['x', 'y', 'px', 'py', 'betx', 'bety', 'alfx', 'alfy'],
f'ip{my_ip}'].to_pandas())
my_df[-1].index = [f'B{my_beam}']
print(pd.concat(my_df, axis=0).transpose())
print(f'*****************\n')


mydf = my_table[['x', 'y', 'px', 'py', 'betx', 'bety', 'alfx', 'alfy'],
f'ip{my_ip}'].to_pandas()
mydf.name = 'B1'
mydf

10 changes: 5 additions & 5 deletions master_study/master_jobs/2_configure_and_track/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ config_collider:
ip2: 20
ip5: 25
ip8: 20
sigma_z: 0.0761
num_particles_per_bunch: 1.15e11
sigma_z: 0.09 # 0.0761, increased for the TDIS
num_particles_per_bunch: 1.6e11
nemitt_x: 2.2e-6
nemitt_y: 2.2e-6
mask_with_filling_pattern:
Expand All @@ -103,7 +103,7 @@ config_collider:
# constraints:
# max_intensity: 1.8e11
# max_PU: 70

skip_leveling: true
config_lumi_leveling:
ip2:
Expand Down Expand Up @@ -139,8 +139,8 @@ config_collider:
- corr_co_acbcvs5.r2b1

ip8:
luminosity: 2.0e+32
num_colliding_bunches: null # This will be set automatically according to the filling scheme
luminosity: 2.0e+31
num_colliding_bunches: 2112 # This will be set automatically according to the filling scheme
impose_separation_orthogonal_to_crossing: true
knobs:
- on_sep8h
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,20 +185,24 @@ def do_levelling(config_collider, config_bb, n_collisions_ip8, collider, n_colli
)
initial_I = config_bb["num_particles_per_bunch"]
config_bb["num_particles_per_bunch"] = I
else:
initial_I = config_bb["num_particles_per_bunch"]
I = initial_I


# Then level luminosity in IP 2/8 changing the separation
additional_targets_lumi = []
if "constraints" in config_lumi_leveling["ip8"]:
for constraint in config_lumi_leveling["ip8"]["constraints"]:
obs, beam, sign, val, at = constraint.split("_")
target = xt.TargetInequality(obs, sign, float(val), at=at, line=beam, tol=1e-6)
additional_targets_lumi.append(target)
luminosity_leveling(
collider,
config_lumi_leveling=config_lumi_leveling,
config_beambeam=config_bb,
additional_targets_lumi=additional_targets_lumi,
)
# if "constraints" in config_lumi_leveling["ip8"]:
# for constraint in config_lumi_leveling["ip8"]["constraints"]:
# obs, beam, sign, val, at = constraint.split("_")
# target = xt.TargetInequality(obs, sign, float(val), at=at, line=beam, tol=1e-6)
# additional_targets_lumi.append(target)
# luminosity_leveling(
# collider,
# config_lumi_leveling=config_lumi_leveling,
# config_beambeam=config_bb,
# additional_targets_lumi=additional_targets_lumi,
# )

# Update configuration
config_bb["num_particles_per_bunch_after_optimization"] = I
Expand Down
131 changes: 131 additions & 0 deletions master_study/master_jobs/2_configure_and_track/test_it.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
# %%
import xtrack as xt
import numpy as np
import pandas as pd

# add current folder to path
# import sys
# sys.path.append(os.getcwd())


import configure_and_track as configure_and_track

# %%
collider = xt.Multiline.from_json('collider.json')

# %%
collider.build_trackers()

# %%
# collider.vars['beambeam_scale'] = 0
twiss_b1 = collider['lhcb1'].twiss()
twiss_b2 = collider['lhcb2'].twiss().reverse()

# %%
survey_b1 = {}
survey_b2 = {}

for my_ip in [1,2,5,8]:
print(f'Survey for IP{my_ip}...')
survey_b1[f'ip{my_ip}'] = collider['lhcb1'].survey(element0=f'ip{my_ip}')
survey_b2[f'ip{my_ip}'] = collider['lhcb2'].survey(element0=f'ip{my_ip}').reverse()
# collider.vars['beambeam_scale'] = 1

# %% filling scheme computation
config, config_sim, config_collider = configure_and_track.read_configuration()

filling_scheme = (config_collider['config_beambeam']
['mask_with_filling_pattern']
['pattern_fname'])

b1_bunch_to_track = (config_collider['config_beambeam']
['mask_with_filling_pattern']
['i_bunch_b1'])
b2_bunch_to_track = (config_collider['config_beambeam']
['mask_with_filling_pattern']
['i_bunch_b2'])
import fillingpatterns as fp
bb_schedule = fp.FillingPattern.from_json(filling_scheme)
bb_schedule.b1.n_bunches
bb_schedule.b2.n_bunches

bb_schedule.n_coll_ATLAS
bb_schedule.n_coll_LHCb
bb_schedule.n_coll_ALICE

bb_schedule.compute_beam_beam_schedule(
n_lr_per_side=25)

for ii,zz in zip([bb_schedule.b1,bb_schedule.b2],['Beam 1','Beam 2']):
my_bb_schedule= ii.bb_schedule.sort_values(by=['collides in ATLAS/CMS',
'collides in LHCB',
'collides in ALICE',
'# of LR in ATLAS/CMS',
'# of LR in ALICE',
'# of LR in LHCB',
], ascending=False)

print(f'Suggested bunch ID for {zz}: {my_bb_schedule.index[0]}')
# %%
bb_schedule_b1 = bb_schedule.b1.bb_schedule.loc[b1_bunch_to_track]
bb_schedule_b2 = bb_schedule.b2.bb_schedule.loc[b2_bunch_to_track]

print('\nBunch to track in Beam 1:')
print(bb_schedule_b1)
print('\nBunch to track in Beam 2:')
print(bb_schedule_b2)

# %% Compute the luminosity
from xtrack import lumi
assert twiss_b1.T_rev0 == twiss_b1.T_rev0

for ii, colliding_bunches in zip(['ip1','ip2','ip5','ip8'],
[bb_schedule.n_coll_ATLAS,
bb_schedule.n_coll_ALICE,
bb_schedule.n_coll_ATLAS,
bb_schedule.n_coll_LHCb]):
aux = lumi.luminosity_from_twiss(
colliding_bunches,
config_collider['config_beambeam']['num_particles_per_bunch'],
ii,
config_collider['config_beambeam']['nemitt_x'],
config_collider['config_beambeam']['nemitt_y'],
config_collider['config_beambeam']['sigma_z'],
twiss_b1,
twiss_b2,
crab=False,
)

sigma_tot = 81e-27 # cm^2
print(f'Luminosity in {ii}: {aux:.2e} cm^-2 s^-1')
# compute pile-up from luminosity
print(f'Pile-up in {ii}: {aux*sigma_tot/colliding_bunches*twiss_b1.T_rev0:.2e}\n')
# %%
for my_ip in ['on_alice_normalized','on_lhcb_normalized']:
print(f'*****************\nValues for {my_ip} (polarity):')
print(collider.vars[my_ip]._value)
print(f'*****************\n')

# %%
for my_ip in [1,2,5,8]:
print(f'*****************\nValues for IP{my_ip}:')
my_df = []
for my_table, my_beam in zip([twiss_b1, twiss_b2],[1,2]):
my_df.append(my_table[
['x', 'y', 'px', 'py', 'betx', 'bety', 'alfx', 'alfy'],
f'ip{my_ip}'].to_pandas())
my_df[-1].index = [f'B{my_beam}']
print(pd.concat(my_df, axis=0).transpose())
print(f'*****************\n')

# %%
for ii in collider.vars.get_independent_vars():
if 'beam' in ii:
print(ii)
# %%
collider.vars['beambeam_scale']._value
# %%
for ii in config_collider['config_knobs_and_tuning']['knob_settings'].keys():
if len(collider.vars[ii]._find_dependant_targets())==1:
print(ii)
# %%
4 changes: 2 additions & 2 deletions master_study/master_jobs/2_configure_and_track/tree_maker.log
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"started": 1687938027145708032,
"completed": 1687938272984875008
"started": 1689604462232194816,
"completed": 1689604681994530048
}

0 comments on commit 9a42939

Please sign in to comment.