Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,333 changes: 1,220 additions & 113 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pineappl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ float-cmp = { default-features = false, version = "0.9.0" }
git-version = "0.3.5"
itertools = "0.10.1"
lz4_flex = "0.9.2"
ndarray = { features = ["serde"], version = "0.15.4" }
ndarray = { features = ["serde"], version = "0.16.1" }
# TODO: opt out of default features in this crate to match line above
pineappl-v0 = { package = "pineappl", version = "0.8.2" }
rayon = "1.5.1"
Expand Down
2 changes: 1 addition & 1 deletion pineappl/src/evolution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ pub(crate) fn evolve_slice(

Ok((
Array1::from_iter(sub_fk_tables)
.into_shape((1, grid.bwfl().len(), channels0.len()))
.into_shape_with_order((1, grid.bwfl().len(), channels0.len()))
.unwrap(),
channels0
.into_iter()
Expand Down
2 changes: 1 addition & 1 deletion pineappl_capi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ workspace = true
[dependencies]
pineappl = { path = "../pineappl", version = "=1.3.0" }
itertools = "0.10.1"
ndarray = "0.15.4"
ndarray = "0.16.1"

[features]
capi = []
Expand Down
5 changes: 3 additions & 2 deletions pineappl_cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@ git-version = "0.3.5"
itertools = "0.10.1"
lhapdf = { package = "managed-lhapdf", version = "0.3.4" }
lz4_flex = { optional = true, version = "0.9.2" }
ndarray = "0.15.4"
ndarray-npy = { default-features = false, features = ["npz"], optional = true, version = "0.8.1" }
neopdf = "=0.3.0-alpha2"
ndarray = "0.16.1"
ndarray-npy = { default-features = false, features = ["npz"], optional = true, version = "0.9.0" }
pineappl = { path = "../pineappl", version = "=1.3.0" }
pineappl_applgrid = { optional = true, path = "../pineappl_applgrid", version = "=1.3.0" }
pineappl_fastnlo = { optional = true, path = "../pineappl_fastnlo", version = "=1.3.0" }
Expand Down
10 changes: 6 additions & 4 deletions pineappl_cli/src/convolve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,11 @@ pub struct Opts {
impl Subcommand for Opts {
fn run(&self, cfg: &GlobalConfiguration) -> Result<ExitCode> {
let grid = helpers::read_grid(&self.input)?;
let mut conv_funs_0 = helpers::create_conv_funs(&self.conv_funs[0])?;
let mut conv_funs_0 =
helpers::create_conv_funs_with_backend(&self.conv_funs[0], cfg.pdf_backend)?;
let bins: Vec<_> = self.bins.iter().cloned().flatten().collect();

let results = helpers::convolve_scales(
let results = helpers::convolve_scales_with_backend(
&grid,
&mut conv_funs_0,
&self.conv_funs[0].conv_types,
Expand Down Expand Up @@ -91,8 +92,9 @@ impl Subcommand for Opts {
.iter()
.flat_map(|conv_funs| {
let conv_types = &conv_funs.conv_types;
let mut conv_funs = helpers::create_conv_funs(conv_funs).unwrap();
helpers::convolve(
let mut conv_funs =
helpers::create_conv_funs_with_backend(conv_funs, cfg.pdf_backend).unwrap();
helpers::convolve_with_backend(
&grid,
&mut conv_funs,
conv_types,
Expand Down
189 changes: 188 additions & 1 deletion pineappl_cli/src/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use super::pdf_backend::{self, Backend, ForcePositive, PdfBackend, PdfSetBackend};
use super::GlobalConfiguration;
use anyhow::{anyhow, bail, Context, Error, Result};
use itertools::Itertools;
Expand Down Expand Up @@ -64,6 +65,7 @@ impl FromStr for ConvFuns {
}
}

/// Creates convolution functions using LHAPDF backend (legacy).
pub fn create_conv_funs(funs: &ConvFuns) -> Result<Vec<Pdf>> {
Ok(funs
.lhapdf_names
Expand All @@ -82,6 +84,22 @@ pub fn create_conv_funs(funs: &ConvFuns) -> Result<Vec<Pdf>> {
.collect::<Result<_, _>>()?)
}

/// Creates convolution functions using the specified backend.
pub fn create_conv_funs_with_backend(
funs: &ConvFuns,
backend: Backend,
) -> Result<Vec<Box<dyn PdfBackend>>> {
funs.lhapdf_names
.iter()
.zip(&funs.members)
.map(|(name, member)| {
let member = member.unwrap_or(0);
pdf_backend::create_pdf(name, member, backend)
})
.collect()
}

/// Creates convolution functions for a PDF set using LHAPDF backend (legacy).
pub fn create_conv_funs_for_set(
funs: &ConvFuns,
index_of_set: usize,
Expand Down Expand Up @@ -115,6 +133,30 @@ pub fn create_conv_funs_for_set(
Ok((set, conv_funs))
}

/// Creates convolution functions for a PDF set using the specified backend.
///
/// Returns a tuple of (`PdfSetBackend`, Vec<Vec<Box<dyn PdfBackend>>>).
pub fn create_conv_funs_for_set_with_backend(
funs: &ConvFuns,
index_of_set: usize,
backend: Backend,
) -> Result<(Box<dyn PdfSetBackend>, Vec<Vec<Box<dyn PdfBackend>>>)> {
let setname = &funs.lhapdf_names[index_of_set];
let set = pdf_backend::create_pdf_set(setname, backend)?;

let set_members = set.mk_pdfs()?;
let conv_funs = set_members
.into_iter()
.map(|member_pdf| {
let mut conv_funs = create_conv_funs_with_backend(funs, backend)?;
conv_funs[index_of_set] = member_pdf;
Ok::<_, Error>(conv_funs)
})
.collect::<Result<_, _>>()?;

Ok((set, conv_funs))
}

pub fn read_grid(input: &Path) -> Result<Grid> {
Grid::read(File::open(input).context(format!("unable to open '{}'", input.display()))?)
.context(format!("unable to read '{}'", input.display()))
Expand Down Expand Up @@ -238,6 +280,7 @@ pub enum ConvoluteMode {
Normal,
}

/// Performs convolution with scale variations using LHAPDF backend (legacy).
pub fn convolve_scales(
grid: &Grid,
conv_funs: &mut [Pdf],
Expand Down Expand Up @@ -321,8 +364,125 @@ pub fn convolve_scales(
match mode {
ConvoluteMode::Asymmetry => {
let bin_count = grid.bwfl().len();
assert!((bins.is_empty() || (bins.len() == bin_count)) && (bin_count % 2 == 0));

// calculating the asymmetry for a subset of bins doesn't work
results
.iter()
.skip((bin_count / 2) * scales.len())
.zip(
results
.chunks_exact(scales.len())
.take(bin_count / 2)
.rev()
.flatten(),
)
.map(|(pos, neg)| (pos - neg) / (pos + neg))
.collect()
}
ConvoluteMode::Integrated => {
results
.iter_mut()
.zip(
grid.bwfl()
.normalizations()
.into_iter()
.enumerate()
.filter(|(index, _)| bins.is_empty() || bins.contains(index))
.flat_map(|(_, norm)| iter::repeat(norm).take(scales.len())),
)
.for_each(|(value, norm)| *value *= norm);

results
}
ConvoluteMode::Normal => results,
}
}

/// Performs convolution with scale variations using the backend abstraction.
#[allow(clippy::too_many_arguments)]
pub fn convolve_scales_with_backend(
grid: &Grid,
conv_funs: &mut [Box<dyn PdfBackend>],
conv_types: &[ConvType],
orders: &[(u8, u8)],
bins: &[usize],
channels: &[bool],
scales: &[(f64, f64, f64)],
mode: ConvoluteMode,
cfg: &GlobalConfiguration,
) -> Vec<f64> {
let orders: Vec<_> = grid
.orders()
.iter()
.map(|order| {
orders.is_empty()
|| orders
.iter()
.any(|other| (order.alphas == other.0) && (order.alpha == other.1))
})
.collect();

if cfg.force_positive {
for fun in conv_funs.iter_mut() {
fun.set_force_positive(ForcePositive::ClipNegative);
}
}

// TODO: promote this to an error
assert!(
cfg.use_alphas_from < conv_funs.len(),
"expected `use_alphas_from` to be an integer within `[0, {})`, but got `{}`",
conv_funs.len(),
cfg.use_alphas_from
);

let x_min_max: Vec<_> = conv_funs
.iter_mut()
.map(|fun| (fun.x_min(), fun.x_max()))
.collect();

let mut funs: Vec<_> = conv_funs
.iter()
.zip(&x_min_max)
.map(|(fun, &(x_min, x_max))| {
move |id: i32, x: f64, q2: f64| {
if !cfg.allow_extrapolation && (x < x_min || x > x_max) {
0.0
} else {
fun.xfx_q2(id, x, q2)
}
}
})
.collect();

let xfx: Vec<_> = funs
.iter_mut()
.map(|fun| fun as &mut dyn FnMut(i32, f64, f64) -> f64)
.collect();

let mut alphas_funs: Vec<_> = conv_funs
.iter()
.map(|fun| {
let fun_ref = fun.as_ref();
move |q2: f64| fun_ref.alphas_q2(q2)
})
.collect();

let convolutions: Vec<_> = conv_funs
.iter()
.zip(conv_types)
.map(|(fun, &conv_type)| {
let pid = fun.particle_id();
Conv::new(conv_type, pid)
})
.collect();

let mut cache = ConvolutionCache::new(convolutions, xfx, &mut alphas_funs[cfg.use_alphas_from]);
let mut results = grid.convolve(&mut cache, &orders, bins, channels, scales);

match mode {
ConvoluteMode::Asymmetry => {
let bin_count = grid.bwfl().len();
assert!((bins.is_empty() || (bins.len() == bin_count)) && (bin_count % 2 == 0));

results
Expand Down Expand Up @@ -373,6 +533,7 @@ pub fn scales_vector(grid: &Grid, scales: usize) -> &[(f64, f64, f64)] {
}
}

/// Performs convolution using LHAPDF backend (legacy).
pub fn convolve(
grid: &Grid,
conv_funs: &mut [Pdf],
Expand All @@ -397,6 +558,32 @@ pub fn convolve(
)
}

/// Performs convolution using the backend abstraction.
#[allow(clippy::too_many_arguments)]
pub fn convolve_with_backend(
grid: &Grid,
conv_funs: &mut [Box<dyn PdfBackend>],
conv_types: &[ConvType],
orders: &[(u8, u8)],
bins: &[usize],
lumis: &[bool],
scales: usize,
mode: ConvoluteMode,
cfg: &GlobalConfiguration,
) -> Vec<f64> {
convolve_scales_with_backend(
grid,
conv_funs,
conv_types,
orders,
bins,
lumis,
scales_vector(grid, scales),
mode,
cfg,
)
}

pub fn convolve_limits(grid: &Grid, bins: &[usize], mode: ConvoluteMode) -> Vec<Vec<(f64, f64)>> {
let limits: Vec<_> = grid
.bwfl()
Expand Down
4 changes: 4 additions & 0 deletions pineappl_cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ mod helpers;
mod import;
mod merge;
mod orders;
pub mod pdf_backend;
mod plot;
mod pull;
mod read;
Expand Down Expand Up @@ -41,6 +42,9 @@ pub struct GlobalConfiguration {
/// Choose the PDF/FF set for the strong coupling.
#[arg(default_value = "0", long, value_name = "IDX")]
pub use_alphas_from: usize,
/// Select the PDF interpolation backend: 'lhapdf' or 'neopdf'.
#[arg(default_value = "lhapdf", long, value_name = "BACKEND")]
pub pdf_backend: pdf_backend::Backend,
}

#[enum_dispatch]
Expand Down
Loading
Loading