diff --git a/interpol-rs/Cargo.toml b/interpol-rs/Cargo.toml index 265c32c..32bc6c6 100644 --- a/interpol-rs/Cargo.toml +++ b/interpol-rs/Cargo.toml @@ -11,6 +11,7 @@ derive_builder = "0.10.2" lazy_static = "1.4.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" +rayon = "1.5.2" typetag = "0.1.8" [lib] diff --git a/interpol-rs/src/interpol.rs b/interpol-rs/src/interpol.rs index f3b27e1..4e37d04 100644 --- a/interpol-rs/src/interpol.rs +++ b/interpol-rs/src/interpol.rs @@ -1,9 +1,8 @@ -use lazy_static::lazy_static; -use std::fs::File; -use std::io::Write; -use std::{collections::TryReserveError, sync::Mutex}; - use crate::mpi_events::{ + collectives::{ + mpi_ibcast::MpiIbcastBuilder, mpi_igather::MpiIgatherBuilder, + mpi_ireduce::MpiIreduceBuilder, mpi_iscatter::MpiIscatterBuilder, + }, management::{ mpi_finalize::MpiFinalizeBuilder, mpi_init::MpiInitBuilder, mpi_init_thread::MpiInitThreadBuilder, @@ -13,17 +12,53 @@ use crate::mpi_events::{ mpi_send::MpiSendBuilder, }, synchronization::{ - mpi_barrier::MpiBarrierBuilder, mpi_test::MpiTestBuilder, mpi_wait::MpiWaitBuilder, + mpi_barrier::MpiBarrierBuilder, mpi_ibarrier::MpiIbarrierBuilder, mpi_test::MpiTestBuilder, + mpi_wait::MpiWaitBuilder, }, }; use crate::types::{MpiComm, MpiRank, MpiReq, MpiTag, Tsc, Usecs}; +use crate::InterpolError; +use lazy_static::lazy_static; +use rayon::prelude::*; +use std::fs::{self, File}; +use std::io::Write; +use std::sync::Mutex; + +static INTERPOL_DIR: &str = "interpol-tmp"; #[repr(transparent)] pub struct Trace(Mutex>>); #[typetag::serde(tag = "type")] pub trait Register: Send + Sync { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError>; + fn register( + self, + events: &mut Vec>, + ) -> Result<(), std::collections::TryReserveError>; + + fn tsc(&self) -> Tsc; +} + +#[macro_export] +macro_rules! impl_register { + ($t:ty) => { + use crate::interpol::Register; + use std::collections::TryReserveError; + + #[typetag::serde] + impl Register for $t { + fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { + // Ensure that the program does not panic if allocation fails + events.try_reserve_exact(2 * events.len())?; + events.push(Box::new(self)); + Ok(()) + } + + fn tsc(&self) -> crate::types::Tsc { + self.tsc + } + } + }; } lazy_static! { @@ -51,239 +86,249 @@ lazy_static! { static ref EVENTS: Trace = Trace(Mutex::new(Vec::new())); } -#[derive(Clone, Debug, PartialEq)] -#[repr(C)] -#[allow(dead_code)] -enum MpiCallType { +#[derive(Debug, PartialEq)] +#[repr(i8)] +pub enum MpiCallType { Init, Initthread, Finalize, Send, - Isend, Recv, + Isend, Irecv, - Wait, Test, + Wait, Barrier, Ibarrier, - Ibcast, - Ireduce, + Ibcast, + Igather, + Ireduce, Iscatter, - Igather } -#[derive(Clone, Debug, PartialEq)] +#[derive(Debug, PartialEq)] #[repr(C)] pub struct MpiCall { - call: MpiCallType, + time: Usecs, tsc: Tsc, duration: Tsc, - time: Usecs, - nb_bytes: u32, + partner_rank: MpiRank, + current_rank: MpiRank, + nb_bytes_s: u32, + nb_bytes_r: u32, comm: MpiComm, req: MpiReq, - current_rank: MpiRank, - partner_rank: *mut MpiRank, tag: MpiTag, required_thread_lvl: i32, provided_thread_lvl: i32, finished: bool, + op_type: i8, + kind: MpiCallType, } -#[no_mangle] -pub extern "C" fn register_mpi_call(call: MpiCall) { - unsafe - { - match call.call { - MpiCallType::Init=>register_init(call.current_rank, call.tsc, call.time), - - MpiCallType::Initthread=>register_init_thread(call.current_rank, call.tsc, call.time, call.required_thread_lvl, call.provided_thread_lvl), - - MpiCallType::Finalize=>register_finalize(call.current_rank, call.tsc, call.time), - - MpiCallType::Send=>register_send(call.current_rank, *call.partner_rank ,call.nb_bytes , call.comm , call.tag , call.tsc , call.duration), - - MpiCallType::Isend=>register_isend(call.current_rank, *call.partner_rank, call.nb_bytes, call.comm, call.req, call.tag, call.tsc, call.duration), - - MpiCallType::Recv=>register_recv(call.current_rank, *call.partner_rank, call.nb_bytes, call.comm, call.tag, call.tsc, call.duration), - - MpiCallType::Irecv=>register_irecv(call.current_rank, *call.partner_rank, call.nb_bytes, call.comm, call.req, call.tag, call.tsc, call.duration), - - MpiCallType::Wait=>register_wait(call.current_rank, call.req, call.tsc, call.duration), - - MpiCallType::Test=>register_test(call.current_rank, call.req, call.finished, call.tsc, call.duration), - - MpiCallType::Barrier=>register_barrier(call.current_rank, call.comm, call.tsc, call.duration), - - //MpiCallType::Ibarrier=>register_ibarrier(), - - //MpiCallType::Ibcast=>register_ibcast(), - - //MpiCallType::Ireduce=>register_ireduce(), +/// Serialize the contents of the `Vec` and write them to an output file +fn serialize( + events: &mut Vec>, + current_rank: MpiRank, +) -> Result<(), InterpolError> { + let ser_traces = serde_json::to_string_pretty(events) + .expect("failed to serialize vector contents to string"); + let filename = format!( + "{}/rank{}_traces.json", + INTERPOL_DIR, + current_rank.to_string() + ); - //MpiCallType::Iscatter=>register_iscatter(), + fs::create_dir_all(INTERPOL_DIR)?; + let mut file = File::create(filename.clone())?; + write!(file, "{}", ser_traces)?; + Ok(()) +} - //MpiCallType::Igather=>register_igather(), +#[no_mangle] +pub extern "C" fn register_mpi_call(mpi_call: MpiCall) { + let rank = mpi_call.current_rank; + match dispatch(mpi_call) { + Ok(_) => (), + Err(e) => eprintln!("Rank {}: {e}", rank), + } +} - _ => () +fn dispatch(call: MpiCall) -> Result<(), InterpolError> { + match call.kind { + MpiCallType::Init => register_init(call.current_rank, call.tsc, call.time), + MpiCallType::Initthread => register_init_thread( + call.current_rank, + call.required_thread_lvl, + call.provided_thread_lvl, + call.tsc, + call.time, + ), + MpiCallType::Finalize => register_finalize(call.current_rank, call.tsc, call.time), + MpiCallType::Send => register_send( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.comm, + call.tag, + call.tsc, + call.duration, + ), + MpiCallType::Isend => register_isend( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.comm, + call.req, + call.tag, + call.tsc, + call.duration, + ), + MpiCallType::Recv => register_recv( + call.current_rank, + call.partner_rank, + call.nb_bytes_r, + call.comm, + call.tag, + call.tsc, + call.duration, + ), + MpiCallType::Irecv => register_irecv( + call.current_rank, + call.partner_rank, + call.nb_bytes_r, + call.comm, + call.req, + call.tag, + call.tsc, + call.duration, + ), + MpiCallType::Barrier => { + register_barrier(call.current_rank, call.comm, call.tsc, call.duration) } + MpiCallType::Ibarrier => register_ibarrier( + call.current_rank, + call.comm, + call.req, + call.tsc, + call.duration, + ), + MpiCallType::Test => register_test( + call.current_rank, + call.req, + call.finished, + call.tsc, + call.duration, + ), + MpiCallType::Wait => register_wait(call.current_rank, call.req, call.tsc, call.duration), + MpiCallType::Ibcast => register_ibcast( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.comm, + call.req, + call.tsc, + call.duration, + ), + MpiCallType::Igather => register_igather( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.nb_bytes_r, + call.comm, + call.req, + call.tsc, + call.duration, + ), + MpiCallType::Ireduce => register_ireduce( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.op_type, + call.comm, + call.req, + call.tsc, + call.duration, + ), + MpiCallType::Iscatter => register_iscatter( + call.current_rank, + call.partner_rank, + call.nb_bytes_s, + call.nb_bytes_r, + call.comm, + call.req, + call.tsc, + call.duration, + ), } } /// Registers an `MPI_Init` call into a static vector. -#[no_mangle] -pub fn register_init(current_rank: MpiRank, tsc: Tsc, time: Usecs) { - let init_event = match MpiInitBuilder::default() +fn register_init(current_rank: MpiRank, tsc: Tsc, time: Usecs) -> Result<(), InterpolError> { + let init_event = MpiInitBuilder::default() .current_rank(current_rank) .tsc(tsc) .time(time) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiInit` event", - format!("{err:#?}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiInit` event"); - match init_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiInit` event", - format!("{err}").as_str(), - ); - } - } + init_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Init_thread` call into a static vector. -#[no_mangle] -pub fn register_init_thread( +fn register_init_thread( current_rank: MpiRank, - tsc: Tsc, - time: Usecs, required_thread_lvl: i32, provided_thread_lvl: i32, -) { - let init_thread_event = match MpiInitThreadBuilder::default() + tsc: Tsc, + time: Usecs, +) -> Result<(), InterpolError> { + let init_thread_event = MpiInitThreadBuilder::default() .current_rank(current_rank) .required_thread_lvl(required_thread_lvl) .provided_thread_lvl(provided_thread_lvl) .tsc(tsc) .time(time) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiInitThread` event", - format!("{err:#?}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiInitThread` event"); - match init_thread_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiInitThread` event", - format!("{err}").as_str(), - ); - } - } + init_thread_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Finalize` call into a static vector. /// /// As this *should* be the final registered event, the contents of the vector will be sorted with /// every other MPI processes vectors' and then serialized. -#[no_mangle] -pub fn register_finalize(current_rank: MpiRank, tsc: Tsc, time: Usecs) { - let finalize_event = match MpiFinalizeBuilder::default() +fn register_finalize(current_rank: MpiRank, tsc: Tsc, time: Usecs) -> Result<(), InterpolError> { + let finalize_event = MpiFinalizeBuilder::default() .current_rank(current_rank) .tsc(tsc) .time(time) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiFinalize` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiFinalize` event"); - match finalize_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiFinalize` event", - format!("{err}").as_str(), - ); - } - } + finalize_event.register(&mut guard)?; - // Serialize the contents of the `Vec` and write them to an output file - let ser_traces = serde_json::to_string_pretty(&*guard) - .expect("failed to serialize vector contents to string"); - let filename = format!("/tmp/rank{}_traces.json", current_rank.to_string()); - let mut file = match File::create(filename.clone()) { - Ok(file) => file, - Err(err) => { - print_err( - current_rank, - format!("failed to create file `{}`", filename).as_str(), - format!("{err}").as_str(), - ); - return; - } - }; - - match write!(file, "{}", ser_traces) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - format!("failed to write to file `{}`", filename).as_str(), - format!("{err}").as_str(), - ); - } - }; - - if current_rank != 0 { - return; - } - // TODO: Deserialize every trace files, sort and serialize everything in order. + // Serialize all events of the current rank + serialize(&mut *guard, current_rank)?; + Ok(()) } /// Registers an `MPI_Send` call into a static vector. -#[no_mangle] -pub fn register_send( +fn register_send( current_rank: MpiRank, partner_rank: MpiRank, nb_bytes: u32, @@ -291,8 +336,8 @@ pub fn register_send( tag: MpiTag, tsc: Tsc, duration: Tsc, -) { - let send_event = match MpiSendBuilder::default() +) -> Result<(), InterpolError> { + let send_event = MpiSendBuilder::default() .current_rank(current_rank) .partner_rank(partner_rank) .nb_bytes(nb_bytes) @@ -300,39 +345,19 @@ pub fn register_send( .tag(tag) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiSend` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiSend` event"); - match send_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiSend` event", - format!("{err}").as_str(), - ); - return; - } - } + send_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Recv` call into a static vector. -#[no_mangle] -pub fn register_recv( +fn register_recv( current_rank: MpiRank, partner_rank: MpiRank, nb_bytes: u32, @@ -340,8 +365,8 @@ pub fn register_recv( tag: MpiTag, tsc: Tsc, duration: Tsc, -) { - let recv_event = match MpiRecvBuilder::default() +) -> Result<(), InterpolError> { + let recv_event = MpiRecvBuilder::default() .current_rank(current_rank) .partner_rank(partner_rank) .nb_bytes(nb_bytes) @@ -349,39 +374,19 @@ pub fn register_recv( .tag(tag) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiRecv` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiRecv` event"); - match recv_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiRecv` event", - format!("{err}").as_str(), - ); - return; - } - } + recv_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Isend` call into a static vector. -#[no_mangle] -pub fn register_isend( +fn register_isend( current_rank: MpiRank, partner_rank: MpiRank, nb_bytes: u32, @@ -390,8 +395,8 @@ pub fn register_isend( tag: MpiTag, tsc: Tsc, duration: Tsc, -) { - let isend_event = match MpiIsendBuilder::default() +) -> Result<(), InterpolError> { + let isend_event = MpiIsendBuilder::default() .current_rank(current_rank) .partner_rank(partner_rank) .nb_bytes(nb_bytes) @@ -400,39 +405,19 @@ pub fn register_isend( .tag(tag) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiIsend` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiIsend` event"); - match isend_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiIsend` event", - format!("{err}").as_str(), - ); - return; - } - } + isend_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Irecv` call into a static vector. -#[no_mangle] -pub fn register_irecv( +fn register_irecv( current_rank: MpiRank, partner_rank: MpiRank, nb_bytes: u32, @@ -441,8 +426,8 @@ pub fn register_irecv( tag: MpiTag, tsc: Tsc, duration: Tsc, -) { - let irecv_event = match MpiIrecvBuilder::default() +) -> Result<(), InterpolError> { + let irecv_event = MpiIrecvBuilder::default() .current_rank(current_rank) .partner_rank(partner_rank) .nb_bytes(nb_bytes) @@ -451,163 +436,263 @@ pub fn register_irecv( .tag(tag) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiIrecv` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiIrecv` event"); - match irecv_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiIrecv` event", - format!("{err}").as_str(), - ); - return; - } - } + irecv_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Barrier` call into a static vector. -#[no_mangle] -pub fn register_barrier(current_rank: MpiRank, comm: MpiComm, tsc: Tsc, duration: Tsc) { - let barrier_event = match MpiBarrierBuilder::default() +fn register_barrier( + current_rank: MpiRank, + comm: MpiComm, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let barrier_event = MpiBarrierBuilder::default() .current_rank(current_rank) .comm(comm) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiBarrier` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiBarrier` event"); - match barrier_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiBarrier` event", - format!("{err}").as_str(), - ); - return; - } - } + barrier_event.register(&mut guard)?; + + Ok(()) +} + +/// Registers an `MPI_Ibarrier` call into a static vector. +fn register_ibarrier( + current_rank: MpiRank, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let ibarrier_event = MpiIbarrierBuilder::default() + .current_rank(current_rank) + .comm(comm) + .req(req) + .tsc(tsc) + .duration(duration) + .build()?; + + let mut guard = EVENTS + .0 + .lock() + .expect("failed to take the lock on vector for `MpiIbarrier` event"); + ibarrier_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Test` call into a static vector. -#[no_mangle] -pub fn register_test( +fn register_test( current_rank: MpiRank, req: MpiReq, finished: bool, tsc: Tsc, duration: Tsc, -) { - let test_event = match MpiTestBuilder::default() +) -> Result<(), InterpolError> { + let test_event = MpiTestBuilder::default() .current_rank(current_rank) .req(req) .finished(finished) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiTest` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiTest` event"); - match test_event.register(&mut guard) { - Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiTest` event", - format!("{err}").as_str(), - ); - return; - } - } + test_event.register(&mut guard)?; + + Ok(()) } /// Registers an `MPI_Wait` call into a static vector. -#[no_mangle] -pub fn register_wait(current_rank: MpiRank, req: MpiReq, tsc: Tsc, duration: Tsc) { - let wait_event = match MpiWaitBuilder::default() +fn register_wait( + current_rank: MpiRank, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let wait_event = MpiWaitBuilder::default() .current_rank(current_rank) .req(req) .tsc(tsc) .duration(duration) - .build() - { - Ok(event) => event, - Err(err) => { - print_err( - current_rank, - "failed to build `MpiWait` event", - format!("{err}").as_str(), - ); - return; - } - }; + .build()?; let mut guard = EVENTS .0 .lock() .expect("failed to take the lock on vector for `MpiWait` event"); - match wait_event.register(&mut guard) { + wait_event.register(&mut guard)?; + + Ok(()) +} + +fn register_ibcast( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let ibcast_event = MpiIbcastBuilder::default() + .current_rank(current_rank) + .root_rank(root_rank) + .nb_bytes(nb_bytes) + .comm(comm) + .req(req) + .tsc(tsc) + .duration(duration) + .build()?; + + let mut guard = EVENTS + .0 + .lock() + .expect("failed to take the lock on vector for `MpiIbcast` event"); + ibcast_event.register(&mut guard)?; + Ok(()) +} + +fn register_igather( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let igather_event = MpiIgatherBuilder::default() + .current_rank(current_rank) + .root_rank(root_rank) + .nb_bytes_send(nb_bytes_send) + .nb_bytes_recv(nb_bytes_recv) + .comm(comm) + .req(req) + .tsc(tsc) + .duration(duration) + .build()?; + + let mut guard = EVENTS + .0 + .lock() + .expect("failed to take the lock on vector for `MpiIbcast` event"); + igather_event.register(&mut guard)?; + Ok(()) +} + +fn register_ireduce( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + op_type: i8, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let ireduce_event = MpiIreduceBuilder::default() + .current_rank(current_rank) + .root_rank(root_rank) + .nb_bytes(nb_bytes) + .op_type(op_type) + .comm(comm) + .req(req) + .tsc(tsc) + .duration(duration) + .build()?; + + let mut guard = EVENTS + .0 + .lock() + .expect("failed to take the lock on vector for `MpiIbcast` event"); + ireduce_event.register(&mut guard)?; + Ok(()) +} + +fn register_iscatter( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +) -> Result<(), InterpolError> { + let iscatter_event = MpiIscatterBuilder::default() + .current_rank(current_rank) + .root_rank(root_rank) + .nb_bytes_send(nb_bytes_send) + .nb_bytes_recv(nb_bytes_recv) + .comm(comm) + .req(req) + .tsc(tsc) + .duration(duration) + .build()?; + + let mut guard = EVENTS + .0 + .lock() + .expect("failed to take the lock on vector for `MpiIbcast` event"); + iscatter_event.register(&mut guard)?; + Ok(()) +} + +#[no_mangle] +pub extern "C" fn sort_all_traces() { + let mut all_traces = match deserialize_all_traces() { + Ok(t) => t, + Err(e) => panic!("{e}"), + }; + + let start = std::time::Instant::now(); + all_traces.par_sort_unstable_by_key(|event| event.tsc()); + let end = start.elapsed(); + println!("Sort took {end:?}"); + + let serialized_traces = + serde_json::to_string_pretty(&all_traces).expect("failed to serialize all traces"); + match write_all_traces(serialized_traces) { Ok(_) => (), - Err(err) => { - print_err( - current_rank, - "failed to register `MpiWait` event", - format!("{err}").as_str(), - ); - return; - } + Err(e) => eprintln!("{e}"), } } -fn print_err(rank: MpiRank, err: &str, reason: &str) { - eprintln!( - "{} {} \n {} {}", - format!("\x1b[1;31merror[rank {}]:\x1b[0m", rank.to_string()).as_str(), - format!("\x1b[1m{}\x1b[0m", err), - "\x1b[34m-->\x1b[0m", - reason - ); +fn deserialize_all_traces() -> Result>, InterpolError> { + let mut all_traces = Vec::new(); + + for entry in fs::read_dir(INTERPOL_DIR)? { + let dir = entry?; + let contents = fs::read_to_string(dir.path())?; + let mut deserialized: Vec> = + serde_json::from_str(&contents).expect("failed to deserialize trace file contents"); + all_traces.append(&mut deserialized); + } + + Ok(all_traces) +} + +fn write_all_traces(serialized_traces: String) -> Result<(), InterpolError> { + let mut file = File::create(format!("{}/{}", INTERPOL_DIR, "interpol_traces.json"))?; + write!(file, "{}", serialized_traces)?; + Ok(()) } diff --git a/interpol-rs/src/lib.rs b/interpol-rs/src/lib.rs index e069f43..a2a4afa 100644 --- a/interpol-rs/src/lib.rs +++ b/interpol-rs/src/lib.rs @@ -3,3 +3,64 @@ pub mod interpol; pub mod mpi_events; pub mod types; + +#[non_exhaustive] +#[derive(Debug)] +pub enum InterpolErrorKind { + Io, + TryReserve, + DeriveBuilder, +} + +#[derive(Debug)] +pub struct InterpolError { + kind: InterpolErrorKind, + reason: String, +} + +impl std::fmt::Display for InterpolError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let err_msg = match self.kind { + InterpolErrorKind::Io => format!("I/O error: {}", self.reason), + InterpolErrorKind::TryReserve => format!("TryReserve error: {}", self.reason), + InterpolErrorKind::Io => format!("DeriveBuilder error: {}", self.reason), + _ => String::from("Unknown error kind"), + }; + + write!(f, "{err_msg}") + } +} + +impl From for InterpolError { + fn from(error: std::io::Error) -> Self { + InterpolError { + kind: InterpolErrorKind::Io, + reason: error.to_string(), + } + } +} + +impl From for InterpolError { + fn from(error: std::collections::TryReserveError) -> Self { + InterpolError { + kind: InterpolErrorKind::TryReserve, + reason: error.to_string(), + } + } +} + +#[macro_export] +macro_rules! impl_builder_error { + ($t:ty) => { + use crate::{InterpolError, InterpolErrorKind}; + + impl From<$t> for InterpolError { + fn from(error: $t) -> Self { + InterpolError { + kind: InterpolErrorKind::DeriveBuilder, + reason: error.to_string(), + } + } + } + }; +} diff --git a/interpol-rs/src/mpi_events/collectives/mod.rs b/interpol-rs/src/mpi_events/collectives/mod.rs new file mode 100644 index 0000000..7a379cb --- /dev/null +++ b/interpol-rs/src/mpi_events/collectives/mod.rs @@ -0,0 +1,4 @@ +pub mod mpi_ibcast; +pub mod mpi_igather; +pub mod mpi_ireduce; +pub mod mpi_iscatter; diff --git a/interpol-rs/src/mpi_events/collectives/mpi_ibcast.rs b/interpol-rs/src/mpi_events/collectives/mpi_ibcast.rs new file mode 100644 index 0000000..819fd2f --- /dev/null +++ b/interpol-rs/src/mpi_events/collectives/mpi_ibcast.rs @@ -0,0 +1,104 @@ +use crate::{impl_builder_error, impl_register}; +use crate::types::{MpiComm, MpiRank, MpiReq, Tsc}; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// A structure that stores information about `MPI_Ibcast` calls. +/// +/// The information stored are: +/// - the rank of the process making the call to `MPI_Ibcast`; +/// - the rank of the root process making the broadcast; +/// - the number of bytes exchanged; +/// - the identifier of the MPI communicator; +/// - the identifier of the MPI request; +/// - the tag of the communication; +/// - the current value of the Time Stamp counter before the call to `MPI_Ibcast`. +/// - the duration of the call. +#[derive(Builder, Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MpiIbcast { + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +} + +impl MpiIbcast { + /// Creates a new `MpiIbcast` structure from the specified parameters. + pub fn new( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, + ) -> Self { + MpiIbcast { + current_rank, + root_rank, + nb_bytes, + comm, + req, + tsc, + duration, + } + } +} + +impl_builder_error!(MpiIbcastBuilderError); +impl_register!(MpiIbcast); + +#[cfg(test)] +mod tests { + use super::*; + const MPI_COMM_WORLD: i32 = 0; + + #[test] + fn builds() { + let ibcast_new = MpiIbcast::new(0, 1, 8, MPI_COMM_WORLD, 7, 1024, 2048); + let ibcast_builder = MpiIbcastBuilder::default() + .current_rank(0) + .root_rank(1) + .nb_bytes(8) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIbcast`"); + + assert_eq!(ibcast_new, ibcast_builder); + } + + #[test] + fn serializes() { + let ibcast = MpiIbcast::new(0, 0, 8, MPI_COMM_WORLD, 7, 1024, 2048); + let json = String::from("{\"current_rank\":0,\"root_rank\":0,\"nb_bytes\":8,\"comm\":0,\"req\":7,\"tsc\":1024,\"duration\":2048}"); + let serialized = serde_json::to_string(&ibcast).expect("failed to serialize `MpiIbcast`"); + + assert_eq!(json, serialized); + } + + #[test] + fn deserializes() { + let ibcast = MpiIbcastBuilder::default() + .current_rank(1) + .root_rank(0) + .nb_bytes(8) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIbcast`"); + let serialized = + serde_json::to_string_pretty(&ibcast).expect("failed to serialize `MpiIbcast`"); + let deserialized: MpiIbcast = + serde_json::from_str(&serialized).expect("failed to deserialize `MpiIbcast`"); + + assert_eq!(ibcast, deserialized); + } +} diff --git a/interpol-rs/src/mpi_events/collectives/mpi_igather.rs b/interpol-rs/src/mpi_events/collectives/mpi_igather.rs new file mode 100644 index 0000000..fe37e31 --- /dev/null +++ b/interpol-rs/src/mpi_events/collectives/mpi_igather.rs @@ -0,0 +1,110 @@ +use crate::{impl_builder_error, impl_register}; +use crate::types::{MpiComm, MpiRank, MpiReq, Tsc}; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// A structure that stores information about `MPI_Igather` calls. +/// +/// The information stored are: +/// - the rank of the process making the call to `MPI_Igather`; +/// - the rank of the root process making the broadcast; +/// - the number of bytes sent; +/// - the number of bytes received; +/// - the identifier of the MPI communicator; +/// - the identifier of the MPI request; +/// - the tag of the communication; +/// - the current value of the Time Stamp counter before the call to `MPI_Igather`. +/// - the duration of the call. +#[derive(Builder, Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MpiIgather { + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +} + +impl MpiIgather { + /// Creates a new `MpiIgather` structure from the specified parameters. + pub fn new( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, + ) -> Self { + MpiIgather { + current_rank, + root_rank, + nb_bytes_send, + nb_bytes_recv, + comm, + req, + tsc, + duration, + } + } +} + +impl_builder_error!(MpiIgatherBuilderError); +impl_register!(MpiIgather); + +#[cfg(test)] +mod tests { + use super::*; + const MPI_COMM_WORLD: i32 = 0; + + #[test] + fn builds() { + let igather_new = MpiIgather::new(0, 1, 8, 0, MPI_COMM_WORLD, 7, 1024, 2048); + let igather_builder = MpiIgatherBuilder::default() + .current_rank(0) + .root_rank(1) + .nb_bytes_send(8) + .nb_bytes_recv(0) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIgather`"); + + assert_eq!(igather_new, igather_builder); + } + + #[test] + fn serializes() { + let igather = MpiIgather::new(0, 0, 8, 64, MPI_COMM_WORLD, 7, 1024, 2048); + let json = String::from("{\"current_rank\":0,\"root_rank\":0,\"nb_bytes_send\":8,\"nb_bytes_recv\":64,\"comm\":0,\"req\":7,\"tsc\":1024,\"duration\":2048}"); + let serialized = serde_json::to_string(&igather).expect("failed to serialize `MpiIgather`"); + + assert_eq!(json, serialized); + } + + #[test] + fn deserializes() { + let igather = MpiIgatherBuilder::default() + .current_rank(1) + .root_rank(0) + .nb_bytes_send(64) + .nb_bytes_recv(0) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIgather`"); + let serialized = + serde_json::to_string_pretty(&igather).expect("failed to serialize `MpiIgather`"); + let deserialized: MpiIgather = + serde_json::from_str(&serialized).expect("failed to deserialize `MpiIgather`"); + + assert_eq!(igather, deserialized); + } +} diff --git a/interpol-rs/src/mpi_events/collectives/mpi_ireduce.rs b/interpol-rs/src/mpi_events/collectives/mpi_ireduce.rs new file mode 100644 index 0000000..3a6f962 --- /dev/null +++ b/interpol-rs/src/mpi_events/collectives/mpi_ireduce.rs @@ -0,0 +1,126 @@ +use crate::types::{MpiComm, MpiRank, MpiReq, Tsc}; +use crate::{impl_builder_error, impl_register}; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// A structure that stores information about `MPI_Ireduce` calls. +/// +/// The information stored are: +/// - the rank of the process making the call to `MPI_Ireduce`; +/// - the rank of the root process making the broadcast; +/// - the number of bytes exchanged; +/// - the type of MPI reduction operation; +/// - the identifier of the MPI communicator; +/// - the identifier of the MPI request; +/// - the tag of the communication; +/// - the current value of the Time Stamp counter before the call to `MPI_Ireduce`. +/// - the duration of the call. +#[derive(Builder, Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MpiIreduce { + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + op_type: i8, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +} + +impl MpiIreduce { + /// Creates a new `MpiIreduce` structure from the specified parameters. + pub fn new( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes: u32, + op_type: i8, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, + ) -> Self { + MpiIreduce { + current_rank, + root_rank, + nb_bytes, + op_type, + comm, + req, + tsc, + duration, + } + } +} + +impl_builder_error!(MpiIreduceBuilderError); +impl_register!(MpiIreduce); + +#[cfg(test)] +mod tests { + use super::*; + const MPI_COMM_WORLD: i32 = 0; + + enum MpiOpType { + Max, + Sum, + Prod, + } + + #[test] + fn builds() { + let ireduce_new = + MpiIreduce::new(0, 1, 8, MpiOpType::Sum as i8, MPI_COMM_WORLD, 7, 1024, 2048); + let ireduce_builder = MpiIreduceBuilder::default() + .current_rank(0) + .root_rank(1) + .nb_bytes(8) + .op_type(MpiOpType::Sum as i8) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIreduce`"); + + assert_eq!(ireduce_new, ireduce_builder); + } + + #[test] + fn serializes() { + let ireduce = MpiIreduce::new( + 0, + 0, + 8, + MpiOpType::Prod as i8, + MPI_COMM_WORLD, + 7, + 1024, + 2048, + ); + let json = String::from("{\"current_rank\":0,\"root_rank\":0,\"nb_bytes\":8,\"op_type\":2,\"comm\":0,\"req\":7,\"tsc\":1024,\"duration\":2048}"); + let serialized = serde_json::to_string(&ireduce).expect("failed to serialize `MpiIreduce`"); + + assert_eq!(json, serialized); + } + + #[test] + fn deserializes() { + let ireduce = MpiIreduceBuilder::default() + .current_rank(1) + .root_rank(0) + .nb_bytes(8) + .op_type(MpiOpType::Max as i8) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIreduce`"); + let serialized = + serde_json::to_string_pretty(&ireduce).expect("failed to serialize `MpiIreduce`"); + let deserialized: MpiIreduce = + serde_json::from_str(&serialized).expect("failed to deserialize `MpiIreduce`"); + + assert_eq!(ireduce, deserialized); + } +} diff --git a/interpol-rs/src/mpi_events/collectives/mpi_iscatter.rs b/interpol-rs/src/mpi_events/collectives/mpi_iscatter.rs new file mode 100644 index 0000000..4b80f26 --- /dev/null +++ b/interpol-rs/src/mpi_events/collectives/mpi_iscatter.rs @@ -0,0 +1,111 @@ +use crate::{impl_builder_error, impl_register}; +use crate::types::{MpiComm, MpiRank, MpiReq, Tsc}; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// A structure that stores information about `MPI_Iscatter` calls. +/// +/// The information stored are: +/// - the rank of the process making the call to `MPI_Iscatter`; +/// - the rank of the root process making the broadcast; +/// - the number of bytes sent; +/// - the number of bytes received; +/// - the identifier of the MPI communicator; +/// - the identifier of the MPI request; +/// - the tag of the communication; +/// - the current value of the Time Stamp counter before the call to `MPI_Iscatter`. +/// - the duration of the call. +#[derive(Builder, Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct MpiIscatter { + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, +} + +impl MpiIscatter { + /// Creates a new `MpiIscatter` structure from the specified parameters. + pub fn new( + current_rank: MpiRank, + root_rank: MpiRank, + nb_bytes_send: u32, + nb_bytes_recv: u32, + comm: MpiComm, + req: MpiReq, + tsc: Tsc, + duration: Tsc, + ) -> Self { + MpiIscatter { + current_rank, + root_rank, + nb_bytes_send, + nb_bytes_recv, + comm, + req, + tsc, + duration, + } + } +} + +impl_builder_error!(MpiIscatterBuilderError); +impl_register!(MpiIscatter); + +#[cfg(test)] +mod tests { + use super::*; + const MPI_COMM_WORLD: i32 = 0; + + #[test] + fn builds() { + let iscatter_new = MpiIscatter::new(0, 1, 8, 0, MPI_COMM_WORLD, 7, 1024, 2048); + let iscatter_builder = MpiIscatterBuilder::default() + .current_rank(0) + .root_rank(1) + .nb_bytes_send(8) + .nb_bytes_recv(0) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIscatter`"); + + assert_eq!(iscatter_new, iscatter_builder); + } + + #[test] + fn serializes() { + let iscatter = MpiIscatter::new(0, 0, 8, 64, MPI_COMM_WORLD, 7, 1024, 2048); + let json = String::from("{\"current_rank\":0,\"root_rank\":0,\"nb_bytes_send\":8,\"nb_bytes_recv\":64,\"comm\":0,\"req\":7,\"tsc\":1024,\"duration\":2048}"); + let serialized = + serde_json::to_string(&iscatter).expect("failed to serialize `MpiIscatter`"); + + assert_eq!(json, serialized); + } + + #[test] + fn deserializes() { + let iscatter = MpiIscatterBuilder::default() + .current_rank(1) + .root_rank(0) + .nb_bytes_send(64) + .nb_bytes_recv(0) + .comm(MPI_COMM_WORLD) + .req(7) + .tsc(1024) + .duration(2048) + .build() + .expect("failed to build `MpiIscatter`"); + let serialized = + serde_json::to_string_pretty(&iscatter).expect("failed to serialize `MpiIscatter`"); + let deserialized: MpiIscatter = + serde_json::from_str(&serialized).expect("failed to deserialize `MpiIscatter`"); + + assert_eq!(iscatter, deserialized); + } +} diff --git a/interpol-rs/src/mpi_events/management/mpi_finalize.rs b/interpol-rs/src/mpi_events/management/mpi_finalize.rs index 5901177..07a7c66 100644 --- a/interpol-rs/src/mpi_events/management/mpi_finalize.rs +++ b/interpol-rs/src/mpi_events/management/mpi_finalize.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiRank, Tsc, Usecs}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Finalize` calls. /// @@ -31,15 +30,8 @@ impl MpiFinalize { } } -#[typetag::serde] -impl Register for MpiFinalize { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiFinalizeBuilderError); +impl_register!(MpiFinalize); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/management/mpi_init.rs b/interpol-rs/src/mpi_events/management/mpi_init.rs index 67c3f7b..0803c87 100644 --- a/interpol-rs/src/mpi_events/management/mpi_init.rs +++ b/interpol-rs/src/mpi_events/management/mpi_init.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiRank, Tsc, Usecs}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Init` calls. /// @@ -31,15 +30,8 @@ impl MpiInit { } } -#[typetag::serde] -impl Register for MpiInit { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiInitBuilderError); +impl_register!(MpiInit); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/management/mpi_init_thread.rs b/interpol-rs/src/mpi_events/management/mpi_init_thread.rs index 578a60e..11e033a 100644 --- a/interpol-rs/src/mpi_events/management/mpi_init_thread.rs +++ b/interpol-rs/src/mpi_events/management/mpi_init_thread.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiRank, Tsc, Usecs}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Init_thread` calls. /// @@ -42,15 +41,8 @@ impl MpiInitThread { } } -#[typetag::serde] -impl Register for MpiInitThread { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiInitThreadBuilderError); +impl_register!(MpiInitThread); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/mod.rs b/interpol-rs/src/mpi_events/mod.rs index b85df9c..5188e19 100644 --- a/interpol-rs/src/mpi_events/mod.rs +++ b/interpol-rs/src/mpi_events/mod.rs @@ -1,4 +1,4 @@ -// pub mod collectives; +pub mod collectives; pub mod management; pub mod point_to_point; pub mod synchronization; diff --git a/interpol-rs/src/mpi_events/point_to_point/mpi_irecv.rs b/interpol-rs/src/mpi_events/point_to_point/mpi_irecv.rs index 61b087e..cbcb7e8 100644 --- a/interpol-rs/src/mpi_events/point_to_point/mpi_irecv.rs +++ b/interpol-rs/src/mpi_events/point_to_point/mpi_irecv.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiComm, MpiRank, MpiReq, MpiTag, Tsc}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Irecv` calls. /// @@ -53,15 +52,8 @@ impl MpiIrecv { } } -#[typetag::serde] -impl Register for MpiIrecv { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiIrecvBuilderError); +impl_register!(MpiIrecv); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/point_to_point/mpi_isend.rs b/interpol-rs/src/mpi_events/point_to_point/mpi_isend.rs index e73e1cf..97d2f09 100644 --- a/interpol-rs/src/mpi_events/point_to_point/mpi_isend.rs +++ b/interpol-rs/src/mpi_events/point_to_point/mpi_isend.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiComm, MpiRank, MpiReq, MpiTag, Tsc}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Isend` calls. /// @@ -53,15 +52,8 @@ impl MpiIsend { } } -#[typetag::serde] -impl Register for MpiIsend { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiIsendBuilderError); +impl_register!(MpiIsend); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/point_to_point/mpi_recv.rs b/interpol-rs/src/mpi_events/point_to_point/mpi_recv.rs index 4e3cad9..d5119f5 100644 --- a/interpol-rs/src/mpi_events/point_to_point/mpi_recv.rs +++ b/interpol-rs/src/mpi_events/point_to_point/mpi_recv.rs @@ -1,10 +1,9 @@ use crate::{ - interpol::Register, + impl_builder_error, impl_register, types::{MpiComm, MpiRank, MpiTag, Tsc}, }; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Recv` calls. /// @@ -51,15 +50,8 @@ impl MpiRecv { } } -#[typetag::serde] -impl Register for MpiRecv { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiRecvBuilderError); +impl_register!(MpiRecv); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/point_to_point/mpi_send.rs b/interpol-rs/src/mpi_events/point_to_point/mpi_send.rs index 09d184b..8a3bedc 100644 --- a/interpol-rs/src/mpi_events/point_to_point/mpi_send.rs +++ b/interpol-rs/src/mpi_events/point_to_point/mpi_send.rs @@ -1,10 +1,9 @@ use crate::{ - interpol::Register, + impl_builder_error, impl_register, types::{MpiComm, MpiRank, MpiTag, Tsc}, }; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Send` calls. /// @@ -51,15 +50,8 @@ impl MpiSend { } } -#[typetag::serde] -impl Register for MpiSend { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiSendBuilderError); +impl_register!(MpiSend); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_barrier.rs b/interpol-rs/src/mpi_events/synchronization/mpi_barrier.rs index 5c8c405..d3bef70 100644 --- a/interpol-rs/src/mpi_events/synchronization/mpi_barrier.rs +++ b/interpol-rs/src/mpi_events/synchronization/mpi_barrier.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; use crate::types::{MpiComm, MpiRank, Tsc}; +use crate::{impl_builder_error, impl_register}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Barrier` calls. /// @@ -33,15 +32,8 @@ impl MpiBarrier { } } -#[typetag::serde] -impl Register for MpiBarrier { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiBarrierBuilderError); +impl_register!(MpiBarrier); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_ibarrier.rs b/interpol-rs/src/mpi_events/synchronization/mpi_ibarrier.rs index b051ef5..9f7aa7d 100644 --- a/interpol-rs/src/mpi_events/synchronization/mpi_ibarrier.rs +++ b/interpol-rs/src/mpi_events/synchronization/mpi_ibarrier.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; +use crate::{impl_builder_error, impl_register}; use crate::types::{MpiComm, MpiRank, MpiReq, Tsc}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Barrier` calls. /// @@ -36,15 +35,8 @@ impl MpiIbarrier { } } -#[typetag::serde] -impl Register for MpiIbarrier { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiIbarrierBuilderError); +impl_register!(MpiIbarrier); #[cfg(test)] mod tests { @@ -68,8 +60,11 @@ mod tests { #[test] fn serializes() { let ibarrier = MpiIbarrier::new(0, 0, 0, 1024, 2048); - let json = String::from("{\"current_rank\":0,\"comm\":0,\"req\":0,\"tsc\":1024,\"duration\":2048}"); - let serialized = serde_json::to_string(&ibarrier).expect("failed to serialize `MpiIbarrier`"); + let json = String::from( + "{\"current_rank\":0,\"comm\":0,\"req\":0,\"tsc\":1024,\"duration\":2048}", + ); + let serialized = + serde_json::to_string(&ibarrier).expect("failed to serialize `MpiIbarrier`"); assert_eq!(json, serialized); } diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_test.rs b/interpol-rs/src/mpi_events/synchronization/mpi_test.rs index 6a1d182..9d9726f 100644 --- a/interpol-rs/src/mpi_events/synchronization/mpi_test.rs +++ b/interpol-rs/src/mpi_events/synchronization/mpi_test.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; +use crate::{impl_builder_error, impl_register}; use crate::types::{MpiRank, MpiReq, Tsc}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Test` calls. /// @@ -42,15 +41,8 @@ impl MpiTest { } } -#[typetag::serde] -impl Register for MpiTest { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiTestBuilderError); +impl_register!(MpiTest); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_testall.rs b/interpol-rs/src/mpi_events/synchronization/mpi_testall.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/interpol-rs/src/mpi_events/synchronization/mpi_testall.rs @@ -0,0 +1 @@ + diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_testsome.rs b/interpol-rs/src/mpi_events/synchronization/mpi_testsome.rs new file mode 100644 index 0000000..e69de29 diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_wait.rs b/interpol-rs/src/mpi_events/synchronization/mpi_wait.rs index decd58d..031b4a0 100644 --- a/interpol-rs/src/mpi_events/synchronization/mpi_wait.rs +++ b/interpol-rs/src/mpi_events/synchronization/mpi_wait.rs @@ -1,8 +1,7 @@ -use crate::interpol::Register; +use crate::{impl_builder_error, impl_register}; use crate::types::{MpiRank, MpiReq, Tsc}; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use std::collections::TryReserveError; /// A structure that stores information about `MPI_Wait` calls. /// @@ -33,15 +32,8 @@ impl MpiWait { } } -#[typetag::serde] -impl Register for MpiWait { - fn register(self, events: &mut Vec>) -> Result<(), TryReserveError> { - // Ensure that the program does not panic if allocation fails - events.try_reserve_exact(2 * events.len())?; - events.push(Box::new(self)); - Ok(()) - } -} +impl_builder_error!(MpiWaitBuilderError); +impl_register!(MpiWait); #[cfg(test)] mod tests { diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_waitall.rs b/interpol-rs/src/mpi_events/synchronization/mpi_waitall.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/interpol-rs/src/mpi_events/synchronization/mpi_waitall.rs @@ -0,0 +1 @@ + diff --git a/interpol-rs/src/mpi_events/synchronization/mpi_waitsome.rs b/interpol-rs/src/mpi_events/synchronization/mpi_waitsome.rs new file mode 100644 index 0000000..e69de29