Cargo fmt

This commit is contained in:
Henrik Tjäder 2020-09-01 14:39:05 +00:00
parent fea6d2facf
commit f151d5871c
11 changed files with 53 additions and 99 deletions

View file

@ -25,24 +25,21 @@ impl ops::Deref for Analysis {
// Assign an `extern` interrupt to each priority level // Assign an `extern` interrupt to each priority level
pub fn app(analysis: P<analyze::Analysis>, app: &App) -> P<Analysis> { pub fn app(analysis: P<analyze::Analysis>, app: &App) -> P<Analysis> {
let mut interrupts = BTreeMap::new(); let mut interrupts = BTreeMap::new();
let priorities = app let priorities = app
.software_tasks .software_tasks
.values() .values()
.filter_map(|task| { .filter_map(|task| Some(task.args.priority))
Some(task.args.priority) .chain(analysis.timer_queues.first().map(|tq| tq.priority))
}) .collect::<BTreeSet<_>>();
.chain(analysis.timer_queues.first().map(|tq| tq.priority))
.collect::<BTreeSet<_>>();
if !priorities.is_empty() { if !priorities.is_empty() {
interrupts = interrupts = priorities
priorities .iter()
.iter() .cloned()
.cloned() .rev()
.rev() .zip(app.extern_interrupts.keys().cloned())
.zip(app.extern_interrupts.keys().cloned()) .collect();
.collect(); }
}
P::new(Analysis { P::new(Analysis {
parent: analysis, parent: analysis,

View file

@ -4,7 +4,6 @@ use proc_macro2::Span;
use rtic_syntax::{ use rtic_syntax::{
analyze::Analysis, analyze::Analysis,
ast::{App, CustomArg}, ast::{App, CustomArg},
}; };
use syn::{parse, Path}; use syn::{parse, Path};
@ -51,9 +50,7 @@ pub fn app<'a>(app: &'a App, analysis: &Analysis) -> parse::Result<Extra<'a>> {
// check that external (device-specific) interrupts are not named after known (Cortex-M) // check that external (device-specific) interrupts are not named after known (Cortex-M)
// exceptions // exceptions
for name in app for name in app.extern_interrupts.keys() {
.extern_interrupts.keys()
{
let name_s = name.to_string(); let name_s = name.to_string();
match &*name_s { match &*name_s {
@ -83,8 +80,7 @@ pub fn app<'a>(app: &'a App, analysis: &Analysis) -> parse::Result<Extra<'a>> {
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let need = priorities.len(); let need = priorities.len();
let given = app let given = app.extern_interrupts.len();
.extern_interrupts.len();
if need > given { if need > given {
let s = { let s = {
format!( format!(
@ -131,9 +127,7 @@ pub fn app<'a>(app: &'a App, analysis: &Analysis) -> parse::Result<Extra<'a>> {
}, },
"peripherals" => match v { "peripherals" => match v {
CustomArg::Bool(x) => { CustomArg::Bool(x) => peripherals = if *x { true } else { false },
peripherals = if *x { true } else { false }
}
/* /*
CustomArg::UInt(s) if app.args.cores != 1 => { CustomArg::UInt(s) if app.args.cores != 1 => {
@ -152,13 +146,12 @@ pub fn app<'a>(app: &'a App, analysis: &Analysis) -> parse::Result<Extra<'a>> {
} }
} }
*/ */
_ => { _ => {
return Err(parse::Error::new( return Err(parse::Error::new(
k.span(), k.span(),
//if app.args.cores == 1 { //if app.args.cores == 1 {
"unexpected argument value; this should be a boolean", "unexpected argument value; this should be a boolean",
/* /*
} else { } else {
"unexpected argument value; this should be an integer" "unexpected argument value; this should be an integer"
}, },

View file

@ -35,14 +35,11 @@ pub fn app(app: &App, analysis: &Analysis, extra: &Extra) -> TokenStream2 {
let pre_init_stmts = pre_init::codegen(&app, analysis, extra); let pre_init_stmts = pre_init::codegen(&app, analysis, extra);
let (const_app_init, root_init, user_init, call_init) = let (const_app_init, root_init, user_init, call_init) = init::codegen(app, analysis, extra);
init::codegen(app, analysis, extra);
let (const_app_post_init, post_init_stmts) = let (const_app_post_init, post_init_stmts) = post_init::codegen(&app, analysis);
post_init::codegen(&app, analysis);
let (const_app_idle, root_idle, user_idle, call_idle) = let (const_app_idle, root_idle, user_idle, call_idle) = idle::codegen(app, analysis, extra);
idle::codegen(app, analysis, extra);
user.push(quote!( user.push(quote!(
#user_init #user_init
@ -84,7 +81,6 @@ pub fn app(app: &App, analysis: &Analysis, extra: &Extra) -> TokenStream2 {
} }
)); ));
let (const_app_resources, mod_resources) = resources::codegen(app, analysis, extra); let (const_app_resources, mod_resources) = resources::codegen(app, analysis, extra);
let (const_app_hardware_tasks, root_hardware_tasks, user_hardware_tasks) = let (const_app_hardware_tasks, root_hardware_tasks, user_hardware_tasks) =

View file

@ -11,15 +11,15 @@ pub fn codegen(analysis: &Analysis) -> Vec<TokenStream2> {
// type only on some core (e.g. `#[cfg(core = "0")] use some::Type;`) // type only on some core (e.g. `#[cfg(core = "0")] use some::Type;`)
//if let Some(types) = analysis.send_types { //if let Some(types) = analysis.send_types {
for ty in &analysis.send_types { for ty in &analysis.send_types {
stmts.push(quote!(rtic::export::assert_send::<#ty>();)); stmts.push(quote!(rtic::export::assert_send::<#ty>();));
} }
//} //}
//if let Some(types) = analysis.sync_types { //if let Some(types) = analysis.sync_types {
for ty in &analysis.sync_types { for ty in &analysis.sync_types {
stmts.push(quote!(rtic::export::assert_sync::<#ty>();)); stmts.push(quote!(rtic::export::assert_sync::<#ty>();));
} }
//} //}
// if the `schedule` API is used in more than one core then we need to check that the // if the `schedule` API is used in more than one core then we need to check that the

View file

@ -29,7 +29,6 @@ pub fn codegen(
let mut user_tasks = vec![]; let mut user_tasks = vec![];
for (name, task) in &app.hardware_tasks { for (name, task) in &app.hardware_tasks {
let (let_instant, instant) = if app.uses_schedule() { let (let_instant, instant) = if app.uses_schedule() {
let m = extra.monotonic(); let m = extra.monotonic();
@ -96,8 +95,7 @@ pub fn codegen(
// `${task}Locals` // `${task}Locals`
let mut locals_pat = None; let mut locals_pat = None;
if !task.locals.is_empty() { if !task.locals.is_empty() {
let (struct_, pat) = let (struct_, pat) = locals::codegen(Context::HardwareTask(name), &task.locals, app);
locals::codegen(Context::HardwareTask(name), &task.locals, app);
root.push(struct_); root.push(struct_);
locals_pat = Some(pat); locals_pat = Some(pat);

View file

@ -40,17 +40,15 @@ pub fn codegen(
.late_resources .late_resources
.iter() .iter()
.flat_map(|resources| { .flat_map(|resources| {
resources resources.iter().map(|name| {
.iter() let ty = &app.late_resources[name].ty;
.map(|name| { let cfgs = &app.late_resources[name].cfgs;
let ty = &app.late_resources[name].ty;
let cfgs = &app.late_resources[name].cfgs;
quote!( quote!(
#(#cfgs)* #(#cfgs)*
pub #name: #ty pub #name: #ty
) )
}) })
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -5,17 +5,14 @@ use rtic_syntax::ast::App;
use crate::analyze::Analysis; use crate::analyze::Analysis;
/// Generates code that runs after `#[init]` returns /// Generates code that runs after `#[init]` returns
pub fn codegen( pub fn codegen(app: &App, analysis: &Analysis) -> (Vec<TokenStream2>, Vec<TokenStream2>) {
app: &App,
analysis: &Analysis,
) -> (Vec<TokenStream2>, Vec<TokenStream2>) {
//#TODO remove //#TODO remove
let const_app = vec![]; let const_app = vec![];
let mut stmts = vec![]; let mut stmts = vec![];
// initialize late resources // initialize late resources
//if let Some(late_resources) = analysis.late_resources { //if let Some(late_resources) = analysis.late_resources {
//for name in late_resources { //for name in late_resources {
if analysis.late_resources.len() > 0 { if analysis.late_resources.len() > 0 {
// #TODO, check soundness of this, why the wrapping // #TODO, check soundness of this, why the wrapping
// BTreeSet wrapped in a vector // BTreeSet wrapped in a vector

View file

@ -5,14 +5,7 @@ use rtic_syntax::ast::App;
use crate::{analyze::Analysis, check::Extra, codegen::util}; use crate::{analyze::Analysis, check::Extra, codegen::util};
/// Generates code that runs before `#[init]` /// Generates code that runs before `#[init]`
pub fn codegen( pub fn codegen(app: &App, analysis: &Analysis, extra: &Extra) -> Vec<TokenStream2> {
app: &App,
analysis: &Analysis,
extra: &Extra,
) ->
// `pre_init_stmts`
Vec<TokenStream2>
{
let mut stmts = vec![]; let mut stmts = vec![];
// disable interrupts -- `init` must run with interrupts disabled // disable interrupts -- `init` must run with interrupts disabled

View file

@ -1,9 +1,6 @@
use proc_macro2::TokenStream as TokenStream2; use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use rtic_syntax::{ use rtic_syntax::{analyze::Ownership, ast::App};
analyze::Ownership,
ast::App,
};
use crate::{analyze::Analysis, check::Extra, codegen::util}; use crate::{analyze::Analysis, check::Extra, codegen::util};
@ -28,10 +25,10 @@ pub fn codegen(
{ {
//let loc_attr = None; //let loc_attr = None;
let section = if expr.is_none() { let section = if expr.is_none() {
util::link_section_uninit(true) util::link_section_uninit(true)
} else { } else {
None None
}; };
/* /*
let (loc_attr, section) = match loc { let (loc_attr, section) = match loc {
Location::Owned => ( Location::Owned => (
@ -66,7 +63,6 @@ pub fn codegen(
} }
if let Some(Ownership::Contended { ceiling }) = analysis.ownerships.get(name) { if let Some(Ownership::Contended { ceiling }) = analysis.ownerships.get(name) {
mod_resources.push(quote!( mod_resources.push(quote!(
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
#(#cfgs)* #(#cfgs)*

View file

@ -45,7 +45,8 @@ pub fn codegen(
let device = extra.device; let device = extra.device;
let enum_ = util::interrupt_ident(); let enum_ = util::interrupt_ident();
let interrupt = &analysis.interrupts.get(&priority); let interrupt = &analysis.interrupts.get(&priority);
let pend = {quote!( let pend = {
quote!(
rtic::pend(#device::#enum_::#interrupt); rtic::pend(#device::#enum_::#interrupt);
) )
}; };

View file

@ -44,10 +44,7 @@ pub fn cfg_core(core: Core, cores: u8) -> Option<TokenStream2> {
/// There may be more than one free queue per task because we need one for each sender core so we /// There may be more than one free queue per task because we need one for each sender core so we
/// include the sender (e.g. `S0`) in the name /// include the sender (e.g. `S0`) in the name
pub fn fq_ident(task: &Ident) -> Ident { pub fn fq_ident(task: &Ident) -> Ident {
Ident::new( Ident::new(&format!("{}_FQ", task.to_string()), Span::call_site())
&format!("{}_FQ", task.to_string()),
Span::call_site(),
)
} }
/// Generates a `Mutex` implementation /// Generates a `Mutex` implementation
@ -112,7 +109,7 @@ pub fn instants_ident(task: &Ident) -> Ident {
pub fn interrupt_ident() -> Ident { pub fn interrupt_ident() -> Ident {
let span = Span::call_site(); let span = Span::call_site();
Ident::new("Interrupt", span) Ident::new("Interrupt", span)
} }
/// Whether `name` is an exception with configurable priority /// Whether `name` is an exception with configurable priority
@ -253,10 +250,7 @@ pub fn resources_ident(ctxt: Context, app: &App) -> Ident {
/// in turn may use more than one ready queue because the queues are SPSC queues so one is needed /// in turn may use more than one ready queue because the queues are SPSC queues so one is needed
/// per sender core. /// per sender core.
pub fn rq_ident(priority: u8) -> Ident { pub fn rq_ident(priority: u8) -> Ident {
Ident::new( Ident::new(&format!("P{}_RQ", priority), Span::call_site())
&format!("P{}_RQ", priority),
Span::call_site(),
)
} }
/// Generates an identifier for a "schedule" function /// Generates an identifier for a "schedule" function
@ -264,10 +258,7 @@ pub fn rq_ident(priority: u8) -> Ident {
/// The methods of the `Schedule` structs invoke these functions. As one task may be `schedule`-ed /// The methods of the `Schedule` structs invoke these functions. As one task may be `schedule`-ed
/// by different cores we need one "schedule" function per possible task-sender pair /// by different cores we need one "schedule" function per possible task-sender pair
pub fn schedule_ident(name: &Ident) -> Ident { pub fn schedule_ident(name: &Ident) -> Ident {
Ident::new( Ident::new(&format!("schedule_{}", name.to_string()), Span::call_site())
&format!("schedule_{}", name.to_string()),
Span::call_site(),
)
} }
/// Generates an identifier for the `enum` of `schedule`-able tasks /// Generates an identifier for the `enum` of `schedule`-able tasks
@ -287,10 +278,7 @@ pub fn spawn_barrier() -> Ident {
/// The methods of the `Spawn` structs invoke these functions. As one task may be `spawn`-ed by /// The methods of the `Spawn` structs invoke these functions. As one task may be `spawn`-ed by
/// different cores we need one "spawn" function per possible task-sender pair /// different cores we need one "spawn" function per possible task-sender pair
pub fn spawn_ident(name: &Ident) -> Ident { pub fn spawn_ident(name: &Ident) -> Ident {
Ident::new( Ident::new(&format!("spawn_{}", name.to_string()), Span::call_site())
&format!("spawn_{}", name.to_string()),
Span::call_site(),
)
} }
/// Generates an identifier for the `enum` of `spawn`-able tasks /// Generates an identifier for the `enum` of `spawn`-able tasks
@ -298,10 +286,7 @@ pub fn spawn_ident(name: &Ident) -> Ident {
/// This identifier needs the same structure as the `RQ` identifier because there's one ready queue /// This identifier needs the same structure as the `RQ` identifier because there's one ready queue
/// for each of these `T` enums /// for each of these `T` enums
pub fn spawn_t_ident(priority: u8) -> Ident { pub fn spawn_t_ident(priority: u8) -> Ident {
Ident::new( Ident::new(&format!("P{}_T", priority), Span::call_site())
&format!("P{}_T", priority),
Span::call_site(),
)
} }
pub fn suffixed(name: &str) -> Ident { pub fn suffixed(name: &str) -> Ident {