Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
121 changes: 103 additions & 18 deletions pdl-compiler/src/analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -535,13 +535,17 @@ fn scalar_max(width: usize) -> usize {
/// - undeclared test identifier
/// - invalid test identifier
/// - recursive declaration
fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<(), Diagnostics> {
///
/// Returns a copy of the file where the declarations have been
/// reordered by topological sort to remove forward references.
fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<File, Diagnostics> {
enum Mark {
Temporary,
Permanent,
}
#[derive(Default)]
struct Context<'d> {
history: Vec<Decl>,
visited: HashMap<&'d str, Mark>,
}

Expand Down Expand Up @@ -669,6 +673,7 @@ fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<(), Diagnostics>
}

// Done visiting current declaration.
context.history.push(decl.clone());
context.visited.insert(decl_id, Mark::Permanent);
}

Expand All @@ -677,10 +682,12 @@ fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<(), Diagnostics>
let mut context = Default::default();
for decl in &file.declarations {
match &decl.desc {
DeclDesc::Checksum { .. } | DeclDesc::CustomField { .. } | DeclDesc::Enum { .. } => (),
DeclDesc::Packet { .. } | DeclDesc::Struct { .. } | DeclDesc::Group { .. } => {
bfs(decl, &mut context, scope, &mut diagnostics)
}
DeclDesc::Checksum { .. }
| DeclDesc::CustomField { .. }
| DeclDesc::Enum { .. }
| DeclDesc::Packet { .. }
| DeclDesc::Struct { .. }
| DeclDesc::Group { .. } => bfs(decl, &mut context, scope, &mut diagnostics),
DeclDesc::Test { type_id, .. } => match scope.typedef.get(type_id) {
None => diagnostics.push(
Diagnostic::error()
Expand All @@ -701,7 +708,14 @@ fn check_decl_identifiers(file: &File, scope: &Scope) -> Result<(), Diagnostics>
}
}

diagnostics.err_or(())
diagnostics.err_or(File {
version: file.version.clone(),
file: file.file,
comments: file.comments.clone(),
endianness: file.endianness,
declarations: context.history,
max_key: file.max_key,
})
}

/// Check field identifiers.
Expand Down Expand Up @@ -1845,18 +1859,19 @@ fn desugar_flags(file: &mut File) {
/// from the analysis.
pub fn analyze(file: &File) -> Result<File, Diagnostics> {
let scope = Scope::new(file)?;
check_decl_identifiers(file, &scope)?;
check_field_identifiers(file)?;
check_enum_declarations(file)?;
check_size_fields(file)?;
check_fixed_fields(file, &scope)?;
check_payload_fields(file)?;
check_array_fields(file)?;
check_padding_fields(file)?;
check_checksum_fields(file, &scope)?;
check_optional_fields(file)?;
check_group_constraints(file, &scope)?;
let mut file = inline_groups(file)?;
let file = check_decl_identifiers(file, &scope)?;
let scope = Scope::new(&file).unwrap();
check_field_identifiers(&file)?;
check_enum_declarations(&file)?;
check_size_fields(&file)?;
check_fixed_fields(&file, &scope)?;
check_payload_fields(&file)?;
check_array_fields(&file)?;
check_padding_fields(&file)?;
check_checksum_fields(&file, &scope)?;
check_optional_fields(&file)?;
check_group_constraints(&file, &scope)?;
let mut file = inline_groups(&file)?;
desugar_flags(&mut file);
let scope = Scope::new(&file)?;
check_decl_constraints(&file, &scope)?;
Expand Down Expand Up @@ -3218,6 +3233,76 @@ mod test {
);
}

#[test]
fn test_decl_ordering() {
valid!(
r#"
little_endian_packets
packet A {
b: B,
}

enum B : 8 {
X = 0,
Y = 1,
Z = 127,
}
"#
);

valid!(
r#"
little_endian_packets
packet A {
b: B,
}

struct B {
a: 7,
b: 9,
}
"#
);

valid!(
r#"
little_endian_packets
packet A {
f: F,
}

custom_field F : 8 "f"
"#
);

valid!(
r#"
little_endian_packets
packet A {
G,
}

group G {
a: 8,
}
"#
);

valid!(
r#"
little_endian_packets
packet A : B {
b: 16,
}

packet B {
a: 8,
_payload_
}
"#
);
}

#[test]
fn test_enum_declaration() {
valid!(
Expand Down
24 changes: 15 additions & 9 deletions pdl-compiler/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ struct Opt {
test_file: Option<String>,

#[argh(positional)]
/// input file.
input_file: String,
/// input files.
input_file: Option<String>,

#[argh(option)]
/// exclude declarations from the generated output.
Expand Down Expand Up @@ -104,9 +104,9 @@ fn filter_declarations(file: ast::File, exclude_declarations: &[String]) -> ast:
}
}

fn generate_backend(opt: &Opt) -> Result<(), String> {
fn generate_backend(opt: &Opt, input_file: &str) -> Result<(), String> {
let mut sources = ast::SourceDatabase::new();
match parser::parse_file(&mut sources, &opt.input_file) {
match parser::parse_file(&mut sources, input_file) {
Ok(file) => {
let file = filter_declarations(file, &opt.exclude_declaration);
let analyzed_file = match analyzer::analyze(&file) {
Expand Down Expand Up @@ -174,7 +174,7 @@ fn generate_backend(opt: &Opt) -> Result<(), String> {
}
}

fn generate_tests(opt: &Opt, test_file: &str) -> Result<(), String> {
fn generate_tests(opt: &Opt, test_file: &str, _input_file: &str) -> Result<(), String> {
match opt.output_format {
OutputFormat::Rust => {
println!("{}", backends::rust::test::generate_tests(test_file)?);
Expand All @@ -199,7 +199,7 @@ fn generate_tests(opt: &Opt, test_file: &str) -> Result<(), String> {
test_file,
std::path::Path::new(output_dir),
package.clone(),
&opt.input_file,
_input_file,
&opt.exclude_declaration,
)
}
Expand All @@ -214,13 +214,19 @@ fn main() -> Result<(), String> {
let opt: Opt = argh::from_env();

if opt.version {
println!("Packet Description Language parser version 1.0");
println!("pdlc {}\nCopyright (C) 2026 Google LLC", env!("CARGO_PKG_VERSION"));
return Ok(());
}

let Some(input_file) = opt.input_file.as_ref() else {
return Err("No input file is specified".to_owned());
};

if let Some(test_file) = opt.test_file.as_ref() {
generate_tests(&opt, test_file)
generate_tests(&opt, test_file, input_file)?
} else {
generate_backend(&opt)
generate_backend(&opt, input_file)?
}

Ok(())
}
Loading