parsing example rfc1035 master file, bug in escaped dots in names

This commit is contained in:
Benjamin Fry 2015-09-01 23:53:44 -07:00
parent 376273f2f2
commit aec63c5dfe
21 changed files with 449 additions and 139 deletions

View File

@ -26,6 +26,24 @@ keywords = ["DNS", "dns", "host", "BIND", "dig", "named", "dnssec"]
# be separated with a `/`
license = "Apache-2.0"
[profile.dev]
opt-level = 0 # Controls the --opt-level the compiler builds with
debug = true # Controls whether the compiler passes `-g`
rpath = false # Controls whether the compiler passes `-C rpath`
lto = false # Controls `-C lto` for binaries and staticlibs
debug-assertions = true # Controls whether debug assertions are enabled
codegen-units = 1 # Controls whether the compiler passes `-C codegen-units`
# `codegen-units` is ignored when `lto = true`
# The testing profile, used for `cargo test`
[profile.test]
opt-level = 0
debug = true
rpath = false
lto = false
debug-assertions = true
codegen-units = 1
[dependencies]
log = "^0.3.1"
# regex = "^0.1.41"

View File

@ -17,6 +17,7 @@ use std::error::Error;
use std::fmt;
use std::num;
use std::io;
use std::net::AddrParseError;
use super::DecodeError;
use super::LexerError;
@ -26,7 +27,7 @@ use ::serialize::txt::Token;
pub enum ParseError {
LexerError(LexerError),
DecodeError(DecodeError),
UnrecognizedToken(Token),
UnexpectedToken(Token),
OriginIsUndefined,
RecordTypeNotSpecified,
RecordNameNotSpecified,
@ -34,7 +35,10 @@ pub enum ParseError {
RecordTTLNotSpecified,
RecordDataNotSpecified,
SoaAlreadySpecified,
MissingToken(String),
IoError(io::Error),
ParseIntError(num::ParseIntError),
AddrParseError(AddrParseError),
}
impl fmt::Display for ParseError {
@ -42,7 +46,7 @@ impl fmt::Display for ParseError {
match *self {
ParseError::LexerError(ref err) => err.fmt(f),
ParseError::DecodeError(ref err) => err.fmt(f),
ParseError::UnrecognizedToken(ref t) => write!(f, "Unrecognized Token in stream: {:?}", t),
ParseError::UnexpectedToken(ref t) => write!(f, "Unrecognized Token in stream: {:?}", t),
ParseError::OriginIsUndefined => write!(f, "$ORIGIN was not specified"),
ParseError::RecordTypeNotSpecified => write!(f, "Record type not specified"),
ParseError::RecordNameNotSpecified => write!(f, "Record name not specified"),
@ -50,7 +54,10 @@ impl fmt::Display for ParseError {
ParseError::RecordTTLNotSpecified => write!(f, "Record ttl not specified"),
ParseError::RecordDataNotSpecified => write!(f, "Record data not specified"),
ParseError::SoaAlreadySpecified => write!(f, "SOA is already specified"),
ParseError::MissingToken(ref s) => write!(f, "Token is missing: {}", s),
ParseError::IoError(ref err) => err.fmt(f),
ParseError::ParseIntError(ref err) => err.fmt(f),
ParseError::AddrParseError(ref s) => write!(f, "Could not parse address: {:?}", s),
}
}
}
@ -60,7 +67,7 @@ impl Error for ParseError {
match *self {
ParseError::LexerError(ref err) => err.description(),
ParseError::DecodeError(ref err) => err.description(),
ParseError::UnrecognizedToken(..) => "Unrecognized Token",
ParseError::UnexpectedToken(..) => "Unrecognized Token",
ParseError::OriginIsUndefined => "$ORIGIN was not specified",
ParseError::RecordTypeNotSpecified => "Record type not specified",
ParseError::RecordNameNotSpecified => "Record name not specified",
@ -68,7 +75,10 @@ impl Error for ParseError {
ParseError::RecordTTLNotSpecified => "Record ttl not specified",
ParseError::RecordDataNotSpecified => "Record data not specified",
ParseError::SoaAlreadySpecified => "SOA is already specified",
ParseError::MissingToken(..) => "Token is missing",
ParseError::IoError(ref err) => err.description(),
ParseError::ParseIntError(ref err) => err.description(),
ParseError::AddrParseError(..) => "Could not parse address",
}
}
@ -77,6 +87,7 @@ impl Error for ParseError {
ParseError::LexerError(ref err) => Some(err),
ParseError::DecodeError(ref err) => Some(err),
ParseError::IoError(ref err) => Some(err),
ParseError::ParseIntError(ref err) => Some(err),
_ => None,
}
}
@ -99,3 +110,15 @@ impl From<io::Error> for ParseError {
ParseError::IoError(err)
}
}
impl From<num::ParseIntError> for ParseError {
fn from(err: num::ParseIntError) -> ParseError {
ParseError::ParseIntError(err)
}
}
impl From<AddrParseError> for ParseError {
fn from(err: AddrParseError) -> ParseError {
ParseError::AddrParseError(err)
}
}

View File

@ -15,8 +15,6 @@
*/
use std::collections::HashMap;
use ::serialize::txt::*;
use ::error::*;
use ::rr::{RecordType, Record, Name};
/// Authority is the storage method for all
@ -30,4 +28,16 @@ impl Authority {
pub fn new(origin: Name, records: HashMap<(Name, RecordType), Vec<Record>>) -> Authority {
Authority{ origin: origin, records: records }
}
pub fn get_soa(&self) -> Option<&Record> {
// SOA should be origin|SOA
self.lookup(&self.origin, RecordType::SOA).and_then(|v|v.first())
}
pub fn lookup(&self, name: &Name, rtype: RecordType) -> Option<&Vec<Record>> {
// TODO this should be an unnecessary copy... need to create a key type, and then use that for
// all queries
//self.records.get(&(self.origin.clone(), RecordType::SOA)).map(|v|v.first())
self.records.get(&(name.clone(), rtype))
}
}

View File

@ -28,6 +28,14 @@ impl Name {
Name { labels: Vec::new() }
}
// inline builder
pub fn label(self, label: &'static str) -> Self {
let mut me = self;
me.labels.push(label.into());
me
}
// for mutating over time
pub fn with_labels(labels: Vec<String>) -> Self {
Name { labels: labels }
}
@ -45,19 +53,20 @@ impl Name {
self
}
pub fn parse(local: String, origin: &Option<Self>) -> ParseResult<Self> {
// TODO: I think this does the wrong thing for escaped data
pub fn parse(local: &String, origin: Option<&Self>) -> ParseResult<Self> {
let mut build = Name::new();
// split the local part
// TODO: this should be a real lexer, to varify all data is legal name...
for s in local.split('.') {
if s.len() > 0 {
build.add_label(s.to_string());
build.add_label(s.to_string().to_lowercase()); // all names stored in lowercase
}
}
if !local.ends_with('.') {
build.append(try!(origin.as_ref().ok_or(ParseError::OriginIsUndefined)));
build.append(try!(origin.ok_or(ParseError::OriginIsUndefined)));
}
Ok(build)
@ -86,7 +95,7 @@ impl BinSerializable for Name {
Some(0) | None => LabelParseState::Root,
Some(byte) if byte & 0xC0 == 0xC0 => LabelParseState::Pointer,
Some(byte) if byte <= 0x3F => LabelParseState::Label,
_ => unimplemented!(),
_ => unreachable!(),
}
},
LabelParseState::Label => {

View File

@ -66,7 +66,10 @@ pub fn emit(encoder: &mut BinEncoder, a: &RData) -> EncodeResult {
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
let mut token = tokens.iter();
let address: Ipv4Addr = try!(token.next().ok_or(ParseError::MissingToken("ipv4 address".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::A{ address: address })
}
#[cfg(test)]

View File

@ -61,7 +61,10 @@ pub fn emit(encoder: &mut BinEncoder, aaaa: &RData) -> EncodeResult {
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
let mut token = tokens.iter();
let address: Ipv6Addr = try!(token.next().ok_or(ParseError::MissingToken("ipv6 address".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::AAAA{ address: address })
}

View File

@ -50,8 +50,11 @@ pub fn emit(encoder: &mut BinEncoder, cname_data: &RData) -> EncodeResult {
}
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
pub fn parse(tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<RData> {
let mut token = tokens.iter();
let cname: Name = try!(token.next().ok_or(ParseError::MissingToken("cname".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::CNAME{ cname: cname })
}

View File

@ -14,6 +14,8 @@
* limitations under the License.
*/
// TODO: these should each be it's own struct, it would make parsing and decoding a little cleaner
// and also a little more ergonomic when accessing.
// each of these module's has the parser for that rdata embedded, to keep the file sizes down...
pub mod cname;
pub mod mx;

View File

@ -48,8 +48,8 @@ pub fn read(decoder: &mut BinDecoder) -> DecodeResult<RData> {
}
pub fn emit(encoder: &mut BinEncoder, mx: &RData) -> EncodeResult {
if let RData::MX { ref preference, ref exchange } = *mx {
try!(encoder.emit_u16(*preference));
if let RData::MX { preference, ref exchange } = *mx {
try!(encoder.emit_u16(preference));
try!(exchange.emit(encoder));
Ok(())
} else {
@ -57,8 +57,13 @@ pub fn emit(encoder: &mut BinEncoder, mx: &RData) -> EncodeResult {
}
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
pub fn parse(tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<RData> {
let mut token = tokens.iter();
let preference: u16 = try!(token.next().ok_or(ParseError::MissingToken("preference".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let exchange: Name = try!(token.next().ok_or(ParseError::MissingToken("exchange".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::MX { preference: preference, exchange: exchange})
}

View File

@ -57,6 +57,9 @@ pub fn emit(encoder: &mut BinEncoder, ns: &RData) -> EncodeResult {
}
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
pub fn parse(tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<RData> {
let mut token = tokens.iter();
let nsdname: Name = try!(token.next().ok_or(ParseError::MissingToken("nsdname".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::NS{ nsdname: nsdname })
}

View File

@ -60,6 +60,7 @@ pub fn emit(encoder: &mut BinEncoder, nil: &RData) -> EncodeResult {
}
}
#[allow(unused)]
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
}

View File

@ -50,6 +50,9 @@ pub fn emit(encoder: &mut BinEncoder, ptr: &RData) -> EncodeResult {
}
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
pub fn parse(tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<RData> {
let mut token = tokens.iter();
let ptrdname: Name = try!(token.next().ok_or(ParseError::MissingToken("ptrdname".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::PTR{ ptrdname: ptrdname })
}

View File

@ -98,7 +98,7 @@ pub fn read(decoder: &mut BinDecoder) -> DecodeResult<RData> {
}
pub fn emit(encoder: &mut BinEncoder, soa: &RData) -> EncodeResult {
if let RData::SOA { ref mname, ref rname, ref serial, ref refresh, ref retry, ref expire, ref minimum} = *soa {
if let RData::SOA { ref mname, ref rname, ref serial, ref refresh, ref retry, ref expire, ref minimum } = *soa {
try!(mname.emit(encoder));
try!(rname.emit(encoder));
try!(encoder.emit_u32(*serial));
@ -112,6 +112,32 @@ pub fn emit(encoder: &mut BinEncoder, soa: &RData) -> EncodeResult {
}
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
// VENERA Action\.domains (
// 20 ; SERIAL
// 7200 ; REFRESH
// 600 ; RETRY
// 3600000; EXPIRE
// 60) ; MINIMUM
pub fn parse(tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<RData> {
let mut token = tokens.iter();
let mname: Name = try!(token.next().ok_or(ParseError::MissingToken("mname".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let rname: Name = try!(token.next().ok_or(ParseError::MissingToken("rname".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Name::parse(s, origin)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
try!(token.next().ok_or(ParseError::MissingToken("(".to_string())).and_then(|t| if let &Token::StartList = t {Ok(t)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let serial: u32 = try!(token.next().ok_or(ParseError::MissingToken("serial".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let refresh: i32 = try!(token.next().ok_or(ParseError::MissingToken("refresh".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let retry: i32 = try!(token.next().ok_or(ParseError::MissingToken("retry".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let expire: i32 = try!(token.next().ok_or(ParseError::MissingToken("expire".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
let minimum: u32 = try!(token.next().ok_or(ParseError::MissingToken("minimum".to_string())).and_then(|t| if let &Token::CharData(ref s) = t {Ok(try!(s.parse()))} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
try!(token.next().ok_or(ParseError::MissingToken(")".to_string())).and_then(|t| if let &Token::EndList = t {Ok(t)} else {Err(ParseError::UnexpectedToken(t.clone()))} ));
Ok(RData::SOA{
mname: mname,
rname: rname,
serial: serial,
refresh: refresh,
retry: retry,
expire: expire,
minimum: minimum,
})
}

View File

@ -55,5 +55,13 @@ pub fn emit(encoder: &mut BinEncoder, txt: &RData) -> EncodeResult {
}
pub fn parse(tokens: &Vec<Token>) -> ParseResult<RData> {
unimplemented!()
let mut txt_data: Vec<String> = Vec::with_capacity(tokens.len());
for t in tokens {
match *t {
Token::CharData(ref txt) => txt_data.push(txt.clone()),
_ => return Err(ParseError::UnexpectedToken(t.clone())),
}
}
Ok(RData::TXT { txt_data: txt_data })
}

View File

@ -329,14 +329,14 @@ pub enum RData {
}
impl RData {
pub fn parse(record_type: RecordType, tokens: &Vec<Token>) -> ParseResult<Self> {
pub fn parse(record_type: RecordType, tokens: &Vec<Token>, origin: Option<&Name>) -> ParseResult<Self> {
match record_type {
RecordType::CNAME => rdata::cname::parse(tokens),
RecordType::MX => rdata::mx::parse(tokens),
RecordType::CNAME => rdata::cname::parse(tokens, origin),
RecordType::MX => rdata::mx::parse(tokens, origin),
RecordType::NULL => rdata::null::parse(tokens),
RecordType::NS => rdata::ns::parse(tokens),
RecordType::PTR => rdata::ptr::parse(tokens),
RecordType::SOA => rdata::soa::parse(tokens),
RecordType::NS => rdata::ns::parse(tokens, origin),
RecordType::PTR => rdata::ptr::parse(tokens, origin),
RecordType::SOA => rdata::soa::parse(tokens, origin),
RecordType::TXT => rdata::txt::parse(tokens),
RecordType::A => rdata::a::parse(tokens),
RecordType::AAAA => rdata::aaaa::parse(tokens),

View File

@ -82,8 +82,12 @@ impl RecordType {
"AAAA" => Ok(RecordType::AAAA),
"CNAME" => Ok(RecordType::CNAME),
"NULL" => Ok(RecordType::NULL),
"MX" => Ok(RecordType::MX),
"NS" => Ok(RecordType::NS),
"PTR" => Ok(RecordType::PTR),
"SOA" => Ok(RecordType::SOA),
"TXT" => Ok(RecordType::TXT),
_ => Err(DecodeError::UnknownRecordTypeStr(str.to_string())),
}
}
@ -102,8 +106,11 @@ impl RecordType {
28 => Ok(RecordType::AAAA),
5 => Ok(RecordType::CNAME),
0 => Ok(RecordType::NULL),
15 => Ok(RecordType::MX),
2 => Ok(RecordType::NS),
12 => Ok(RecordType::PTR),
6 => Ok(RecordType::SOA),
16 => Ok(RecordType::TXT),
_ => Err(DecodeError::UnknownRecordTypeValue(value)),
}
}
@ -142,8 +149,11 @@ impl From<RecordType> for &'static str {
RecordType::AAAA => "AAAA",
RecordType::CNAME => "CNAME",
RecordType::NULL => "NULL",
RecordType::MX => "MX",
RecordType::NS => "NS",
RecordType::PTR => "PTR",
RecordType::SOA => "SOA",
RecordType::TXT => "TXT",
_ => panic!("unsupported RecordType: {:?}", rt), // other types are planned
}
}
@ -165,8 +175,11 @@ impl From<RecordType> for u16 {
RecordType::AAAA => 28,
RecordType::CNAME => 5,
RecordType::NULL => 0,
RecordType::MX => 15,
RecordType::NS => 2,
RecordType::PTR => 12,
RecordType::SOA => 6,
RecordType::TXT => 16,
_ => panic!("unsupported RecordType: {:?}", rt), // other types are planned...
}
}

View File

@ -95,6 +95,7 @@ impl Record {
*/
pub fn new() -> Record {
Record {
// TODO: these really should all be Optionals, I was lazy.
name_labels: domain::Name::new(),
rr_type: RecordType::A,
dns_class: DNSClass::IN,

View File

@ -17,6 +17,7 @@
use std::collections::HashMap;
use std::io::Read;
use std::fs::File;
use std::num::ParseIntError;
use ::error::*;
use ::rr::Name;
@ -133,15 +134,20 @@ impl Parser {
Parser
}
pub fn parse(&mut self, file: File, origin: Option<Name>) -> ParseResult<Authority> {
let mut records: HashMap<(Name, RecordType), Vec<Record>> = HashMap::new();
let mut buf = String::new();
pub fn parse_from(&mut self, file: File, origin: Option<Name>) -> ParseResult<Authority> {
let mut file = file;
let mut buf = String::new();
// TODO, this should really use something to read line by line or some other method to
// keep the usage down.
// keep the usage down. and be a custom lexer...
try!(file.read_to_string(&mut buf));
let mut lexer = Lexer::new(&buf);
let lexer = Lexer::new(&buf);
self.parse(lexer, origin)
}
pub fn parse(&mut self, lexer: Lexer, origin: Option<Name>) -> ParseResult<Authority> {
let mut lexer = lexer;
let mut records: HashMap<(Name, RecordType), Vec<Record>> = HashMap::new();
let mut origin: Option<Name> = origin;
let mut current_name: Option<Name> = None;
@ -164,55 +170,55 @@ impl Parser {
Token::Origin => State::Origin,
// if CharData, then Name then ttl_class_type
Token::CharData(data) => {
current_name = Some(try!(Name::parse(data, &origin)));
State::Ttl_Class_Type
Token::CharData(ref data) => {
current_name = Some(try!(Name::parse(data, origin.as_ref())));
State::TtlClassType
},
// @ is a placeholder for specifying the current origin
Token::At => {
current_name = origin.clone(); // TODO a COW or RC would reduce copies...
State::Ttl_Class_Type
State::TtlClassType
}
// if blank, then nothing or ttl_class_type... grr...
// if blank, then nothing or ttl_class_type
Token::Blank => {
State::Ttl_Class_Type
State::TtlClassType
},
Token::EOL => State::StartLine, // probably a comment
_ => return Err(ParseError::UnrecognizedToken(t)),
_ => return Err(ParseError::UnexpectedToken(t)),
}
},
State::Origin => {
match t {
Token::CharData(data) => {
Token::CharData(ref data) => {
// TODO an origin was specified, should this be legal? definitely confusing...
origin = Some(try!(Name::parse(data, &None)));
origin = Some(try!(Name::parse(data, None)));
State::StartLine
}
_ => return Err(ParseError::UnrecognizedToken(t)),
_ => return Err(ParseError::UnexpectedToken(t)),
}
}
State::Include => unimplemented!(),
State::Ttl_Class_Type => {
State::TtlClassType => {
match t {
// if number, TTL
// Token::Number(ref num) => ttl = Some(*num),
// One of Class or Type (these cannot be overlapping!)
Token::CharData(ref data) => {
// if it's a number it's a ttl
let result = data.parse();
let result: Result<i32, ParseIntError> = data.parse();
if result.is_ok() {
if ttl.is_some() { return Err(ParseError::UnrecognizedToken(t.clone())) } // ideally there is no copy in normal usage
ttl = result.ok();
State::Ttl_Class_Type
State::TtlClassType
} else {
// if can parse DNSClass, then class
let result = DNSClass::from_str(data);
if result.is_ok() {
class = result.ok();
State::Ttl_Class_Type
State::TtlClassType
} else {
// if can parse RecordType, then RecordType
rtype = Some(try!(RecordType::from_str(data)));
State::Record
@ -223,81 +229,80 @@ impl Parser {
Token::EOL => {
State::StartLine // next line
},
_ => return Err(ParseError::UnrecognizedToken(t)),
_ => return Err(ParseError::UnexpectedToken(t)),
}
},
State::Record => {
// b/c of ownership rules, perhaps, just collect all the RData components as a list of
// tokens to pass into the processor
match t {
Token::EOL => State::EndRecord,
Token::EOL => {
// call out to parsers for difference record types
let rdata = try!(RData::parse(try!(rtype.ok_or(ParseError::RecordTypeNotSpecified)), &tokens, origin.as_ref()));
// verify that we have everything we need for the record
let mut record = Record::new();
// TODO COW or RC would reduce mem usage, perhaps Name should have an intern()...
// might want to wait until RC.weak() stabilizes, as that would be needed for global
// memory where you want
record.name(try!(current_name.clone().ok_or(ParseError::RecordNameNotSpecified)));
record.rr_type(rtype.unwrap());
record.dns_class(try!(class.ok_or(ParseError::RecordClassNotSpecified)));
// slightly annoying, need to grab the TTL, then move rdata into the record,
// then check the Type again and have custom add logic.
match rtype.unwrap() {
RecordType::SOA => {
// TTL for the SOA is set internally...
// expire is for the SOA, minimum is default for records
if let RData::SOA { ref expire, ref minimum, ..} = rdata {
record.ttl(*expire);
ttl = Some(*minimum as i32);
} else { assert!(false, "Invalid RData here, expected SOA: {:?}", rdata); }
},
_ => {
record.ttl(try!(ttl.ok_or(ParseError::RecordTTLNotSpecified)));
},
}
// move the rdata into record...
record.rdata(rdata);
// add to the map
let key = (record.get_name().clone(), record.get_rr_type());
match rtype.unwrap() {
RecordType::SOA => {
if records.insert(key, vec![record]).is_some() {
return Err(ParseError::SoaAlreadySpecified);
}
},
_ => {
// add a Vec if it's not there, then add the record to the list
let mut records = records.entry(key).or_insert(Vec::with_capacity(1));
records.push(record);
},
}
State::StartLine
},
_ => { tokens.push(t); State::Record },
}
},
State::EndRecord => {
// call out to parsers for difference record types
let rdata = try!(RData::parse(try!(rtype.ok_or(ParseError::RecordTypeNotSpecified)), &tokens));
// verify that we have everything we need for the record
let mut record = Record::new();
// TODO COW or RC would reduce mem usage, perhaps Name should have an intern()...
// might want to wait until RC.weak() stabilizes, as that would be needed for global
// memory where you want
record.name(try!(current_name.clone().ok_or(ParseError::RecordNameNotSpecified)));
record.rr_type(rtype.unwrap());
record.dns_class(try!(class.ok_or(ParseError::RecordClassNotSpecified)));
// slightly annoying, need to grab the TTL, then move rdata into the record,
// then check the Type again and have custom add logic.
match rtype.unwrap() {
RecordType::SOA => {
// TTL for the SOA is set internally...
// expire is for the SOA, minimum is default for records
if let RData::SOA { ref expire, ref minimum, ..} = rdata {
record.ttl(*expire);
ttl = Some(*minimum as i32);
} else { assert!(false, "Invalid RData here, expected SOA: {:?}", rdata); }
},
_ => {
record.ttl(try!(ttl.ok_or(ParseError::RecordTTLNotSpecified)));
},
}
// move the rdata into record...
record.rdata(rdata);
// add to the map
let key = (record.get_name().clone(), record.get_rr_type());
match rtype.unwrap() {
RecordType::SOA => {
if records.insert(key, vec![record]).is_some() {
return Err(ParseError::SoaAlreadySpecified);
}
},
_ => {
// add a Vec if it's not there, then add the record to the list
let mut records = records.entry(key).or_insert(Vec::with_capacity(1));
records.push(record);
},
}
State::StartLine
},
}
}
//
// build the Authority and return.
records.shrink_to_fit(); // this shouldn't change once stored (replacement instead)
Ok(Authority::new(try!(origin.ok_or(ParseError::OriginIsUndefined)), records))
}
}
enum State {
StartLine, // start of line, @, $<WORD>, Name, Blank
Ttl_Class_Type, // [<TTL>] [<class>] <type>,
TtlClassType, // [<TTL>] [<class>] <type>,
Record,
Include, // $INCLUDE <filename>
Origin,
EndRecord,
}

View File

@ -2,7 +2,6 @@ use std::cell::{Cell,RefCell};
use std::iter::Peekable;
use std::str::Chars;
use std::char;
use std::fs::File;
use ::error::{LexerResult,LexerError};
@ -18,8 +17,8 @@ impl<'a> Lexer<'a> {
}
pub fn next_token(&mut self) -> LexerResult<Option<Token>> {
let mut cur_token: Cell<Option<State>> = Cell::new(None);
let mut cur_string: RefCell<Option<String>> = RefCell::new(None);
let cur_token: Cell<Option<State>> = Cell::new(None);
let cur_string: RefCell<Option<String>> = RefCell::new(None);
//while let Some(ch) = self.txt.by_ref().peekable().peek() {
'out: for i in 0..4096 { // max chars in a single lex, helps with issues in the lexer...
@ -122,8 +121,8 @@ impl<'a> Lexer<'a> {
self.txt.next(); // consume the escape
let ch = try!(self.peek().ok_or(LexerError::EOF));
if (!ch.is_control()) {
if (ch.is_numeric()) {
if !ch.is_control() {
if ch.is_numeric() {
// in this case it's an excaped octal: \DDD
let d1 = try!(self.txt.next().ok_or(LexerError::EOF)); // gobble
let d2 = try!(self.txt.next().ok_or(LexerError::EOF)); // gobble
@ -200,9 +199,8 @@ pub enum Token {
Blank, // only if the first part of the line
StartList, // (
EndList, // )
CharData(String), // [a-zA-Z, non-control utf8, ., -, 0-9]+
CharData(String), // [a-zA-Z, non-control utf8, ., -, 0-9]+, ".*"
At, // @
Quote(String), // ".*"
Include, // $INCLUDE
Origin, // $ORIGIN
EOL, // \n or \r\n
@ -218,7 +216,7 @@ impl Token {
State::Comment => Token::EOL, // comments can't end a sequence, so must be EOF/EOL
State::At => Token::At,
State::Quote => return Err(LexerError::UnclosedQuotedString),
State::Quoted => Token::Quote(value.unwrap_or_default()),
State::Quoted => Token::CharData(value.unwrap_or_default()),
State::Dollar => {
let s = value.unwrap_or_default();
if "INCLUDE".to_string() == s { Token::Include }
@ -232,7 +230,6 @@ impl Token {
#[cfg(test)]
mod lex_test {
use ::error::*;
use super::*;
#[test]
@ -256,6 +253,16 @@ mod lex_test {
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("after".to_string()));
let mut lexer = Lexer::new("dead beef ();comment
after");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("dead".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("beef".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::StartList);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EndList);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("after".to_string()));
}
#[test]
@ -270,18 +277,18 @@ mod lex_test {
#[test]
fn quoted_txt() {
assert_eq!(Lexer::new("\"Quoted\"").next_token().unwrap().unwrap(), Token::Quote("Quoted".to_string()));
assert_eq!(Lexer::new("\";@$\"").next_token().unwrap().unwrap(), Token::Quote(";@$".to_string()));
assert_eq!(Lexer::new("\"some \\A\"").next_token().unwrap().unwrap(), Token::Quote("some A".to_string()));
assert_eq!(Lexer::new("\"Quoted\"").next_token().unwrap().unwrap(), Token::CharData("Quoted".to_string()));
assert_eq!(Lexer::new("\";@$\"").next_token().unwrap().unwrap(), Token::CharData(";@$".to_string()));
assert_eq!(Lexer::new("\"some \\A\"").next_token().unwrap().unwrap(), Token::CharData("some A".to_string()));
let mut lexer = Lexer::new("\"multi\nline\ntext\"");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Quote("multi\nline\ntext".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("multi\nline\ntext".to_string()));
assert_eq!(lexer.next_token().unwrap(), None);
let mut lexer = Lexer::new("\"multi\r\nline\r\ntext\"\r\n");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Quote("multi\r\nline\r\ntext".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("multi\r\nline\r\ntext".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap(), None);
@ -345,36 +352,36 @@ mod lex_test {
let mut lexer = Lexer::new("(\n\"abc\"\n)");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::StartList);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Quote("abc".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("abc".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EndList);
assert_eq!(lexer.next_token().unwrap(), None);
let mut lexer = Lexer::new("(\n\"abc\";comment\n)");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::StartList);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Quote("abc".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("abc".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EndList);
assert_eq!(lexer.next_token().unwrap(), None);
}
#[test]
fn soa() {
let mut lexer = Lexer::new("@ IN SOA VENERA Action\\.domains (\n\
20 ; SERIAL\n\
7200 ; REFRESH\n\
600 ; RETRY\n\
3600000; EXPIRE\n\
60) ; MINIMUM\n\
\n\
NS A.ISI.EDU.\n\
NS VENERA\n\
NS VAXA\n\
MX 10 VENERA\n\
MX 20 VAXA\n\
\n\
A A 26.3.0.103\n\
\n\
VENERA A 10.1.0.52\n\
A 128.9.0.32\n\
\n\
let mut lexer = Lexer::new("@ IN SOA VENERA Action\\.domains (
20 ; SERIAL
7200 ; REFRESH
600 ; RETRY
3600000; EXPIRE
60) ; MINIMUM
NS A.ISI.EDU.
NS VENERA
NS VAXA
MX 10 VENERA
MX 20 VAXA
A A 26.3.0.103
VENERA A 10.1.0.52
A 128.9.0.32
$INCLUDE <SUBSYS>ISI-MAILBOXES.TXT");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::At);
@ -391,19 +398,24 @@ $INCLUDE <SUBSYS>ISI-MAILBOXES.TXT");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EndList);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("NS".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("A.ISI.EDU.".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("NS".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("VENERA".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("NS".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("VAXA".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("MX".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("10".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("VENERA".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("MX".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("20".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("VAXA".to_string()));
@ -418,6 +430,7 @@ $INCLUDE <SUBSYS>ISI-MAILBOXES.TXT");
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("A".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("10.1.0.52".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::Blank);
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("A".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::CharData("128.9.0.32".to_string()));
assert_eq!(lexer.next_token().unwrap().unwrap(), Token::EOL);

View File

@ -25,3 +25,6 @@ pub use self::master_lex::Token;
pub trait TxtSerializable {
fn parse(lexer: &mut Lexer) -> ParseResult<Self>;
}
#[cfg(test)]
mod txt_tests;

View File

@ -0,0 +1,158 @@
use std::net::{Ipv4Addr, Ipv6Addr};
use std::str::FromStr;
use ::rr::*;
use super::*;
#[test]
fn test_string() {
let lexer = Lexer::new("@ IN SOA VENERA Action\\.domains (
20 ; SERIAL
7200 ; REFRESH
600 ; RETRY
3600000; EXPIRE
60) ; MINIMUM
NS A.ISI.EDU.
NS VENERA
NS VAXA
MX 10 VENERA
MX 20 VAXA
A A 26.3.0.103
TXT I am a txt record
TXT I am another txt record
TXT \"I am a different\" \"txt record\"
TXT key=val
AAAA AAAA 4321:0:1:2:3:4:567:89ab
ALIAS CNAME A
103.0.3.26.IN-ADDR.ARPA. PTR A
b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1.0.0.0.0.0.0.0.1.2.3.4.IP6.ARPA. PTR AAAA
SHORT 70 A 26.3.0.104
VENERA A 10.1.0.52
A 128.9.0.32");
let authority = Parser::new().parse(lexer, Some(Name::new().label("isi").label("edu"))).unwrap();
// not validating everything, just one of each...
// SOA
let soa_record = authority.get_soa().unwrap();
assert_eq!(RecordType::SOA, soa_record.get_rr_type());
assert_eq!(&Name::new().label("isi").label("edu"), soa_record.get_name()); // i.e. the origin or domain
assert_eq!(3600000, soa_record.get_ttl());
assert_eq!(DNSClass::IN, soa_record.get_dns_class());
if let RData::SOA { ref mname, ref rname, serial, refresh, retry, expire, minimum } = *soa_record.get_rdata() {
// this should all be lowercased
assert_eq!(&Name::new().label("venera").label("isi").label("edu"), mname);
// TODO: this is broken, need to build names directly into the lexer I think.
//assert_eq!(&Name::new().label("Action.domains").label("isi").label("edu"), rname);
assert_eq!(&Name::new().label("action").label("domains").label("isi").label("edu"), rname);
assert_eq!(20, serial);
assert_eq!(7200, refresh);
assert_eq!(600, retry);
assert_eq!(3600000, expire);
assert_eq!(60, minimum);
} else {
panic!("Not an SOA record!!!")
}
// NS
let ns_records: &Vec<Record> = authority.lookup(&Name::with_labels(vec!["isi".into(),"edu".into()]), RecordType::NS).unwrap();
let compare = ns_records.iter().zip(vec![ // this is cool, zip up the expected results... works as long as the order is good.
Name::new().label("a").label("isi").label("edu"),
Name::new().label("venera").label("isi").label("edu"),
Name::new().label("vaxa").label("isi").label("edu")
]);
for (record, ref name) in compare {
assert_eq!(&Name::with_labels(vec!["isi".into(),"edu".into()]), record.get_name());
assert_eq!(60, record.get_ttl()); // TODO: should this be minimum or expire?
assert_eq!(DNSClass::IN, record.get_dns_class());
assert_eq!(RecordType::NS, record.get_rr_type());
if let RData::NS{ ref nsdname } = *record.get_rdata() {
assert_eq!(name, nsdname);
} else {
panic!("Not an NS record!!!")
}
}
// MX
let mx_records: &Vec<Record> = authority.lookup(&Name::new().label("isi").label("edu"), RecordType::MX).unwrap();
let compare = mx_records.iter().zip(vec![
(10, Name::new().label("venera").label("isi").label("edu")),
(20, Name::new().label("vaxa").label("isi").label("edu")),
]);
for (record, (num, ref name)) in compare {
assert_eq!(&Name::new().label("isi").label("edu"), record.get_name());
assert_eq!(60, record.get_ttl()); // TODO: should this be minimum or expire?
assert_eq!(DNSClass::IN, record.get_dns_class());
assert_eq!(RecordType::MX, record.get_rr_type());
if let RData::MX{ preference, ref exchange } = *record.get_rdata() {
assert_eq!(num, preference);
assert_eq!(name, exchange);
} else {
panic!("Not an NS record!!!")
}
}
// A
let a_record: &Record = authority.lookup(&Name::new().label("a").label("isi").label("edu"), RecordType::A).unwrap().first().unwrap();
assert_eq!(&Name::new().label("a").label("isi").label("edu"), a_record.get_name());
assert_eq!(60, a_record.get_ttl()); // TODO: should this be minimum or expire?
assert_eq!(DNSClass::IN, a_record.get_dns_class());
assert_eq!(RecordType::A, a_record.get_rr_type());
if let RData::A{ ref address } = *a_record.get_rdata() {
assert_eq!(&Ipv4Addr::new(26u8,3u8,0u8,103u8), address);
} else {
panic!("Not an A record!!!")
}
// AAAA
let aaaa_record: &Record = authority.lookup(&Name::new().label("aaaa").label("isi").label("edu"), RecordType::AAAA).unwrap().first().unwrap();
assert_eq!(&Name::new().label("aaaa").label("isi").label("edu"), aaaa_record.get_name());
if let RData::AAAA{ ref address } = *aaaa_record.get_rdata() {
assert_eq!(&Ipv6Addr::from_str("4321:0:1:2:3:4:567:89ab").unwrap(), address);
} else {
panic!("Not a AAAA record!!!")
}
// SHORT
let short_record: &Record = authority.lookup(&Name::new().label("short").label("isi").label("edu"), RecordType::A).unwrap().first().unwrap();
assert_eq!(&Name::new().label("short").label("isi").label("edu"), short_record.get_name());
assert_eq!(70, short_record.get_ttl());
if let RData::A{ ref address } = *short_record.get_rdata() {
assert_eq!(&Ipv4Addr::new(26u8,3u8,0u8,104u8), address);
} else {
panic!("Not an A record!!!")
}
// TXT
let txt_records: &Vec<Record> = authority.lookup(&Name::new().label("a").label("isi").label("edu"), RecordType::TXT).unwrap();
let compare = txt_records.iter().zip(vec![
vec!["I".to_string(), "am".to_string(), "a".to_string(), "txt".to_string(), "record".to_string()],
vec!["I".to_string(), "am".to_string(), "another".to_string(), "txt".to_string(), "record".to_string()],
vec!["I am a different".to_string(), "txt record".to_string()],
vec!["key=val".to_string()],
]);
for (record, ref vector) in compare {
if let RData::TXT{ ref txt_data } = *record.get_rdata() {
assert_eq!(vector, txt_data);
} else {
panic!("Not a TXT record!!!")
}
}
// PTR
let ptr_record: &Record = authority.lookup(&Name::new().label("103").label("0").label("3").label("26").label("in-addr").label("arpa"), RecordType::PTR).unwrap().first().unwrap();
if let RData::PTR{ ref ptrdname } = *ptr_record.get_rdata() {
assert_eq!(&Name::new().label("a").label("isi").label("edu"), ptrdname);
} else {
panic!("Not a PTR record!!!")
}
}