Add support for aliased expressions (#153)

diff --git a/Cargo.toml b/Cargo.toml
index 5e6dcd2..053517a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "sqlparser"
 description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
-version = "0.2.4"
+version = "0.2.5"
+edition = "2018"
 authors = ["Andy Grove <andygrove73@gmail.com>"]
 homepage = "https://github.com/andygrove/sqlparser-rs"
 documentation = "https://docs.rs/sqlparser/"
diff --git a/src/dialect/ansi_sql.rs b/src/dialect/ansi_sql.rs
index b91fdc6..cd57440 100644
--- a/src/dialect/ansi_sql.rs
+++ b/src/dialect/ansi_sql.rs
@@ -1,6 +1,5 @@
-use dialect::Dialect;
-
-use dialect::keywords::*;
+use crate::dialect::keywords::*;
+use crate::dialect::Dialect;
 
 pub struct AnsiSqlDialect {}
 
diff --git a/src/dialect/generic_sql.rs b/src/dialect/generic_sql.rs
index 0f18b72..15f2acd 100644
--- a/src/dialect/generic_sql.rs
+++ b/src/dialect/generic_sql.rs
@@ -1,6 +1,6 @@
-use dialect::Dialect;
+use crate::dialect::keywords::*;
+use crate::dialect::Dialect;
 
-use dialect::keywords::*;
 pub struct GenericSqlDialect {}
 
 impl Dialect for GenericSqlDialect {
diff --git a/src/dialect/keywords.rs b/src/dialect/keywords.rs
index e468372..ae3935b 100644
--- a/src/dialect/keywords.rs
+++ b/src/dialect/keywords.rs
@@ -6,7 +6,7 @@
     }
 }
 
-/// enumerate all the keywords here for all dialects to support in this project
+// enumerate all the keywords here for all dialects to support in this project
 keyword!(
     ABS,
     ADD,
diff --git a/src/dialect/postgresql.rs b/src/dialect/postgresql.rs
index 66cb51c..fa67573 100644
--- a/src/dialect/postgresql.rs
+++ b/src/dialect/postgresql.rs
@@ -1,6 +1,5 @@
-use dialect::Dialect;
-
-use dialect::keywords::*;
+use crate::dialect::keywords::*;
+use crate::dialect::Dialect;
 
 pub struct PostgreSqlDialect {}
 
diff --git a/src/sqlast/mod.rs b/src/sqlast/mod.rs
index 54b650a..aa5f7f2 100644
--- a/src/sqlast/mod.rs
+++ b/src/sqlast/mod.rs
@@ -30,6 +30,8 @@
 pub enum ASTNode {
     /// Identifier e.g. table name or column name
     SQLIdentifier(String),
+    /// Aliased expression
+    SQLAliasedExpr(Box<ASTNode>, String),
     /// Wildcard e.g. `*`
     SQLWildcard,
     /// Multi part identifier e.g. `myschema.dbo.mytable`
@@ -139,6 +141,9 @@
 impl ToString for ASTNode {
     fn to_string(&self) -> String {
         match self {
+            ASTNode::SQLAliasedExpr(ast, alias) => {
+                format!("{} AS {}", ast.as_ref().to_string(), alias)
+            }
             ASTNode::SQLIdentifier(s) => s.to_string(),
             ASTNode::SQLWildcard => "*".to_string(),
             ASTNode::SQLCompoundIdentifier(s) => s.join("."),
diff --git a/src/sqlparser.rs b/src/sqlparser.rs
index 42a39b0..52fb7b3 100644
--- a/src/sqlparser.rs
+++ b/src/sqlparser.rs
@@ -53,7 +53,7 @@
     }
 
     /// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
-    pub fn parse_sql(dialect: &Dialect, sql: String) -> Result<ASTNode, ParserError> {
+    pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result<ASTNode, ParserError> {
         let mut tokenizer = Tokenizer::new(dialect, &sql);
         let tokens = tokenizer.tokenize()?;
         let mut parser = Parser::new(tokens);
@@ -256,6 +256,11 @@
         debug!("parsing infix");
         match self.next_token() {
             Some(tok) => match tok {
+                // Token::Keyword(ref k) if k == "AS" => {
+                // aliased expressions and CAST expr AS ident
+                //     Ok(Some(ASTNode::SQLAliasedExpr(Box::new(expr), self.parse_identifier()?)))
+                //     Ok(None)
+                // }
                 Token::Keyword(ref k) if k == "IS" => {
                     if self.parse_keywords(vec!["NULL"]) {
                         Ok(Some(ASTNode::SQLIsNull(Box::new(expr))))
@@ -342,6 +347,7 @@
         debug!("get_precedence() {:?}", tok);
 
         match tok {
+            //&Token::Keyword(ref k) if k == "AS" => Ok(4),
             &Token::Keyword(ref k) if k == "OR" => Ok(5),
             &Token::Keyword(ref k) if k == "AND" => Ok(10),
             &Token::Keyword(ref k) if k == "NOT" => Ok(15),
@@ -1015,6 +1021,14 @@
         }
     }
 
+    pub fn parse_identifier(&mut self) -> Result<String, ParserError> {
+        let identifier = self.parse_compound_identifier(&Token::Period)?;
+        match identifier {
+            ASTNode::SQLCompoundIdentifier(idents) => Ok(idents.join(".")),
+            other => parser_err!(format!("Expecting identifier, found: {:?}", other)),
+        }
+    }
+
     pub fn parse_column_names(&mut self) -> Result<Vec<String>, ParserError> {
         let identifier = self.parse_compound_identifier(&Token::Comma)?;
         match identifier {
@@ -1300,7 +1314,18 @@
     pub fn parse_expr_list(&mut self) -> Result<Vec<ASTNode>, ParserError> {
         let mut expr_list: Vec<ASTNode> = vec![];
         loop {
-            expr_list.push(self.parse_expr(0)?);
+            let expr = self.parse_expr(0)?;
+            match self.peek_token() {
+                Some(Token::Keyword(k)) if k.as_str() == "AS" => {
+                    self.next_token();
+                    expr_list.push(ASTNode::SQLAliasedExpr(
+                        Box::new(expr),
+                        self.parse_identifier()?,
+                    ));
+                }
+                _ => expr_list.push(expr),
+            }
+
             if let Some(t) = self.peek_token() {
                 if t == Token::Comma {
                     self.next_token();
diff --git a/src/sqltokenizer.rs b/src/sqltokenizer.rs
index 5040882..4162407 100644
--- a/src/sqltokenizer.rs
+++ b/src/sqltokenizer.rs
@@ -151,7 +151,7 @@
 
 /// SQL Tokenizer
 pub struct Tokenizer<'a> {
-    dialect: &'a Dialect,
+    dialect: &'a dyn Dialect,
     pub query: String,
     pub line: u64,
     pub col: u64,
@@ -159,7 +159,7 @@
 
 impl<'a> Tokenizer<'a> {
     /// Create a new SQL tokenizer for the specified SQL statement
-    pub fn new(dialect: &'a Dialect, query: &str) -> Self {
+    pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self {
         Self {
             dialect,
             query: query.to_string(),
@@ -278,11 +278,11 @@
                     Ok(Some(Token::DoubleQuotedString(s)))
                 }
                 // numbers
-                '0'...'9' => {
+                '0'..='9' => {
                     let mut s = String::new();
                     while let Some(&ch) = chars.peek() {
                         match ch {
-                            '0'...'9' | '.' => {
+                            '0'..='9' | '.' => {
                                 chars.next(); // consume
                                 s.push(ch);
                             }
@@ -550,5 +550,4 @@
         //println!("------------------------------");
         assert_eq!(expected, actual);
     }
-
 }
diff --git a/tests/sqlparser_generic.rs b/tests/sqlparser_generic.rs
index 5c86797..af4ff71 100644
--- a/tests/sqlparser_generic.rs
+++ b/tests/sqlparser_generic.rs
@@ -509,6 +509,24 @@
 }
 
 #[test]
+fn parse_select_with_alias() {
+    let sql = String::from("SELECT id AS aliased_id FROM customer");
+    let ast = parse_sql(&sql);
+    match ast {
+        ASTNode::SQLSelect { projection, .. } => {
+            assert_eq!(1, projection.len());
+            match &projection[0] {
+                ASTNode::SQLAliasedExpr(_, alias) => {
+                    assert_eq!("aliased_id", alias.as_str());
+                }
+                _ => assert!(false),
+            }
+        }
+        _ => assert!(false),
+    }
+}
+
+#[test]
 fn parse_delete_with_semi_colon() {
     let sql: &str = "DELETE FROM 'table';";
 
@@ -676,7 +694,7 @@
     generic_ast
 }
 
-fn parse_sql_with(sql: &str, dialect: &Dialect) -> ASTNode {
+fn parse_sql_with(sql: &str, dialect: &dyn Dialect) -> ASTNode {
     let mut tokenizer = Tokenizer::new(dialect, &sql);
     let tokens = tokenizer.tokenize().unwrap();
     let mut parser = Parser::new(tokens);
diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs
index 6b6598c..f1abf16 100644
--- a/tests/sqlparser_postgres.rs
+++ b/tests/sqlparser_postgres.rs
@@ -313,7 +313,8 @@
 
 #[test]
 fn parse_copy_example() {
-    let sql = String::from(r#"COPY public.actor (actor_id, first_name, last_name, last_update, value) FROM stdin;
+    let sql = String::from(
+        r#"COPY public.actor (actor_id, first_name, last_name, last_update, value) FROM stdin;
 1	PENELOPE	GUINESS	2006-02-15 09:34:33 0.11111
 2	NICK	WAHLBERG	2006-02-15 09:34:33 0.22222
 3	ED	CHASE	2006-02-15 09:34:33 0.312323
@@ -332,7 +333,8 @@
 'awe':5 'awe-inspir':4 'barbarella':1 'cat':13 'conquer':16 'dog':18 'feminist':10 'inspir':6 'monasteri':21 'must':15 'stori':7 'streetcar':2
 PHP	₱ USD $
 \N  Some other value
-\\."#);
+\\."#,
+    );
     let ast = parse_sql(&sql);
     println!("{:#?}", ast);
     //assert_eq!(sql, ast.to_string());