diff --git a/Cargo.toml b/Cargo.toml
index 2fe0464b..7ceb7a8e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "proc-macro2"
-version = "0.2.3" # remember to update html_root_url
+version = "0.3.0" # remember to update html_root_url
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
 license = "MIT/Apache-2.0"
 readme = "README.md"
diff --git a/src/lib.rs b/src/lib.rs
index 1d9f0cfd..54c06e4d 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -24,7 +24,7 @@
 //! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
 
 // Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.3")]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.0")]
 
 #![cfg_attr(feature = "nightly", feature(proc_macro))]
 
@@ -35,8 +35,10 @@ extern crate proc_macro;
 extern crate unicode_xid;
 
 use std::fmt;
-use std::str::FromStr;
 use std::iter::FromIterator;
+use std::marker;
+use std::rc::Rc;
+use std::str::FromStr;
 
 #[macro_use]
 #[cfg(not(feature = "nightly"))]
@@ -49,67 +51,80 @@ mod imp;
 #[cfg(feature = "nightly")]
 mod imp;
 
-#[macro_use]
-mod macros;
-
 #[derive(Clone)]
-pub struct TokenStream(imp::TokenStream);
+pub struct TokenStream {
+    inner: imp::TokenStream,
+    _marker: marker::PhantomData<Rc<()>>,
+}
+
+pub struct LexError {
+    inner: imp::LexError,
+    _marker: marker::PhantomData<Rc<()>>,
+}
+
+impl TokenStream {
+    fn _new(inner: imp::TokenStream) -> TokenStream {
+        TokenStream {
+            inner: inner,
+            _marker: marker::PhantomData,
+        }
+    }
 
-pub struct LexError(imp::LexError);
+    pub fn empty() -> TokenStream {
+        TokenStream::_new(imp::TokenStream::empty())
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.inner.is_empty()
+    }
+}
 
 impl FromStr for TokenStream {
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        match src.parse() {
-            Ok(e) => Ok(TokenStream(e)),
-            Err(e) => Err(LexError(e)),
-        }
+        let e = src.parse().map_err(|e| {
+            LexError { inner: e, _marker: marker::PhantomData }
+        })?;
+        Ok(TokenStream::_new(e))
     }
 }
 
 #[cfg(feature = "proc-macro")]
 impl From<proc_macro::TokenStream> for TokenStream {
     fn from(inner: proc_macro::TokenStream) -> TokenStream {
-        TokenStream(inner.into())
+        TokenStream::_new(inner.into())
     }
 }
 
 #[cfg(feature = "proc-macro")]
 impl From<TokenStream> for proc_macro::TokenStream {
     fn from(inner: TokenStream) -> proc_macro::TokenStream {
-        inner.0.into()
+        inner.inner.into()
     }
 }
 
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream(tree.into())
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+        TokenStream::_new(streams.into_iter().collect())
     }
 }
 
-impl<T: Into<TokenStream>> FromIterator<T> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
-        TokenStream(streams.into_iter().map(|t| t.into().0).collect())
+impl fmt::Display for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
 }
 
-impl IntoIterator for TokenStream {
-    type Item = TokenTree;
-    type IntoIter = TokenTreeIter;
-
-    fn into_iter(self) -> TokenTreeIter {
-        TokenTreeIter(self.0.into_iter())
+impl fmt::Debug for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
 }
 
-impl TokenStream {
-    pub fn empty() -> TokenStream {
-        TokenStream(imp::TokenStream::empty())
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.0.is_empty()
+impl fmt::Debug for LexError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
 }
 
@@ -153,83 +168,147 @@ pub struct LineColumn {
     pub column: usize,
 }
 
-#[derive(Copy, Clone, PartialEq, Eq)]
-pub struct Span(imp::Span);
+#[derive(Copy, Clone)]
+#[cfg_attr(procmacro2_semver_exempt, derive(PartialEq, Eq))]
+pub struct Span {
+    inner: imp::Span,
+    _marker: marker::PhantomData<Rc<()>>,
+}
 
 impl Span {
+    fn _new(inner: imp::Span) -> Span {
+        Span {
+            inner: inner,
+            _marker: marker::PhantomData,
+        }
+    }
+
     pub fn call_site() -> Span {
-        Span(imp::Span::call_site())
+        Span::_new(imp::Span::call_site())
     }
 
+    #[cfg(procmacro2_semver_exempt)]
     pub fn def_site() -> Span {
-        Span(imp::Span::def_site())
+        Span::_new(imp::Span::def_site())
     }
 
     /// Creates a new span with the same line/column information as `self` but
     /// that resolves symbols as though it were at `other`.
+    #[cfg(procmacro2_semver_exempt)]
     pub fn resolved_at(&self, other: Span) -> Span {
-        Span(self.0.resolved_at(other.0))
+        Span::_new(self.inner.resolved_at(other.inner))
     }
 
     /// Creates a new span with the same name resolution behavior as `self` but
     /// with the line/column information of `other`.
+    #[cfg(procmacro2_semver_exempt)]
     pub fn located_at(&self, other: Span) -> Span {
-        Span(self.0.located_at(other.0))
+        Span::_new(self.inner.located_at(other.inner))
     }
 
     /// This method is only available when the `"nightly"` feature is enabled.
     #[cfg(all(feature = "nightly", feature = "proc-macro"))]
     pub fn unstable(self) -> proc_macro::Span {
-        self.0.unstable()
+        self.inner.unstable()
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
-        SourceFile(self.0.source_file())
+        SourceFile(self.inner.source_file())
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.0.start();
+        let imp::LineColumn{ line, column } = self.inner.start();
         LineColumn { line: line, column: column }
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn end(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.0.end();
+        let imp::LineColumn{ line, column } = self.inner.end();
         LineColumn { line: line, column: column }
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn join(&self, other: Span) -> Option<Span> {
-        self.0.join(other.0).map(Span)
+        self.inner.join(other.inner).map(Span::_new)
+    }
+}
+
+impl fmt::Debug for Span {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
 }
 
 #[derive(Clone, Debug)]
-pub struct TokenTree {
-    pub span: Span,
-    pub kind: TokenNode,
+pub enum TokenTree {
+    Group(Group),
+    Term(Term),
+    Op(Op),
+    Literal(Literal),
+}
+
+impl TokenTree {
+    pub fn span(&self) -> Span {
+        match *self {
+            TokenTree::Group(ref t) => t.span(),
+            TokenTree::Term(ref t) => t.span(),
+            TokenTree::Op(ref t) => t.span(),
+            TokenTree::Literal(ref t) => t.span(),
+        }
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        match *self {
+            TokenTree::Group(ref mut t) => t.set_span(span),
+            TokenTree::Term(ref mut t) => t.set_span(span),
+            TokenTree::Op(ref mut t) => t.set_span(span),
+            TokenTree::Literal(ref mut t) => t.set_span(span),
+        }
+    }
+}
+
+impl From<Group> for TokenTree {
+    fn from(g: Group) -> TokenTree {
+        TokenTree::Group(g)
+    }
+}
+
+impl From<Term> for TokenTree {
+    fn from(g: Term) -> TokenTree {
+        TokenTree::Term(g)
+    }
 }
 
-impl From<TokenNode> for TokenTree {
-    fn from(kind: TokenNode) -> TokenTree {
-        TokenTree { span: Span::def_site(), kind: kind }
+impl From<Op> for TokenTree {
+    fn from(g: Op) -> TokenTree {
+        TokenTree::Op(g)
+    }
+}
+
+impl From<Literal> for TokenTree {
+    fn from(g: Literal) -> TokenTree {
+        TokenTree::Literal(g)
     }
 }
 
 impl fmt::Display for TokenTree {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        TokenStream::from(self.clone()).fmt(f)
+        match *self {
+            TokenTree::Group(ref t) => t.fmt(f),
+            TokenTree::Term(ref t) => t.fmt(f),
+            TokenTree::Op(ref t) => t.fmt(f),
+            TokenTree::Literal(ref t) => t.fmt(f),
+        }
     }
 }
 
 #[derive(Clone, Debug)]
-pub enum TokenNode {
-    Group(Delimiter, TokenStream),
-    Term(Term),
-    Op(char, Spacing),
-    Literal(Literal),
+pub struct Group {
+    delimiter: Delimiter,
+    stream: TokenStream,
+    span: Span,
 }
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
@@ -240,105 +319,279 @@ pub enum Delimiter {
     None,
 }
 
-#[derive(Copy, Clone)]
-pub struct Term(imp::Term);
+impl Group {
+    pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+        Group {
+            delimiter: delimiter,
+            stream: stream,
+            span: Span::call_site(),
+        }
+    }
 
-impl Term {
-    pub fn intern(string: &str) -> Term {
-        Term(imp::Term::intern(string))
+    pub fn delimiter(&self) -> Delimiter {
+        self.delimiter
     }
 
-    pub fn as_str(&self) -> &str {
-        self.0.as_str()
+    pub fn stream(&self) -> TokenStream {
+        self.stream.clone()
+    }
+
+    pub fn span(&self) -> Span {
+        self.span
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        self.span = span;
+    }
+}
+
+impl fmt::Display for Group {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.stream.fmt(f)
     }
 }
 
+#[derive(Copy, Clone, Debug)]
+pub struct Op {
+    op: char,
+    spacing: Spacing,
+    span: Span,
+}
+
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum Spacing {
     Alone,
     Joint,
 }
 
+impl Op {
+    pub fn new(op: char, spacing: Spacing) -> Op {
+        Op {
+            op: op,
+            spacing: spacing,
+            span: Span::call_site(),
+        }
+    }
+
+    pub fn op(&self) -> char {
+        self.op
+    }
+
+    pub fn spacing(&self) -> Spacing {
+        self.spacing
+    }
+
+    pub fn span(&self) -> Span {
+        self.span
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        self.span = span;
+    }
+}
+
+impl fmt::Display for Op {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.op.fmt(f)
+    }
+}
+
+#[derive(Copy, Clone)]
+pub struct Term {
+    inner: imp::Term,
+    span: Span,
+    _marker: marker::PhantomData<Rc<()>>,
+}
+
+impl Term {
+    fn _new(inner: imp::Term, span: Span) -> Term {
+        Term {
+            inner: inner,
+            span: span,
+            _marker: marker::PhantomData,
+        }
+    }
+
+    pub fn new(string: &str, span: Span) -> Term {
+        Term::_new(imp::Term::intern(string), span)
+    }
+
+    pub fn as_str(&self) -> &str {
+        self.inner.as_str()
+    }
+
+    pub fn span(&self) -> Span {
+        self.span
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        self.span = span;
+    }
+}
+
+impl fmt::Display for Term {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.as_str().fmt(f)
+    }
+}
+
+impl fmt::Debug for Term {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
+
 #[derive(Clone)]
-pub struct Literal(imp::Literal);
+pub struct Literal {
+    inner: imp::Literal,
+    span: Span,
+    _marker: marker::PhantomData<Rc<()>>,
+}
 
-macro_rules! int_literals {
-    ($($kind:ident,)*) => ($(
-        pub fn $kind(n: $kind) -> Literal {
-            Literal(n.into())
+macro_rules! suffixed_int_literals {
+    ($($name:ident => $kind:ident,)*) => ($(
+        #[allow(unused_comparisons)]
+        pub fn $name(n: $kind) -> Literal {
+            Literal::_new(n.into())
+        }
+    )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+    ($($name:ident => $kind:ident,)*) => ($(
+        #[allow(unused_comparisons)]
+        pub fn $name(n: $kind) -> Literal {
+            Literal::_new(imp::Literal::integer(n as i64))
         }
     )*)
 }
 
 impl Literal {
-    pub fn integer(s: i64) -> Literal {
-        Literal(imp::Literal::integer(s))
+    fn _new(inner: imp::Literal) -> Literal {
+        Literal {
+            inner: inner,
+            span: Span::call_site(),
+            _marker: marker::PhantomData,
+        }
+    }
+
+    suffixed_int_literals! {
+        u8_suffixed => u8,
+        u16_suffixed => u16,
+        u32_suffixed => u32,
+        u64_suffixed => u64,
+        usize_suffixed => usize,
+        i8_suffixed => i8,
+        i16_suffixed => i16,
+        i32_suffixed => i32,
+        i64_suffixed => i64,
+        isize_suffixed => isize,
+    }
+
+    unsuffixed_int_literals! {
+        u8_unsuffixed => u8,
+        u16_unsuffixed => u16,
+        u32_unsuffixed => u32,
+        u64_unsuffixed => u64,
+        usize_unsuffixed => usize,
+        i8_unsuffixed => i8,
+        i16_unsuffixed => i16,
+        i32_unsuffixed => i32,
+        i64_unsuffixed => i64,
+        isize_unsuffixed => isize,
     }
 
-    int_literals! {
-        u8, u16, u32, u64, usize,
-        i8, i16, i32, i64, isize,
+    pub fn f64_unsuffixed(f: f64) -> Literal {
+        assert!(f.is_finite());
+        Literal::_new(imp::Literal::float(f))
     }
 
-    pub fn float(f: f64) -> Literal {
-        Literal(imp::Literal::float(f))
+    pub fn f64_suffixed(f: f64) -> Literal {
+        assert!(f.is_finite());
+        Literal::_new(f.into())
     }
 
-    pub fn f64(f: f64) -> Literal {
-        Literal(f.into())
+    pub fn f32_unsuffixed(f: f32) -> Literal {
+        assert!(f.is_finite());
+        Literal::_new(imp::Literal::float(f as f64))
     }
 
-    pub fn f32(f: f32) -> Literal {
-        Literal(f.into())
+    pub fn f32_suffixed(f: f32) -> Literal {
+        assert!(f.is_finite());
+        Literal::_new(f.into())
     }
 
     pub fn string(string: &str) -> Literal {
-        Literal(string.into())
+        Literal::_new(string.into())
     }
 
     pub fn character(ch: char) -> Literal {
-        Literal(ch.into())
+        Literal::_new(ch.into())
     }
 
     pub fn byte_string(s: &[u8]) -> Literal {
-        Literal(imp::Literal::byte_string(s))
+        Literal::_new(imp::Literal::byte_string(s))
     }
 
-    // =======================================================================
-    // Not present upstream in proc_macro yet
-
-    pub fn byte_char(b: u8) -> Literal {
-        Literal(imp::Literal::byte_char(b))
+    pub fn span(&self) -> Span {
+        self.span
     }
 
-    pub fn doccomment(s: &str) -> Literal {
-        Literal(imp::Literal::doccomment(s))
+    pub fn set_span(&mut self, span: Span) {
+        self.span = span;
     }
+}
 
-    pub fn raw_string(s: &str, pounds: usize) -> Literal {
-        Literal(imp::Literal::raw_string(s, pounds))
+impl fmt::Debug for Literal {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
+}
 
-    pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
-        Literal(imp::Literal::raw_byte_string(s, pounds))
+impl fmt::Display for Literal {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
     }
 }
 
-pub struct TokenTreeIter(imp::TokenTreeIter);
+pub mod token_stream {
+    use std::fmt;
+    use std::marker;
+    use std::rc::Rc;
 
-impl Iterator for TokenTreeIter {
-    type Item = TokenTree;
+    use imp;
+    use TokenTree;
+    pub use TokenStream;
 
-    fn next(&mut self) -> Option<TokenTree> {
-        self.0.next()
+    pub struct IntoIter {
+        inner: imp::TokenTreeIter,
+        _marker: marker::PhantomData<Rc<()>>,
+    }
+
+
+    impl Iterator for IntoIter {
+        type Item = TokenTree;
+
+        fn next(&mut self) -> Option<TokenTree> {
+            self.inner.next()
+        }
+    }
+
+    impl fmt::Debug for IntoIter {
+        fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+            self.inner.fmt(f)
+        }
     }
-}
 
-forward_fmt!(Debug for LexError);
-forward_fmt!(Debug for Literal);
-forward_fmt!(Debug for Span);
-forward_fmt!(Debug for Term);
-forward_fmt!(Debug for TokenTreeIter);
-forward_fmt!(Debug for TokenStream);
-forward_fmt!(Display for Literal);
-forward_fmt!(Display for TokenStream);
+    impl IntoIterator for TokenStream {
+        type Item = TokenTree;
+        type IntoIter = IntoIter;
+
+        fn into_iter(self) -> IntoIter {
+            IntoIter {
+                inner: self.inner.into_iter(),
+                _marker: marker::PhantomData,
+            }
+        }
+    }
+}
diff --git a/src/macros.rs b/src/macros.rs
deleted file mode 100644
index 89eb554c..00000000
--- a/src/macros.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-macro_rules! forward_fmt {
-    ($tr:ident for $ty:ident) => {
-        impl ::std::fmt::$tr for $ty {
-            fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
-                ::std::fmt::$tr::fmt(&self.0, f)
-            }
-        }
-    }
-}
diff --git a/src/stable.rs b/src/stable.rs
index cb376a95..d7d35d00 100644
--- a/src/stable.rs
+++ b/src/stable.rs
@@ -1,3 +1,5 @@
+#![allow(dead_code)]
+
 use std::ascii;
 use std::borrow::Borrow;
 use std::cell::RefCell;
@@ -14,7 +16,7 @@ use std::vec;
 use unicode_xid::UnicodeXID;
 use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
 
-use {TokenTree, TokenNode, Delimiter, Spacing};
+use {TokenTree, Delimiter, Spacing, Group, Op};
 
 #[derive(Clone, Debug)]
 pub struct TokenStream {
@@ -67,7 +69,7 @@ impl FromStr for TokenStream {
                 if skip_whitespace(input).len() != 0 {
                     Err(LexError)
                 } else {
-                    Ok(output.0)
+                    Ok(output.inner)
                 }
             }
             Err(LexError) => Err(LexError),
@@ -83,32 +85,32 @@ impl fmt::Display for TokenStream {
                 write!(f, " ")?;
             }
             joint = false;
-            match tt.kind {
-                TokenNode::Group(delim, ref stream) => {
-                    let (start, end) = match delim {
+            match *tt {
+                TokenTree::Group(ref tt) => {
+                    let (start, end) = match tt.delimiter() {
                         Delimiter::Parenthesis => ("(", ")"),
                         Delimiter::Brace => ("{", "}"),
                         Delimiter::Bracket => ("[", "]"),
                         Delimiter::None => ("", ""),
                     };
-                    if stream.0.inner.len() == 0 {
+                    if tt.stream().inner.inner.len() == 0 {
                         write!(f, "{} {}", start, end)?
                     } else {
-                        write!(f, "{} {} {}", start, stream, end)?
+                        write!(f, "{} {} {}", start, tt.stream(), end)?
                     }
                 }
-                TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?,
-                TokenNode::Op(ch, ref op) => {
-                    write!(f, "{}", ch)?;
-                    match *op {
+                TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
+                TokenTree::Op(ref tt) => {
+                    write!(f, "{}", tt.op())?;
+                    match tt.spacing() {
                         Spacing::Alone => {}
                         Spacing::Joint => joint = true,
                     }
                 }
-                TokenNode::Literal(ref literal) => {
-                    write!(f, "{}", literal)?;
+                TokenTree::Literal(ref tt) => {
+                    write!(f, "{}", tt)?;
                     // handle comments
-                    if (literal.0).0.starts_with("/") {
+                    if tt.inner.0.starts_with("/") {
                         write!(f, "\n")?;
                     }
                 }
@@ -140,12 +142,12 @@ impl From<TokenTree> for TokenStream {
     }
 }
 
-impl iter::FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
+impl iter::FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
         let mut v = Vec::new();
 
-        for stream in streams.into_iter() {
-            v.extend(stream.inner);
+        for token in streams.into_iter() {
+            v.push(token);
         }
 
         TokenStream { inner: v }
@@ -589,64 +591,58 @@ impl From<char> for Literal {
 
 named!(token_stream -> ::TokenStream, map!(
     many0!(token_tree),
-    |trees| ::TokenStream(TokenStream { inner: trees })
+    |trees| ::TokenStream::_new(TokenStream { inner: trees })
 ));
 
 #[cfg(not(procmacro2_semver_exempt))]
 fn token_tree(input: Cursor) -> PResult<TokenTree> {
-    let (input, kind) = token_kind(input)?;
-    Ok((input, TokenTree {
-        span: ::Span(Span {}),
-        kind: kind,
-    }))
+    token_kind(input)
 }
 
 #[cfg(procmacro2_semver_exempt)]
 fn token_tree(input: Cursor) -> PResult<TokenTree> {
     let input = skip_whitespace(input);
     let lo = input.off;
-    let (input, kind) = token_kind(input)?;
+    let (input, mut token) = token_kind(input)?;
     let hi = input.off;
-    Ok((input, TokenTree {
-        span: ::Span(Span {
-            lo: lo,
-            hi: hi,
-        }),
-        kind: kind,
-    }))
+    token.set_span(::Span::_new(Span {
+        lo: lo,
+        hi: hi,
+    }));
+    Ok((input, token))
 }
 
-named!(token_kind -> TokenNode, alt!(
-    map!(delimited, |(d, s)| TokenNode::Group(d, s))
+named!(token_kind -> TokenTree, alt!(
+    map!(group, TokenTree::Group)
     |
-    map!(literal, TokenNode::Literal) // must be before symbol
+    map!(literal, TokenTree::Literal) // must be before symbol
     |
     symbol
     |
-    map!(op, |(op, kind)| TokenNode::Op(op, kind))
+    map!(op, TokenTree::Op)
 ));
 
-named!(delimited -> (Delimiter, ::TokenStream), alt!(
+named!(group -> Group, alt!(
     delimited!(
         punct!("("),
         token_stream,
         punct!(")")
-    ) => { |ts| (Delimiter::Parenthesis, ts) }
+    ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
     |
     delimited!(
         punct!("["),
         token_stream,
         punct!("]")
-    ) => { |ts| (Delimiter::Bracket, ts) }
+    ) => { |ts| Group::new(Delimiter::Bracket, ts) }
     |
     delimited!(
         punct!("{"),
         token_stream,
         punct!("}")
-    ) => { |ts| (Delimiter::Brace, ts) }
+    ) => { |ts| Group::new(Delimiter::Brace, ts) }
 ));
 
-fn symbol(mut input: Cursor) -> PResult<TokenNode> {
+fn symbol(mut input: Cursor) -> PResult<TokenTree> {
     input = skip_whitespace(input);
 
     let mut chars = input.char_indices();
@@ -674,9 +670,9 @@ fn symbol(mut input: Cursor) -> PResult<TokenNode> {
     } else {
         let a = &input.rest[..end];
         if a == "_" {
-            Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone)))
+            Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
         } else {
-            Ok((input.advance(end), TokenNode::Term(::Term::intern(a))))
+            Ok((input.advance(end), ::Term::new(a, ::Span::call_site()).into()))
         }
     }
 }
@@ -700,7 +696,7 @@ fn literal(input: Cursor) -> PResult<::Literal> {
             let start = input.len() - input_no_ws.len();
             let len = input_no_ws.len() - a.len();
             let end = start + len;
-            Ok((a, ::Literal(Literal(input.rest[start..end].to_string()))))
+            Ok((a, ::Literal::_new(Literal(input.rest[start..end].to_string()))))
         }
         Err(LexError) => Err(LexError),
     }
@@ -1147,7 +1143,7 @@ fn digits(mut input: Cursor) -> PResult<()> {
     }
 }
 
-fn op(input: Cursor) -> PResult<(char, Spacing)> {
+fn op(input: Cursor) -> PResult<Op> {
     let input = skip_whitespace(input);
     match op_char(input) {
         Ok((rest, ch)) => {
@@ -1155,7 +1151,7 @@ fn op(input: Cursor) -> PResult<(char, Spacing)> {
                 Ok(_) => Spacing::Joint,
                 Err(LexError) => Spacing::Alone,
             };
-            Ok((rest, (ch, kind)))
+            Ok((rest, Op::new(ch, kind)))
         }
         Err(LexError) => Err(LexError),
     }
diff --git a/src/unstable.rs b/src/unstable.rs
index 6f971977..e01e1f09 100644
--- a/src/unstable.rs
+++ b/src/unstable.rs
@@ -1,3 +1,5 @@
+#![allow(dead_code)]
+
 use std::ascii;
 use std::fmt;
 use std::iter;
@@ -5,7 +7,7 @@ use std::str::FromStr;
 
 use proc_macro;
 
-use {TokenTree, TokenNode, Delimiter, Spacing};
+use {TokenTree, Delimiter, Spacing, Group, Op};
 
 #[derive(Clone)]
 pub struct TokenStream(proc_macro::TokenStream);
@@ -49,40 +51,40 @@ impl From<TokenStream> for proc_macro::TokenStream {
 }
 
 impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream(proc_macro::TokenTree {
-            span: (tree.span.0).0,
-            kind: match tree.kind {
-                TokenNode::Group(delim, s) => {
-                    let delim = match delim {
-                        Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
-                        Delimiter::Bracket => proc_macro::Delimiter::Bracket,
-                        Delimiter::Brace => proc_macro::Delimiter::Brace,
-                        Delimiter::None => proc_macro::Delimiter::None,
-                    };
-                    proc_macro::TokenNode::Group(delim, (s.0).0)
-                }
-                TokenNode::Op(ch, kind) => {
-                    let kind = match kind {
-                        Spacing::Joint => proc_macro::Spacing::Joint,
-                        Spacing::Alone => proc_macro::Spacing::Alone,
-                    };
-                    proc_macro::TokenNode::Op(ch, kind)
-                }
-                TokenNode::Term(s) => {
-                    proc_macro::TokenNode::Term((s.0).0)
-                }
-                TokenNode::Literal(l) => {
-                    proc_macro::TokenNode::Literal((l.0).0)
-                }
-            },
-        }.into())
-    }
-}
-
-impl iter::FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
-        let streams = streams.into_iter().map(|s| s.0);
+    fn from(token: TokenTree) -> TokenStream {
+        let (span, kind) = match token {
+            TokenTree::Group(tt) => {
+                let delim = match tt.delimiter() {
+                    Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
+                    Delimiter::Bracket => proc_macro::Delimiter::Bracket,
+                    Delimiter::Brace => proc_macro::Delimiter::Brace,
+                    Delimiter::None => proc_macro::Delimiter::None,
+                };
+                let span = tt.span();
+                let group = proc_macro::TokenNode::Group(delim, tt.stream.inner.0);
+                (span, group)
+            }
+            TokenTree::Op(tt) => {
+                let kind = match tt.spacing() {
+                    Spacing::Joint => proc_macro::Spacing::Joint,
+                    Spacing::Alone => proc_macro::Spacing::Alone,
+                };
+                (tt.span(), proc_macro::TokenNode::Op(tt.op(), kind))
+            }
+            TokenTree::Term(tt) => {
+                (tt.span(), proc_macro::TokenNode::Term(tt.inner.0))
+            }
+            TokenTree::Literal(tt) => {
+                (tt.span(), proc_macro::TokenNode::Literal(tt.inner.0))
+            }
+        };
+        TokenStream(proc_macro::TokenTree { span: span.inner.0, kind }.into())
+    }
+}
+
+impl iter::FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
+        let streams = streams.into_iter().map(TokenStream::from);
         TokenStream(streams.collect::<proc_macro::TokenStream>())
     }
 }
@@ -114,36 +116,38 @@ impl Iterator for TokenTreeIter {
     type Item = TokenTree;
 
     fn next(&mut self) -> Option<TokenTree> {
-        let token = match self.0.next() {
-            Some(n) => n,
-            None => return None,
-        };
-        Some(TokenTree {
-            span: ::Span(Span(token.span)),
-            kind: match token.kind {
-                proc_macro::TokenNode::Group(delim, s) => {
-                    let delim = match delim {
-                        proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
-                        proc_macro::Delimiter::Bracket => Delimiter::Bracket,
-                        proc_macro::Delimiter::Brace => Delimiter::Brace,
-                        proc_macro::Delimiter::None => Delimiter::None,
-                    };
-                    TokenNode::Group(delim, ::TokenStream(TokenStream(s)))
-                }
-                proc_macro::TokenNode::Op(ch, kind) => {
-                    let kind = match kind {
-                        proc_macro::Spacing::Joint => Spacing::Joint,
-                        proc_macro::Spacing::Alone => Spacing::Alone,
-                    };
-                    TokenNode::Op(ch, kind)
-                }
-                proc_macro::TokenNode::Term(s) => {
-                    TokenNode::Term(::Term(Term(s)))
-                }
-                proc_macro::TokenNode::Literal(l) => {
-                    TokenNode::Literal(::Literal(Literal(l)))
-                }
-            },
+        let token = self.0.next()?;
+        let span = ::Span::_new(Span(token.span));
+        Some(match token.kind {
+            proc_macro::TokenNode::Group(delim, s) => {
+                let delim = match delim {
+                    proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
+                    proc_macro::Delimiter::Bracket => Delimiter::Bracket,
+                    proc_macro::Delimiter::Brace => Delimiter::Brace,
+                    proc_macro::Delimiter::None => Delimiter::None,
+                };
+                let stream = ::TokenStream::_new(TokenStream(s));
+                let mut g = Group::new(delim, stream);
+                g.set_span(span);
+                g.into()
+            }
+            proc_macro::TokenNode::Op(ch, kind) => {
+                let kind = match kind {
+                    proc_macro::Spacing::Joint => Spacing::Joint,
+                    proc_macro::Spacing::Alone => Spacing::Alone,
+                };
+                let mut o = Op::new(ch, kind);
+                o.span = span;
+                o.into()
+            }
+            proc_macro::TokenNode::Term(s) => {
+                ::Term::_new(Term(s), span).into()
+            }
+            proc_macro::TokenNode::Literal(l) => {
+                let mut l = ::Literal::_new(Literal(l));
+                l.span = span;
+                l.into()
+            }
         })
     }
 
@@ -158,11 +162,9 @@ impl fmt::Debug for TokenTreeIter {
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq)]
 pub struct FileName(String);
 
-#[cfg(procmacro2_semver_exempt)]
 impl fmt::Display for FileName {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.0.fmt(f)
@@ -171,11 +173,9 @@ impl fmt::Display for FileName {
 
 // NOTE: We have to generate our own filename object here because we can't wrap
 // the one provided by proc_macro.
-#[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq)]
 pub struct SourceFile(proc_macro::SourceFile, FileName);
 
-#[cfg(procmacro2_semver_exempt)]
 impl SourceFile {
     fn new(sf: proc_macro::SourceFile) -> Self {
         let filename = FileName(sf.path().to_string());
@@ -192,21 +192,18 @@ impl SourceFile {
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 impl AsRef<FileName> for SourceFile {
     fn as_ref(&self) -> &FileName {
         self.path()
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.0.fmt(f)
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 pub struct LineColumn {
     pub line: usize,
     pub column: usize,
@@ -217,7 +214,7 @@ pub struct Span(proc_macro::Span);
 
 impl From<proc_macro::Span> for ::Span {
     fn from(proc_span: proc_macro::Span) -> ::Span {
-        ::Span(Span(proc_span))
+        ::Span::_new(Span(proc_span))
     }
 }
 
@@ -242,24 +239,20 @@ impl Span {
         self.0
     }
 
-    #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
         SourceFile::new(self.0.source_file())
     }
 
-    #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
         let proc_macro::LineColumn{ line, column } = self.0.start();
         LineColumn { line, column }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
     pub fn end(&self) -> LineColumn {
         let proc_macro::LineColumn{ line, column } = self.0.end();
         LineColumn { line, column }
     }
 
-    #[cfg(procmacro2_semver_exempt)]
     pub fn join(&self, other: Span) -> Option<Span> {
         self.0.join(other.0).map(Span)
     }
diff --git a/tests/test.rs b/tests/test.rs
index 2e481d16..7bd96f0c 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -2,26 +2,19 @@ extern crate proc_macro2;
 
 use std::str;
 
-use proc_macro2::{Term, Literal, TokenStream};
-
-#[cfg(procmacro2_semver_exempt)]
-use proc_macro2::TokenNode;
-
-#[cfg(procmacro2_semver_exempt)]
-#[cfg(not(feature = "nightly"))]
-use proc_macro2::Span;
+use proc_macro2::{Term, Literal, TokenStream, Span};
 
 #[test]
 fn symbols() {
-    assert_eq!(Term::intern("foo").as_str(), "foo");
-    assert_eq!(Term::intern("bar").as_str(), "bar");
+    assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
+    assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
 }
 
 #[test]
 fn literals() {
     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
-    assert_eq!(Literal::float(10.0).to_string(), "10.0");
+    assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
 }
 
 #[test]
@@ -78,6 +71,8 @@ fn fail() {
 #[cfg(procmacro2_semver_exempt)]
 #[test]
 fn span_test() {
+    use proc_macro2::TokenTree;
+
     fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
         let ts = p.parse::<TokenStream>().unwrap();
         check_spans_internal(ts, &mut lines);
@@ -91,17 +86,18 @@ fn span_test() {
             if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
                 *lines = rest;
 
-                let start = i.span.start();
+                let start = i.span().start();
                 assert_eq!(start.line, sline, "sline did not match for {}", i);
                 assert_eq!(start.column, scol, "scol did not match for {}", i);
 
-                let end = i.span.end();
+                let end = i.span().end();
                 assert_eq!(end.line, eline, "eline did not match for {}", i);
                 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
 
-                match i.kind {
-                    TokenNode::Group(_, stream) =>
-                        check_spans_internal(stream, lines),
+                match i {
+                    TokenTree::Group(ref g) => {
+                        check_spans_internal(g.stream().clone(), lines);
+                    }
                     _ => {}
                 }
             }
@@ -146,11 +142,11 @@ fn span_join() {
     let source2 =
         "ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
 
-    assert!(source1[0].span.source_file() != source2[0].span.source_file());
-    assert_eq!(source1[0].span.source_file(), source1[1].span.source_file());
+    assert!(source1[0].span().source_file() != source2[0].span().source_file());
+    assert_eq!(source1[0].span().source_file(), source1[1].span().source_file());
 
-    let joined1 = source1[0].span.join(source1[1].span);
-    let joined2 = source1[0].span.join(source2[0].span);
+    let joined1 = source1[0].span().join(source1[1].span());
+    let joined2 = source1[0].span().join(source2[0].span());
     assert!(joined1.is_some());
     assert!(joined2.is_none());
 
@@ -161,7 +157,7 @@ fn span_join() {
     assert_eq!(end.line, 2);
     assert_eq!(end.column, 3);
 
-    assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file());
+    assert_eq!(joined1.unwrap().source_file(), source1[0].span().source_file());
 }
 
 #[test]
@@ -179,8 +175,8 @@ fn tricky_doc_commaent() {
     let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
     let tokens = stream.into_iter().collect::<Vec<_>>();
     assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
-    match tokens[0].kind {
-        proc_macro2::TokenNode::Literal(_) => {}
+    match tokens[0] {
+        proc_macro2::TokenTree::Literal(_) => {}
         _ => panic!("wrong token {:?}", tokens[0]),
     }
 }