Track latest git deps and rust compiler

This commit is contained in:
Tim Kuehn
2016-11-05 15:06:39 -07:00
parent 85fbe411e6
commit 3afcfe6274
3 changed files with 10 additions and 10 deletions

View File

@@ -14,7 +14,7 @@ description = "An RPC framework for Rust with a focus on ease of use."
bincode = "0.6"
byteorder = "0.5"
bytes = { git = "https://github.com/carllerche/bytes" }
futures = "0.1"
futures = { git = "https://github.com/alexcrichton/futures-rs" }
lazy_static = "0.2"
log = "0.3"
scoped-pool = "1.0"
@@ -28,6 +28,7 @@ tokio-core = { git = "https://github.com/tokio-rs/tokio-core" }
[replace]
"tokio-core:0.1.0" = { git = "https://github.com/tokio-rs/tokio-core" }
"futures:0.1.3" = { git = "https://github.com/alexcrichton/futures-rs" }
[dev-dependencies]
chrono = "0.2"

View File

@@ -4,8 +4,7 @@
// This file may not be copied, modified, or distributed except according to those terms.
use bincode::{SizeLimit, serde as bincode};
use byteorder::BigEndian;
use bytes::{Buf, MutBuf};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use futures::{Async, Poll};
use serde;
use std::io::{self, Cursor};
@@ -96,14 +95,14 @@ impl<T> easy::Parse for Parser<T>
match self.state {
Id if buf.len() < mem::size_of::<u64>() => return Ok(Async::NotReady),
Id => {
self.state = Len { id: Cursor::new(&*buf.get_mut()).read_u64::<BigEndian>() };
self.state = Len { id: Cursor::new(&*buf.get_mut()).read_u64::<BigEndian>()? };
*buf = buf.split_off(mem::size_of::<u64>());
}
Len { .. } if buf.len() < mem::size_of::<u64>() => return Ok(Async::NotReady),
Len { id } => {
self.state = Payload {
id: id,
len: Cursor::new(&*buf.get_mut()).read_u64::<BigEndian>(),
len: Cursor::new(&*buf.get_mut()).read_u64::<BigEndian>()?,
};
*buf = buf.split_off(mem::size_of::<u64>());
}
@@ -139,8 +138,8 @@ impl<T> easy::Serialize for Serializer<T>
type In = (RequestId, T);
fn serialize(&mut self, (id, message): Self::In, buf: &mut Vec<u8>) {
buf.write_u64::<BigEndian>(id);
buf.write_u64::<BigEndian>(bincode::serialized_size(&message));
buf.write_u64::<BigEndian>(id).unwrap();
buf.write_u64::<BigEndian>(bincode::serialized_size(&message)).unwrap();
bincode::serialize_into(buf,
&message,
SizeLimit::Infinite)

View File

@@ -21,7 +21,7 @@ use syntax::tokenstream::TokenTree;
use syntax::util::small_vector::SmallVector;
fn snake_to_camel(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'static> {
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg().clone(), tts.into());
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.into());
// The `expand_expr` method is called so that any macro calls in the
// parsed expression are expanded.
@@ -69,7 +69,7 @@ fn snake_to_camel(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResul
}
fn impl_snake_to_camel(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'static> {
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg().clone(), tts.into());
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.into());
// The `expand_expr` method is called so that any macro calls in the
// parsed expression are expanded.
@@ -91,7 +91,7 @@ fn impl_snake_to_camel(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<Mac
}
fn ty_snake_to_camel(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'static> {
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg().clone(), tts.into());
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.into());
// The `expand_expr` method is called so that any macro calls in the
// parsed expression are expanded.