To use these templates, you'll need to create a new Anki "note type" with the following fields:
- Sound
- Time
- Source
- Image
- Target: line
- Base: line
- Target: line before
- Base: line before
To use these templates, you'll need to create a new Anki "note type" with the following fields:
pub trait Nothing<'a> { | |
fn nothing(&'a mut self) -> (); | |
fn twice_nothing(&'a mut self) -> () { | |
{ self.nothing(); } | |
{ self.nothing(); } | |
} | |
} | |
// This works. |
pub trait StreamingIterator<'a, T> { | |
/// Return either the next item in the sequence, or `None` if all items | |
/// have been consumed. | |
fn next(&'a mut self) -> Option<T>; | |
/// Hey, it compiles. | |
fn reduce<S>(&'a mut self, init: S, r: |S,T| -> S) -> S { | |
let mut sum = init; | |
//streaming_for!(v in self, { | |
// sum = r(sum, v); |
impl<'a,T: Buffer+'a> Buffer for ChunkBuffer<'a,T> { | |
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { | |
if self.buffer.as_slice().contains_slice(self.boundary.as_slice()) { | |
// Exit 1: Valid data in our local buffer. | |
Ok(self.buffer.as_slice()) | |
} else if self.buffer.len() > 0 { | |
// Exit 2: Add some more data to our local buffer so that it's | |
// valid (see invariants for top_up). | |
self.top_up() | |
} else { |
Some benchmark results, for:
MemReader
.MemReader
and reading 130 tokens as sentences (with lots of String
and Vector
).test conll::memreader_overhead ... bench: 195 ns/iter (+/- 22) = 22394 MB/s
test conll::sentence_reader_iter_bench ... bench: 269884 ns/iter (+/- 40269) = 16 MB/s
test conll::token_from_str_bench ... bench: 99 ns/iter (+/- 24) = 313 MB/s
#!/usr/bin/env ruby | |
# | |
# Convert `MElt -tdL` output into *.conllx format. Usage: | |
# | |
# melt2conllx < input.melt > output.conllx | |
# | |
# Input should be one line per sentence, formatted like: | |
# | |
# Durant/P/durant le/DET/le trajet/NC/trajet qui/PROREL/qui | |
# |
pub struct Emitter<R,K: Hash + Eq + Clone,V: Copy,MR: MapReduce<R,K,V>> { | |
results: HashMap<K,V> | |
} | |
impl<R,K: Hash + Eq + Clone,V: Copy,MR: MapReduce<R,K,V>> Emitter<R,K,V,MR> { | |
fn new() -> Emitter<R,K,V,MR> { | |
Emitter{results: HashMap::with_capacity(25000)} | |
} | |
#[inline] |
Updating git repository `https://github.com/reem/rust-unsafe-any.git` | |
Unable to update https://github.com/reem/rust-unsafe-any.git#75cff194 | |
Caused by: | |
failed to clone into: /tmp/cargo_ASPp/.cargo/git/db/rust-unsafe-any-3633b05955fd77c7 | |
Caused by: | |
[16] The SSL certificate is invalid |
-- Using Lexique 3 from http://lexique.org/ with SQLite 3. | |
-- | |
-- On the command line, to remove header and extract the first 10 columns. | |
-- iconv -f ISO-8859-15 -t UTF-8 Lexique380/Bases+Scripts/Lexique380.txt | tail -n+2 | cut -f 1-10 > lexique1-10.txt | |
-- Set up our original data table. This is pretty raw. | |
PRAGMA encoding = "UTF-8"; | |
CREATE TABLE lexique ( | |
ortho TEXT, | |
phon TEXT, |
-- Generated from Lexique 3. For raw data & Creative Commons License, see: http://lexique.org/ | |
-- Note that this word list has some peculiarities, because it was intended for use by language | |
-- processing software. | |
être | |
avoir | |
je | |
de | |
ne | |
pas | |
le |