Skip to content

Commit 7c71990

Browse files
committed
Updated magnus
1 parent c828ffe commit 7c71990

File tree

6 files changed

+26
-26
lines changed

6 files changed

+26
-26
lines changed

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

ext/tokenizers/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ publish = false
1111
crate-type = ["cdylib"]
1212

1313
[dependencies]
14-
magnus = "0.6"
14+
magnus = "0.7"
1515
onig = { version = "6", default-features = false }
1616
serde = { version = "1", features = ["rc", "derive"] }
1717

ext/tokenizers/src/normalizers.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -222,8 +222,8 @@ pub struct RbSequence {}
222222
impl RbSequence {
223223
fn new(normalizers: RArray) -> RbResult<RbNormalizer> {
224224
let mut sequence = Vec::with_capacity(normalizers.len());
225-
for n in normalizers.each() {
226-
let normalizer: &RbNormalizer = TryConvert::try_convert(n?)?;
225+
for n in normalizers.into_iter() {
226+
let normalizer: &RbNormalizer = TryConvert::try_convert(n)?;
227227
match &normalizer.normalizer {
228228
RbNormalizerTypeWrapper::Sequence(inner) => sequence.extend(inner.iter().cloned()),
229229
RbNormalizerTypeWrapper::Single(inner) => sequence.push(inner.clone()),

ext/tokenizers/src/pre_tokenizers.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -258,8 +258,8 @@ pub struct RbSequence {}
258258
impl RbSequence {
259259
fn new(pre_tokenizers: RArray) -> RbResult<RbPreTokenizer> {
260260
let mut sequence = Vec::with_capacity(pre_tokenizers.len());
261-
for n in pre_tokenizers.each() {
262-
let pretokenizer: &RbPreTokenizer = TryConvert::try_convert(n?)?;
261+
for n in pre_tokenizers.into_iter() {
262+
let pretokenizer: &RbPreTokenizer = TryConvert::try_convert(n)?;
263263
match &pretokenizer.pretok {
264264
RbPreTokenizerTypeWrapper::Sequence(inner) => {
265265
sequence.extend(inner.iter().cloned())

ext/tokenizers/src/tokenizer.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -282,12 +282,12 @@ impl RbTokenizer {
282282
add_special_tokens: bool,
283283
) -> RbResult<RArray> {
284284
let input: Vec<tk::EncodeInput> = input
285-
.each()
285+
.into_iter()
286286
.map(|o| {
287287
let input: tk::EncodeInput = if is_pretokenized {
288-
PreTokenizedEncodeInput::try_convert(o?)?.into()
288+
PreTokenizedEncodeInput::try_convert(o)?.into()
289289
} else {
290-
TextEncodeInput::try_convert(o?)?.into()
290+
TextEncodeInput::try_convert(o)?.into()
291291
};
292292
Ok(input)
293293
})

ext/tokenizers/src/trainers.rs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,9 @@ impl RbTrainer {
110110
BpeTrainer,
111111
special_tokens,
112112
special_tokens
113-
.each()
113+
.into_iter()
114114
.map(|token| {
115-
if let Ok(content) = String::try_convert(token?) {
115+
if let Ok(content) = String::try_convert(token) {
116116
Ok(RbAddedToken::from(content, Some(true)).get_token())
117117
} else {
118118
todo!()
@@ -197,9 +197,9 @@ impl RbTrainer {
197197
UnigramTrainer,
198198
special_tokens,
199199
special_tokens
200-
.each()
200+
.into_iter()
201201
.map(|token| {
202-
if let Ok(content) = String::try_convert(token?) {
202+
if let Ok(content) = String::try_convert(token) {
203203
Ok(RbAddedToken::from(content, Some(true)).get_token())
204204
} else {
205205
todo!()
@@ -268,9 +268,9 @@ impl RbTrainer {
268268
WordLevelTrainer,
269269
special_tokens,
270270
special_tokens
271-
.each()
271+
.into_iter()
272272
.map(|token| {
273-
if let Ok(content) = String::try_convert(token?) {
273+
if let Ok(content) = String::try_convert(token) {
274274
Ok(RbAddedToken::from(content, Some(true)).get_token())
275275
} else {
276276
todo!()
@@ -322,9 +322,9 @@ impl RbTrainer {
322322
WordPieceTrainer,
323323
@set_special_tokens,
324324
special_tokens
325-
.each()
325+
.into_iter()
326326
.map(|token| {
327-
if let Ok(content) = String::try_convert(token?) {
327+
if let Ok(content) = String::try_convert(token) {
328328
Ok(RbAddedToken::from(content, Some(true)).get_token())
329329
} else {
330330
todo!()
@@ -398,9 +398,9 @@ impl RbBpeTrainer {
398398
if !value.is_nil() {
399399
builder = builder.special_tokens(
400400
RArray::try_convert(value)?
401-
.each()
401+
.into_iter()
402402
.map(|token| {
403-
if let Ok(content) = String::try_convert(token?) {
403+
if let Ok(content) = String::try_convert(token) {
404404
Ok(RbAddedToken::from(content, Some(true)).get_token())
405405
} else {
406406
todo!()
@@ -466,9 +466,9 @@ impl RbUnigramTrainer {
466466
if !value.is_nil() {
467467
builder.special_tokens(
468468
RArray::try_convert(value)?
469-
.each()
469+
.into_iter()
470470
.map(|token| {
471-
if let Ok(content) = String::try_convert(token?) {
471+
if let Ok(content) = String::try_convert(token) {
472472
Ok(RbAddedToken::from(content, Some(true)).get_token())
473473
} else {
474474
todo!()
@@ -540,9 +540,9 @@ impl RbWordLevelTrainer {
540540
if !value.is_nil() {
541541
builder.special_tokens(
542542
RArray::try_convert(value)?
543-
.each()
543+
.into_iter()
544544
.map(|token| {
545-
if let Ok(content) = String::try_convert(token?) {
545+
if let Ok(content) = String::try_convert(token) {
546546
Ok(RbAddedToken::from(content, Some(true)).get_token())
547547
} else {
548548
todo!()
@@ -581,9 +581,9 @@ impl RbWordPieceTrainer {
581581
if !value.is_nil() {
582582
builder = builder.special_tokens(
583583
RArray::try_convert(value)?
584-
.each()
584+
.into_iter()
585585
.map(|token| {
586-
if let Ok(content) = String::try_convert(token?) {
586+
if let Ok(content) = String::try_convert(token) {
587587
Ok(RbAddedToken::from(content, Some(true)).get_token())
588588
} else {
589589
todo!()

0 commit comments

Comments
 (0)