1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
use std::iter::FromIterator;
use rustpython_parser::lexer;
use crate::function::PyFuncArgs;
use crate::obj::objstr;
use crate::pyobject::{PyObjectRef, PyResult};
use crate::vm::VirtualMachine;
fn tokenize_tokenize(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(vm, args, required = [(readline, Some(vm.ctx.str_type()))]);
let source = objstr::borrow_value(readline);
let lexer1 = lexer::make_tokenizer(source);
let tokens = lexer1.map(|st| vm.ctx.new_str(format!("{:?}", st.unwrap().1)));
let tokens = Vec::from_iter(tokens);
Ok(vm.ctx.new_list(tokens))
}
pub fn make_module(vm: &VirtualMachine) -> PyObjectRef {
let ctx = &vm.ctx;
py_module!(vm, "tokenize", {
"tokenize" => ctx.new_function(tokenize_tokenize)
})
}