Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion code/ulab.c
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
#include "user/user.h"
#include "vector/vectorise.h"

#define ULAB_VERSION 1.0.0
#define ULAB_VERSION 1.1.0
#define xstr(s) str(s)
#define str(s) #s
#if ULAB_NUMPY_COMPATIBILITY
Expand Down Expand Up @@ -126,6 +126,9 @@ STATIC const mp_map_elem_t ulab_globals_table[] = {
#if ULAB_CREATE_HAS_CONCATENATE
{ MP_ROM_QSTR(MP_QSTR_concatenate), (mp_obj_t)&create_concatenate_obj },
#endif
#if ULAB_CREATE_HAS_DIAGONAL
{ MP_ROM_QSTR(MP_QSTR_diagonal), (mp_obj_t)&create_diagonal_obj },
#endif
#if ULAB_MAX_DIMS > 1
#if ULAB_CREATE_HAS_EYE
{ MP_ROM_QSTR(MP_QSTR_eye), (mp_obj_t)&create_eye_obj },
Expand Down
1 change: 1 addition & 0 deletions code/ulab.h
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
// module constant
#define ULAB_CREATE_HAS_ARANGE (1)
#define ULAB_CREATE_HAS_CONCATENATE (1)
#define ULAB_CREATE_HAS_DIAGONAL (1)
#define ULAB_CREATE_HAS_EYE (1)
#define ULAB_CREATE_HAS_FULL (1)
#define ULAB_CREATE_HAS_LINSPACE (1)
Expand Down
61 changes: 61 additions & 0 deletions code/ulab_create.c
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,67 @@ mp_obj_t create_concatenate(size_t n_args, const mp_obj_t *pos_args, mp_map_t *k
MP_DEFINE_CONST_FUN_OBJ_KW(create_concatenate_obj, 1, create_concatenate);
#endif

#if ULAB_CREATE_HAS_DIAGONAL
//| def diagonal(a: ulab.array, *, offset: int = 0) -> ulab.array:
//| """
//| .. param: a
//| an ndarray
//| .. param: offset
//| Offset of the diagonal from the main diagonal. Can be positive or negative.
//|
//| Return specified diagonals."""
//| ...
//|
mp_obj_t create_diagonal(size_t n_args, const mp_obj_t *pos_args, mp_map_t *kw_args) {
static const mp_arg_t allowed_args[] = {
{ MP_QSTR_, MP_ARG_REQUIRED | MP_ARG_OBJ, { .u_rom_obj = mp_const_none } },
{ MP_QSTR_offset, MP_ARG_KW_ONLY | MP_ARG_INT, { .u_int = 0 } },
};

mp_arg_val_t args[MP_ARRAY_SIZE(allowed_args)];
mp_arg_parse_all(n_args, pos_args, kw_args, MP_ARRAY_SIZE(allowed_args), allowed_args, args);

if(!MP_OBJ_IS_TYPE(args[0].u_obj, &ulab_ndarray_type)) {
mp_raise_TypeError(translate("input must be an ndarray"));
}
ndarray_obj_t *source = MP_OBJ_TO_PTR(args[0].u_obj);
if(source->ndim != 2) {
mp_raise_TypeError(translate("input must be a tensor of rank 2"));
}
int32_t offset = args[1].u_int;
size_t len = 0;
uint8_t *sarray = (uint8_t *)source->array;
if(offset < 0) { // move the pointer "vertically"
sarray -= offset * source->strides[ULAB_MAX_DIMS - 2];
if(-offset < (int32_t)source->shape[ULAB_MAX_DIMS - 2]) {
len = source->shape[ULAB_MAX_DIMS - 1] + offset;
}
} else { // move the pointer "horizontally"
if(offset < (int32_t)source->shape[ULAB_MAX_DIMS - 1]) {
len = source->shape[ULAB_MAX_DIMS - 1] - offset;
}
sarray += offset * source->strides[ULAB_MAX_DIMS - 1];
}

if(len == 0) {
mp_raise_ValueError(translate("offset is too large"));
}

ndarray_obj_t *target = ndarray_new_linear_array(len, source->dtype);
uint8_t *tarray = (uint8_t *)target->array;

for(size_t i=0; i < len; i++) {
memcpy(tarray, sarray, source->itemsize);
sarray += source->strides[ULAB_MAX_DIMS - 2];
sarray += source->strides[ULAB_MAX_DIMS - 1];
tarray += source->itemsize;
}
return MP_OBJ_FROM_PTR(target);
}

MP_DEFINE_CONST_FUN_OBJ_KW(create_diagonal_obj, 1, create_diagonal);
#endif /* ULAB_CREATE_HAS_DIAGONAL */

#if ULAB_MAX_DIMS > 1
#if ULAB_CREATE_HAS_EYE
//| def eye(size: int, *, M: Optional[int] = None, k: int = 0, dtype: _DType = ulab.float) -> ulab.array:
Expand Down
5 changes: 5 additions & 0 deletions code/ulab_create.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@ mp_obj_t create_concatenate(size_t , const mp_obj_t *, mp_map_t *);
MP_DECLARE_CONST_FUN_OBJ_KW(create_concatenate_obj);
#endif

#if ULAB_CREATE_HAS_DIAGONAL
mp_obj_t create_diagonal(size_t , const mp_obj_t *, mp_map_t *);
MP_DECLARE_CONST_FUN_OBJ_KW(create_diagonal_obj);
#endif

#if ULAB_MAX_DIMS > 1
#if ULAB_CREATE_HAS_EYE
mp_obj_t create_eye(size_t , const mp_obj_t *, mp_map_t *);
Expand Down
230 changes: 183 additions & 47 deletions docs/manual/extract_pyi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,86 +2,222 @@
#
# SPDX-License-Identifier: MIT

# Run with 'python tools/extract_pyi.py shared-bindings/ path/to/stub/dir
# You can also test a specific library in shared-bindings by putting the path
# to that directory instead

import ast
import os
import re
import sys
import astroid
import traceback

top_level = sys.argv[1].strip("/")
stub_directory = sys.argv[2]
import isort
import black


IMPORTS_IGNORE = frozenset({'int', 'float', 'bool', 'str', 'bytes', 'tuple', 'list', 'set', 'dict', 'bytearray', 'slice', 'file', 'buffer', 'range', 'array', 'struct_time'})
IMPORTS_TYPING = frozenset({'Any', 'Optional', 'Union', 'Tuple', 'List', 'Sequence', 'NamedTuple', 'Iterable', 'Iterator', 'Callable', 'AnyStr', 'overload', 'Type'})
IMPORTS_TYPES = frozenset({'TracebackType'})
CPY_TYPING = frozenset({'ReadableBuffer', 'WriteableBuffer', 'AudioSample', 'FrameBuffer'})


def is_typed(node, allow_any=False):
if node is None:
return False
if allow_any:
return True
elif isinstance(node, ast.Name) and node.id == "Any":
return False
elif isinstance(node, ast.Attribute) and type(node.value) == ast.Name \
and node.value.id == "typing" and node.attr == "Any":
return False
return True


def find_stub_issues(tree):
for node in ast.walk(tree):
if isinstance(node, ast.AnnAssign):
if not is_typed(node.annotation):
yield ("WARN", f"Missing attribute type on line {node.lineno}")
if isinstance(node.value, ast.Constant) and node.value.value == Ellipsis:
yield ("WARN", f"Unnecessary Ellipsis assignment (= ...) on line {node.lineno}.")
elif isinstance(node, ast.Assign):
if isinstance(node.value, ast.Constant) and node.value.value == Ellipsis:
yield ("WARN", f"Unnecessary Ellipsis assignment (= ...) on line {node.lineno}.")
elif isinstance(node, ast.arguments):
allargs = list(node.args + node.kwonlyargs)
if sys.version_info >= (3, 8):
allargs.extend(node.posonlyargs)
for arg_node in allargs:
if not is_typed(arg_node.annotation) and (arg_node.arg != "self" and arg_node.arg != "cls"):
yield ("WARN", f"Missing argument type: {arg_node.arg} on line {arg_node.lineno}")
if node.vararg and not is_typed(node.vararg.annotation, allow_any=True):
yield ("WARN", f"Missing argument type: *{node.vararg.arg} on line {node.vararg.lineno}")
if node.kwarg and not is_typed(node.kwarg.annotation, allow_any=True):
yield ("WARN", f"Missing argument type: **{node.kwarg.arg} on line {node.kwarg.lineno}")
elif isinstance(node, ast.FunctionDef):
if not is_typed(node.returns):
yield ("WARN", f"Missing return type: {node.name} on line {node.lineno}")


def extract_imports(tree):
modules = set()
typing = set()
types = set()
cpy_typing = set()

def collect_annotations(anno_tree):
if anno_tree is None:
return
for node in ast.walk(anno_tree):
if isinstance(node, ast.Name):
if node.id in IMPORTS_IGNORE:
continue
elif node.id in IMPORTS_TYPING:
typing.add(node.id)
elif node.id in IMPORTS_TYPES:
types.add(node.id)
elif node.id in CPY_TYPING:
cpy_typing.add(node.id)
elif isinstance(node, ast.Attribute):
if isinstance(node.value, ast.Name):
modules.add(node.value.id)

for node in ast.walk(tree):
if isinstance(node, (ast.AnnAssign, ast.arg)):
collect_annotations(node.annotation)
elif isinstance(node, ast.Assign):
collect_annotations(node.value)
elif isinstance(node, ast.FunctionDef):
collect_annotations(node.returns)
for deco in node.decorator_list:
if isinstance(deco, ast.Name) and (deco.id in IMPORTS_TYPING):
typing.add(deco.id)

return {
"modules": sorted(modules),
"typing": sorted(typing),
"types": sorted(types),
"cpy_typing": sorted(cpy_typing),
}


def find_references(tree):
for node in ast.walk(tree):
if isinstance(node, ast.arguments):
for node in ast.walk(node):
if isinstance(node, ast.Attribute):
if isinstance(node.value, ast.Name) and node.value.id[0].isupper():
yield node.value.id


def convert_folder(top_level, stub_directory):
ok = 0
total = 0
filenames = sorted(os.listdir(top_level))
pyi_lines = []
stub_fragments = []
references = set()

for filename in filenames:
full_path = os.path.join(top_level, filename)
file_lines = []
if os.path.isdir(full_path):
mok, mtotal = convert_folder(full_path, os.path.join(stub_directory, filename))
(mok, mtotal) = convert_folder(full_path, os.path.join(stub_directory, filename))
ok += mok
total += mtotal
elif filename.endswith(".c"):
with open(full_path, "r") as f:
with open(full_path, "r", encoding="utf-8") as f:
for line in f:
line = line.rstrip()
if line.startswith("//|"):
if line[3] == " ":
if len(line) == 3:
line = ""
elif line[3] == " ":
line = line[4:]
elif line[3] == "\n":
line = line[3:]
else:
continue
line = line[3:]
print("[WARN] There must be at least one space after '//|'")
file_lines.append(line)
elif filename.endswith(".pyi"):
with open(full_path, "r") as f:
file_lines.extend(f.readlines())
file_lines.extend(line.rstrip() for line in f)

fragment = "\n".join(file_lines).strip()
try:
tree = ast.parse(fragment)
except SyntaxError as e:
print(f"[ERROR] Failed to parse a Python stub from {full_path}")
traceback.print_exception(type(e), e, e.__traceback__)
return (ok, total + 1)
references.update(find_references(tree))

# Always put the contents from an __init__ first.
if filename.startswith("__init__."):
pyi_lines = file_lines + pyi_lines
else:
pyi_lines.extend(file_lines)
if fragment:
name = os.path.splitext(os.path.basename(filename))[0]
if name == "__init__" or (name in references):
stub_fragments.insert(0, fragment)
else:
stub_fragments.append(fragment)

if not pyi_lines:
return ok, total
if not stub_fragments:
return (ok, total)

stub_filename = os.path.join(stub_directory, "__init__.pyi")
print(stub_filename)
stub_contents = "".join(pyi_lines)
stub_contents = "\n\n".join(stub_fragments)

# Validate the stub code.
try:
tree = ast.parse(stub_contents)
except SyntaxError as e:
traceback.print_exception(type(e), e, e.__traceback__)
return (ok, total)

error = False
for (level, msg) in find_stub_issues(tree):
if level == "ERROR":
error = True
print(f"[{level}] {msg}")

total += 1
if not error:
ok += 1

# Add import statements
imports = extract_imports(tree)
import_lines = ["from __future__ import annotations"]
if imports["types"]:
import_lines.append("from types import " + ", ".join(imports["types"]))
if imports["typing"]:
import_lines.append("from typing import " + ", ".join(imports["typing"]))
if imports["cpy_typing"]:
import_lines.append("from _typing import " + ", ".join(imports["cpy_typing"]))
import_lines.extend(f"import {m}" for m in imports["modules"])
import_body = "\n".join(import_lines)
m = re.match(r'(\s*""".*?""")', stub_contents, flags=re.DOTALL)
if m:
stub_contents = m.group(1) + "\n\n" + import_body + "\n\n" + stub_contents[m.end():]
else:
stub_contents = import_body + "\n\n" + stub_contents

# Code formatting
stub_contents = isort.code(stub_contents)
stub_contents = black.format_str(stub_contents, mode=black.FileMode(is_pyi=True))

os.makedirs(stub_directory, exist_ok=True)
with open(stub_filename, "w") as f:
f.write(stub_contents)

# Validate that the module is a parseable stub.
total += 1
try:
tree = astroid.parse(stub_contents)
for i in tree.body:
if 'name' in i.__dict__:
print(i.__dict__['name'])
for j in i.body:
if isinstance(j, astroid.scoped_nodes.FunctionDef):
if None in j.args.__dict__['annotations']:
print(f"Missing parameter type: {j.__dict__['name']} on line {j.__dict__['lineno']}\n")
if j.returns:
if 'Any' in j.returns.__dict__.values():
print(f"Missing return type: {j.__dict__['name']} on line {j.__dict__['lineno']}")
elif isinstance(j, astroid.node_classes.AnnAssign):
if 'name' in j.__dict__['annotation'].__dict__:
if j.__dict__['annotation'].__dict__['name'] == 'Any':
print(f"missing attribute type on line {j.__dict__['lineno']}")
return (ok, total)

ok += 1
except astroid.exceptions.AstroidSyntaxError as e:
e = e.__cause__
traceback.print_exception(type(e), e, e.__traceback__)
print()
return ok, total

ok, total = convert_folder(top_level, stub_directory)
if __name__ == "__main__":
top_level = sys.argv[1].strip("/")
stub_directory = sys.argv[2]

(ok, total) = convert_folder(top_level, stub_directory)

print(f"{ok} ok out of {total}")
print(f"Parsing .pyi files: {total - ok} failed, {ok} passed")

if ok != total:
sys.exit(total - ok)
if ok != total:
sys.exit(total - ok)
Loading