Skip to content

feat: add support for algorand-python 2.7 features #32

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 23 commits into from
Feb 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
98b23a1
chore: update coverage script
daniel-makerx Feb 14, 2025
ea1cd66
chore: ensure unimplemented stubs appear at top of check stubs report
daniel-makerx Feb 14, 2025
83e1422
chore: improve coverage of some excluded implementations
daniel-makerx Feb 14, 2025
6a3c854
fix: added missing __contains__ implementation for `algopy.Bytes`
daniel-makerx Feb 14, 2025
d43ecca
fix: update `algopy.CompiledContract` and `algopy.CompiledLogicSig` t…
daniel-makerx Feb 14, 2025
64b21ef
fix: include `ARC4Contract` in `algopy.arc4` namespace
daniel-makerx Feb 14, 2025
113e531
fix: add missing mappings for `algopy.op.Txn` members
daniel-makerx Feb 14, 2025
ce01316
refactor: simplify EllipticCurve mock
daniel-makerx Feb 14, 2025
5ed97be
chore: further coverage check improvements
daniel-makerx Feb 14, 2025
d28b6ee
refactor: add explicit methods to AcctParamsGet and AssetParamsGet
daniel-makerx Feb 14, 2025
30e53a5
feat: add `algopy.arc4.Struct._replace` introduced in algorand-python…
daniel-makerx Feb 14, 2025
b22fde4
feat: update `algopy.op.Global` with fields added in AVM 11
daniel-makerx Feb 14, 2025
5d9a993
feat: add `avm_version` to `algopy.Contract` class options
daniel-makerx Feb 14, 2025
089e9a1
feat: update `algopy.op.AcctParamsGet` with fields added in AVM 11
daniel-makerx Feb 14, 2025
767cdf0
chore: refresh test artifacts
daniel-makerx Feb 14, 2025
995b517
chore: ignore puya map files from artifacts
daniel-makerx Feb 14, 2025
45a04de
feat: update `algopy.op.Block` with fields added in AVM 11
daniel-makerx Feb 14, 2025
ac40679
feat: support mocking new `algopy.op` functions `falcon_verify`, `mim…
daniel-makerx Feb 14, 2025
d90d4be
chore: ensure refresh_test_artifacts captures all contracts
daniel-makerx Feb 17, 2025
1e939c3
chore: refresh test artifacts
daniel-makerx Feb 17, 2025
db67193
test: add array test artifacts
daniel-makerx Feb 17, 2025
910f166
feat: support `algopy.Array` and `algopy.ImmutableArray` from algoran…
daniel-makerx Feb 14, 2025
fa507ff
docs: fix doctest example for `algopy.EllipticCurve`
daniel-makerx Feb 18, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ coverage.xml
.venv*

.cursorignore
*.puya.map
18 changes: 10 additions & 8 deletions docs/testing-guide/opcodes.md
Original file line number Diff line number Diff line change
Expand Up @@ -353,20 +353,22 @@ test_mock_vrf_verify()
from unittest.mock import patch, MagicMock
import algopy

def test_mock_elliptic_curve_decompress():
mock_result = (algopy.Bytes(b'x_coord'), algopy.Bytes(b'y_coord'))
with patch('algopy.op.EllipticCurve.decompress', return_value=mock_result) as mock_decompress:
result = algopy.op.EllipticCurve.decompress(
def test_mock_elliptic_curve_add():
mock_result = algopy.Bytes(b'result')
with patch('algopy.op.EllipticCurve.add', return_value=mock_result) as mock_add:
result = algopy.op.EllipticCurve.add(
algopy.op.EC.BN254g1,
algopy.Bytes(b'compressed_point')
algopy.Bytes(b'a'),
algopy.Bytes(b'b')
)
assert result == mock_result
mock_decompress.assert_called_once_with(
mock_add.assert_called_once_with(
algopy.op.EC.BN254g1,
algopy.Bytes(b'compressed_point')
algopy.Bytes(b'a'),
algopy.Bytes(b'b'),
)

test_mock_elliptic_curve_decompress()
test_mock_elliptic_curve_add()
```

These examples demonstrate how to mock key mockable opcodes in `algorand-python-testing`. Use similar techniques (in your preferred testing framework) for other mockable opcodes like `algopy.compile_logicsig`, `algopy.arc4.arc4_create`, and `algopy.arc4.arc4_update`.
Expand Down
114 changes: 85 additions & 29 deletions scripts/check_stubs_cov.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import ast
import importlib
import inspect
import site
import sys
import typing
from collections.abc import Iterable
from pathlib import Path
from typing import NamedTuple
Expand All @@ -13,6 +15,40 @@
STUBS_ROOT = SITE_PACKAGES / "algopy-stubs"
IMPL = PROJECT_ROOT / "src"
ROOT_MODULE = "algopy"
_ADDITIONAL_GLOBAL_IMPLS = [
"_algopy_testing.op.global_values._Global",
"_algopy_testing.op.global_values.GlobalFields",
]
_ADDITIONAL_TXN_IMPLS = [
"_algopy_testing.models.txn_fields.TransactionFields",
"_algopy_testing.models.txn_fields.TransactionFieldsGetter",
"_algopy_testing.op.constants.OP_MEMBER_TO_TXN_MEMBER",
]

# mapping of stub types to additional implementation types to scan for members
_ADDITIONAL_TYPE_IMPLS = {
"algopy.Asset": ["_algopy_testing.models.asset.AssetFields"],
"algopy.Account": ["_algopy_testing.models.account.AccountFields"],
"algopy.Application": ["_algopy_testing.models.application.ApplicationFields"],
"algopy.Global": _ADDITIONAL_GLOBAL_IMPLS,
"algopy.Txn": _ADDITIONAL_TXN_IMPLS,
"algopy.op.Global": _ADDITIONAL_GLOBAL_IMPLS,
"algopy.op.GTxn": _ADDITIONAL_TXN_IMPLS,
"algopy.op.GITxn": _ADDITIONAL_TXN_IMPLS,
"algopy.op.Txn": _ADDITIONAL_TXN_IMPLS,
"algopy.op.ITxn": _ADDITIONAL_TXN_IMPLS,
"algopy.op.ITxnCreate": _ADDITIONAL_TXN_IMPLS,
"algopy.op.AppParamsGet": ["_algopy_testing.op.misc._AppParamsGet"],
"algopy.op.AssetHoldingGet": ["_algopy_testing.op.misc._AssetHoldingGet"],
"algopy.op.AppGlobal": ["_algopy_testing.op.misc._AppGlobal"],
"algopy.op.AppLocal": ["_algopy_testing.op.misc._AppLocal"],
"algopy.op.Scratch": ["_algopy_testing.op.misc._Scratch"],
}

# mapping of stub types to members that may be present but not found when discovering members
_ADDITIONAL_MEMBERS = {
"algopy.Asset": ["id"],
}


class ASTNodeDefinition(NamedTuple):
Expand Down Expand Up @@ -127,9 +163,6 @@ def collect_stubs(stubs_dir: Path, relative_module: str) -> dict[str, ASTNodeDef
def collect_coverage(stubs: dict[str, ASTNodeDefinition]) -> list[CoverageResult]:
result = []
for full_name, stub in stubs.items():
if "GTxn" in full_name:
print("stop")

coverage = _get_impl_coverage(full_name, stub)
if coverage:
try:
Expand All @@ -145,7 +178,7 @@ def collect_coverage(stubs: dict[str, ASTNodeDefinition]) -> list[CoverageResult
CoverageResult(
full_name=full_name,
stub_file=str(stub.path.relative_to(STUBS_ROOT)),
impl_file=impl_file,
impl_file=impl_file or "MISSING!",
coverage=coverage.coverage if coverage else 0,
missing=", ".join(coverage.missing if coverage else []),
)
Expand All @@ -156,18 +189,16 @@ def collect_coverage(stubs: dict[str, ASTNodeDefinition]) -> list[CoverageResult
def print_results(results: list[CoverageResult]) -> None:
table = PrettyTable(
field_names=["Name", "Implementation", "Source Stub", "Coverage", "Missing"],
sortby="Coverage",
header=True,
border=True,
padding_width=2,
reversesort=True,
left_padding_width=0,
right_padding_width=1,
align="l",
max_width=100,
)

for result in results:
for result in sorted(results, key=lambda c: c.coverage):
table.add_row(
[
result.full_name,
Expand Down Expand Up @@ -209,7 +240,6 @@ def _get_impl_coverage(symbol: str, stub: ASTNodeDefinition) -> ImplCoverage | N
try:
impl = getattr(mod, name)
except AttributeError:
print(f"Attribute {name} not found in module {module}")
return None

try:
Expand All @@ -219,39 +249,63 @@ def _get_impl_coverage(symbol: str, stub: ASTNodeDefinition) -> ImplCoverage | N
if hasattr(impl, "__class__"):
try:
impl_path = Path(inspect.getfile(impl.__class__))
# For special cases like GTxn and GITxn, assume full implementation
if name in [
"GTxn",
"GITxn",
"Txn",
"ITxn",
"Global",
"AssetConfigInnerTransaction",
"Contract",
"ApplicationCallInnerTransaction",
"UFixedNxM",
"BigUFixedNxM",
]:
return ImplCoverage(impl_path)
except TypeError:
print(f"Warning: Could not determine file for {symbol}")
return None
else:
print(f"Warning: Could not determine file for {symbol}")
return None

return _compare_stub_impl(stub.node, impl, impl_path)
return _compare_stub_impl(stub.node, symbol, impl, impl_path)


def _get_impl_members(impl_name: str, impl: object) -> set[str]:
if isinstance(impl, type):
impl_mros: list[object] = [
typ for typ in impl.mro() if typ.__module__.startswith("_algopy_testing")
]
else:
impl_mros = []
for additional_type in _ADDITIONAL_TYPE_IMPLS.get(impl_name, []):
impl_mros.append(_resolve_fullname(additional_type))

impl_members = set[str](_ADDITIONAL_MEMBERS.get(impl_name, []))
for impl_typ in impl_mros:
if typing.is_typeddict(impl_typ) and isinstance(impl_typ, type):
for typed_dict_mro in impl_typ.mro():
ann = getattr(typed_dict_mro, "__annotations__", None)
if isinstance(ann, dict):
impl_members.update(ann.keys())
elif isinstance(impl_typ, dict):
impl_members.update(impl_typ.keys())
elif isinstance(impl_typ, type):
members = list(vars(impl_typ).keys())
impl_members.update(members)
else:
raise ValueError(f"unexpected implementation type, {impl_typ}")
# special case for ITxnCreate
if impl_name == "algopy.op.ITxnCreate":
impl_members = {f"set_{member}" for member in impl_members}
impl_members.update(("begin", "next", "submit"))
return impl_members


def _resolve_fullname(fullname: str) -> object:
# note this assumes no nested classes
module_name, type_name = fullname.rsplit(".", maxsplit=1)
module = importlib.import_module(module_name)
return getattr(module, type_name)

def _compare_stub_impl(stub: ast.AST, impl: object, impl_path: Path) -> ImplCoverage:

def _compare_stub_impl(
stub: ast.AST, impl_name: str, impl: object, impl_path: Path
) -> ImplCoverage:
# classes are really the only types that can be "partially implemented"
# from a typing perspective
if not isinstance(stub, ast.ClassDef):
# algopy.uenumerate is typed as a class, but is really just a function
if not isinstance(stub, ast.ClassDef) or impl_name == "algopy.uenumerate":
return ImplCoverage(impl_path)

# using vars to only get explicitly implemented members
# need more sophisticated approach if implementations start using inheritance
impl_members = set(vars(impl))
impl_members = _get_impl_members(impl_name, impl)
stub_members = set()
for stmt in stub.body:
if isinstance(stmt, ast.FunctionDef):
Expand Down Expand Up @@ -287,6 +341,8 @@ def _compare_stub_impl(stub: ast.AST, impl: object, impl_path: Path) -> ImplCove
"ne",
)
}
# excluding special fields used in typing hints
default_impls.update(("__match_args__", "__match_value__"))
missing = sorted(stub_members.difference({*impl_members, *default_impls}))
return ImplCoverage(impl_path, sorted(stub_members), missing)

Expand Down
4 changes: 2 additions & 2 deletions scripts/refresh_test_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@

def get_artifact_folders(root_dir: str) -> Iterator[Path]:
for folder in Path(root_dir).iterdir():
if folder.is_dir() and (folder / "contract.py").exists():
if folder.is_dir() and not str(folder.stem).startswith((".", "__")):
yield folder


def compile_contract(folder: Path) -> None:
logger.info(f"Compiling: {folder}")
contract_path = folder / "contract.py"
contract_path = folder
(folder / "data").mkdir(exist_ok=True)
compile_cmd = [
"hatch",
Expand Down
62 changes: 30 additions & 32 deletions src/_algopy_testing/arc4.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
UINT512_SIZE,
)
from _algopy_testing.models.account import Account
from _algopy_testing.models.contract import ARC4Contract
from _algopy_testing.mutable import (
MutableBytes,
add_mutable_callback,
Expand All @@ -42,6 +43,7 @@

__all__ = [
"ARC4Client",
"ARC4Contract",
"Address",
"BigUFixedNxM",
"BigUIntN",
Expand Down Expand Up @@ -743,7 +745,7 @@ def is_dynamic(self) -> bool:
return True


class _DynamicArrayMeta(type(_ABIEncoded), typing.Generic[_TArrayItem, _TArrayLength]): # type: ignore # noqa: PGH003
class _DynamicArrayMeta(type(_ABIEncoded), typing.Generic[_TArrayItem]): # type: ignore[misc]
__concrete__: typing.ClassVar[dict[type, type]] = {}

def __getitem__(cls, key_t: type[_TArrayItem]) -> type:
Expand Down Expand Up @@ -1013,17 +1015,18 @@ def __repr__(self) -> str:


class _StructTypeInfo(_TypeInfo):
def __init__(self, struct_type: type[Struct]) -> None:
def __init__(self, struct_type: type[Struct], *, frozen: bool) -> None:
self.struct_type = struct_type
self.fields = dataclasses.fields(struct_type)
self.field_names = [field.name for field in self.fields]
self.frozen = frozen

@property
def typ(self) -> type:
return self.struct_type

@property
def child_types(self) -> Iterable[_TypeInfo]:
def child_types(self) -> list[_TypeInfo]:
return _tuple_type_from_struct(self.struct_type)._type_info.child_types

@property
Expand Down Expand Up @@ -1054,8 +1057,11 @@ class Struct(MutableBytes, _ABIEncoded, metaclass=_StructMeta): # type: ignore[
_type_info: typing.ClassVar[_StructTypeInfo] # type: ignore[misc]

def __init_subclass__(cls, *args: typing.Any, **kwargs: dict[str, typing.Any]) -> None:
dataclasses.dataclass(cls, *args, **kwargs)
cls._type_info = _StructTypeInfo(cls)
# make implementation not frozen, so we can conditionally control behaviour
dataclasses.dataclass(cls, *args, **{**kwargs, "frozen": False})
frozen = kwargs.get("frozen", False)
assert isinstance(frozen, bool)
cls._type_info = _StructTypeInfo(cls, frozen=frozen)

def __post_init__(self) -> None:
# calling base class here to init Mutable
Expand All @@ -1071,6 +1077,10 @@ def __setattr__(self, key: str, value: typing.Any) -> None:
super().__setattr__(key, value)
# don't update backing value until base class has been init'd
if hasattr(self, "_on_mutate") and key in self._type_info.field_names:
if self._type_info.frozen:
raise dataclasses.FrozenInstanceError(
f"{type(self)} is frozen and cannot be modified"
)
self._update_backing_value()

def _update_backing_value(self) -> None:
Expand All @@ -1094,6 +1104,12 @@ def _as_tuple(self) -> Tuple: # type: ignore[type-arg]
tuple_items = tuple(getattr(self, field.name) for field in dataclasses.fields(self))
return Tuple(tuple_items)

def _replace(self, **kwargs: typing.Any) -> typing.Self:
copy = self.copy()
for field, value in kwargs.items():
setattr(copy, field, value)
return copy


class ARC4Client:
pass
Expand Down Expand Up @@ -1146,34 +1162,16 @@ def emit(event: str | Struct, /, *args: object) -> None:
log(event_hash[:4] + event_data.value)


def native_value_to_arc4(value: object) -> _ABIEncoded: # noqa: PLR0911
import algopy

if isinstance(value, _ABIEncoded):
return value
if isinstance(value, bool):
return Bool(value)
if isinstance(value, algopy.UInt64):
return UInt64(value)
if isinstance(value, algopy.BigUInt):
return UInt512(value)
if isinstance(value, algopy.Bytes):
return DynamicBytes(value)
if isinstance(value, algopy.String):
return String(value)
if isinstance(value, tuple):
return Tuple(tuple(map(native_value_to_arc4, value)))
raise TypeError(f"Unsupported type: {type(value).__name__}")


def _cast_arg_as_arc4(arg: object) -> _ABIEncoded:
from _algopy_testing.serialize import native_to_arc4

if isinstance(arg, int) and not isinstance(arg, bool):
return UInt64(arg) if arg <= MAX_UINT64 else UInt512(arg)
if isinstance(arg, bytes):
return DynamicBytes(arg)
if isinstance(arg, str):
return String(arg)
return native_value_to_arc4(arg)
return native_to_arc4(arg)


def _find_bool(
Expand Down Expand Up @@ -1237,13 +1235,13 @@ def _get_max_bytes_len(type_info: _TypeInfo) -> int:
size = 0
if isinstance(type_info, _DynamicArrayTypeInfo):
size += _ABI_LENGTH_SIZE
elif isinstance(type_info, _TupleTypeInfo | _StaticArrayTypeInfo):
elif isinstance(type_info, _TupleTypeInfo | _StructTypeInfo | _StaticArrayTypeInfo):
i = 0
child_types = (
type_info.child_types
if isinstance(type_info, _TupleTypeInfo)
else [type_info.item_type] * type_info.size
)
if isinstance(type_info, _TupleTypeInfo | _StructTypeInfo):
child_types = type_info.child_types
else:
typing.assert_type(type_info, _StaticArrayTypeInfo)
child_types = [type_info.item_type] * type_info.size
while i < len(child_types):
if isinstance(child_types[i], _BoolTypeInfo):
after = _find_bool_types(child_types, i, 1)
Expand Down
Loading