-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy path__init__.py
More file actions
2724 lines (2018 loc) · 107 KB
/
__init__.py
File metadata and controls
2724 lines (2018 loc) · 107 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
"""unpythonic.syntax: Toto, I've a feeling we're not in Python anymore.
Requires `mcpyrate`.
"""
from mcpyrate import parametricmacro
from mcpyrate.expander import MacroExpander
from mcpyrate.utils import extract_bindings
from ..dynassign import make_dynvar, dyn
# --------------------------------------------------------------------------------
# This module contains the macro interface and docstrings; the submodules
# contain the actual syntax transformers (regular functions that process ASTs)
# that implement the macros.
# --------------------------------------------------------------------------------
# --------------------------------------------------------------------------------
# **Historical NOTE**:
#
# These macros were originally written years before `mcpyrate` even existed, and were designed to run on
# MacroPy. It was a pioneering product, and an excellent choice at the time. Particularly, it can't have
# been easy to be the first to implement quasiquotes for Python, up to and including hygienic captures!
# So, my hat is off for the MacroPy team - without MacroPy, there would be no `mcpyrate`.
#
# Now, let's get technical:
#
# MacroPy 1.1.0b2 expands macros using a two-pass system: first pass outside in, then second pass inside
# out. By default, a macro expands in the inside-out pass. By making the macro interface function into a
# generator (instead of a regular function), it can opt in to two-pass processing. It then `yield`s the
# first-pass (outside in) output, and optionally `return`s the second-pass (inside out) output, if any.
# The MacroPy interface is thus similar to how `contextlib.contextmanager` treats the enter and exit code.
#
# Following MacroPy's default mode, most of the macros in `unpythonic.syntax` are designed to expand
# inside-out. This seemed a good idea at the time, particularly regarding the lexical scoping of `let`
# constructs, which were one of the first features of `unpythonic.syntax`. However, with `mcpyrate`
# that's not such a bright idea. First, the default is outside-in (because less magic). Secondly,
# because I followed the `mcpy` design and didn't want to use generators to define macros, this means
# that expanding inside-out requires an explicit recursion call (very pythonic!) - but this has the
# implication that the debug expansion stepper (see macro `mcpyrate.debug.step_expansion`) will not get
# control back until after all the inner macros have expanded. So inside-out expansions are harder to
# debug. As of `mcpyrate` 3.2.2, the expansion stepper has gained the `"detailed"` option, which will
# show individual inner macro expansions, but there's no way to get an overview of the whole tree
# before control returns to the stepper.
#
# (`mcpyrate`'s expansion stepper is a logical extension of the idea of MacroPy's `show_expanded`, and
# was inspired by Racket's macro stepper. Beside the final AST, it also shows the intermediate steps of
# the expansion, and outputs the unparsed code with syntax highlighting.)
#
# A better way would be to expand outside-in, because many of our macros work together; using the
# outside-in order would reduce the need to analyze macro-expanded ASTs. Lexical scoping for `let` could
# be achieved in this system by detecting boundaries of nested `let` (et al.) invocations, recursing
# into that invocation first, and then processing the resulting tree normally. (So the order would be
# outside-in except for those inner invocations that absolutely need to expand first, e.g. to respect
# lexical scoping for lexically nested `unpythonic` envs.)
#
# (No sensible person works directly with macro-expanded ASTs if that can be avoided. It goes against
# the grain of the macro abstraction. It's a bit like decompiling to see what's going on.)
#
# Finally, be aware that in `mcpyrate`, an inside-out expansion order is achieved by recursing explicitly:
#
# def mymacrointerface(tree, *, expander, *kw):
# # perform your outside-in processing here
#
# tree = expander.visit(tree) # recurse explicitly
#
# # perform your inside-out processing here
#
# return tree
#
# If the line `tree = expander.visit(tree)` is omitted, the macro expands outside-in.
# Note this default is different from MacroPy's!
#
# There are further cleanups of the macro layer possible with `mcpyrate`. For example:
#
# - Quasiquotes no longer auto-expand macros in the quoted code. `letseq` could use hygienic *macro*
# capture and just return an unexpanded `let` and another `letseq` (with one fewer binding),
# similarly to how Racket implements `let*`. See `unpythonic.syntax.simplelet` for a demo.
#
# - The macro interfaces and their docstrings could live inside the implementation modules, and this
# module could just re-export them. (A function being a macro is a feature of its *use site* where
# it is imported, by `from xxx import macros, ...`; `mcpyrate` has no macro registry.)
#
# - Many macros could perhaps run in the outside-in pass. Some need a redesign for their AST analysis,
# but much of that has been sufficiently abstracted (e.g. `unpythonic.syntax.letdoutil`) so that this
# is mainly a case of carefully changing the analysis mode at all appropriate use sites.
#
# However, 0.15.0 is the initial version that runs on `mcpyrate`, and the focus is to just get this running.
# Cleanups can be done in a future release.
# TODO: Fix remaining failures and errors detected by test suite.
# TODO: `make_isxpred` is now obsolete because `mcpyrate` does not rename hygienic captures of run-time values. Make it explicit at the use sites what they want, and remove `make_isxpred`. (E.g. `curry` wants to match both `curryf` and `currycall`, exactly. Some use sites want to match only a single thing.)
# TODO: locref could be an ASTMarker anywhere that needs a source location reference; extract `.body` if so.
# TODO: Brackets: use "with test[...]" instead of "with test(...)" in the test modules
# TODO: Remove any unused `expander` kwargs from the macro interface
# TODO: Drop `# pragma: no cover` from macro tests as appropriate, since `mcpyrate` reports coverage correctly.
# TODO: Test the q[t[...]] implementation in do0[]
# TODO: With `mcpyrate` we could start looking at values, not names, when the aim is to detect hygienically captured `unpythonic` constructs. See use sites of `isx`; refer to `mcpyrate.quotes.is_captured_value` and `mcpyrate.quotes.lookup_value`.
# TODO: With `mcpyrate`, we could move the macro interface functions to
# TODO: the submodules, and have just re-exports here.
# TODO: macro docs: "first pass" -> "outside in"; "second pass" -> "inside out"
# TODO: Some macros look up others; convert lookups to mcpyrate style (accounting for as-imports)
# TODO: or hygienic macro references (`h[...]`), as appropriate.
# TODO: `isx` and `getname` from `unpythonic.syntax.nameutil` should probably live in `mcpyrate` instead
# TODO: `mcpyrate` does not auto-expand macros in quasiquoted code.
# - Consider when we should expand macros in quoted code and when not
# - Consider what changes this implies for other macros that read the partially expanded output
# (some things may change from expanded to unexpanded, facilitating easier analysis but requiring
# code changes)
# TODO: Consider using run-time compiler access in macro tests, like `mcpyrate` itself does. This compartmentalizes testing so that the whole test module won't crash on a macro-expansion error.
# TODO: Change decorator macro invocations to use [] instead of () to pass macro arguments. Requires Python 3.9.
# TODO: Check expansion order of several macros in the same `with` statement
# TODO: grep for any remaining mentions of "macropy"
# TODO: Upgrade anaphoric if's `it` into a `mcpyrate` magic variable that errors out at compile time when it appears in an invalid position (i.e. outside any `aif`). Basically, take the `aif` from `mcpyrate`.
# TODO: also let_syntax block, expr
# TODO: also kw() in unpythonic.syntax.prefix
# TODO: let_syntax block, expr: syntactic consistency: change parentheses to brackets
# TODO: grep codebase for "0.15", may have some pending interface changes that don't have their own GitHub issue (e.g. parameter ordering of `unpythonic.it.window`)
# TODO: ansicolor: `mcpyrate` already depends on Colorama anyway (and has a *nix-only fallback capability).
# TODO: `unpythonic` only needs the colorizer in the *macro-enabled* test framework; so we don't really need
# TODO: to provide our own colorizer; we can use the one from `mcpyrate`. (It would be different if regular code needed it.)
# TODO: with mcpyrate, do we really need to set `ctx` in our macros? (does our macro code need it?)
# TODO: The HasThon test (grep for it), when putting the macros in the wrong order on purpose,
# TODO: confuses the call site filename detector of the test framework. Investigate.
# TODO: Move dialect examples from `pydialect` into a new package, `unpythonic.dialects`.
# TODO: `mcpyrate` now provides the necessary infrastructure, while `unpythonic` has the macros
# TODO: needed to make interesting things happen. Update docs accordingly for both projects.
# Syntax transformers and internal utilities
from .autoref import autoref as _autoref
from .autocurry import autocurry as _autocurry
from .dbg import dbg_block as _dbg_block, dbg_expr as _dbg_expr
from .forall import forall as _forall
from .ifexprs import aif as _aif, cond as _cond
from .lambdatools import (multilambda as _multilambda,
namedlambda as _namedlambda,
f as _f,
envify as _envify)
from .lazify import lazy as _lazy, lazify as _lazify, lazyrec as _lazyrec
from .letdo import (local as _local, delete as _delete,
do as _do, do0 as _do0,
let as _let, letseq as _letseq, letrec as _letrec,
dlet as _dlet, dletseq as _dletseq, dletrec as _dletrec,
blet as _blet, bletseq as _bletseq, bletrec as _bletrec)
from .letdoutil import (UnexpandedLetView as _UnexpandedLetView,
canonize_bindings as _canonize_bindings)
from .letsyntax import (let_syntax_expr as _let_syntax_expr,
let_syntax_block as _let_syntax_block)
from .nb import nb as _nb
from .prefix import prefix as _prefix
from .tailtools import (autoreturn as _autoreturn, tco as _tco,
continuations as _continuations)
from .testingtools import (test_expr as _test_expr,
test_expr_signals as _test_expr_signals,
test_expr_raises as _test_expr_raises,
test_block as _test_block,
test_block_signals as _test_block_signals,
test_block_raises as _test_block_raises,
fail_expr as _fail_expr,
error_expr as _error_expr,
warn_expr as _warn_expr)
# Re-exports (for client code that uses us)
from .dbg import dbgprint_block, dbgprint_expr # noqa: F401, re-export for re-use in a decorated variant.
from .forall import insist, deny # noqa: F401
from .ifexprs import it # noqa: F401
from .letdoutil import where # noqa: F401
from .lazify import force, force1 # noqa: F401
from .letsyntax import block, expr # noqa: F401
from .prefix import q, u, kw # noqa: F401 # TODO: bad names, `mcpyrate` uses them too.
from .tailtools import call_cc # noqa: F401
from .testingtools import the # noqa: F401
# We use `dyn` to pass the `expander` parameter to the macro implementations.
class _NoExpander:
def visit(self, tree):
raise NotImplementedError("Macro expander instance has not been set in `dyn`.")
make_dynvar(_macro_expander=_NoExpander())
# -----------------------------------------------------------------------------
# The "kw" we have here is the parameter from mcpyrate; the "kw" we export (that
# flake8 thinks conflicts with this) is the runtime stub for our `prefix` macro.
@parametricmacro
def autoref(tree, *, args, syntax, expander, **kw): # noqa: F811
"""Implicitly reference attributes of an object.
Example::
e = env(a=1, b=2)
c = 3
with autoref[e]:
a
b
c
The transformation is applied in ``Load`` context only. ``Store`` and ``Del``
are not redirected.
Useful e.g. with the ``.mat`` file loader of SciPy.
**CAUTION**: `autoref` is essentially the `with` construct of JavaScript
(which is completely different from Python's meaning of `with`), which is
nowadays deprecated. See:
https://www.ecma-international.org/ecma-262/6.0/#sec-with-statement
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/with
https://2ality.com/2011/06/with-statement.html
**CAUTION**: The auto-reference `with` construct was deprecated in JavaScript
**for security reasons**. Since the autoref'd object **will hijack all name
lookups**, use `with autoref` only with an object you trust!
**CAUTION**: `with autoref` also complicates static code analysis or makes it
outright infeasible, for the same reason. It is impossible to statically know
whether something that looks like a bare name in the source code is actually
a true bare name, or a reference to an attribute of the autoref'd object.
That status can also change at any time, since the lookup is dynamic, and
attributes can be added and removed dynamically.
"""
if syntax != "block":
raise SyntaxError("autoref is a block macro only")
if not args:
raise SyntaxError("autoref requires an argument, the object to be auto-referenced")
if "optional_vars" in kw:
target = kw["optional_vars"]
else:
target = None
tree = expander.visit(tree)
return _autoref(block_body=tree, args=args, asname=target)
# -----------------------------------------------------------------------------
def aif(tree, *, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Anaphoric if.
Usage::
aif[test, then, otherwise]
aif[[pre, ..., test],
[post_true, ..., then], # "then" branch
[post_false, ..., otherwise]] # "otherwise" branch
Inside the ``then`` and ``otherwise`` branches, the magic identifier ``it``
(which is always named literally ``it``) refers to the value of ``test``.
This expands into a ``let`` and an expression-form ``if``.
Each part may consist of multiple expressions by using brackets around it;
those brackets create a `do` environment (see `unpythonic.syntax.do`).
To represent a single expression that is a literal list, use extra
brackets: ``[[1, 2, 3]]``.
"""
if syntax != "expr":
raise SyntaxError("aif is an expr macro only")
# Expand outside-in, but the implicit do[] needs the expander.
with dyn.let(_macro_expander=expander):
return _aif(tree)
def cond(tree, *, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Lispy cond; like "a if p else b", but has "elif".
Usage::
cond[test1, then1,
test2, then2,
...
otherwise]
cond[[pre1, ..., test1], [post1, ..., then1],
[pre2, ..., test2], [post2, ..., then2],
...
[postn, ..., otherwise]]
This allows human-readable multi-branch conditionals in an expression position.
Each part may consist of multiple expressions by using brackets around it;
those brackets create a `do` environment (see `unpythonic.syntax.do`).
To represent a single expression that is a literal list, use extra
brackets: ``[[1, 2, 3]]``.
"""
if syntax != "expr":
raise SyntaxError("cond is an expr macro only")
# Expand outside-in, but the implicit do[] needs the expander.
with dyn.let(_macro_expander=expander):
return _cond(tree)
# -----------------------------------------------------------------------------
def autocurry(tree, *, syntax, expander, **kw): # technically a list of trees, the body of the with block # noqa: F811
"""[syntax, block] Automatic currying.
Usage::
from unpythonic.syntax import macros, autocurry
with autocurry:
...
All **function calls** and **function definitions** (``def``, ``lambda``)
*lexically* inside the ``with autocurry`` block are automatically curried.
**CAUTION**: Some builtins are uninspectable or may report their arities
incorrectly; in those cases, ``curry`` may fail, occasionally in mysterious
ways.
The function ``unpythonic.arity.arities``, which ``unpythonic.fun.curry``
internally uses, has a workaround for the inspectability problems of all
builtins in the top-level namespace (as of Python 3.7), but e.g. methods
of builtin types are not handled.
Lexically inside a ``with autocurry`` block, the auto-curried function calls
will skip the curry if the function is uninspectable, instead of raising
``TypeError`` as usual.
Example::
from unpythonic.syntax import macros, autocurry
from unpythonic import foldr, composerc as compose, cons, nil, ll
with autocurry:
def add3(a, b, c):
return a + b + c
assert add3(1)(2)(3) == 6
assert add3(1, 2)(3) == 6
assert add3(1)(2, 3) == 6
assert add3(1, 2, 3) == 6
mymap = lambda f: foldr(compose(cons, f), nil)
double = lambda x: 2 * x
assert mymap(double, ll(1, 2, 3)) == ll(2, 4, 6)
# The definition was auto-curried, so this works here too.
assert add3(1)(2)(3) == 6
"""
if syntax != "block":
raise SyntaxError("autocurry is a block macro only")
tree = expander.visit(tree)
return _autocurry(block_body=tree)
# -----------------------------------------------------------------------------
@parametricmacro
def let(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Introduce expression-local variables.
This is sugar on top of ``unpythonic.lispylet.let``.
Usage::
let[(k0, v0), ...][body]
let[(k0, v0), ...][[body0, ...]]
where ``body`` is an expression. The names bound by ``let`` are local;
they are available in ``body``, and do not exist outside ``body``.
Alternative haskelly syntax is also available::
let[((k0, v0), ...) in body]
let[((k0, v0), ...) in [body0, ...]]
let[body, where((k0, v0), ...)]
let[[body0, ...], where((k0, v0), ...)]
For a body with multiple expressions, use an extra set of brackets,
as shown above. This inserts a ``do``. Only the outermost extra brackets
are interpreted specially; all others in the bodies are interpreted
as usual, as lists.
Note that in the haskelly syntax, the extra brackets for a multi-expression
body should enclose only the ``body`` part.
Each ``name`` in the same ``let`` must be unique.
Assignment to let-bound variables is supported with syntax such as ``x << 42``.
This is an expression, performing the assignment, and returning the new value.
In a multiple-expression body, also an internal definition context exists
for local variables that are not part of the ``let``; see ``do`` for details.
Technical points:
- In reality, the let-bound variables live in an ``unpythonic.env``.
This macro performs the magic to make them look (and pretty much behave)
like lexical variables.
- Compared to ``unpythonic.lispylet.let``, the macro version needs no quotes
around variable names in bindings.
- The body is automatically wrapped in a ``lambda e: ...``.
- For all ``x`` in bindings, the macro transforms lookups ``x --> e.x``.
- Lexical scoping is respected (so ``let`` constructs can be nested)
by actually using a unique name (gensym) instead of just ``e``.
- In the case of a multiple-expression body, the ``do`` transformation
is applied first to ``[body0, ...]``, and the result becomes ``body``.
"""
if syntax != "expr":
raise SyntaxError("let is an expr macro only")
# The `let[]` family of macros expands inside out.
with dyn.let(_macro_expander=expander):
return _destructure_and_apply_let(tree, args, expander, _let)
@parametricmacro
def letseq(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Let with sequential binding (like Scheme/Racket let*).
Like ``let``, but bindings take effect sequentially. Later bindings
shadow earlier ones if the same name is used multiple times.
Expands to nested ``let`` expressions.
"""
if syntax != "expr":
raise SyntaxError("letseq is an expr macro only")
with dyn.let(_macro_expander=expander):
return _destructure_and_apply_let(tree, args, expander, _letseq)
@parametricmacro
def letrec(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Let with mutually recursive binding.
Like ``let``, but bindings can see other bindings in the same ``letrec``.
Each ``name`` in the same ``letrec`` must be unique.
The definitions are processed sequentially, left to right. A definition
may refer to any previous definition. If ``value`` is callable (lambda),
it may refer to any definition, including later ones.
This is useful for locally defining mutually recursive functions.
"""
if syntax != "expr":
raise SyntaxError("letrec is an expr macro only")
with dyn.let(_macro_expander=expander):
return _destructure_and_apply_let(tree, args, expander, _letrec)
# NOTE: At the macro interface, the invocations `let()[...]` (empty args)
# and `let[...]` (no args) were indistinguishable in MacroPy. This was a
# problem, because it might be that the user wrote the body but simply
# forgot to put anything in the parentheses. (There's `do[]` if you need
# a `let` without making any bindings.)
#
# In `mcpyrate`, `let()[...]` is a syntax error. The preferred syntax,
# when using macro arguments, is `let[...][...]`. When this is not
# possible (in decorator position up to Python 3.8), then `let(...)[...]`
# is acceptable. But empty brackets/parentheses are not accepted. Thus,
# we will have an empty `args` list only when there are no brackets/parentheses
# for the macro arguments part.
#
# So when `args` is empty, this function assumes haskelly let syntax
# `let[(...) in ...]` or `let[..., where(...)]`. In these cases,
# both the bindings and the body reside inside the brackets (i.e.,
# in the AST contained in the `tree` argument).
#
# Since the brackets/parentheses must be deleted when no macro arguments
# are given, this is now the correct assumption to make.
#
# But note that if needed elsewhere, `mcpyrate` has the `invocation` kwarg
# in the macro interface that gives a copy of the whole macro invocation
# node (so we could see the exact original syntax).
#
# allow_call_in_name_position: used by let_syntax to allow template definitions.
def _destructure_and_apply_let(tree, args, macro_expander, let_expander_function, allow_call_in_name_position=False):
with dyn.let(_macro_expander=macro_expander): # implicit do (extra bracket notation) needs this.
if args:
bs = _canonize_bindings(args, allow_call_in_name_position=allow_call_in_name_position)
return let_expander_function(bindings=bs, body=tree)
# haskelly syntax, let[(...) in ...], let[..., where(...)]
view = _UnexpandedLetView(tree) # note "tree" here is only the part inside the brackets
return let_expander_function(bindings=view.bindings, body=view.body)
# -----------------------------------------------------------------------------
# Decorator versions, for "let over def".
@parametricmacro
def dlet(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] Decorator version of let, for 'let over def'.
Example::
@dlet[(x, 0)]
def count():
x << x + 1
return x
assert count() == 1
assert count() == 2
**CAUTION**: function arguments, local variables, and names declared as
``global`` or ``nonlocal`` in a given lexical scope shadow names from the
``let`` environment *for the entirety of that lexical scope*. (This is
modeled after Python's standard scoping rules.)
**CAUTION**: assignment to the let environment is ``name << value``;
the regular syntax ``name = value`` creates a local variable in the
lexical scope of the ``def``.
"""
if syntax != "decorator":
raise SyntaxError("dlet is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _dlet)
@parametricmacro
def dletseq(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] Decorator version of letseq, for 'letseq over def'.
Expands to nested function definitions, each with one ``dlet`` decorator.
Example::
@dletseq[(x, 1),
(x, x+1),
(x, x+2)]
def g(a):
return a + x
assert g(10) == 14
"""
if syntax != "decorator":
raise SyntaxError("dletseq is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _dletseq)
@parametricmacro
def dletrec(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] Decorator version of letrec, for 'letrec over def'.
Example::
@dletrec[(evenp, lambda x: (x == 0) or oddp(x - 1)),
(oddp, lambda x: (x != 0) and evenp(x - 1))]
def f(x):
return evenp(x)
assert f(42) is True
assert f(23) is False
Same cautions apply as to ``dlet``.
"""
if syntax != "decorator":
raise SyntaxError("dletrec is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _dletrec)
@parametricmacro
def blet(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] def --> let block.
Example::
@blet[(x, 21)]
def result():
return 2*x
assert result == 42
"""
if syntax != "decorator":
raise SyntaxError("blet is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _blet)
@parametricmacro
def bletseq(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] def --> letseq block.
Example::
@bletseq[(x, 1),
(x, x+1),
(x, x+2)]
def result():
return x
assert result == 4
"""
if syntax != "decorator":
raise SyntaxError("bletseq is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _bletseq)
@parametricmacro
def bletrec(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, decorator] def --> letrec block.
Example::
@bletrec[(evenp, lambda x: (x == 0) or oddp(x - 1)),
(oddp, lambda x: (x != 0) and evenp(x - 1))]
def result():
return evenp(42)
assert result is True
Because names inside a ``def`` have mutually recursive scope,
an almost equivalent pure Python solution (no macros) is::
from unpythonic.misc import call
@call
def result():
evenp = lambda x: (x == 0) or oddp(x - 1)
oddp = lambda x: (x != 0) and evenp(x - 1)
return evenp(42)
assert result is True
"""
if syntax != "decorator":
raise SyntaxError("bletrec is a decorator macro only")
args = expander.visit(args)
tree = expander.visit(tree)
return _destructure_and_apply_let(tree, args, expander, _bletrec)
# -----------------------------------------------------------------------------
# Imperative code in expression position.
def local(tree, *, syntax, invocation, **kw): # noqa: F811
"""[syntax] Declare a local name in a "do".
Usage::
local[name << value]
Only meaningful in a ``do[...]``, ``do0[...]``, or an implicit ``do``
(extra bracket syntax).
The declaration takes effect starting from next item in the ``do``, i.e.
the item that comes after the ``local[]``. It will not shadow nonlocal
variables of the same name in any earlier items of the same ``do``, and
in the item making the definition, the old bindings are still in effect
on the RHS.
This means that if you want, you can declare a local ``x`` that takes its
initial value from a nonlocal ``x``, by ``local[x << x]``. Here the ``x``
on the RHS is the nonlocal one (since the declaration has not yet taken
effect), and the ``x`` on the LHS is the name given to the new local variable
that only exists inside the ``do``. Any references to ``x`` in any further
items in the same ``do`` will point to the local ``x``.
"""
if syntax != "expr":
raise SyntaxError("local is an expr macro only") # pragma: no cover
return _local(tree)
def delete(tree, *, syntax, invocation, **kw): # noqa: F811
"""[syntax] Delete a previously declared local name in a "do".
Usage::
delete[name]
Only meaningful in a ``do[...]``, ``do0[...]``, or an implicit ``do``
(extra bracket syntax).
The deletion takes effect starting from the next item; hence, the
deleted local variable will no longer shadow nonlocal variables of
the same name in any later items of the same `do`.
Note ``do[]`` supports local variable deletion, but the ``let[]``
constructs don't, by design.
"""
if syntax != "expr":
raise SyntaxError("delete is an expr macro only") # pragma: no cover
return _delete(tree)
def do(tree, *, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Stuff imperative code into an expression position.
Return value is the value of the last expression inside the ``do``.
See also ``do0``.
Usage::
do[body0, ...]
Example::
do[local[x << 42],
print(x),
x << 23,
x]
This is sugar on top of ``unpythonic.seq.do``, but with some extra features.
- To declare and initialize a local name, use ``local[name << value]``.
The operator ``local`` is syntax, not really a function, and it
only exists inside a ``do``. There is also an operator ``delete``
to delete a previously declared local name in the ``do``.
Both ``local`` and ``delete``, if used, should be imported as macros.
- By design, there is no way to create an uninitialized variable;
a value must be given at declaration time. Just use ``None``
as an explicit "no value" if needed.
- Names declared within the same ``do`` must be unique. Re-declaring
the same name is an expansion-time error.
- To assign to an already declared local name, use ``name << value``.
**local name declarations**
A ``local`` declaration comes into effect in the expression following
the one where it appears. Thus::
result = []
let((lst, []))[do[result.append(lst), # the let "lst"
local[lst << lst + [1]], # LHS: do "lst", RHS: let "lst"
result.append(lst)]] # the do "lst"
assert result == [[], [1]]
**Syntactic ambiguity**
These two cases cannot be syntactically distinguished:
- Just one body expression, which is a literal tuple or list,
- Multiple body expressions, represented as a literal tuple or list.
``do`` always uses the latter interpretation.
Whenever there are multiple expressions in the body, the ambiguity does not
arise, because then the distinction between the sequence of expressions itself
and its items is clear.
Examples::
do[1, 2, 3] # --> tuple, 3
do[(1, 2, 3)] # --> tuple, 3 (since in Python, the comma creates tuples;
# parentheses are only used for disambiguation)
do[[1, 2, 3]] # --> list, 3
do[[[1, 2, 3]]] # --> list containing a list, [1, 2, 3]
do[([1, 2, 3],)] # --> tuple containing a list, [1, 2, 3]
do[[1, 2, 3],] # --> tuple containing a list, [1, 2, 3]
do[[(1, 2, 3)]] # --> list containing a tuple, (1, 2, 3)
do[((1, 2, 3),)] # --> tuple containing a tuple, (1, 2, 3)
do[(1, 2, 3),] # --> tuple containing a tuple, (1, 2, 3)
It is possible to use ``unpythonic.misc.pack`` to create a tuple from
given elements: ``do[pack(1, 2, 3)]`` is interpreted as a single-item body
that creates a tuple (by calling a function).
Note the outermost brackets belong to the ``do``; they don't yet create a list.
In the *use brackets to denote a multi-expr body* syntax (e.g. ``multilambda``,
``let`` constructs), the extra brackets already create a list, so in those
uses, the ambiguity does not arise. The transformation inserts not only the
word ``do``, but also the outermost brackets. For example::
let[(x, 1),
(y, 2)][[
[x, y]]]
transforms to::
let[(x, 1),
(y, 2)][do[[ # "do[" is inserted between the two opening brackets
[x, y]]]] # and its closing "]" is inserted here
which already gets rid of the ambiguity.
**Notes**
Macros are expanded in an inside-out order, so a nested ``let`` shadows
names, if the same names appear in the ``do``::
do[local[x << 17],
let[(x, 23)][
print(x)], # 23, the "x" of the "let"
print(x)] # 17, the "x" of the "do"
The reason we require local names to be declared is to allow write access
to lexically outer environments from inside a ``do``::
let[(x, 17)][
do[x << 23, # no "local[...]"; update the "x" of the "let"
local[y << 42], # "y" is local to the "do"
print(x, y)]]
With the extra bracket syntax, the latter example can be written as::
let[(x, 17)][[
x << 23,
local[y << 42],
print(x, y)]]
It's subtly different in that the first version has the do-items in a tuple,
whereas this one has them in a list, but the behavior is exactly the same.
Python does it the other way around, requiring a ``nonlocal`` statement
to re-bind a name owned by an outer scope.
The ``let`` constructs solve this problem by having the local bindings
declared in a separate block, which plays the role of ``local``.
"""
if syntax != "expr":
raise SyntaxError("do is an expr macro only")
with dyn.let(_macro_expander=expander):
return _do(tree)
def do0(tree, *, syntax, expander, **kw): # noqa: F811
"""[syntax, expr] Like do, but return the value of the first expression."""
if syntax != "expr":
raise SyntaxError("do0 is an expr macro only")
with dyn.let(_macro_expander=expander):
return _do0(tree)
# -----------------------------------------------------------------------------
# TODO: change the block() construct to block[], for syntactic consistency
@parametricmacro
def let_syntax(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, expr/block] Introduce local **syntactic** bindings.
**Expression variant**::
let_syntax[(lhs, rhs), ...][body]
let_syntax[(lhs, rhs), ...][[body0, ...]]
Alternative haskelly syntax::
let_syntax[((lhs, rhs), ...) in body]
let_syntax[((lhs, rhs), ...) in [body0, ...]]
let_syntax[body, where((lhs, rhs), ...)]
let_syntax[[body0, ...], where((lhs, rhs), ...)]
**Block variant**::
with let_syntax:
with block as xs: # capture a block of statements - bare name
...
with block(a, ...) as xs: # capture a block of statements - template
...
with expr as x: # capture a single expression - bare name
...
with expr(a, ...) as x: # capture a single expression - template
...
body0
...
A single expression can be a ``do[]`` if multiple expressions are needed.
The bindings are applied **at macro expansion time**, substituting
the expression on the RHS for each instance of the corresponding LHS.
Each substitution gets a fresh copy.
This is useful to e.g. locally abbreviate long function names at macro
expansion time (with zero run-time overhead), or to splice in several
(possibly parametric) instances of a common pattern.
In the expression variant, ``lhs`` may be:
- A bare name (e.g. ``x``), or
- A simple template of the form ``f(x, ...)``. The names inside the
parentheses declare the formal parameters of the template (that can
then be used in the body).
In the block variant:
- The **as-part** specifies the name of the LHS.
- If a template, the formal parameters are declared on the ``block``
or ``expr``, not on the as-part (due to syntactic limitations).
**Templates**
To make parametric substitutions, use templates.
Templates support only positional arguments, with no default values.
Even in block templates, parameters are always expressions (because they
use the function-call syntax at the use site).
In the body of the ``let_syntax``, a template is used like a function call.
Just like in an actual function call, when the template is substituted,
any instances of its formal parameters on its RHS get replaced by the
argument values from the "call" site; but ``let_syntax`` performs this
at macro-expansion time.
Note each instance of the same formal parameter gets a fresh copy of the
corresponding argument value.
**Substitution order**
This is a two-step process. In the first step, we apply template substitutions.
In the second step, we apply bare name substitutions to the result of the
first step. (So RHSs of templates may use any of the bare-name definitions.)
Within each step, the substitutions are applied **in the order specified**.
So if the bindings are ``((x, y), (y, z))``, then ``x`` transforms to ``z``.
But if the bindings are ``((y, z), (x, y))``, then ``x`` transforms to ``y``,
and only an explicit ``y`` at the use site transforms to ``z``.
**Notes**
Inspired by Racket's ``let-syntax`` and ``with-syntax``, see:
https://docs.racket-lang.org/reference/let.html
https://docs.racket-lang.org/reference/stx-patterns.html
**CAUTION**: This is essentially a toy macro system inside the real
macro system, implemented with the real macro system.
The usual caveats of macro systems apply. Especially, we support absolutely
no form of hygiene. Be very, very careful to avoid name conflicts.
``let_syntax`` is meant only for simple local substitutions where the
elimination of repetition can shorten the code and improve readability.
If you need to do something complex, prefer writing a real macro directly
in `mcpyrate`.
"""
if syntax not in ("expr", "block"):
raise SyntaxError("let_syntax is an expr and block macro only")
tree = expander.visit(tree)
if syntax == "expr":
return _destructure_and_apply_let(tree, args, expander, _let_syntax_expr, allow_call_in_name_position=True)
else: # syntax == "block":
return _let_syntax_block(block_body=tree)
@parametricmacro
def abbrev(tree, *, args, syntax, expander, **kw): # noqa: F811
"""[syntax, expr/block] Exactly like ``let_syntax``, but expands outside in.
Because this variant expands before any macros in the body, it can locally
rename other macros, e.g.::
abbrev[(m, macrowithverylongname)][
m[tree1] if m[tree2] else m[tree3]]
**CAUTION**: Because ``abbrev`` expands outside-in, and does not respect
boundaries of any nested ``abbrev`` invocations, it will not lexically scope
the substitutions. Instead, the outermost ``abbrev`` expands first, and then
any inner ones expand with whatever substitutions they have remaining.
If the same name is used on the LHS in two or more nested ``abbrev``,
any inner ones will likely raise an error (unless the outer substitution
just replaces a name with another), because also the names on the LHS
in the inner ``abbrev`` will undergo substitution when the outer
``abbrev`` expands.
"""
if syntax not in ("expr", "block"):
raise SyntaxError("abbrev is an expr and block macro only")
# DON'T expand inner macro invocations first - outside-in ordering is the default, so we simply do nothing.
if syntax == "expr":
return _destructure_and_apply_let(tree, args, expander, _let_syntax_expr, allow_call_in_name_position=True)
else:
return _let_syntax_block(block_body=tree)