Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
allendowney
GitHub Repository: allendowney/cpython
Path: blob/main/Modules/_functoolsmodule.c
12 views
1
#include "Python.h"
2
#include "pycore_call.h" // _PyObject_CallNoArgs()
3
#include "pycore_dict.h" // _PyDict_Pop_KnownHash()
4
#include "pycore_long.h" // _PyLong_GetZero()
5
#include "pycore_moduleobject.h" // _PyModule_GetState()
6
#include "pycore_object.h" // _PyObject_GC_TRACK
7
#include "pycore_pystate.h" // _PyThreadState_GET()
8
#include "pycore_tuple.h" // _PyTuple_ITEMS()
9
#include "structmember.h" // PyMemberDef
10
11
#include "clinic/_functoolsmodule.c.h"
12
/*[clinic input]
13
module _functools
14
class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec"
15
[clinic start generated code]*/
16
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bece4053896b09c0]*/
17
18
/* _functools module written and maintained
19
by Hye-Shik Chang <[email protected]>
20
with adaptations by Raymond Hettinger <[email protected]>
21
Copyright (c) 2004, 2005, 2006 Python Software Foundation.
22
All rights reserved.
23
*/
24
25
typedef struct _functools_state {
26
/* this object is used delimit args and keywords in the cache keys */
27
PyObject *kwd_mark;
28
PyTypeObject *partial_type;
29
PyTypeObject *keyobject_type;
30
PyTypeObject *lru_list_elem_type;
31
} _functools_state;
32
33
static inline _functools_state *
34
get_functools_state(PyObject *module)
35
{
36
void *state = _PyModule_GetState(module);
37
assert(state != NULL);
38
return (_functools_state *)state;
39
}
40
41
42
/* partial object **********************************************************/
43
44
typedef struct {
45
PyObject_HEAD
46
PyObject *fn;
47
PyObject *args;
48
PyObject *kw;
49
PyObject *dict; /* __dict__ */
50
PyObject *weakreflist; /* List of weak references */
51
vectorcallfunc vectorcall;
52
} partialobject;
53
54
static void partial_setvectorcall(partialobject *pto);
55
static struct PyModuleDef _functools_module;
56
static PyObject *
57
partial_call(partialobject *pto, PyObject *args, PyObject *kwargs);
58
59
static inline _functools_state *
60
get_functools_state_by_type(PyTypeObject *type)
61
{
62
PyObject *module = PyType_GetModuleByDef(type, &_functools_module);
63
if (module == NULL) {
64
return NULL;
65
}
66
return get_functools_state(module);
67
}
68
69
// Not converted to argument clinic, because of `*args, **kwargs` arguments.
70
static PyObject *
71
partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
72
{
73
PyObject *func, *pargs, *nargs, *pkw;
74
partialobject *pto;
75
76
if (PyTuple_GET_SIZE(args) < 1) {
77
PyErr_SetString(PyExc_TypeError,
78
"type 'partial' takes at least one argument");
79
return NULL;
80
}
81
82
pargs = pkw = NULL;
83
func = PyTuple_GET_ITEM(args, 0);
84
if (Py_TYPE(func)->tp_call == (ternaryfunc)partial_call) {
85
// The type of "func" might not be exactly the same type object
86
// as "type", but if it is called using partial_call, it must have the
87
// same memory layout (fn, args and kw members).
88
// We can use its underlying function directly and merge the arguments.
89
partialobject *part = (partialobject *)func;
90
if (part->dict == NULL) {
91
pargs = part->args;
92
pkw = part->kw;
93
func = part->fn;
94
assert(PyTuple_Check(pargs));
95
assert(PyDict_Check(pkw));
96
}
97
}
98
if (!PyCallable_Check(func)) {
99
PyErr_SetString(PyExc_TypeError,
100
"the first argument must be callable");
101
return NULL;
102
}
103
104
/* create partialobject structure */
105
pto = (partialobject *)type->tp_alloc(type, 0);
106
if (pto == NULL)
107
return NULL;
108
109
pto->fn = Py_NewRef(func);
110
111
nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
112
if (nargs == NULL) {
113
Py_DECREF(pto);
114
return NULL;
115
}
116
if (pargs == NULL) {
117
pto->args = nargs;
118
}
119
else {
120
pto->args = PySequence_Concat(pargs, nargs);
121
Py_DECREF(nargs);
122
if (pto->args == NULL) {
123
Py_DECREF(pto);
124
return NULL;
125
}
126
assert(PyTuple_Check(pto->args));
127
}
128
129
if (pkw == NULL || PyDict_GET_SIZE(pkw) == 0) {
130
if (kw == NULL) {
131
pto->kw = PyDict_New();
132
}
133
else if (Py_REFCNT(kw) == 1) {
134
pto->kw = Py_NewRef(kw);
135
}
136
else {
137
pto->kw = PyDict_Copy(kw);
138
}
139
}
140
else {
141
pto->kw = PyDict_Copy(pkw);
142
if (kw != NULL && pto->kw != NULL) {
143
if (PyDict_Merge(pto->kw, kw, 1) != 0) {
144
Py_DECREF(pto);
145
return NULL;
146
}
147
}
148
}
149
if (pto->kw == NULL) {
150
Py_DECREF(pto);
151
return NULL;
152
}
153
154
partial_setvectorcall(pto);
155
return (PyObject *)pto;
156
}
157
158
static int
159
partial_clear(partialobject *pto)
160
{
161
Py_CLEAR(pto->fn);
162
Py_CLEAR(pto->args);
163
Py_CLEAR(pto->kw);
164
Py_CLEAR(pto->dict);
165
return 0;
166
}
167
168
static int
169
partial_traverse(partialobject *pto, visitproc visit, void *arg)
170
{
171
Py_VISIT(Py_TYPE(pto));
172
Py_VISIT(pto->fn);
173
Py_VISIT(pto->args);
174
Py_VISIT(pto->kw);
175
Py_VISIT(pto->dict);
176
return 0;
177
}
178
179
static void
180
partial_dealloc(partialobject *pto)
181
{
182
PyTypeObject *tp = Py_TYPE(pto);
183
/* bpo-31095: UnTrack is needed before calling any callbacks */
184
PyObject_GC_UnTrack(pto);
185
if (pto->weakreflist != NULL) {
186
PyObject_ClearWeakRefs((PyObject *) pto);
187
}
188
(void)partial_clear(pto);
189
tp->tp_free(pto);
190
Py_DECREF(tp);
191
}
192
193
194
/* Merging keyword arguments using the vectorcall convention is messy, so
195
* if we would need to do that, we stop using vectorcall and fall back
196
* to using partial_call() instead. */
197
Py_NO_INLINE static PyObject *
198
partial_vectorcall_fallback(PyThreadState *tstate, partialobject *pto,
199
PyObject *const *args, size_t nargsf,
200
PyObject *kwnames)
201
{
202
pto->vectorcall = NULL;
203
Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
204
return _PyObject_MakeTpCall(tstate, (PyObject *)pto,
205
args, nargs, kwnames);
206
}
207
208
static PyObject *
209
partial_vectorcall(partialobject *pto, PyObject *const *args,
210
size_t nargsf, PyObject *kwnames)
211
{
212
PyThreadState *tstate = _PyThreadState_GET();
213
214
/* pto->kw is mutable, so need to check every time */
215
if (PyDict_GET_SIZE(pto->kw)) {
216
return partial_vectorcall_fallback(tstate, pto, args, nargsf, kwnames);
217
}
218
219
Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
220
Py_ssize_t nargs_total = nargs;
221
if (kwnames != NULL) {
222
nargs_total += PyTuple_GET_SIZE(kwnames);
223
}
224
225
PyObject **pto_args = _PyTuple_ITEMS(pto->args);
226
Py_ssize_t pto_nargs = PyTuple_GET_SIZE(pto->args);
227
228
/* Fast path if we're called without arguments */
229
if (nargs_total == 0) {
230
return _PyObject_VectorcallTstate(tstate, pto->fn,
231
pto_args, pto_nargs, NULL);
232
}
233
234
/* Fast path using PY_VECTORCALL_ARGUMENTS_OFFSET to prepend a single
235
* positional argument */
236
if (pto_nargs == 1 && (nargsf & PY_VECTORCALL_ARGUMENTS_OFFSET)) {
237
PyObject **newargs = (PyObject **)args - 1;
238
PyObject *tmp = newargs[0];
239
newargs[0] = pto_args[0];
240
PyObject *ret = _PyObject_VectorcallTstate(tstate, pto->fn,
241
newargs, nargs + 1, kwnames);
242
newargs[0] = tmp;
243
return ret;
244
}
245
246
Py_ssize_t newnargs_total = pto_nargs + nargs_total;
247
248
PyObject *small_stack[_PY_FASTCALL_SMALL_STACK];
249
PyObject *ret;
250
PyObject **stack;
251
252
if (newnargs_total <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) {
253
stack = small_stack;
254
}
255
else {
256
stack = PyMem_Malloc(newnargs_total * sizeof(PyObject *));
257
if (stack == NULL) {
258
PyErr_NoMemory();
259
return NULL;
260
}
261
}
262
263
/* Copy to new stack, using borrowed references */
264
memcpy(stack, pto_args, pto_nargs * sizeof(PyObject*));
265
memcpy(stack + pto_nargs, args, nargs_total * sizeof(PyObject*));
266
267
ret = _PyObject_VectorcallTstate(tstate, pto->fn,
268
stack, pto_nargs + nargs, kwnames);
269
if (stack != small_stack) {
270
PyMem_Free(stack);
271
}
272
return ret;
273
}
274
275
/* Set pto->vectorcall depending on the parameters of the partial object */
276
static void
277
partial_setvectorcall(partialobject *pto)
278
{
279
if (PyVectorcall_Function(pto->fn) == NULL) {
280
/* Don't use vectorcall if the underlying function doesn't support it */
281
pto->vectorcall = NULL;
282
}
283
/* We could have a special case if there are no arguments,
284
* but that is unlikely (why use partial without arguments?),
285
* so we don't optimize that */
286
else {
287
pto->vectorcall = (vectorcallfunc)partial_vectorcall;
288
}
289
}
290
291
292
// Not converted to argument clinic, because of `*args, **kwargs` arguments.
293
static PyObject *
294
partial_call(partialobject *pto, PyObject *args, PyObject *kwargs)
295
{
296
assert(PyCallable_Check(pto->fn));
297
assert(PyTuple_Check(pto->args));
298
assert(PyDict_Check(pto->kw));
299
300
/* Merge keywords */
301
PyObject *kwargs2;
302
if (PyDict_GET_SIZE(pto->kw) == 0) {
303
/* kwargs can be NULL */
304
kwargs2 = Py_XNewRef(kwargs);
305
}
306
else {
307
/* bpo-27840, bpo-29318: dictionary of keyword parameters must be
308
copied, because a function using "**kwargs" can modify the
309
dictionary. */
310
kwargs2 = PyDict_Copy(pto->kw);
311
if (kwargs2 == NULL) {
312
return NULL;
313
}
314
315
if (kwargs != NULL) {
316
if (PyDict_Merge(kwargs2, kwargs, 1) != 0) {
317
Py_DECREF(kwargs2);
318
return NULL;
319
}
320
}
321
}
322
323
/* Merge positional arguments */
324
/* Note: tupleconcat() is optimized for empty tuples */
325
PyObject *args2 = PySequence_Concat(pto->args, args);
326
if (args2 == NULL) {
327
Py_XDECREF(kwargs2);
328
return NULL;
329
}
330
331
PyObject *res = PyObject_Call(pto->fn, args2, kwargs2);
332
Py_DECREF(args2);
333
Py_XDECREF(kwargs2);
334
return res;
335
}
336
337
PyDoc_STRVAR(partial_doc,
338
"partial(func, *args, **keywords) - new function with partial application\n\
339
of the given arguments and keywords.\n");
340
341
#define OFF(x) offsetof(partialobject, x)
342
static PyMemberDef partial_memberlist[] = {
343
{"func", T_OBJECT, OFF(fn), READONLY,
344
"function object to use in future partial calls"},
345
{"args", T_OBJECT, OFF(args), READONLY,
346
"tuple of arguments to future partial calls"},
347
{"keywords", T_OBJECT, OFF(kw), READONLY,
348
"dictionary of keyword arguments to future partial calls"},
349
{"__weaklistoffset__", T_PYSSIZET,
350
offsetof(partialobject, weakreflist), READONLY},
351
{"__dictoffset__", T_PYSSIZET,
352
offsetof(partialobject, dict), READONLY},
353
{"__vectorcalloffset__", T_PYSSIZET,
354
offsetof(partialobject, vectorcall), READONLY},
355
{NULL} /* Sentinel */
356
};
357
358
static PyGetSetDef partial_getsetlist[] = {
359
{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
360
{NULL} /* Sentinel */
361
};
362
363
static PyObject *
364
partial_repr(partialobject *pto)
365
{
366
PyObject *result = NULL;
367
PyObject *arglist;
368
Py_ssize_t i, n;
369
PyObject *key, *value;
370
int status;
371
372
status = Py_ReprEnter((PyObject *)pto);
373
if (status != 0) {
374
if (status < 0)
375
return NULL;
376
return PyUnicode_FromString("...");
377
}
378
379
arglist = PyUnicode_FromString("");
380
if (arglist == NULL)
381
goto done;
382
/* Pack positional arguments */
383
assert (PyTuple_Check(pto->args));
384
n = PyTuple_GET_SIZE(pto->args);
385
for (i = 0; i < n; i++) {
386
Py_SETREF(arglist, PyUnicode_FromFormat("%U, %R", arglist,
387
PyTuple_GET_ITEM(pto->args, i)));
388
if (arglist == NULL)
389
goto done;
390
}
391
/* Pack keyword arguments */
392
assert (PyDict_Check(pto->kw));
393
for (i = 0; PyDict_Next(pto->kw, &i, &key, &value);) {
394
/* Prevent key.__str__ from deleting the value. */
395
Py_INCREF(value);
396
Py_SETREF(arglist, PyUnicode_FromFormat("%U, %S=%R", arglist,
397
key, value));
398
Py_DECREF(value);
399
if (arglist == NULL)
400
goto done;
401
}
402
result = PyUnicode_FromFormat("%s(%R%U)", Py_TYPE(pto)->tp_name,
403
pto->fn, arglist);
404
Py_DECREF(arglist);
405
406
done:
407
Py_ReprLeave((PyObject *)pto);
408
return result;
409
}
410
411
/* Pickle strategy:
412
__reduce__ by itself doesn't support getting kwargs in the unpickle
413
operation so we define a __setstate__ that replaces all the information
414
about the partial. If we only replaced part of it someone would use
415
it as a hook to do strange things.
416
*/
417
418
static PyObject *
419
partial_reduce(partialobject *pto, PyObject *unused)
420
{
421
return Py_BuildValue("O(O)(OOOO)", Py_TYPE(pto), pto->fn, pto->fn,
422
pto->args, pto->kw,
423
pto->dict ? pto->dict : Py_None);
424
}
425
426
static PyObject *
427
partial_setstate(partialobject *pto, PyObject *state)
428
{
429
PyObject *fn, *fnargs, *kw, *dict;
430
431
if (!PyTuple_Check(state) ||
432
!PyArg_ParseTuple(state, "OOOO", &fn, &fnargs, &kw, &dict) ||
433
!PyCallable_Check(fn) ||
434
!PyTuple_Check(fnargs) ||
435
(kw != Py_None && !PyDict_Check(kw)))
436
{
437
PyErr_SetString(PyExc_TypeError, "invalid partial state");
438
return NULL;
439
}
440
441
if(!PyTuple_CheckExact(fnargs))
442
fnargs = PySequence_Tuple(fnargs);
443
else
444
Py_INCREF(fnargs);
445
if (fnargs == NULL)
446
return NULL;
447
448
if (kw == Py_None)
449
kw = PyDict_New();
450
else if(!PyDict_CheckExact(kw))
451
kw = PyDict_Copy(kw);
452
else
453
Py_INCREF(kw);
454
if (kw == NULL) {
455
Py_DECREF(fnargs);
456
return NULL;
457
}
458
459
if (dict == Py_None)
460
dict = NULL;
461
else
462
Py_INCREF(dict);
463
464
Py_SETREF(pto->fn, Py_NewRef(fn));
465
Py_SETREF(pto->args, fnargs);
466
Py_SETREF(pto->kw, kw);
467
Py_XSETREF(pto->dict, dict);
468
partial_setvectorcall(pto);
469
Py_RETURN_NONE;
470
}
471
472
static PyMethodDef partial_methods[] = {
473
{"__reduce__", (PyCFunction)partial_reduce, METH_NOARGS},
474
{"__setstate__", (PyCFunction)partial_setstate, METH_O},
475
{"__class_getitem__", Py_GenericAlias,
476
METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
477
{NULL, NULL} /* sentinel */
478
};
479
480
static PyType_Slot partial_type_slots[] = {
481
{Py_tp_dealloc, partial_dealloc},
482
{Py_tp_repr, partial_repr},
483
{Py_tp_call, partial_call},
484
{Py_tp_getattro, PyObject_GenericGetAttr},
485
{Py_tp_setattro, PyObject_GenericSetAttr},
486
{Py_tp_doc, (void *)partial_doc},
487
{Py_tp_traverse, partial_traverse},
488
{Py_tp_clear, partial_clear},
489
{Py_tp_methods, partial_methods},
490
{Py_tp_members, partial_memberlist},
491
{Py_tp_getset, partial_getsetlist},
492
{Py_tp_new, partial_new},
493
{Py_tp_free, PyObject_GC_Del},
494
{0, 0}
495
};
496
497
static PyType_Spec partial_type_spec = {
498
.name = "functools.partial",
499
.basicsize = sizeof(partialobject),
500
.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
501
Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_VECTORCALL |
502
Py_TPFLAGS_IMMUTABLETYPE,
503
.slots = partial_type_slots
504
};
505
506
507
/* cmp_to_key ***************************************************************/
508
509
typedef struct {
510
PyObject_HEAD
511
PyObject *cmp;
512
PyObject *object;
513
} keyobject;
514
515
static int
516
keyobject_clear(keyobject *ko)
517
{
518
Py_CLEAR(ko->cmp);
519
Py_CLEAR(ko->object);
520
return 0;
521
}
522
523
static void
524
keyobject_dealloc(keyobject *ko)
525
{
526
PyTypeObject *tp = Py_TYPE(ko);
527
PyObject_GC_UnTrack(ko);
528
(void)keyobject_clear(ko);
529
tp->tp_free(ko);
530
Py_DECREF(tp);
531
}
532
533
static int
534
keyobject_traverse(keyobject *ko, visitproc visit, void *arg)
535
{
536
Py_VISIT(Py_TYPE(ko));
537
Py_VISIT(ko->cmp);
538
Py_VISIT(ko->object);
539
return 0;
540
}
541
542
static PyMemberDef keyobject_members[] = {
543
{"obj", T_OBJECT,
544
offsetof(keyobject, object), 0,
545
PyDoc_STR("Value wrapped by a key function.")},
546
{NULL}
547
};
548
549
static PyObject *
550
keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds);
551
552
static PyObject *
553
keyobject_richcompare(PyObject *ko, PyObject *other, int op);
554
555
static PyType_Slot keyobject_type_slots[] = {
556
{Py_tp_dealloc, keyobject_dealloc},
557
{Py_tp_call, keyobject_call},
558
{Py_tp_getattro, PyObject_GenericGetAttr},
559
{Py_tp_traverse, keyobject_traverse},
560
{Py_tp_clear, keyobject_clear},
561
{Py_tp_richcompare, keyobject_richcompare},
562
{Py_tp_members, keyobject_members},
563
{0, 0}
564
};
565
566
static PyType_Spec keyobject_type_spec = {
567
.name = "functools.KeyWrapper",
568
.basicsize = sizeof(keyobject),
569
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION |
570
Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE),
571
.slots = keyobject_type_slots
572
};
573
574
static PyObject *
575
keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds)
576
{
577
PyObject *object;
578
keyobject *result;
579
static char *kwargs[] = {"obj", NULL};
580
581
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:K", kwargs, &object))
582
return NULL;
583
584
result = PyObject_GC_New(keyobject, Py_TYPE(ko));
585
if (result == NULL) {
586
return NULL;
587
}
588
result->cmp = Py_NewRef(ko->cmp);
589
result->object = Py_NewRef(object);
590
PyObject_GC_Track(result);
591
return (PyObject *)result;
592
}
593
594
static PyObject *
595
keyobject_richcompare(PyObject *ko, PyObject *other, int op)
596
{
597
if (!Py_IS_TYPE(other, Py_TYPE(ko))) {
598
PyErr_Format(PyExc_TypeError, "other argument must be K instance");
599
return NULL;
600
}
601
602
PyObject *compare = ((keyobject *) ko)->cmp;
603
assert(compare != NULL);
604
PyObject *x = ((keyobject *) ko)->object;
605
PyObject *y = ((keyobject *) other)->object;
606
if (!x || !y){
607
PyErr_Format(PyExc_AttributeError, "object");
608
return NULL;
609
}
610
611
/* Call the user's comparison function and translate the 3-way
612
* result into true or false (or error).
613
*/
614
PyObject* args[2] = {x, y};
615
PyObject *res = PyObject_Vectorcall(compare, args, 2, NULL);
616
if (res == NULL) {
617
return NULL;
618
}
619
620
PyObject *answer = PyObject_RichCompare(res, _PyLong_GetZero(), op);
621
Py_DECREF(res);
622
return answer;
623
}
624
625
/*[clinic input]
626
_functools.cmp_to_key
627
628
mycmp: object
629
Function that compares two objects.
630
631
Convert a cmp= function into a key= function.
632
[clinic start generated code]*/
633
634
static PyObject *
635
_functools_cmp_to_key_impl(PyObject *module, PyObject *mycmp)
636
/*[clinic end generated code: output=71eaad0f4fc81f33 input=d1b76f231c0dfeb3]*/
637
{
638
keyobject *object;
639
_functools_state *state;
640
641
state = get_functools_state(module);
642
object = PyObject_GC_New(keyobject, state->keyobject_type);
643
if (!object)
644
return NULL;
645
object->cmp = Py_NewRef(mycmp);
646
object->object = NULL;
647
PyObject_GC_Track(object);
648
return (PyObject *)object;
649
}
650
651
/* reduce (used to be a builtin) ********************************************/
652
653
// Not converted to argument clinic, because of `args` in-place modification.
654
// AC will affect performance.
655
static PyObject *
656
functools_reduce(PyObject *self, PyObject *args)
657
{
658
PyObject *seq, *func, *result = NULL, *it;
659
660
if (!PyArg_UnpackTuple(args, "reduce", 2, 3, &func, &seq, &result))
661
return NULL;
662
if (result != NULL)
663
Py_INCREF(result);
664
665
it = PyObject_GetIter(seq);
666
if (it == NULL) {
667
if (PyErr_ExceptionMatches(PyExc_TypeError))
668
PyErr_SetString(PyExc_TypeError,
669
"reduce() arg 2 must support iteration");
670
Py_XDECREF(result);
671
return NULL;
672
}
673
674
if ((args = PyTuple_New(2)) == NULL)
675
goto Fail;
676
677
for (;;) {
678
PyObject *op2;
679
680
if (Py_REFCNT(args) > 1) {
681
Py_DECREF(args);
682
if ((args = PyTuple_New(2)) == NULL)
683
goto Fail;
684
}
685
686
op2 = PyIter_Next(it);
687
if (op2 == NULL) {
688
if (PyErr_Occurred())
689
goto Fail;
690
break;
691
}
692
693
if (result == NULL)
694
result = op2;
695
else {
696
/* Update the args tuple in-place */
697
assert(Py_REFCNT(args) == 1);
698
Py_XSETREF(_PyTuple_ITEMS(args)[0], result);
699
Py_XSETREF(_PyTuple_ITEMS(args)[1], op2);
700
if ((result = PyObject_Call(func, args, NULL)) == NULL) {
701
goto Fail;
702
}
703
// bpo-42536: The GC may have untracked this args tuple. Since we're
704
// recycling it, make sure it's tracked again:
705
if (!_PyObject_GC_IS_TRACKED(args)) {
706
_PyObject_GC_TRACK(args);
707
}
708
}
709
}
710
711
Py_DECREF(args);
712
713
if (result == NULL)
714
PyErr_SetString(PyExc_TypeError,
715
"reduce() of empty iterable with no initial value");
716
717
Py_DECREF(it);
718
return result;
719
720
Fail:
721
Py_XDECREF(args);
722
Py_XDECREF(result);
723
Py_DECREF(it);
724
return NULL;
725
}
726
727
PyDoc_STRVAR(functools_reduce_doc,
728
"reduce(function, iterable[, initial]) -> value\n\
729
\n\
730
Apply a function of two arguments cumulatively to the items of a sequence\n\
731
or iterable, from left to right, so as to reduce the iterable to a single\n\
732
value. For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates\n\
733
((((1+2)+3)+4)+5). If initial is present, it is placed before the items\n\
734
of the iterable in the calculation, and serves as a default when the\n\
735
iterable is empty.");
736
737
/* lru_cache object **********************************************************/
738
739
/* There are four principal algorithmic differences from the pure python version:
740
741
1). The C version relies on the GIL instead of having its own reentrant lock.
742
743
2). The prev/next link fields use borrowed references.
744
745
3). For a full cache, the pure python version rotates the location of the
746
root entry so that it never has to move individual links and it can
747
limit updates to just the key and result fields. However, in the C
748
version, links are temporarily removed while the cache dict updates are
749
occurring. Afterwards, they are appended or prepended back into the
750
doubly-linked lists.
751
752
4) In the Python version, the _HashSeq class is used to prevent __hash__
753
from being called more than once. In the C version, the "known hash"
754
variants of dictionary calls as used to the same effect.
755
756
*/
757
758
struct lru_list_elem;
759
struct lru_cache_object;
760
761
typedef struct lru_list_elem {
762
PyObject_HEAD
763
struct lru_list_elem *prev, *next; /* borrowed links */
764
Py_hash_t hash;
765
PyObject *key, *result;
766
} lru_list_elem;
767
768
static void
769
lru_list_elem_dealloc(lru_list_elem *link)
770
{
771
PyTypeObject *tp = Py_TYPE(link);
772
Py_XDECREF(link->key);
773
Py_XDECREF(link->result);
774
tp->tp_free(link);
775
Py_DECREF(tp);
776
}
777
778
static PyType_Slot lru_list_elem_type_slots[] = {
779
{Py_tp_dealloc, lru_list_elem_dealloc},
780
{0, 0}
781
};
782
783
static PyType_Spec lru_list_elem_type_spec = {
784
.name = "functools._lru_list_elem",
785
.basicsize = sizeof(lru_list_elem),
786
.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION |
787
Py_TPFLAGS_IMMUTABLETYPE,
788
.slots = lru_list_elem_type_slots
789
};
790
791
792
typedef PyObject *(*lru_cache_ternaryfunc)(struct lru_cache_object *, PyObject *, PyObject *);
793
794
typedef struct lru_cache_object {
795
lru_list_elem root; /* includes PyObject_HEAD */
796
lru_cache_ternaryfunc wrapper;
797
int typed;
798
PyObject *cache;
799
Py_ssize_t hits;
800
PyObject *func;
801
Py_ssize_t maxsize;
802
Py_ssize_t misses;
803
/* the kwd_mark is used delimit args and keywords in the cache keys */
804
PyObject *kwd_mark;
805
PyTypeObject *lru_list_elem_type;
806
PyObject *cache_info_type;
807
PyObject *dict;
808
PyObject *weakreflist;
809
} lru_cache_object;
810
811
static PyObject *
812
lru_cache_make_key(PyObject *kwd_mark, PyObject *args,
813
PyObject *kwds, int typed)
814
{
815
PyObject *key, *keyword, *value;
816
Py_ssize_t key_size, pos, key_pos, kwds_size;
817
818
kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0;
819
820
/* short path, key will match args anyway, which is a tuple */
821
if (!typed && !kwds_size) {
822
if (PyTuple_GET_SIZE(args) == 1) {
823
key = PyTuple_GET_ITEM(args, 0);
824
if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) {
825
/* For common scalar keys, save space by
826
dropping the enclosing args tuple */
827
return Py_NewRef(key);
828
}
829
}
830
return Py_NewRef(args);
831
}
832
833
key_size = PyTuple_GET_SIZE(args);
834
if (kwds_size)
835
key_size += kwds_size * 2 + 1;
836
if (typed)
837
key_size += PyTuple_GET_SIZE(args) + kwds_size;
838
839
key = PyTuple_New(key_size);
840
if (key == NULL)
841
return NULL;
842
843
key_pos = 0;
844
for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
845
PyObject *item = PyTuple_GET_ITEM(args, pos);
846
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
847
}
848
if (kwds_size) {
849
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(kwd_mark));
850
for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) {
851
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(keyword));
852
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(value));
853
}
854
assert(key_pos == PyTuple_GET_SIZE(args) + kwds_size * 2 + 1);
855
}
856
if (typed) {
857
for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
858
PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos));
859
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
860
}
861
if (kwds_size) {
862
for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) {
863
PyObject *item = (PyObject *)Py_TYPE(value);
864
PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
865
}
866
}
867
}
868
assert(key_pos == key_size);
869
return key;
870
}
871
872
static PyObject *
873
uncached_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
874
{
875
PyObject *result;
876
877
self->misses++;
878
result = PyObject_Call(self->func, args, kwds);
879
if (!result)
880
return NULL;
881
return result;
882
}
883
884
static PyObject *
885
infinite_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
886
{
887
PyObject *result;
888
Py_hash_t hash;
889
PyObject *key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
890
if (!key)
891
return NULL;
892
hash = PyObject_Hash(key);
893
if (hash == -1) {
894
Py_DECREF(key);
895
return NULL;
896
}
897
result = _PyDict_GetItem_KnownHash(self->cache, key, hash);
898
if (result) {
899
Py_INCREF(result);
900
self->hits++;
901
Py_DECREF(key);
902
return result;
903
}
904
if (PyErr_Occurred()) {
905
Py_DECREF(key);
906
return NULL;
907
}
908
self->misses++;
909
result = PyObject_Call(self->func, args, kwds);
910
if (!result) {
911
Py_DECREF(key);
912
return NULL;
913
}
914
if (_PyDict_SetItem_KnownHash(self->cache, key, result, hash) < 0) {
915
Py_DECREF(result);
916
Py_DECREF(key);
917
return NULL;
918
}
919
Py_DECREF(key);
920
return result;
921
}
922
923
static void
924
lru_cache_extract_link(lru_list_elem *link)
925
{
926
lru_list_elem *link_prev = link->prev;
927
lru_list_elem *link_next = link->next;
928
link_prev->next = link->next;
929
link_next->prev = link->prev;
930
}
931
932
static void
933
lru_cache_append_link(lru_cache_object *self, lru_list_elem *link)
934
{
935
lru_list_elem *root = &self->root;
936
lru_list_elem *last = root->prev;
937
last->next = root->prev = link;
938
link->prev = last;
939
link->next = root;
940
}
941
942
static void
943
lru_cache_prepend_link(lru_cache_object *self, lru_list_elem *link)
944
{
945
lru_list_elem *root = &self->root;
946
lru_list_elem *first = root->next;
947
first->prev = root->next = link;
948
link->prev = root;
949
link->next = first;
950
}
951
952
/* General note on reentrancy:
953
954
There are four dictionary calls in the bounded_lru_cache_wrapper():
955
1) The initial check for a cache match. 2) The post user-function
956
check for a cache match. 3) The deletion of the oldest entry.
957
4) The addition of the newest entry.
958
959
In all four calls, we have a known hash which lets use avoid a call
960
to __hash__(). That leaves only __eq__ as a possible source of a
961
reentrant call.
962
963
The __eq__ method call is always made for a cache hit (dict access #1).
964
Accordingly, we have make sure not modify the cache state prior to
965
this call.
966
967
The __eq__ method call is never made for the deletion (dict access #3)
968
because it is an identity match.
969
970
For the other two accesses (#2 and #4), calls to __eq__ only occur
971
when some other entry happens to have an exactly matching hash (all
972
64-bits). Though rare, this can happen, so we have to make sure to
973
either call it at the top of its code path before any cache
974
state modifications (dict access #2) or be prepared to restore
975
invariants at the end of the code path (dict access #4).
976
977
Another possible source of reentrancy is a decref which can trigger
978
arbitrary code execution. To make the code easier to reason about,
979
the decrefs are deferred to the end of the each possible code path
980
so that we know the cache is a consistent state.
981
*/
982
983
static PyObject *
984
bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
985
{
986
lru_list_elem *link;
987
PyObject *key, *result, *testresult;
988
Py_hash_t hash;
989
990
key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
991
if (!key)
992
return NULL;
993
hash = PyObject_Hash(key);
994
if (hash == -1) {
995
Py_DECREF(key);
996
return NULL;
997
}
998
link = (lru_list_elem *)_PyDict_GetItem_KnownHash(self->cache, key, hash);
999
if (link != NULL) {
1000
lru_cache_extract_link(link);
1001
lru_cache_append_link(self, link);
1002
result = link->result;
1003
self->hits++;
1004
Py_INCREF(result);
1005
Py_DECREF(key);
1006
return result;
1007
}
1008
if (PyErr_Occurred()) {
1009
Py_DECREF(key);
1010
return NULL;
1011
}
1012
self->misses++;
1013
result = PyObject_Call(self->func, args, kwds);
1014
if (!result) {
1015
Py_DECREF(key);
1016
return NULL;
1017
}
1018
testresult = _PyDict_GetItem_KnownHash(self->cache, key, hash);
1019
if (testresult != NULL) {
1020
/* Getting here means that this same key was added to the cache
1021
during the PyObject_Call(). Since the link update is already
1022
done, we need only return the computed result. */
1023
Py_DECREF(key);
1024
return result;
1025
}
1026
if (PyErr_Occurred()) {
1027
/* This is an unusual case since this same lookup
1028
did not previously trigger an error during lookup.
1029
Treat it the same as an error in user function
1030
and return with the error set. */
1031
Py_DECREF(key);
1032
Py_DECREF(result);
1033
return NULL;
1034
}
1035
/* This is the normal case. The new key wasn't found before
1036
user function call and it is still not there. So we
1037
proceed normally and update the cache with the new result. */
1038
1039
assert(self->maxsize > 0);
1040
if (PyDict_GET_SIZE(self->cache) < self->maxsize ||
1041
self->root.next == &self->root)
1042
{
1043
/* Cache is not full, so put the result in a new link */
1044
link = (lru_list_elem *)PyObject_New(lru_list_elem,
1045
self->lru_list_elem_type);
1046
if (link == NULL) {
1047
Py_DECREF(key);
1048
Py_DECREF(result);
1049
return NULL;
1050
}
1051
1052
link->hash = hash;
1053
link->key = key;
1054
link->result = result;
1055
/* What is really needed here is a SetItem variant with a "no clobber"
1056
option. If the __eq__ call triggers a reentrant call that adds
1057
this same key, then this setitem call will update the cache dict
1058
with this new link, leaving the old link as an orphan (i.e. not
1059
having a cache dict entry that refers to it). */
1060
if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
1061
hash) < 0) {
1062
Py_DECREF(link);
1063
return NULL;
1064
}
1065
lru_cache_append_link(self, link);
1066
return Py_NewRef(result);
1067
}
1068
/* Since the cache is full, we need to evict an old key and add
1069
a new key. Rather than free the old link and allocate a new
1070
one, we reuse the link for the new key and result and move it
1071
to front of the cache to mark it as recently used.
1072
1073
We try to assure all code paths (including errors) leave all
1074
of the links in place. Either the link is successfully
1075
updated and moved or it is restored to its old position.
1076
However if an unrecoverable error is found, it doesn't
1077
make sense to reinsert the link, so we leave it out
1078
and the cache will no longer register as full.
1079
*/
1080
PyObject *oldkey, *oldresult, *popresult;
1081
1082
/* Extract the oldest item. */
1083
assert(self->root.next != &self->root);
1084
link = self->root.next;
1085
lru_cache_extract_link(link);
1086
/* Remove it from the cache.
1087
The cache dict holds one reference to the link.
1088
We created one other reference when the link was created.
1089
The linked list only has borrowed references. */
1090
popresult = _PyDict_Pop_KnownHash(self->cache, link->key,
1091
link->hash, Py_None);
1092
if (popresult == Py_None) {
1093
/* Getting here means that the user function call or another
1094
thread has already removed the old key from the dictionary.
1095
This link is now an orphan. Since we don't want to leave the
1096
cache in an inconsistent state, we don't restore the link. */
1097
Py_DECREF(popresult);
1098
Py_DECREF(link);
1099
Py_DECREF(key);
1100
return result;
1101
}
1102
if (popresult == NULL) {
1103
/* An error arose while trying to remove the oldest key (the one
1104
being evicted) from the cache. We restore the link to its
1105
original position as the oldest link. Then we allow the
1106
error propagate upward; treating it the same as an error
1107
arising in the user function. */
1108
lru_cache_prepend_link(self, link);
1109
Py_DECREF(key);
1110
Py_DECREF(result);
1111
return NULL;
1112
}
1113
/* Keep a reference to the old key and old result to prevent their
1114
ref counts from going to zero during the update. That will
1115
prevent potentially arbitrary object clean-up code (i.e. __del__)
1116
from running while we're still adjusting the links. */
1117
oldkey = link->key;
1118
oldresult = link->result;
1119
1120
link->hash = hash;
1121
link->key = key;
1122
link->result = result;
1123
/* Note: The link is being added to the cache dict without the
1124
prev and next fields set to valid values. We have to wait
1125
for successful insertion in the cache dict before adding the
1126
link to the linked list. Otherwise, the potentially reentrant
1127
__eq__ call could cause the then orphan link to be visited. */
1128
if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
1129
hash) < 0) {
1130
/* Somehow the cache dict update failed. We no longer can
1131
restore the old link. Let the error propagate upward and
1132
leave the cache short one link. */
1133
Py_DECREF(popresult);
1134
Py_DECREF(link);
1135
Py_DECREF(oldkey);
1136
Py_DECREF(oldresult);
1137
return NULL;
1138
}
1139
lru_cache_append_link(self, link);
1140
Py_INCREF(result); /* for return */
1141
Py_DECREF(popresult);
1142
Py_DECREF(oldkey);
1143
Py_DECREF(oldresult);
1144
return result;
1145
}
1146
1147
static PyObject *
1148
lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
1149
{
1150
PyObject *func, *maxsize_O, *cache_info_type, *cachedict;
1151
int typed;
1152
lru_cache_object *obj;
1153
Py_ssize_t maxsize;
1154
PyObject *(*wrapper)(lru_cache_object *, PyObject *, PyObject *);
1155
_functools_state *state;
1156
static char *keywords[] = {"user_function", "maxsize", "typed",
1157
"cache_info_type", NULL};
1158
1159
if (!PyArg_ParseTupleAndKeywords(args, kw, "OOpO:lru_cache", keywords,
1160
&func, &maxsize_O, &typed,
1161
&cache_info_type)) {
1162
return NULL;
1163
}
1164
1165
if (!PyCallable_Check(func)) {
1166
PyErr_SetString(PyExc_TypeError,
1167
"the first argument must be callable");
1168
return NULL;
1169
}
1170
1171
state = get_functools_state_by_type(type);
1172
if (state == NULL) {
1173
return NULL;
1174
}
1175
1176
/* select the caching function, and make/inc maxsize_O */
1177
if (maxsize_O == Py_None) {
1178
wrapper = infinite_lru_cache_wrapper;
1179
/* use this only to initialize lru_cache_object attribute maxsize */
1180
maxsize = -1;
1181
} else if (PyIndex_Check(maxsize_O)) {
1182
maxsize = PyNumber_AsSsize_t(maxsize_O, PyExc_OverflowError);
1183
if (maxsize == -1 && PyErr_Occurred())
1184
return NULL;
1185
if (maxsize < 0) {
1186
maxsize = 0;
1187
}
1188
if (maxsize == 0)
1189
wrapper = uncached_lru_cache_wrapper;
1190
else
1191
wrapper = bounded_lru_cache_wrapper;
1192
} else {
1193
PyErr_SetString(PyExc_TypeError, "maxsize should be integer or None");
1194
return NULL;
1195
}
1196
1197
if (!(cachedict = PyDict_New()))
1198
return NULL;
1199
1200
obj = (lru_cache_object *)type->tp_alloc(type, 0);
1201
if (obj == NULL) {
1202
Py_DECREF(cachedict);
1203
return NULL;
1204
}
1205
1206
obj->root.prev = &obj->root;
1207
obj->root.next = &obj->root;
1208
obj->wrapper = wrapper;
1209
obj->typed = typed;
1210
obj->cache = cachedict;
1211
obj->func = Py_NewRef(func);
1212
obj->misses = obj->hits = 0;
1213
obj->maxsize = maxsize;
1214
obj->kwd_mark = Py_NewRef(state->kwd_mark);
1215
obj->lru_list_elem_type = (PyTypeObject*)Py_NewRef(state->lru_list_elem_type);
1216
obj->cache_info_type = Py_NewRef(cache_info_type);
1217
obj->dict = NULL;
1218
obj->weakreflist = NULL;
1219
return (PyObject *)obj;
1220
}
1221
1222
static lru_list_elem *
1223
lru_cache_unlink_list(lru_cache_object *self)
1224
{
1225
lru_list_elem *root = &self->root;
1226
lru_list_elem *link = root->next;
1227
if (link == root)
1228
return NULL;
1229
root->prev->next = NULL;
1230
root->next = root->prev = root;
1231
return link;
1232
}
1233
1234
static void
1235
lru_cache_clear_list(lru_list_elem *link)
1236
{
1237
while (link != NULL) {
1238
lru_list_elem *next = link->next;
1239
Py_SETREF(link, next);
1240
}
1241
}
1242
1243
static int
1244
lru_cache_tp_clear(lru_cache_object *self)
1245
{
1246
lru_list_elem *list = lru_cache_unlink_list(self);
1247
Py_CLEAR(self->cache);
1248
Py_CLEAR(self->func);
1249
Py_CLEAR(self->kwd_mark);
1250
Py_CLEAR(self->lru_list_elem_type);
1251
Py_CLEAR(self->cache_info_type);
1252
Py_CLEAR(self->dict);
1253
lru_cache_clear_list(list);
1254
return 0;
1255
}
1256
1257
static void
1258
lru_cache_dealloc(lru_cache_object *obj)
1259
{
1260
PyTypeObject *tp = Py_TYPE(obj);
1261
/* bpo-31095: UnTrack is needed before calling any callbacks */
1262
PyObject_GC_UnTrack(obj);
1263
if (obj->weakreflist != NULL) {
1264
PyObject_ClearWeakRefs((PyObject*)obj);
1265
}
1266
1267
(void)lru_cache_tp_clear(obj);
1268
tp->tp_free(obj);
1269
Py_DECREF(tp);
1270
}
1271
1272
static PyObject *
1273
lru_cache_call(lru_cache_object *self, PyObject *args, PyObject *kwds)
1274
{
1275
return self->wrapper(self, args, kwds);
1276
}
1277
1278
static PyObject *
1279
lru_cache_descr_get(PyObject *self, PyObject *obj, PyObject *type)
1280
{
1281
if (obj == Py_None || obj == NULL) {
1282
return Py_NewRef(self);
1283
}
1284
return PyMethod_New(self, obj);
1285
}
1286
1287
/*[clinic input]
1288
_functools._lru_cache_wrapper.cache_info
1289
1290
Report cache statistics
1291
[clinic start generated code]*/
1292
1293
static PyObject *
1294
_functools__lru_cache_wrapper_cache_info_impl(PyObject *self)
1295
/*[clinic end generated code: output=cc796a0b06dbd717 input=f05e5b6ebfe38645]*/
1296
{
1297
lru_cache_object *_self = (lru_cache_object *) self;
1298
if (_self->maxsize == -1) {
1299
return PyObject_CallFunction(_self->cache_info_type, "nnOn",
1300
_self->hits, _self->misses, Py_None,
1301
PyDict_GET_SIZE(_self->cache));
1302
}
1303
return PyObject_CallFunction(_self->cache_info_type, "nnnn",
1304
_self->hits, _self->misses, _self->maxsize,
1305
PyDict_GET_SIZE(_self->cache));
1306
}
1307
1308
/*[clinic input]
1309
_functools._lru_cache_wrapper.cache_clear
1310
1311
Clear the cache and cache statistics
1312
[clinic start generated code]*/
1313
1314
static PyObject *
1315
_functools__lru_cache_wrapper_cache_clear_impl(PyObject *self)
1316
/*[clinic end generated code: output=58423b35efc3e381 input=6ca59dba09b12584]*/
1317
{
1318
lru_cache_object *_self = (lru_cache_object *) self;
1319
lru_list_elem *list = lru_cache_unlink_list(_self);
1320
_self->hits = _self->misses = 0;
1321
PyDict_Clear(_self->cache);
1322
lru_cache_clear_list(list);
1323
Py_RETURN_NONE;
1324
}
1325
1326
static PyObject *
1327
lru_cache_reduce(PyObject *self, PyObject *unused)
1328
{
1329
return PyObject_GetAttrString(self, "__qualname__");
1330
}
1331
1332
static PyObject *
1333
lru_cache_copy(PyObject *self, PyObject *unused)
1334
{
1335
return Py_NewRef(self);
1336
}
1337
1338
static PyObject *
1339
lru_cache_deepcopy(PyObject *self, PyObject *unused)
1340
{
1341
return Py_NewRef(self);
1342
}
1343
1344
static int
1345
lru_cache_tp_traverse(lru_cache_object *self, visitproc visit, void *arg)
1346
{
1347
Py_VISIT(Py_TYPE(self));
1348
lru_list_elem *link = self->root.next;
1349
while (link != &self->root) {
1350
lru_list_elem *next = link->next;
1351
Py_VISIT(link->key);
1352
Py_VISIT(link->result);
1353
Py_VISIT(Py_TYPE(link));
1354
link = next;
1355
}
1356
Py_VISIT(self->cache);
1357
Py_VISIT(self->func);
1358
Py_VISIT(self->kwd_mark);
1359
Py_VISIT(self->lru_list_elem_type);
1360
Py_VISIT(self->cache_info_type);
1361
Py_VISIT(self->dict);
1362
return 0;
1363
}
1364
1365
1366
PyDoc_STRVAR(lru_cache_doc,
1367
"Create a cached callable that wraps another function.\n\
1368
\n\
1369
user_function: the function being cached\n\
1370
\n\
1371
maxsize: 0 for no caching\n\
1372
None for unlimited cache size\n\
1373
n for a bounded cache\n\
1374
\n\
1375
typed: False cache f(3) and f(3.0) as identical calls\n\
1376
True cache f(3) and f(3.0) as distinct calls\n\
1377
\n\
1378
cache_info_type: namedtuple class with the fields:\n\
1379
hits misses currsize maxsize\n"
1380
);
1381
1382
static PyMethodDef lru_cache_methods[] = {
1383
_FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_INFO_METHODDEF
1384
_FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_CLEAR_METHODDEF
1385
{"__reduce__", (PyCFunction)lru_cache_reduce, METH_NOARGS},
1386
{"__copy__", (PyCFunction)lru_cache_copy, METH_VARARGS},
1387
{"__deepcopy__", (PyCFunction)lru_cache_deepcopy, METH_VARARGS},
1388
{NULL}
1389
};
1390
1391
static PyGetSetDef lru_cache_getsetlist[] = {
1392
{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
1393
{NULL}
1394
};
1395
1396
static PyMemberDef lru_cache_memberlist[] = {
1397
{"__dictoffset__", T_PYSSIZET,
1398
offsetof(lru_cache_object, dict), READONLY},
1399
{"__weaklistoffset__", T_PYSSIZET,
1400
offsetof(lru_cache_object, weakreflist), READONLY},
1401
{NULL} /* Sentinel */
1402
};
1403
1404
static PyType_Slot lru_cache_type_slots[] = {
1405
{Py_tp_dealloc, lru_cache_dealloc},
1406
{Py_tp_call, lru_cache_call},
1407
{Py_tp_doc, (void *)lru_cache_doc},
1408
{Py_tp_traverse, lru_cache_tp_traverse},
1409
{Py_tp_clear, lru_cache_tp_clear},
1410
{Py_tp_methods, lru_cache_methods},
1411
{Py_tp_members, lru_cache_memberlist},
1412
{Py_tp_getset, lru_cache_getsetlist},
1413
{Py_tp_descr_get, lru_cache_descr_get},
1414
{Py_tp_new, lru_cache_new},
1415
{0, 0}
1416
};
1417
1418
static PyType_Spec lru_cache_type_spec = {
1419
.name = "functools._lru_cache_wrapper",
1420
.basicsize = sizeof(lru_cache_object),
1421
.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
1422
Py_TPFLAGS_METHOD_DESCRIPTOR | Py_TPFLAGS_IMMUTABLETYPE,
1423
.slots = lru_cache_type_slots
1424
};
1425
1426
1427
/* module level code ********************************************************/
1428
1429
PyDoc_STRVAR(_functools_doc,
1430
"Tools that operate on functions.");
1431
1432
static PyMethodDef _functools_methods[] = {
1433
{"reduce", functools_reduce, METH_VARARGS, functools_reduce_doc},
1434
_FUNCTOOLS_CMP_TO_KEY_METHODDEF
1435
{NULL, NULL} /* sentinel */
1436
};
1437
1438
static int
1439
_functools_exec(PyObject *module)
1440
{
1441
_functools_state *state = get_functools_state(module);
1442
state->kwd_mark = _PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type);
1443
if (state->kwd_mark == NULL) {
1444
return -1;
1445
}
1446
1447
state->partial_type = (PyTypeObject *)PyType_FromModuleAndSpec(module,
1448
&partial_type_spec, NULL);
1449
if (state->partial_type == NULL) {
1450
return -1;
1451
}
1452
if (PyModule_AddType(module, state->partial_type) < 0) {
1453
return -1;
1454
}
1455
1456
PyObject *lru_cache_type = PyType_FromModuleAndSpec(module,
1457
&lru_cache_type_spec, NULL);
1458
if (lru_cache_type == NULL) {
1459
return -1;
1460
}
1461
if (PyModule_AddType(module, (PyTypeObject *)lru_cache_type) < 0) {
1462
Py_DECREF(lru_cache_type);
1463
return -1;
1464
}
1465
Py_DECREF(lru_cache_type);
1466
1467
state->keyobject_type = (PyTypeObject *)PyType_FromModuleAndSpec(module,
1468
&keyobject_type_spec, NULL);
1469
if (state->keyobject_type == NULL) {
1470
return -1;
1471
}
1472
// keyobject_type is used only internally.
1473
// So we don't expose it in module namespace.
1474
1475
state->lru_list_elem_type = (PyTypeObject *)PyType_FromModuleAndSpec(
1476
module, &lru_list_elem_type_spec, NULL);
1477
if (state->lru_list_elem_type == NULL) {
1478
return -1;
1479
}
1480
// lru_list_elem is used only in _lru_cache_wrapper.
1481
// So we don't expose it in module namespace.
1482
1483
return 0;
1484
}
1485
1486
static int
1487
_functools_traverse(PyObject *module, visitproc visit, void *arg)
1488
{
1489
_functools_state *state = get_functools_state(module);
1490
Py_VISIT(state->kwd_mark);
1491
Py_VISIT(state->partial_type);
1492
Py_VISIT(state->keyobject_type);
1493
Py_VISIT(state->lru_list_elem_type);
1494
return 0;
1495
}
1496
1497
static int
1498
_functools_clear(PyObject *module)
1499
{
1500
_functools_state *state = get_functools_state(module);
1501
Py_CLEAR(state->kwd_mark);
1502
Py_CLEAR(state->partial_type);
1503
Py_CLEAR(state->keyobject_type);
1504
Py_CLEAR(state->lru_list_elem_type);
1505
return 0;
1506
}
1507
1508
static void
1509
_functools_free(void *module)
1510
{
1511
_functools_clear((PyObject *)module);
1512
}
1513
1514
static struct PyModuleDef_Slot _functools_slots[] = {
1515
{Py_mod_exec, _functools_exec},
1516
{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED},
1517
{0, NULL}
1518
};
1519
1520
static struct PyModuleDef _functools_module = {
1521
PyModuleDef_HEAD_INIT,
1522
.m_name = "_functools",
1523
.m_doc = _functools_doc,
1524
.m_size = sizeof(_functools_state),
1525
.m_methods = _functools_methods,
1526
.m_slots = _functools_slots,
1527
.m_traverse = _functools_traverse,
1528
.m_clear = _functools_clear,
1529
.m_free = _functools_free,
1530
};
1531
1532
PyMODINIT_FUNC
1533
PyInit__functools(void)
1534
{
1535
return PyModuleDef_Init(&_functools_module);
1536
}
1537
1538