Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
singlestore-labs
GitHub Repository: singlestore-labs/singlestoredb-python
Path: blob/main/singlestoredb/mysql/cursors.py
469 views
1
# type: ignore
2
import re
3
from collections import namedtuple
4
5
from . import err
6
from ..connection import Cursor as BaseCursor
7
from ..utils import results
8
from ..utils.debug import log_query
9
from ..utils.results import get_schema
10
11
try:
12
from pydantic import BaseModel
13
has_pydantic = True
14
except ImportError:
15
has_pydantic = False
16
17
18
#: Regular expression for :meth:`Cursor.executemany`.
19
#: executemany only supports simple bulk insert.
20
#: You can use it to load large dataset.
21
RE_INSERT_VALUES = re.compile(
22
r'\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)'
23
+ r'(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))'
24
+ r'(\s*(?:ON DUPLICATE.*)?);?\s*\Z',
25
re.IGNORECASE | re.DOTALL,
26
)
27
28
29
class Cursor(BaseCursor):
30
"""
31
This is the object used to interact with the database.
32
33
Do not create an instance of a Cursor yourself. Call
34
connection.Connection.cursor().
35
36
See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
37
the specification.
38
39
Parameters
40
----------
41
connection : Connection
42
The connection the cursor is associated with.
43
44
"""
45
46
#: Max statement size which :meth:`executemany` generates.
47
#:
48
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
49
#: Default value of max_allowed_packet is 1048576.
50
max_stmt_length = 1024000
51
52
def __init__(self, connection):
53
self._connection = connection
54
self.warning_count = 0
55
self._description = None
56
self._format_schema = None
57
self._rownumber = 0
58
self.rowcount = -1
59
self.arraysize = 1
60
self._executed = None
61
self._result = None
62
self._rows = None
63
self.lastrowid = None
64
65
@property
66
def messages(self):
67
# TODO
68
return []
69
70
@property
71
def description(self):
72
return self._description
73
74
@property
75
def _schema(self):
76
return self._format_schema
77
78
@property
79
def connection(self):
80
return self._connection
81
82
@property
83
def rownumber(self):
84
return self._rownumber
85
86
def close(self):
87
"""Closing a cursor just exhausts all remaining data."""
88
conn = self._connection
89
if conn is None:
90
return
91
try:
92
while self.nextset():
93
pass
94
finally:
95
self._connection = None
96
97
@property
98
def open(self) -> bool:
99
conn = self._connection
100
if conn is None:
101
return False
102
return True
103
104
def is_connected(self):
105
return self.open
106
107
def __enter__(self):
108
return self
109
110
def __exit__(self, *exc_info):
111
del exc_info
112
self.close()
113
114
def _get_db(self):
115
if not self._connection:
116
raise err.ProgrammingError('Cursor closed')
117
return self._connection
118
119
def _check_executed(self):
120
if not self._executed:
121
raise err.ProgrammingError('execute() first')
122
123
def _conv_row(self, row):
124
return row
125
126
def setinputsizes(self, *args):
127
"""Does nothing, required by DB API."""
128
129
def setoutputsizes(self, *args):
130
"""Does nothing, required by DB API."""
131
132
setoutputsize = setoutputsizes
133
134
def _nextset(self, unbuffered=False):
135
"""Get the next query set."""
136
conn = self._get_db()
137
current_result = self._result
138
if current_result is None or current_result is not conn._result:
139
return None
140
if not current_result.has_next:
141
return None
142
self._result = None
143
self._clear_result()
144
conn.next_result(unbuffered=unbuffered)
145
self._do_get_result()
146
return True
147
148
def nextset(self):
149
return self._nextset(False)
150
151
def _escape_args(self, args, conn):
152
dtype = type(args)
153
literal = conn.literal
154
if dtype is tuple or dtype is list or isinstance(args, (tuple, list)):
155
return tuple(literal(arg) for arg in args)
156
elif dtype is dict or isinstance(args, dict):
157
return {key: literal(val) for (key, val) in args.items()}
158
elif has_pydantic and isinstance(args, BaseModel):
159
return {key: literal(val) for (key, val) in args.model_dump().items()}
160
# If it's not a dictionary let's try escaping it anyways.
161
# Worst case it will throw a Value error
162
return conn.escape(args)
163
164
def mogrify(self, query, args=None):
165
"""
166
Returns the exact string sent to the database by calling the execute() method.
167
168
This method follows the extension to the DB API 2.0 followed by Psycopg.
169
170
Parameters
171
----------
172
query : str
173
Query to mogrify.
174
args : Sequence[Any] or Dict[str, Any] or Any, optional
175
Parameters used with query. (optional)
176
177
Returns
178
-------
179
str : The query with argument binding applied.
180
181
"""
182
conn = self._get_db()
183
184
if args:
185
query = query % self._escape_args(args, conn)
186
187
return query
188
189
def execute(self, query, args=None, infile_stream=None):
190
"""
191
Execute a query.
192
193
If args is a list or tuple, :1, :2, etc. can be used as a
194
placeholder in the query. If args is a dict, :name can be used
195
as a placeholder in the query.
196
197
Parameters
198
----------
199
query : str
200
Query to execute.
201
args : Sequence[Any] or Dict[str, Any] or Any, optional
202
Parameters used with query. (optional)
203
infile_stream : io.BytesIO or Iterator[bytes], optional
204
Data stream for ``LOCAL INFILE`` statements
205
206
Returns
207
-------
208
int : Number of affected rows.
209
210
"""
211
while self.nextset():
212
pass
213
214
log_query(query, args)
215
216
query = self.mogrify(query, args)
217
218
result = self._query(query, infile_stream=infile_stream)
219
self._executed = query
220
return result
221
222
def executemany(self, query, args=None):
223
"""
224
Run several data against one query.
225
226
This method improves performance on multiple-row INSERT and
227
REPLACE. Otherwise it is equivalent to looping over args with
228
execute().
229
230
Parameters
231
----------
232
query : str,
233
Query to execute.
234
args : Sequnce[Any], optional
235
Sequence of sequences or mappings. It is used as parameter.
236
237
Returns
238
-------
239
int : Number of rows affected, if any.
240
241
"""
242
if args is None or len(args) == 0:
243
return
244
245
m = RE_INSERT_VALUES.match(query)
246
if m:
247
q_prefix = m.group(1) % ()
248
q_values = m.group(2).rstrip()
249
q_postfix = m.group(3) or ''
250
assert q_values[0] == '(' and q_values[-1] == ')'
251
return self._do_execute_many(
252
q_prefix,
253
q_values,
254
q_postfix,
255
args,
256
self.max_stmt_length,
257
self._get_db().encoding,
258
)
259
260
self.rowcount = sum(self.execute(query, arg) for arg in args)
261
return self.rowcount
262
263
def _do_execute_many(
264
self, prefix, values, postfix, args, max_stmt_length, encoding,
265
):
266
conn = self._get_db()
267
escape = self._escape_args
268
if isinstance(prefix, str):
269
prefix = prefix.encode(encoding)
270
if isinstance(postfix, str):
271
postfix = postfix.encode(encoding)
272
sql = bytearray(prefix)
273
# Detect dataframes
274
if hasattr(args, 'itertuples'):
275
args = args.itertuples(index=False)
276
else:
277
args = iter(args)
278
v = values % escape(next(args), conn)
279
if isinstance(v, str):
280
v = v.encode(encoding, 'surrogateescape')
281
sql += v
282
rows = 0
283
for arg in args:
284
v = values % escape(arg, conn)
285
if type(v) is str or isinstance(v, str):
286
v = v.encode(encoding, 'surrogateescape')
287
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
288
rows += self.execute(sql + postfix)
289
sql = bytearray(prefix)
290
else:
291
sql += b','
292
sql += v
293
rows += self.execute(sql + postfix)
294
self.rowcount = rows
295
return rows
296
297
def callproc(self, procname, args=()):
298
"""
299
Execute stored procedure procname with args.
300
301
Compatibility warning: PEP-249 specifies that any modified
302
parameters must be returned. This is currently impossible
303
as they are only available by storing them in a server
304
variable and then retrieved by a query. Since stored
305
procedures return zero or more result sets, there is no
306
reliable way to get at OUT or INOUT parameters via callproc.
307
The server variables are named @_procname_n, where procname
308
is the parameter above and n is the position of the parameter
309
(from zero). Once all result sets generated by the procedure
310
have been fetched, you can issue a SELECT @_procname_0, ...
311
query using .execute() to get any OUT or INOUT values.
312
313
Compatibility warning: The act of calling a stored procedure
314
itself creates an empty result set. This appears after any
315
result sets generated by the procedure. This is non-standard
316
behavior with respect to the DB-API. Be sure to use nextset()
317
to advance through all result sets; otherwise you may get
318
disconnected.
319
320
Parameters
321
----------
322
procname : str
323
Name of procedure to execute on server.
324
args : Sequence[Any], optional
325
Sequence of parameters to use with procedure.
326
327
Returns
328
-------
329
Sequence[Any] : The original args.
330
331
"""
332
conn = self._get_db()
333
if args:
334
fmt = f'@_{procname}_%d=%s'
335
self._query(
336
'SET %s'
337
% ','.join(
338
fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
339
),
340
)
341
self.nextset()
342
343
q = 'CALL {}({})'.format(
344
procname,
345
','.join(['@_%s_%d' % (procname, i) for i in range(len(args))]),
346
)
347
self._query(q)
348
self._executed = q
349
return args
350
351
def fetchone(self):
352
"""Fetch the next row."""
353
self._check_executed()
354
return self._unchecked_fetchone()
355
356
def _unchecked_fetchone(self):
357
"""Fetch the next row."""
358
if self._rows is None or self._rownumber >= len(self._rows):
359
return None
360
result = self._rows[self._rownumber]
361
self._rownumber += 1
362
return result
363
364
def fetchmany(self, size=None):
365
"""Fetch several rows."""
366
self._check_executed()
367
if self._rows is None:
368
self.warning_count = self._result.warning_count
369
return ()
370
end = self._rownumber + (size or self.arraysize)
371
result = self._rows[self._rownumber: end]
372
self._rownumber = min(end, len(self._rows))
373
return result
374
375
def fetchall(self):
376
"""Fetch all the rows."""
377
self._check_executed()
378
if self._rows is None:
379
return ()
380
if self._rownumber:
381
result = self._rows[self._rownumber:]
382
else:
383
result = self._rows
384
self._rownumber = len(self._rows)
385
return result
386
387
def scroll(self, value, mode='relative'):
388
self._check_executed()
389
if mode == 'relative':
390
r = self._rownumber + value
391
elif mode == 'absolute':
392
r = value
393
else:
394
raise err.ProgrammingError('unknown scroll mode %s' % mode)
395
396
if not (0 <= r < len(self._rows)):
397
raise IndexError('out of range')
398
self._rownumber = r
399
400
def _query(self, q, infile_stream=None):
401
conn = self._get_db()
402
self._clear_result()
403
conn.query(q, infile_stream=infile_stream)
404
self._do_get_result()
405
return self.rowcount
406
407
def _clear_result(self):
408
self._rownumber = 0
409
self._result = None
410
411
self.rowcount = 0
412
self.warning_count = 0
413
self._description = None
414
self._format_schema = None
415
self.lastrowid = None
416
self._rows = None
417
418
def _do_get_result(self):
419
conn = self._get_db()
420
421
self._result = result = conn._result
422
423
self.rowcount = result.affected_rows
424
self.warning_count = result.warning_count
425
# Affected rows is set to max int64 for compatibility with MySQLdb, but
426
# the DB-API requires this value to be -1. This happens in unbuffered mode.
427
if self.rowcount == 18446744073709551615:
428
self.rowcount = -1
429
self._description = result.description
430
if self._description:
431
self._format_schema = get_schema(
432
self.connection._results_type,
433
result.description,
434
)
435
self.lastrowid = result.insert_id
436
self._rows = result.rows
437
438
def __iter__(self):
439
self._check_executed()
440
441
def fetchall_unbuffered_gen(_unchecked_fetchone=self._unchecked_fetchone):
442
while True:
443
out = _unchecked_fetchone()
444
if out is not None:
445
yield out
446
else:
447
break
448
return fetchall_unbuffered_gen()
449
450
Warning = err.Warning
451
Error = err.Error
452
InterfaceError = err.InterfaceError
453
DatabaseError = err.DatabaseError
454
DataError = err.DataError
455
OperationalError = err.OperationalError
456
IntegrityError = err.IntegrityError
457
InternalError = err.InternalError
458
ProgrammingError = err.ProgrammingError
459
NotSupportedError = err.NotSupportedError
460
461
462
class CursorSV(Cursor):
463
"""Cursor class for C extension."""
464
465
466
class ArrowCursorMixin:
467
"""Fetch methods for Arrow Tables."""
468
469
def fetchone(self):
470
return results.results_to_arrow(
471
self.description, super().fetchone(), single=True, schema=self._schema,
472
)
473
474
def fetchall(self):
475
return results.results_to_arrow(
476
self.description, super().fetchall(), schema=self._schema,
477
)
478
479
def fetchall_unbuffered(self):
480
return results.results_to_arrow(
481
self.description, super().fetchall_unbuffered(), schema=self._schema,
482
)
483
484
def fetchmany(self, size=None):
485
return results.results_to_arrow(
486
self.description, super().fetchmany(size), schema=self._schema,
487
)
488
489
490
class ArrowCursor(ArrowCursorMixin, Cursor):
491
"""A cursor which returns results as an Arrow Table."""
492
493
494
class ArrowCursorSV(ArrowCursorMixin, CursorSV):
495
"""A cursor which returns results as an Arrow Table for C extension."""
496
497
498
class NumpyCursorMixin:
499
"""Fetch methods for numpy arrays."""
500
501
def fetchone(self):
502
return results.results_to_numpy(
503
self.description, super().fetchone(), single=True, schema=self._schema,
504
)
505
506
def fetchall(self):
507
return results.results_to_numpy(
508
self.description, super().fetchall(), schema=self._schema,
509
)
510
511
def fetchall_unbuffered(self):
512
return results.results_to_numpy(
513
self.description, super().fetchall_unbuffered(), schema=self._schema,
514
)
515
516
def fetchmany(self, size=None):
517
return results.results_to_numpy(
518
self.description, super().fetchmany(size), schema=self._schema,
519
)
520
521
522
class NumpyCursor(NumpyCursorMixin, Cursor):
523
"""A cursor which returns results as a numpy array."""
524
525
526
class NumpyCursorSV(NumpyCursorMixin, CursorSV):
527
"""A cursor which returns results as a numpy array for C extension."""
528
529
530
class PandasCursorMixin:
531
"""Fetch methods for pandas DataFrames."""
532
533
def fetchone(self):
534
return results.results_to_pandas(
535
self.description, super().fetchone(), single=True, schema=self._schema,
536
)
537
538
def fetchall(self):
539
return results.results_to_pandas(
540
self.description, super().fetchall(), schema=self._schema,
541
)
542
543
def fetchall_unbuffered(self):
544
return results.results_to_pandas(
545
self.description, super().fetchall_unbuffered(), schema=self._schema,
546
)
547
548
def fetchmany(self, size=None):
549
return results.results_to_pandas(
550
self.description, super().fetchmany(size), schema=self._schema,
551
)
552
553
554
class PandasCursor(PandasCursorMixin, Cursor):
555
"""A cursor which returns results as a pandas DataFrame."""
556
557
558
class PandasCursorSV(PandasCursorMixin, CursorSV):
559
"""A cursor which returns results as a pandas DataFrame for C extension."""
560
561
562
class PolarsCursorMixin:
563
"""Fetch methods for polars DataFrames."""
564
565
def fetchone(self):
566
return results.results_to_polars(
567
self.description, super().fetchone(), single=True, schema=self._schema,
568
)
569
570
def fetchall(self):
571
return results.results_to_polars(
572
self.description, super().fetchall(), schema=self._schema,
573
)
574
575
def fetchall_unbuffered(self):
576
return results.results_to_polars(
577
self.description, super().fetchall_unbuffered(), schema=self._schema,
578
)
579
580
def fetchmany(self, size=None):
581
return results.results_to_polars(
582
self.description, super().fetchmany(size), schema=self._schema,
583
)
584
585
586
class PolarsCursor(PolarsCursorMixin, Cursor):
587
"""A cursor which returns results as a polars DataFrame."""
588
589
590
class PolarsCursorSV(PolarsCursorMixin, CursorSV):
591
"""A cursor which returns results as a polars DataFrame for C extension."""
592
593
594
class DictCursorMixin:
595
# You can override this to use OrderedDict or other dict-like types.
596
dict_type = dict
597
598
def _do_get_result(self):
599
super(DictCursorMixin, self)._do_get_result()
600
fields = []
601
if self._description:
602
for f in self._result.fields:
603
name = f.name
604
if name in fields:
605
name = f.table_name + '.' + name
606
fields.append(name)
607
self._fields = fields
608
609
if fields and self._rows:
610
self._rows = [self._conv_row(r) for r in self._rows]
611
612
def _conv_row(self, row):
613
if row is None:
614
return None
615
return self.dict_type(zip(self._fields, row))
616
617
618
class DictCursor(DictCursorMixin, Cursor):
619
"""A cursor which returns results as a dictionary."""
620
621
622
class DictCursorSV(Cursor):
623
"""A cursor which returns results as a dictionary for C extension."""
624
625
626
class NamedtupleCursorMixin:
627
628
def _do_get_result(self):
629
super(NamedtupleCursorMixin, self)._do_get_result()
630
fields = []
631
if self._description:
632
for f in self._result.fields:
633
name = f.name
634
if name in fields:
635
name = f.table_name + '.' + name
636
fields.append(name)
637
self._fields = fields
638
self._namedtuple = namedtuple('Row', self._fields, rename=True)
639
640
if fields and self._rows:
641
self._rows = [self._conv_row(r) for r in self._rows]
642
643
def _conv_row(self, row):
644
if row is None:
645
return None
646
return self._namedtuple(*row)
647
648
649
class NamedtupleCursor(NamedtupleCursorMixin, Cursor):
650
"""A cursor which returns results in a named tuple."""
651
652
653
class NamedtupleCursorSV(Cursor):
654
"""A cursor which returns results as a named tuple for C extension."""
655
656
657
class SSCursor(Cursor):
658
"""
659
Unbuffered Cursor, mainly useful for queries that return a lot of data,
660
or for connections to remote servers over a slow network.
661
662
Instead of copying every row of data into a buffer, this will fetch
663
rows as needed. The upside of this is the client uses much less memory,
664
and rows are returned much faster when traveling over a slow network
665
or if the result set is very big.
666
667
There are limitations, though. The MySQL protocol doesn't support
668
returning the total number of rows, so the only way to tell how many rows
669
there are is to iterate over every row returned. Also, it currently isn't
670
possible to scroll backwards, as only the current row is held in memory.
671
672
"""
673
674
def _conv_row(self, row):
675
return row
676
677
def close(self):
678
conn = self._connection
679
if conn is None:
680
return
681
682
if self._result is not None and self._result is conn._result:
683
self._result._finish_unbuffered_query()
684
685
try:
686
while self.nextset():
687
pass
688
finally:
689
self._connection = None
690
691
__del__ = close
692
693
def _query(self, q, infile_stream=None):
694
conn = self._get_db()
695
self._clear_result()
696
conn.query(q, unbuffered=True, infile_stream=infile_stream)
697
self._do_get_result()
698
return self.rowcount
699
700
def nextset(self):
701
return self._nextset(unbuffered=True)
702
703
def read_next(self):
704
"""Read next row."""
705
return self._conv_row(self._result._read_rowdata_packet_unbuffered())
706
707
def fetchone(self):
708
"""Fetch next row."""
709
self._check_executed()
710
return self._unchecked_fetchone()
711
712
def _unchecked_fetchone(self):
713
"""Fetch next row."""
714
row = self.read_next()
715
if row is None:
716
self.warning_count = self._result.warning_count
717
return None
718
self._rownumber += 1
719
return row
720
721
def fetchall(self):
722
"""
723
Fetch all, as per MySQLdb.
724
725
Pretty useless for large queries, as it is buffered.
726
See fetchall_unbuffered(), if you want an unbuffered
727
generator version of this method.
728
729
"""
730
return list(self.fetchall_unbuffered())
731
732
def fetchall_unbuffered(self):
733
"""
734
Fetch all, implemented as a generator.
735
736
This is not a standard DB-API operation, however, it doesn't make
737
sense to return everything in a list, as that would use ridiculous
738
memory for large result sets.
739
740
"""
741
self._check_executed()
742
743
def fetchall_unbuffered_gen(_unchecked_fetchone=self._unchecked_fetchone):
744
while True:
745
out = _unchecked_fetchone()
746
if out is not None:
747
yield out
748
else:
749
break
750
return fetchall_unbuffered_gen()
751
752
def __iter__(self):
753
return self.fetchall_unbuffered()
754
755
def fetchmany(self, size=None):
756
"""Fetch many."""
757
self._check_executed()
758
if size is None:
759
size = self.arraysize
760
761
rows = []
762
for i in range(size):
763
row = self.read_next()
764
if row is None:
765
self.warning_count = self._result.warning_count
766
break
767
rows.append(row)
768
self._rownumber += 1
769
return rows
770
771
def scroll(self, value, mode='relative'):
772
self._check_executed()
773
774
if mode == 'relative':
775
if value < 0:
776
raise err.NotSupportedError(
777
'Backwards scrolling not supported by this cursor',
778
)
779
780
for _ in range(value):
781
self.read_next()
782
self._rownumber += value
783
elif mode == 'absolute':
784
if value < self._rownumber:
785
raise err.NotSupportedError(
786
'Backwards scrolling not supported by this cursor',
787
)
788
789
end = value - self._rownumber
790
for _ in range(end):
791
self.read_next()
792
self._rownumber = value
793
else:
794
raise err.ProgrammingError('unknown scroll mode %s' % mode)
795
796
797
class SSCursorSV(SSCursor):
798
"""An unbuffered cursor for use with PyMySQLsv."""
799
800
def _unchecked_fetchone(self):
801
"""Fetch next row."""
802
row = self._result._read_rowdata_packet_unbuffered(1)
803
if row is None:
804
return None
805
self._rownumber += 1
806
return row
807
808
def fetchone(self):
809
"""Fetch next row."""
810
self._check_executed()
811
return self._unchecked_fetchone()
812
813
def fetchmany(self, size=None):
814
"""Fetch many."""
815
self._check_executed()
816
if size is None:
817
size = self.arraysize
818
out = self._result._read_rowdata_packet_unbuffered(size)
819
if out is None:
820
return []
821
if size == 1:
822
self._rownumber += 1
823
return [out]
824
self._rownumber += len(out)
825
return out
826
827
def scroll(self, value, mode='relative'):
828
self._check_executed()
829
830
if mode == 'relative':
831
if value < 0:
832
raise err.NotSupportedError(
833
'Backwards scrolling not supported by this cursor',
834
)
835
836
self._result._read_rowdata_packet_unbuffered(value)
837
self._rownumber += value
838
elif mode == 'absolute':
839
if value < self._rownumber:
840
raise err.NotSupportedError(
841
'Backwards scrolling not supported by this cursor',
842
)
843
844
end = value - self._rownumber
845
self._result._read_rowdata_packet_unbuffered(end)
846
self._rownumber = value
847
else:
848
raise err.ProgrammingError('unknown scroll mode %s' % mode)
849
850
851
class SSDictCursor(DictCursorMixin, SSCursor):
852
"""An unbuffered cursor, which returns results as a dictionary."""
853
854
855
class SSDictCursorSV(SSCursorSV):
856
"""An unbuffered cursor for the C extension, which returns a dictionary."""
857
858
859
class SSNamedtupleCursor(NamedtupleCursorMixin, SSCursor):
860
"""An unbuffered cursor, which returns results as a named tuple."""
861
862
863
class SSNamedtupleCursorSV(SSCursorSV):
864
"""An unbuffered cursor for the C extension, which returns results as named tuple."""
865
866
867
class SSArrowCursor(ArrowCursorMixin, SSCursor):
868
"""An unbuffered cursor, which returns results as an Arrow Table."""
869
870
871
class SSArrowCursorSV(ArrowCursorMixin, SSCursorSV):
872
"""An unbuffered cursor, which returns results as an Arrow Table (accelerated)."""
873
874
875
class SSNumpyCursor(NumpyCursorMixin, SSCursor):
876
"""An unbuffered cursor, which returns results as a numpy array."""
877
878
879
class SSNumpyCursorSV(NumpyCursorMixin, SSCursorSV):
880
"""An unbuffered cursor, which returns results as a numpy array (accelerated)."""
881
882
883
class SSPandasCursor(PandasCursorMixin, SSCursor):
884
"""An unbuffered cursor, which returns results as a pandas DataFrame."""
885
886
887
class SSPandasCursorSV(PandasCursorMixin, SSCursorSV):
888
"""An unbuffered cursor, which returns results as a pandas DataFrame (accelerated)."""
889
890
891
class SSPolarsCursor(PolarsCursorMixin, SSCursor):
892
"""An unbuffered cursor, which returns results as a polars DataFrame."""
893
894
895
class SSPolarsCursorSV(PolarsCursorMixin, SSCursorSV):
896
"""An unbuffered cursor, which returns results as a polars DataFrame (accelerated)."""
897
898