Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
singlestore-labs
GitHub Repository: singlestore-labs/singlestoredb-python
Path: blob/main/singlestoredb/mysql/cursors.py
801 views
1
# type: ignore
2
import re
3
from collections import namedtuple
4
5
from . import err
6
from ..connection import Cursor as BaseCursor
7
from ..utils import results
8
from ..utils.debug import log_query
9
from ..utils.mogrify import should_interpolate_query
10
from ..utils.results import get_schema
11
12
try:
13
from pydantic import BaseModel
14
has_pydantic = True
15
except ImportError:
16
has_pydantic = False
17
18
19
#: Regular expression for :meth:`Cursor.executemany`.
20
#: executemany only supports simple bulk insert.
21
#: You can use it to load large dataset.
22
RE_INSERT_VALUES = re.compile(
23
r'\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)'
24
+ r'(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))'
25
+ r'(\s*(?:ON DUPLICATE.*)?);?\s*\Z',
26
re.IGNORECASE | re.DOTALL,
27
)
28
29
30
class Cursor(BaseCursor):
31
"""
32
This is the object used to interact with the database.
33
34
Do not create an instance of a Cursor yourself. Call
35
connection.Connection.cursor().
36
37
See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
38
the specification.
39
40
Parameters
41
----------
42
connection : Connection
43
The connection the cursor is associated with.
44
45
"""
46
47
#: Max statement size which :meth:`executemany` generates.
48
#:
49
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
50
#: Default value of max_allowed_packet is 1048576.
51
max_stmt_length = 1024000
52
53
def __init__(self, connection):
54
self._connection = connection
55
self.warning_count = 0
56
self._description = None
57
self._format_schema = None
58
self._rownumber = 0
59
self.rowcount = -1
60
self.arraysize = 1
61
self._executed = None
62
self._result = None
63
self._rows = None
64
self.lastrowid = None
65
66
@property
67
def messages(self):
68
# TODO
69
return []
70
71
@property
72
def description(self):
73
return self._description
74
75
@property
76
def _schema(self):
77
return self._format_schema
78
79
@property
80
def connection(self):
81
return self._connection
82
83
@property
84
def rownumber(self):
85
return self._rownumber
86
87
def close(self):
88
"""Closing a cursor just exhausts all remaining data."""
89
conn = self._connection
90
if conn is None:
91
return
92
try:
93
while self.nextset():
94
pass
95
finally:
96
self._connection = None
97
98
@property
99
def open(self) -> bool:
100
conn = self._connection
101
if conn is None:
102
return False
103
return True
104
105
def is_connected(self):
106
return self.open
107
108
def __enter__(self):
109
return self
110
111
def __exit__(self, *exc_info):
112
del exc_info
113
self.close()
114
115
def _get_db(self):
116
if not self._connection:
117
raise err.ProgrammingError('Cursor closed')
118
return self._connection
119
120
def _check_executed(self):
121
if not self._executed:
122
raise err.ProgrammingError('execute() first')
123
124
def _conv_row(self, row):
125
return row
126
127
def setinputsizes(self, *args):
128
"""Does nothing, required by DB API."""
129
130
def setoutputsizes(self, *args):
131
"""Does nothing, required by DB API."""
132
133
setoutputsize = setoutputsizes
134
135
def _nextset(self, unbuffered=False):
136
"""Get the next query set."""
137
conn = self._get_db()
138
current_result = self._result
139
if current_result is None or current_result is not conn._result:
140
return None
141
if not current_result.has_next:
142
return None
143
self._result = None
144
self._clear_result()
145
conn.next_result(unbuffered=unbuffered)
146
self._do_get_result()
147
return True
148
149
def nextset(self):
150
return self._nextset(False)
151
152
def _escape_args(self, args, conn):
153
dtype = type(args)
154
literal = conn.literal
155
if dtype is tuple or dtype is list or isinstance(args, (tuple, list)):
156
return tuple(literal(arg) for arg in args)
157
elif dtype is dict or isinstance(args, dict):
158
return {key: literal(val) for (key, val) in args.items()}
159
elif has_pydantic and isinstance(args, BaseModel):
160
return {key: literal(val) for (key, val) in args.model_dump().items()}
161
# If it's not a dictionary let's try escaping it anyways.
162
# Worst case it will throw a Value error
163
return conn.escape(args)
164
165
def mogrify(self, query, args=None):
166
"""
167
Returns the exact string sent to the database by calling the execute() method.
168
169
This method follows the extension to the DB API 2.0 followed by Psycopg.
170
171
Parameters
172
----------
173
query : str
174
Query to mogrify.
175
args : Sequence[Any] or Dict[str, Any] or Any, optional
176
Parameters used with query. (optional)
177
178
Returns
179
-------
180
str : The query with argument binding applied.
181
182
"""
183
conn = self._get_db()
184
185
if should_interpolate_query(conn.interpolate_query_with_empty_args, args):
186
query = query % self._escape_args(args, conn)
187
188
return query
189
190
def execute(self, query, args=None, infile_stream=None):
191
"""
192
Execute a query.
193
194
If args is a list or tuple, :1, :2, etc. can be used as a
195
placeholder in the query. If args is a dict, :name can be used
196
as a placeholder in the query.
197
198
Parameters
199
----------
200
query : str
201
Query to execute.
202
args : Sequence[Any] or Dict[str, Any] or Any, optional
203
Parameters used with query. (optional)
204
infile_stream : io.BytesIO or Iterator[bytes], optional
205
Data stream for ``LOCAL INFILE`` statements
206
207
Returns
208
-------
209
int : Number of affected rows.
210
211
"""
212
while self.nextset():
213
pass
214
215
log_query(query, args)
216
217
query = self.mogrify(query, args)
218
219
result = self._query(query, infile_stream=infile_stream)
220
self._executed = query
221
return result
222
223
def executemany(self, query, args=None):
224
"""
225
Run several data against one query.
226
227
This method improves performance on multiple-row INSERT and
228
REPLACE. Otherwise it is equivalent to looping over args with
229
execute().
230
231
Parameters
232
----------
233
query : str,
234
Query to execute.
235
args : Sequnce[Any], optional
236
Sequence of sequences or mappings. It is used as parameter.
237
238
Returns
239
-------
240
int : Number of rows affected, if any.
241
242
"""
243
if args is None or len(args) == 0:
244
return
245
246
m = RE_INSERT_VALUES.match(query)
247
if m:
248
q_prefix = m.group(1) % ()
249
q_values = m.group(2).rstrip()
250
q_postfix = m.group(3) or ''
251
assert q_values[0] == '(' and q_values[-1] == ')'
252
return self._do_execute_many(
253
q_prefix,
254
q_values,
255
q_postfix,
256
args,
257
self.max_stmt_length,
258
self._get_db().encoding,
259
)
260
261
self.rowcount = sum(self.execute(query, arg) for arg in args)
262
return self.rowcount
263
264
def _do_execute_many(
265
self, prefix, values, postfix, args, max_stmt_length, encoding,
266
):
267
conn = self._get_db()
268
escape = self._escape_args
269
if isinstance(prefix, str):
270
prefix = prefix.encode(encoding)
271
if isinstance(postfix, str):
272
postfix = postfix.encode(encoding)
273
sql = bytearray(prefix)
274
# Detect dataframes
275
if hasattr(args, 'itertuples'):
276
args = args.itertuples(index=False)
277
else:
278
args = iter(args)
279
v = values % escape(next(args), conn)
280
if isinstance(v, str):
281
v = v.encode(encoding, 'surrogateescape')
282
sql += v
283
rows = 0
284
for arg in args:
285
v = values % escape(arg, conn)
286
if type(v) is str or isinstance(v, str):
287
v = v.encode(encoding, 'surrogateescape')
288
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
289
rows += self.execute(sql + postfix)
290
sql = bytearray(prefix)
291
else:
292
sql += b','
293
sql += v
294
rows += self.execute(sql + postfix)
295
self.rowcount = rows
296
return rows
297
298
def callproc(self, procname, args=()):
299
"""
300
Execute stored procedure procname with args.
301
302
Compatibility warning: PEP-249 specifies that any modified
303
parameters must be returned. This is currently impossible
304
as they are only available by storing them in a server
305
variable and then retrieved by a query. Since stored
306
procedures return zero or more result sets, there is no
307
reliable way to get at OUT or INOUT parameters via callproc.
308
The server variables are named @_procname_n, where procname
309
is the parameter above and n is the position of the parameter
310
(from zero). Once all result sets generated by the procedure
311
have been fetched, you can issue a SELECT @_procname_0, ...
312
query using .execute() to get any OUT or INOUT values.
313
314
Compatibility warning: The act of calling a stored procedure
315
itself creates an empty result set. This appears after any
316
result sets generated by the procedure. This is non-standard
317
behavior with respect to the DB-API. Be sure to use nextset()
318
to advance through all result sets; otherwise you may get
319
disconnected.
320
321
Parameters
322
----------
323
procname : str
324
Name of procedure to execute on server.
325
args : Sequence[Any], optional
326
Sequence of parameters to use with procedure.
327
328
Returns
329
-------
330
Sequence[Any] : The original args.
331
332
"""
333
conn = self._get_db()
334
if args:
335
fmt = f'@_{procname}_%d=%s'
336
self._query(
337
'SET %s'
338
% ','.join(
339
fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
340
),
341
)
342
self.nextset()
343
344
q = 'CALL {}({})'.format(
345
procname,
346
','.join(['@_%s_%d' % (procname, i) for i in range(len(args))]),
347
)
348
self._query(q)
349
self._executed = q
350
return args
351
352
def fetchone(self):
353
"""Fetch the next row."""
354
self._check_executed()
355
return self._unchecked_fetchone()
356
357
def _unchecked_fetchone(self):
358
"""Fetch the next row."""
359
if self._rows is None or self._rownumber >= len(self._rows):
360
return None
361
result = self._rows[self._rownumber]
362
self._rownumber += 1
363
return result
364
365
def fetchmany(self, size=None):
366
"""Fetch several rows."""
367
self._check_executed()
368
if self._rows is None:
369
self.warning_count = self._result.warning_count
370
return ()
371
end = self._rownumber + (size or self.arraysize)
372
result = self._rows[self._rownumber: end]
373
self._rownumber = min(end, len(self._rows))
374
return result
375
376
def fetchall(self):
377
"""Fetch all the rows."""
378
self._check_executed()
379
if self._rows is None:
380
return ()
381
if self._rownumber:
382
result = self._rows[self._rownumber:]
383
else:
384
result = self._rows
385
self._rownumber = len(self._rows)
386
return result
387
388
def scroll(self, value, mode='relative'):
389
self._check_executed()
390
if mode == 'relative':
391
r = self._rownumber + value
392
elif mode == 'absolute':
393
r = value
394
else:
395
raise err.ProgrammingError('unknown scroll mode %s' % mode)
396
397
if not (0 <= r < len(self._rows)):
398
raise IndexError('out of range')
399
self._rownumber = r
400
401
def _query(self, q, infile_stream=None):
402
conn = self._get_db()
403
self._clear_result()
404
conn.query(q, infile_stream=infile_stream)
405
self._do_get_result()
406
return self.rowcount
407
408
def _clear_result(self):
409
self._rownumber = 0
410
self._result = None
411
412
self.rowcount = 0
413
self.warning_count = 0
414
self._description = None
415
self._format_schema = None
416
self.lastrowid = None
417
self._rows = None
418
419
def _do_get_result(self):
420
conn = self._get_db()
421
422
self._result = result = conn._result
423
424
self.rowcount = result.affected_rows
425
self.warning_count = result.warning_count
426
# Affected rows is set to max int64 for compatibility with MySQLdb, but
427
# the DB-API requires this value to be -1. This happens in unbuffered mode.
428
if self.rowcount == 18446744073709551615:
429
self.rowcount = -1
430
self._description = result.description
431
if self._description:
432
self._format_schema = get_schema(
433
self.connection._results_type,
434
result.description,
435
)
436
self.lastrowid = result.insert_id
437
self._rows = result.rows
438
439
def __iter__(self):
440
self._check_executed()
441
442
def fetchall_unbuffered_gen(_unchecked_fetchone=self._unchecked_fetchone):
443
while True:
444
out = _unchecked_fetchone()
445
if out is not None:
446
yield out
447
else:
448
break
449
return fetchall_unbuffered_gen()
450
451
Warning = err.Warning
452
Error = err.Error
453
InterfaceError = err.InterfaceError
454
DatabaseError = err.DatabaseError
455
DataError = err.DataError
456
OperationalError = err.OperationalError
457
IntegrityError = err.IntegrityError
458
InternalError = err.InternalError
459
ProgrammingError = err.ProgrammingError
460
NotSupportedError = err.NotSupportedError
461
462
463
class CursorSV(Cursor):
464
"""Cursor class for C extension."""
465
466
467
class ArrowCursorMixin:
468
"""Fetch methods for Arrow Tables."""
469
470
def fetchone(self):
471
return results.results_to_arrow(
472
self.description, super().fetchone(), single=True, schema=self._schema,
473
)
474
475
def fetchall(self):
476
return results.results_to_arrow(
477
self.description, super().fetchall(), schema=self._schema,
478
)
479
480
def fetchall_unbuffered(self):
481
return results.results_to_arrow(
482
self.description, super().fetchall_unbuffered(), schema=self._schema,
483
)
484
485
def fetchmany(self, size=None):
486
return results.results_to_arrow(
487
self.description, super().fetchmany(size), schema=self._schema,
488
)
489
490
491
class ArrowCursor(ArrowCursorMixin, Cursor):
492
"""A cursor which returns results as an Arrow Table."""
493
494
495
class ArrowCursorSV(ArrowCursorMixin, CursorSV):
496
"""A cursor which returns results as an Arrow Table for C extension."""
497
498
499
class NumpyCursorMixin:
500
"""Fetch methods for numpy arrays."""
501
502
def fetchone(self):
503
return results.results_to_numpy(
504
self.description, super().fetchone(), single=True, schema=self._schema,
505
)
506
507
def fetchall(self):
508
return results.results_to_numpy(
509
self.description, super().fetchall(), schema=self._schema,
510
)
511
512
def fetchall_unbuffered(self):
513
return results.results_to_numpy(
514
self.description, super().fetchall_unbuffered(), schema=self._schema,
515
)
516
517
def fetchmany(self, size=None):
518
return results.results_to_numpy(
519
self.description, super().fetchmany(size), schema=self._schema,
520
)
521
522
523
class NumpyCursor(NumpyCursorMixin, Cursor):
524
"""A cursor which returns results as a numpy array."""
525
526
527
class NumpyCursorSV(NumpyCursorMixin, CursorSV):
528
"""A cursor which returns results as a numpy array for C extension."""
529
530
531
class PandasCursorMixin:
532
"""Fetch methods for pandas DataFrames."""
533
534
def fetchone(self):
535
return results.results_to_pandas(
536
self.description, super().fetchone(), single=True, schema=self._schema,
537
)
538
539
def fetchall(self):
540
return results.results_to_pandas(
541
self.description, super().fetchall(), schema=self._schema,
542
)
543
544
def fetchall_unbuffered(self):
545
return results.results_to_pandas(
546
self.description, super().fetchall_unbuffered(), schema=self._schema,
547
)
548
549
def fetchmany(self, size=None):
550
return results.results_to_pandas(
551
self.description, super().fetchmany(size), schema=self._schema,
552
)
553
554
555
class PandasCursor(PandasCursorMixin, Cursor):
556
"""A cursor which returns results as a pandas DataFrame."""
557
558
559
class PandasCursorSV(PandasCursorMixin, CursorSV):
560
"""A cursor which returns results as a pandas DataFrame for C extension."""
561
562
563
class PolarsCursorMixin:
564
"""Fetch methods for polars DataFrames."""
565
566
def fetchone(self):
567
return results.results_to_polars(
568
self.description, super().fetchone(), single=True, schema=self._schema,
569
)
570
571
def fetchall(self):
572
return results.results_to_polars(
573
self.description, super().fetchall(), schema=self._schema,
574
)
575
576
def fetchall_unbuffered(self):
577
return results.results_to_polars(
578
self.description, super().fetchall_unbuffered(), schema=self._schema,
579
)
580
581
def fetchmany(self, size=None):
582
return results.results_to_polars(
583
self.description, super().fetchmany(size), schema=self._schema,
584
)
585
586
587
class PolarsCursor(PolarsCursorMixin, Cursor):
588
"""A cursor which returns results as a polars DataFrame."""
589
590
591
class PolarsCursorSV(PolarsCursorMixin, CursorSV):
592
"""A cursor which returns results as a polars DataFrame for C extension."""
593
594
595
class DictCursorMixin:
596
# You can override this to use OrderedDict or other dict-like types.
597
dict_type = dict
598
599
def _do_get_result(self):
600
super(DictCursorMixin, self)._do_get_result()
601
fields = []
602
if self._description:
603
for f in self._result.fields:
604
name = f.name
605
if name in fields:
606
name = f.table_name + '.' + name
607
fields.append(name)
608
self._fields = fields
609
610
if fields and self._rows:
611
self._rows = [self._conv_row(r) for r in self._rows]
612
613
def _conv_row(self, row):
614
if row is None:
615
return None
616
return self.dict_type(zip(self._fields, row))
617
618
619
class DictCursor(DictCursorMixin, Cursor):
620
"""A cursor which returns results as a dictionary."""
621
622
623
class DictCursorSV(Cursor):
624
"""A cursor which returns results as a dictionary for C extension."""
625
626
627
class NamedtupleCursorMixin:
628
629
def _do_get_result(self):
630
super(NamedtupleCursorMixin, self)._do_get_result()
631
fields = []
632
if self._description:
633
for f in self._result.fields:
634
name = f.name
635
if name in fields:
636
name = f.table_name + '.' + name
637
fields.append(name)
638
self._fields = fields
639
self._namedtuple = namedtuple('Row', self._fields, rename=True)
640
641
if fields and self._rows:
642
self._rows = [self._conv_row(r) for r in self._rows]
643
644
def _conv_row(self, row):
645
if row is None:
646
return None
647
return self._namedtuple(*row)
648
649
650
class NamedtupleCursor(NamedtupleCursorMixin, Cursor):
651
"""A cursor which returns results in a named tuple."""
652
653
654
class NamedtupleCursorSV(Cursor):
655
"""A cursor which returns results as a named tuple for C extension."""
656
657
658
class SSCursor(Cursor):
659
"""
660
Unbuffered Cursor, mainly useful for queries that return a lot of data,
661
or for connections to remote servers over a slow network.
662
663
Instead of copying every row of data into a buffer, this will fetch
664
rows as needed. The upside of this is the client uses much less memory,
665
and rows are returned much faster when traveling over a slow network
666
or if the result set is very big.
667
668
There are limitations, though. The MySQL protocol doesn't support
669
returning the total number of rows, so the only way to tell how many rows
670
there are is to iterate over every row returned. Also, it currently isn't
671
possible to scroll backwards, as only the current row is held in memory.
672
673
"""
674
675
def _conv_row(self, row):
676
return row
677
678
def close(self):
679
conn = self._connection
680
if conn is None:
681
return
682
683
if self._result is not None and self._result is conn._result:
684
self._result._finish_unbuffered_query()
685
686
try:
687
while self.nextset():
688
pass
689
finally:
690
self._connection = None
691
692
__del__ = close
693
694
def _query(self, q, infile_stream=None):
695
conn = self._get_db()
696
self._clear_result()
697
conn.query(q, unbuffered=True, infile_stream=infile_stream)
698
self._do_get_result()
699
return self.rowcount
700
701
def nextset(self):
702
return self._nextset(unbuffered=True)
703
704
def read_next(self):
705
"""Read next row."""
706
return self._conv_row(self._result._read_rowdata_packet_unbuffered())
707
708
def fetchone(self):
709
"""Fetch next row."""
710
self._check_executed()
711
return self._unchecked_fetchone()
712
713
def _unchecked_fetchone(self):
714
"""Fetch next row."""
715
row = self.read_next()
716
if row is None:
717
self.warning_count = self._result.warning_count
718
return None
719
self._rownumber += 1
720
return row
721
722
def fetchall(self):
723
"""
724
Fetch all, as per MySQLdb.
725
726
Pretty useless for large queries, as it is buffered.
727
See fetchall_unbuffered(), if you want an unbuffered
728
generator version of this method.
729
730
"""
731
return list(self.fetchall_unbuffered())
732
733
def fetchall_unbuffered(self):
734
"""
735
Fetch all, implemented as a generator.
736
737
This is not a standard DB-API operation, however, it doesn't make
738
sense to return everything in a list, as that would use ridiculous
739
memory for large result sets.
740
741
"""
742
self._check_executed()
743
744
def fetchall_unbuffered_gen(_unchecked_fetchone=self._unchecked_fetchone):
745
while True:
746
out = _unchecked_fetchone()
747
if out is not None:
748
yield out
749
else:
750
break
751
return fetchall_unbuffered_gen()
752
753
def __iter__(self):
754
return self.fetchall_unbuffered()
755
756
def fetchmany(self, size=None):
757
"""Fetch many."""
758
self._check_executed()
759
if size is None:
760
size = self.arraysize
761
762
rows = []
763
for i in range(size):
764
row = self.read_next()
765
if row is None:
766
self.warning_count = self._result.warning_count
767
break
768
rows.append(row)
769
self._rownumber += 1
770
return rows
771
772
def scroll(self, value, mode='relative'):
773
self._check_executed()
774
775
if mode == 'relative':
776
if value < 0:
777
raise err.NotSupportedError(
778
'Backwards scrolling not supported by this cursor',
779
)
780
781
for _ in range(value):
782
self.read_next()
783
self._rownumber += value
784
elif mode == 'absolute':
785
if value < self._rownumber:
786
raise err.NotSupportedError(
787
'Backwards scrolling not supported by this cursor',
788
)
789
790
end = value - self._rownumber
791
for _ in range(end):
792
self.read_next()
793
self._rownumber = value
794
else:
795
raise err.ProgrammingError('unknown scroll mode %s' % mode)
796
797
798
class SSCursorSV(SSCursor):
799
"""An unbuffered cursor for use with PyMySQLsv."""
800
801
def _unchecked_fetchone(self):
802
"""Fetch next row."""
803
row = self._result._read_rowdata_packet_unbuffered(1)
804
if row is None:
805
return None
806
self._rownumber += 1
807
return row
808
809
def fetchone(self):
810
"""Fetch next row."""
811
self._check_executed()
812
return self._unchecked_fetchone()
813
814
def fetchmany(self, size=None):
815
"""Fetch many."""
816
self._check_executed()
817
if size is None:
818
size = self.arraysize
819
out = self._result._read_rowdata_packet_unbuffered(size)
820
if out is None:
821
return []
822
if size == 1:
823
self._rownumber += 1
824
return [out]
825
self._rownumber += len(out)
826
return out
827
828
def scroll(self, value, mode='relative'):
829
self._check_executed()
830
831
if mode == 'relative':
832
if value < 0:
833
raise err.NotSupportedError(
834
'Backwards scrolling not supported by this cursor',
835
)
836
837
self._result._read_rowdata_packet_unbuffered(value)
838
self._rownumber += value
839
elif mode == 'absolute':
840
if value < self._rownumber:
841
raise err.NotSupportedError(
842
'Backwards scrolling not supported by this cursor',
843
)
844
845
end = value - self._rownumber
846
self._result._read_rowdata_packet_unbuffered(end)
847
self._rownumber = value
848
else:
849
raise err.ProgrammingError('unknown scroll mode %s' % mode)
850
851
852
class SSDictCursor(DictCursorMixin, SSCursor):
853
"""An unbuffered cursor, which returns results as a dictionary."""
854
855
856
class SSDictCursorSV(SSCursorSV):
857
"""An unbuffered cursor for the C extension, which returns a dictionary."""
858
859
860
class SSNamedtupleCursor(NamedtupleCursorMixin, SSCursor):
861
"""An unbuffered cursor, which returns results as a named tuple."""
862
863
864
class SSNamedtupleCursorSV(SSCursorSV):
865
"""An unbuffered cursor for the C extension, which returns results as named tuple."""
866
867
868
class SSArrowCursor(ArrowCursorMixin, SSCursor):
869
"""An unbuffered cursor, which returns results as an Arrow Table."""
870
871
872
class SSArrowCursorSV(ArrowCursorMixin, SSCursorSV):
873
"""An unbuffered cursor, which returns results as an Arrow Table (accelerated)."""
874
875
876
class SSNumpyCursor(NumpyCursorMixin, SSCursor):
877
"""An unbuffered cursor, which returns results as a numpy array."""
878
879
880
class SSNumpyCursorSV(NumpyCursorMixin, SSCursorSV):
881
"""An unbuffered cursor, which returns results as a numpy array (accelerated)."""
882
883
884
class SSPandasCursor(PandasCursorMixin, SSCursor):
885
"""An unbuffered cursor, which returns results as a pandas DataFrame."""
886
887
888
class SSPandasCursorSV(PandasCursorMixin, SSCursorSV):
889
"""An unbuffered cursor, which returns results as a pandas DataFrame (accelerated)."""
890
891
892
class SSPolarsCursor(PolarsCursorMixin, SSCursor):
893
"""An unbuffered cursor, which returns results as a polars DataFrame."""
894
895
896
class SSPolarsCursorSV(PolarsCursorMixin, SSCursorSV):
897
"""An unbuffered cursor, which returns results as a polars DataFrame (accelerated)."""
898
899