Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
pola-rs
GitHub Repository: pola-rs/polars
Path: blob/main/crates/polars-core/src/series/implementations/datetime.rs
8424 views
1
use polars_compute::rolling::QuantileMethod;
2
3
use super::*;
4
#[cfg(feature = "algorithm_group_by")]
5
use crate::frame::group_by::*;
6
use crate::prelude::*;
7
8
unsafe impl IntoSeries for DatetimeChunked {
9
fn into_series(self) -> Series {
10
Series(Arc::new(SeriesWrap(self)))
11
}
12
}
13
14
impl private::PrivateSeriesNumeric for SeriesWrap<DatetimeChunked> {
15
fn bit_repr(&self) -> Option<BitRepr> {
16
Some(self.0.physical().to_bit_repr())
17
}
18
}
19
20
impl private::PrivateSeries for SeriesWrap<DatetimeChunked> {
21
fn compute_len(&mut self) {
22
self.0.physical_mut().compute_len()
23
}
24
fn _field(&self) -> Cow<'_, Field> {
25
Cow::Owned(self.0.field())
26
}
27
fn _dtype(&self) -> &DataType {
28
self.0.dtype()
29
}
30
fn _get_flags(&self) -> StatisticsFlags {
31
self.0.physical().get_flags()
32
}
33
fn _set_flags(&mut self, flags: StatisticsFlags) {
34
self.0.physical_mut().set_flags(flags)
35
}
36
37
#[cfg(feature = "zip_with")]
38
fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
39
let other = other.to_physical_repr().into_owned();
40
self.0
41
.physical()
42
.zip_with(mask, other.as_ref().as_ref())
43
.map(|ca| {
44
ca.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
45
.into_series()
46
})
47
}
48
49
fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a> {
50
self.0.physical().into_total_eq_inner()
51
}
52
fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a> {
53
self.0.physical().into_total_ord_inner()
54
}
55
56
fn vec_hash(
57
&self,
58
random_state: PlSeedableRandomStateQuality,
59
buf: &mut Vec<u64>,
60
) -> PolarsResult<()> {
61
self.0.physical().vec_hash(random_state, buf)?;
62
Ok(())
63
}
64
65
fn vec_hash_combine(
66
&self,
67
build_hasher: PlSeedableRandomStateQuality,
68
hashes: &mut [u64],
69
) -> PolarsResult<()> {
70
self.0.physical().vec_hash_combine(build_hasher, hashes)?;
71
Ok(())
72
}
73
74
#[cfg(feature = "algorithm_group_by")]
75
unsafe fn agg_min(&self, groups: &GroupsType) -> Series {
76
self.0
77
.physical()
78
.agg_min(groups)
79
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
80
.into_series()
81
}
82
83
#[cfg(feature = "algorithm_group_by")]
84
unsafe fn agg_max(&self, groups: &GroupsType) -> Series {
85
self.0
86
.physical()
87
.agg_max(groups)
88
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
89
.into_series()
90
}
91
92
#[cfg(feature = "algorithm_group_by")]
93
unsafe fn agg_arg_min(&self, groups: &GroupsType) -> Series {
94
self.0.physical().agg_arg_min(groups)
95
}
96
97
#[cfg(feature = "algorithm_group_by")]
98
unsafe fn agg_arg_max(&self, groups: &GroupsType) -> Series {
99
self.0.physical().agg_arg_max(groups)
100
}
101
102
#[cfg(feature = "algorithm_group_by")]
103
unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
104
// we cannot cast and dispatch as the inner type of the list would be incorrect
105
self.0
106
.physical()
107
.agg_list(groups)
108
.cast(&DataType::List(Box::new(self.dtype().clone())))
109
.unwrap()
110
}
111
112
fn subtract(&self, rhs: &Series) -> PolarsResult<Series> {
113
match (self.dtype(), rhs.dtype()) {
114
(DataType::Datetime(tu, tz), DataType::Datetime(tur, tzr)) => {
115
assert_eq!(tu, tur);
116
assert_eq!(tz, tzr);
117
let lhs = self.cast(&DataType::Int64, CastOptions::NonStrict).unwrap();
118
let rhs = rhs.cast(&DataType::Int64).unwrap();
119
Ok(lhs.subtract(&rhs)?.into_duration(*tu).into_series())
120
},
121
(DataType::Datetime(tu, tz), DataType::Duration(tur)) => {
122
assert_eq!(tu, tur);
123
let lhs = self.cast(&DataType::Int64, CastOptions::NonStrict).unwrap();
124
let rhs = rhs.cast(&DataType::Int64).unwrap();
125
Ok(lhs
126
.subtract(&rhs)?
127
.into_datetime(*tu, tz.clone())
128
.into_series())
129
},
130
(dtl, dtr) => polars_bail!(opq = sub, dtl, dtr),
131
}
132
}
133
fn add_to(&self, rhs: &Series) -> PolarsResult<Series> {
134
match (self.dtype(), rhs.dtype()) {
135
(DataType::Datetime(tu, tz), DataType::Duration(tur)) => {
136
assert_eq!(tu, tur);
137
let lhs = self.cast(&DataType::Int64, CastOptions::NonStrict).unwrap();
138
let rhs = rhs.cast(&DataType::Int64).unwrap();
139
Ok(lhs
140
.add_to(&rhs)?
141
.into_datetime(*tu, tz.clone())
142
.into_series())
143
},
144
(dtl, dtr) => polars_bail!(opq = add, dtl, dtr),
145
}
146
}
147
fn multiply(&self, rhs: &Series) -> PolarsResult<Series> {
148
polars_bail!(opq = mul, self.dtype(), rhs.dtype());
149
}
150
fn divide(&self, rhs: &Series) -> PolarsResult<Series> {
151
polars_bail!(opq = div, self.dtype(), rhs.dtype());
152
}
153
fn remainder(&self, rhs: &Series) -> PolarsResult<Series> {
154
polars_bail!(opq = rem, self.dtype(), rhs.dtype());
155
}
156
#[cfg(feature = "algorithm_group_by")]
157
fn group_tuples(&self, multithreaded: bool, sorted: bool) -> PolarsResult<GroupsType> {
158
self.0.physical().group_tuples(multithreaded, sorted)
159
}
160
161
fn arg_sort_multiple(
162
&self,
163
by: &[Column],
164
options: &SortMultipleOptions,
165
) -> PolarsResult<IdxCa> {
166
self.0.physical().arg_sort_multiple(by, options)
167
}
168
}
169
170
impl SeriesTrait for SeriesWrap<DatetimeChunked> {
171
fn rename(&mut self, name: PlSmallStr) {
172
self.0.rename(name);
173
}
174
175
fn chunk_lengths(&self) -> ChunkLenIter<'_> {
176
self.0.physical().chunk_lengths()
177
}
178
fn name(&self) -> &PlSmallStr {
179
self.0.name()
180
}
181
182
fn chunks(&self) -> &Vec<ArrayRef> {
183
self.0.physical().chunks()
184
}
185
unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
186
self.0.physical_mut().chunks_mut()
187
}
188
189
fn shrink_to_fit(&mut self) {
190
self.0.physical_mut().shrink_to_fit()
191
}
192
193
fn slice(&self, offset: i64, length: usize) -> Series {
194
self.0.slice(offset, length).into_series()
195
}
196
fn split_at(&self, offset: i64) -> (Series, Series) {
197
let (a, b) = self.0.split_at(offset);
198
(a.into_series(), b.into_series())
199
}
200
201
fn _sum_as_f64(&self) -> f64 {
202
self.0.physical()._sum_as_f64()
203
}
204
205
fn mean(&self) -> Option<f64> {
206
self.0.physical().mean()
207
}
208
209
fn median(&self) -> Option<f64> {
210
self.0.physical().median()
211
}
212
213
fn append(&mut self, other: &Series) -> PolarsResult<()> {
214
polars_ensure!(self.0.dtype() == other.dtype(), append);
215
let mut other = other.to_physical_repr().into_owned();
216
self.0
217
.physical_mut()
218
.append_owned(std::mem::take(other._get_inner_mut().as_mut()))
219
}
220
fn append_owned(&mut self, mut other: Series) -> PolarsResult<()> {
221
polars_ensure!(self.0.dtype() == other.dtype(), append);
222
self.0.physical_mut().append_owned(std::mem::take(
223
&mut other
224
._get_inner_mut()
225
.as_any_mut()
226
.downcast_mut::<DatetimeChunked>()
227
.unwrap()
228
.phys,
229
))
230
}
231
232
fn extend(&mut self, other: &Series) -> PolarsResult<()> {
233
polars_ensure!(self.0.dtype() == other.dtype(), extend);
234
let other = other.to_physical_repr();
235
self.0
236
.physical_mut()
237
.extend(other.as_ref().as_ref().as_ref())?;
238
Ok(())
239
}
240
241
fn filter(&self, filter: &BooleanChunked) -> PolarsResult<Series> {
242
self.0.physical().filter(filter).map(|ca| {
243
ca.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
244
.into_series()
245
})
246
}
247
248
fn take(&self, indices: &IdxCa) -> PolarsResult<Series> {
249
let ca = self.0.physical().take(indices)?;
250
Ok(ca
251
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
252
.into_series())
253
}
254
255
unsafe fn take_unchecked(&self, indices: &IdxCa) -> Series {
256
let ca = self.0.physical().take_unchecked(indices);
257
ca.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
258
.into_series()
259
}
260
261
fn take_slice(&self, indices: &[IdxSize]) -> PolarsResult<Series> {
262
let ca = self.0.physical().take(indices)?;
263
Ok(ca
264
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
265
.into_series())
266
}
267
268
unsafe fn take_slice_unchecked(&self, indices: &[IdxSize]) -> Series {
269
let ca = self.0.physical().take_unchecked(indices);
270
ca.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
271
.into_series()
272
}
273
274
fn deposit(&self, validity: &Bitmap) -> Series {
275
self.0
276
.physical()
277
.deposit(validity)
278
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
279
.into_series()
280
}
281
282
fn len(&self) -> usize {
283
self.0.len()
284
}
285
286
fn rechunk(&self) -> Series {
287
self.0
288
.physical()
289
.rechunk()
290
.into_owned()
291
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
292
.into_series()
293
}
294
295
fn new_from_index(&self, index: usize, length: usize) -> Series {
296
self.0
297
.physical()
298
.new_from_index(index, length)
299
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
300
.into_series()
301
}
302
303
fn cast(&self, dtype: &DataType, cast_options: CastOptions) -> PolarsResult<Series> {
304
match dtype {
305
DataType::String => Ok(self.0.to_string("iso")?.into_series()),
306
_ => self.0.cast_with_options(dtype, cast_options),
307
}
308
}
309
310
#[inline]
311
unsafe fn get_unchecked(&self, index: usize) -> AnyValue<'_> {
312
self.0.get_any_value_unchecked(index)
313
}
314
315
fn sort_with(&self, options: SortOptions) -> PolarsResult<Series> {
316
Ok(self
317
.0
318
.physical()
319
.sort_with(options)
320
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
321
.into_series())
322
}
323
324
fn arg_sort(&self, options: SortOptions) -> IdxCa {
325
self.0.physical().arg_sort(options)
326
}
327
328
fn null_count(&self) -> usize {
329
self.0.null_count()
330
}
331
332
fn has_nulls(&self) -> bool {
333
self.0.has_nulls()
334
}
335
336
#[cfg(feature = "algorithm_group_by")]
337
fn unique(&self) -> PolarsResult<Series> {
338
self.0.physical().unique().map(|ca| {
339
ca.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
340
.into_series()
341
})
342
}
343
344
#[cfg(feature = "algorithm_group_by")]
345
fn n_unique(&self) -> PolarsResult<usize> {
346
self.0.physical().n_unique()
347
}
348
349
#[cfg(feature = "algorithm_group_by")]
350
fn arg_unique(&self) -> PolarsResult<IdxCa> {
351
self.0.physical().arg_unique()
352
}
353
354
fn unique_id(&self) -> PolarsResult<(IdxSize, Vec<IdxSize>)> {
355
ChunkUnique::unique_id(self.0.physical())
356
}
357
358
fn is_null(&self) -> BooleanChunked {
359
self.0.is_null()
360
}
361
362
fn is_not_null(&self) -> BooleanChunked {
363
self.0.is_not_null()
364
}
365
366
fn reverse(&self) -> Series {
367
self.0
368
.physical()
369
.reverse()
370
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
371
.into_series()
372
}
373
374
fn as_single_ptr(&mut self) -> PolarsResult<usize> {
375
self.0.physical_mut().as_single_ptr()
376
}
377
378
fn shift(&self, periods: i64) -> Series {
379
self.0
380
.physical()
381
.shift(periods)
382
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
383
.into_series()
384
}
385
386
fn max_reduce(&self) -> PolarsResult<Scalar> {
387
let sc = self.0.physical().max_reduce();
388
let av = sc
389
.value()
390
.as_datetime_owned(self.0.time_unit(), self.0.time_zone_arc());
391
Ok(Scalar::new(self.dtype().clone(), av))
392
}
393
394
fn min_reduce(&self) -> PolarsResult<Scalar> {
395
let sc = self.0.physical().min_reduce();
396
let av = sc
397
.value()
398
.as_datetime_owned(self.0.time_unit(), self.0.time_zone_arc());
399
Ok(Scalar::new(self.dtype().clone(), av))
400
}
401
402
fn mean_reduce(&self) -> PolarsResult<Scalar> {
403
let mean = self.mean().map(|v| v as i64);
404
let av = AnyValue::from(mean).as_datetime_owned(self.0.time_unit(), self.0.time_zone_arc());
405
Ok(Scalar::new(self.dtype().clone(), av))
406
}
407
408
fn median_reduce(&self) -> PolarsResult<Scalar> {
409
let median = self.median().map(|v| v as i64);
410
let av =
411
AnyValue::from(median).as_datetime_owned(self.0.time_unit(), self.0.time_zone_arc());
412
Ok(Scalar::new(self.dtype().clone(), av))
413
}
414
415
fn quantile_reduce(&self, quantile: f64, method: QuantileMethod) -> PolarsResult<Scalar> {
416
let quantile = self.0.physical().quantile_reduce(quantile, method)?;
417
let av = quantile.value().cast(&DataType::Int64);
418
Ok(Scalar::new(
419
self.dtype().clone(),
420
av.as_datetime_owned(self.0.time_unit(), self.0.time_zone_arc()),
421
))
422
}
423
424
fn quantiles_reduce(&self, quantiles: &[f64], method: QuantileMethod) -> PolarsResult<Scalar> {
425
let result = self.0.physical().quantiles_reduce(quantiles, method)?;
426
427
if let AnyValue::List(float_s) = result.value() {
428
let float_ca = float_s.f64().unwrap();
429
let int_s = float_ca
430
.iter()
431
.map(|v: Option<f64>| v.map(|f| f as i64))
432
.collect::<Int64Chunked>()
433
.into_datetime(self.0.time_unit(), self.0.time_zone().clone())
434
.into_series();
435
Ok(Scalar::new(
436
DataType::List(Box::new(self.dtype().clone())),
437
AnyValue::List(int_s),
438
))
439
} else {
440
polars_bail!(ComputeError: "expected list scalar from quantiles_reduce")
441
}
442
}
443
444
#[cfg(feature = "approx_unique")]
445
fn approx_n_unique(&self) -> PolarsResult<IdxSize> {
446
Ok(ChunkApproxNUnique::approx_n_unique(self.0.physical()))
447
}
448
449
fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
450
Arc::new(SeriesWrap(Clone::clone(&self.0)))
451
}
452
453
fn find_validity_mismatch(&self, other: &Series, idxs: &mut Vec<IdxSize>) {
454
self.0.physical().find_validity_mismatch(other, idxs)
455
}
456
457
fn as_any(&self) -> &dyn Any {
458
&self.0
459
}
460
461
fn as_any_mut(&mut self) -> &mut dyn Any {
462
&mut self.0
463
}
464
465
fn as_phys_any(&self) -> &dyn Any {
466
self.0.physical()
467
}
468
469
fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
470
self as _
471
}
472
}
473
474