Path: blob/main/crates/polars-expr/src/expressions/literal.rs
6940 views
use std::borrow::Cow;1use std::ops::Deref;23use arrow::temporal_conversions::NANOSECONDS_IN_DAY;4use polars_core::prelude::*;5use polars_core::utils::NoNull;6use polars_plan::constants::get_literal_name;78use super::*;9use crate::expressions::{AggregationContext, PartitionedAggregation, PhysicalExpr};1011pub struct LiteralExpr(pub LiteralValue, Expr);1213impl LiteralExpr {14pub fn new(value: LiteralValue, expr: Expr) -> Self {15Self(value, expr)16}1718fn as_column(&self) -> PolarsResult<Column> {19use LiteralValue as L;20let column = match &self.0 {21L::Scalar(sc) => {22#[cfg(feature = "dtype-time")]23if let AnyValue::Time(v) = sc.value() {24if !(0..NANOSECONDS_IN_DAY).contains(v) {25polars_bail!(26InvalidOperation: "value `{v}` is out-of-range for `time` which can be 0 - {}",27NANOSECONDS_IN_DAY - 128);29}30}3132sc.clone().into_column(get_literal_name().clone())33},34L::Series(s) => s.deref().clone().into_column(),35lv @ L::Dyn(_) => polars_core::prelude::Series::from_any_values(36get_literal_name().clone(),37&[lv.to_any_value().unwrap()],38false,39)40.unwrap()41.into_column(),42L::Range(RangeLiteralValue { low, high, dtype }) => {43let low = *low;44let high = *high;45match dtype {46DataType::Int32 => {47polars_ensure!(48low >= i32::MIN as i128 && high <= i32::MAX as i128,49ComputeError: "range not within bounds of `Int32`: [{}, {}]", low, high50);51let low = low as i32;52let high = high as i32;53let ca: NoNull<Int32Chunked> = (low..high).collect();54ca.into_inner().into_column()55},56DataType::Int64 => {57polars_ensure!(58low >= i64::MIN as i128 && high <= i64::MAX as i128,59ComputeError: "range not within bounds of `Int32`: [{}, {}]", low, high60);61let low = low as i64;62let high = high as i64;63let ca: NoNull<Int64Chunked> = (low..high).collect();64ca.into_inner().into_column()65},66DataType::UInt32 => {67polars_ensure!(68low >= u32::MIN as i128 && high <= u32::MAX as i128,69ComputeError: "range not within bounds of `UInt32`: [{}, {}]", low, high70);71let low = low as u32;72let high = high as u32;73let ca: NoNull<UInt32Chunked> = (low..high).collect();74ca.into_inner().into_column()75},76dt => polars_bail!(77InvalidOperation: "datatype `{}` is not supported as range", dt78),79}80},81};82Ok(column)83}84}8586impl PhysicalExpr for LiteralExpr {87fn as_expression(&self) -> Option<&Expr> {88Some(&self.1)89}9091fn evaluate(&self, _df: &DataFrame, _state: &ExecutionState) -> PolarsResult<Column> {92self.as_column()93}9495#[allow(clippy::ptr_arg)]96fn evaluate_on_groups<'a>(97&self,98df: &DataFrame,99groups: &'a GroupPositions,100state: &ExecutionState,101) -> PolarsResult<AggregationContext<'a>> {102let s = self.evaluate(df, state)?;103104if self.0.is_scalar() {105Ok(AggregationContext::from_agg_state(106AggState::LiteralScalar(s),107Cow::Borrowed(groups),108))109} else {110// A non-scalar literal value expands to those values for every group.111112let lit_length = s.len() as IdxSize;113polars_ensure!(114(groups.len() as IdxSize).checked_mul(lit_length).is_some(),115bigidx,116ctx = "group_by",117size = groups.len() as u64 * lit_length as u64118);119let groups = GroupsType::Slice {120groups: (0..groups.len() as IdxSize)121.map(|i| [i * lit_length, lit_length])122.collect(),123rolling: false,124};125let agg_state = AggState::AggregatedList(Column::new_scalar(126s.name().clone(),127Scalar::new_list(s.take_materialized_series()),128groups.len(),129));130131let groups = groups.into_sliceable();132Ok(AggregationContext::from_agg_state(133agg_state,134Cow::Owned(groups),135))136}137}138139fn as_partitioned_aggregator(&self) -> Option<&dyn PartitionedAggregation> {140Some(self)141}142143fn to_field(&self, _input_schema: &Schema) -> PolarsResult<Field> {144let dtype = self.0.get_datatype();145Ok(Field::new(PlSmallStr::from_static("literal"), dtype))146}147fn is_literal(&self) -> bool {148true149}150151fn is_scalar(&self) -> bool {152self.0.is_scalar()153}154}155156impl PartitionedAggregation for LiteralExpr {157fn evaluate_partitioned(158&self,159df: &DataFrame,160_groups: &GroupPositions,161state: &ExecutionState,162) -> PolarsResult<Column> {163self.evaluate(df, state)164}165166fn finalize(167&self,168partitioned: Column,169_groups: &GroupPositions,170_state: &ExecutionState,171) -> PolarsResult<Column> {172Ok(partitioned)173}174}175176177