Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 11 additions & 7 deletions libs/@local/hashql/compiletest/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
//!
//! [`Pipeline`] drives the full HashQL compilation sequence: parsing J-Expr
//! source into an AST, lowering through HIR and MIR, running optimization and
//! execution analysis passes, and finally compiling to
//! [`PreparedQueries`](hashql_eval::postgres::PreparedQueries) ready for PostgreSQL execution.
//! execution analysis passes, and finally compiling to [`PreparedQuery`]
//! ready for PostgreSQL execution.
//!
//! [`PreparedQuery`]: hashql_eval::postgres::PreparedQuery
//!
//! Each stage is exposed as a separate method so callers can inspect or test
//! intermediate results. Diagnostics (warnings, advisories) accumulate in
Expand Down Expand Up @@ -237,12 +239,14 @@ impl<'heap> Pipeline<'heap> {
Ok(())
}

/// Runs execution analysis and compiles MIR bodies to prepared SQL queries.
/// Runs execution analysis on MIR bodies.
///
/// Performs size estimation and execution island analysis, determining which
/// parts of each body run on PostgreSQL vs the interpreter. Returns
/// per-body residuals that downstream compilation stages use to produce
/// [`PreparedQuery`] instances.
///
/// Performs size estimation, execution island analysis (determining which
/// parts of each body run on PostgreSQL vs the interpreter), then compiles
/// the PostgreSQL islands into [`PreparedQueries`](hashql_eval::postgres::PreparedQueries)
/// containing the SQL statements, parameter bindings, and column descriptors.
/// [`PreparedQuery`]: hashql_eval::postgres::PreparedQuery
///
/// # Errors
///
Expand Down
100 changes: 100 additions & 0 deletions libs/@local/hashql/core/src/id/bit_vec/finite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,28 @@ impl<I: Id, T: FiniteBitSetIntegral> FiniteBitSet<I, T> {
Some(I::from_u32(self.store.trailing_zeros()))
}

/// Returns `true` if `self` is a superset of `other` (contains all bits set in `other`).
#[inline]
#[must_use]
pub const fn is_superset(&self, other: &Self) -> bool
where
T: [const] FiniteBitSetIntegral,
{
// `other` is a subset of `self` iff `other & self == other`
other.store & self.store == other.store
}

/// Returns `true` if `self` is a subset of `other` (all bits set in `self` are also set in
/// `other`).
#[inline]
#[must_use]
pub const fn is_subset(&self, other: &Self) -> bool
where
T: [const] FiniteBitSetIntegral,
{
other.is_superset(self)
}

/// Returns an iterator over the indices of set bits.
#[inline]
pub fn iter(&self) -> FiniteBitIter<I, T> {
Expand Down Expand Up @@ -863,6 +885,84 @@ mod tests {
assert_eq!(set, original);
}

#[test]
fn is_superset_of_subset() {
let mut a: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
a.insert_range(TestId::from_usize(0)..=TestId::from_usize(5), 8);

let mut b: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
b.insert(TestId::from_usize(1));
b.insert(TestId::from_usize(3));

assert!(a.is_superset(&b));
assert!(!b.is_superset(&a));
}

#[test]
fn is_subset_of_superset() {
let mut a: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
a.insert(TestId::from_usize(2));
a.insert(TestId::from_usize(4));

let mut b: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
b.insert_range(TestId::from_usize(0)..=TestId::from_usize(7), 8);

assert!(a.is_subset(&b));
assert!(!b.is_subset(&a));
}

#[test]
fn empty_is_subset_of_everything() {
let empty: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);

let mut full: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
full.insert_range(TestId::from_usize(0)..=TestId::from_usize(7), 8);

assert!(empty.is_subset(&full));
assert!(empty.is_subset(&empty));
assert!(full.is_superset(&empty));
}

#[test]
fn equal_sets_are_both_subset_and_superset() {
let mut a: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
a.insert(TestId::from_usize(1));
a.insert(TestId::from_usize(5));

let b = a;

assert!(a.is_subset(&b));
assert!(a.is_superset(&b));
}

#[test]
fn disjoint_sets_are_not_subsets() {
let mut a: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
a.insert(TestId::from_usize(0));
a.insert(TestId::from_usize(1));

let mut b: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
b.insert(TestId::from_usize(6));
b.insert(TestId::from_usize(7));

assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(!b.is_superset(&a));
}

#[test]
fn overlapping_sets_are_not_subsets() {
let mut a: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
a.insert_range(TestId::from_usize(0)..=TestId::from_usize(3), 8);

let mut b: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
b.insert_range(TestId::from_usize(2)..=TestId::from_usize(5), 8);

assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
}

#[test]
fn negate_full_width() {
let mut set: FiniteBitSet<TestId, u8> = FiniteBitSet::new_empty(8);
Expand Down
4 changes: 2 additions & 2 deletions libs/@local/hashql/eval/src/orchestrator/codec/decode/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ mod tests;
/// variant in order, and opaque types wrap their inner representation.
///
/// When the type is unknown ([`Param`], [`Infer`], [`Unknown`]), falls back to
/// `decode_unknown`, which uses JSON structure alone
/// (objects become structs or dicts, arrays become lists, etc.).
/// a structural decoder that uses JSON shape alone: objects become structs or
/// dicts, arrays become lists, etc.
///
/// [`Value`]: hashql_mir::interpret::value::Value
/// [`Param`]: hashql_core::type::kind::TypeKind::Param
Expand Down
8 changes: 4 additions & 4 deletions libs/@local/hashql/eval/src/orchestrator/codec/mod.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
//! JSON codec for converting between interpreter [`Value`]s and the PostgreSQL
//! wire format.
//!
//! - `decode`: deserializes JSON column values (from `tokio_postgres` rows) into typed [`Value`]s,
//! guided by the HashQL type system.
//! - `encode`: serializes runtime [`Value`]s and query parameters into forms that `tokio_postgres`
//! can send to the database (via [`ToSql`]).
//! Decoding deserializes JSON column values (from `tokio_postgres` rows) into typed
//! [`Value`]s, guided by the HashQL type system. Encoding serializes runtime [`Value`]s
//! and query parameters into forms that `tokio_postgres` can send to the database
//! (via [`ToSql`]).
//!
//! The [`JsonValueRef`] type provides a borrowed view over `serde_json::Value`
//! that avoids cloning during decode, while [`JsonValueKind`] is a data-free
Expand Down
8 changes: 4 additions & 4 deletions libs/@local/hashql/eval/src/postgres/continuation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ impl ContinuationAlias {

/// Continuation fields returned to the bridge in the `SELECT` list.
///
/// A subset of `ContinuationColumn` that excludes internal-only columns
/// (`Entry` and `Filter`).
/// Each variant corresponds to a column the bridge must decode to reconstruct
/// island exit control flow and live-out locals.
/// Excludes internal-only columns (entry and filter) that are only used
/// within the generated SQL. Each variant corresponds to a column the
/// bridge must decode to reconstruct island exit control flow and live-out
/// locals.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ContinuationField {
/// The target basic block id for island exits.
Expand Down
2 changes: 2 additions & 0 deletions libs/@local/hashql/eval/tests/ui/orchestrator/.spec.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
skip = true
suite = "eval/orchestrator"

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading