diff --git a/docs/wiki/Configuration:-Input.md b/docs/wiki/Configuration:-Input.md index 9af94dcaef..83076019a6 100644 --- a/docs/wiki/Configuration:-Input.md +++ b/docs/wiki/Configuration:-Input.md @@ -2,7 +2,7 @@ In this section you can configure input devices like keyboard and mouse, and some input-related options. -There's a section for each device type: `keyboard`, `touchpad`, `mouse`, `trackpoint`, `trackball`, `tablet`, `touch`. +There's a section for each device type: `keyboard`, `touchpad`, `mouse`, `trackpoint`, `trackball`, `tablet`, `touchscreen`. Settings in those sections will apply to every device of that type. Currently, there's no way to configure specific devices individually (but that is planned). @@ -46,6 +46,14 @@ input { // left-handed // disabled-on-external-mouse // middle-emulation + + // Touchpad gesture binds live in the main binds {} block using + // the `TouchpadSwipe` trigger with `fingers=N direction="..."` + // properties. This subblock only contains tuning parameters. + // gestures { + // swipe-trigger-distance 16.0 + // swipe-progress-distance 40.0 + // } } mouse { @@ -95,10 +103,29 @@ input { // calibration-matrix 1.0 0.0 0.0 0.0 1.0 0.0 } - touch { + touchscreen { // off map-to-output "eDP-1" + // natural-scroll // calibration-matrix 1.0 0.0 0.0 0.0 1.0 0.0 + + // Touchscreen gesture binds live in the main binds {} block using + // parameterized triggers like TouchSwipe fingers=3 direction="up", + // TouchPinch fingers=4 direction="in", or TouchEdge edge="left". + // This subblock only contains tuning parameters. + gestures { + // swipe-trigger-distance 100.0 // px of centroid motion before swipe latches + // edge-start-distance 30.0 // px-wide edge start zone + // pinch-trigger-distance 100.0 // px of spread change before pinch latches + // pinch-dominance-ratio 1.0 // spread must beat swipe × this (higher = stricter pinch) + // pinch-sensitivity 1.0 + // pinch-progress-distance 100.0 // px of spread = IPC progress ±1.0 (signed) + // swipe-multi-finger-scale 1.2 // scales swipe-trigger-distance for 4+ fingers (1.0 = off) + // swipe-progress-distance 200.0 // px of swipe = IPC progress 1.0 + // rotation-trigger-angle 20.0 // ° before rotation can latch + // rotation-dominance-ratio 0.5 // arc must beat swipe × this (higher = stricter rotation) + // rotation-progress-angle 90.0 // ° that map to IPC progress ±1.0 + } } // disable-power-key-handling @@ -259,11 +286,89 @@ Settings specific to `touchpad` and `mouse`: Since: 25.08 You can also override horizontal and vertical scroll factor separately like so: `scroll-factor horizontal=2.0 vertical=-1.0` -Settings specific to `tablet` and `touch`: +Settings specific to `tablet` and `touchscreen`: - `calibration-matrix`: set to six floating point numbers to change the calibration matrix. See the [`LIBINPUT_CALIBRATION_MATRIX` documentation](https://wayland.freedesktop.org/libinput/doc/latest/device-configuration-via-udev.html) for examples. - Since: 25.02 for `tablet` - - Since: 25.11 for `touch` + - Since: 25.11 for `touchscreen` + +Settings specific to `touchscreen`: + +- `natural-scroll`: Since: next if set, inverts the scrolling direction for touchscreen swipe gestures. +- `gestures {}`: Since: next tuning parameters for touchscreen gesture recognition. + +> [!NOTE] +> +> Touchscreen gesture **binds** are configured in the main `binds {}` block using parameterized triggers like `TouchSwipe fingers=3 direction="up"`, `TouchPinch fingers=4 direction="in"`, or `TouchEdge edge="left"`. The `touchscreen { gestures { } }` subblock below only contains tuning parameters that affect *how* gestures are recognized, not *which* ones fire. See the [Gestures](./Gestures.md) wiki page for the full list of touchscreen gesture triggers. + +The `touchscreen { gestures { } }` tuning parameters are: + +All knobs are grouped as: **trigger** (classifier commit gates), **dominance** (3-way race tuning), **progress** (IPC output scaling), and **misc**. + +**Swipe:** + +- `swipe-trigger-distance `: pixels of centroid motion before a swipe gesture commits. Lower values feel more responsive but risk triggering on incidental finger drift. Default: `100.0`. +- `swipe-multi-finger-scale `: scaling applied to `swipe-trigger-distance` for gestures with more than 3 fingers. The formula is `base * (1 + (fingers − 3) * (scale − 1))`, so with a base of 100 and scale 1.2 a 4-finger swipe needs 120 px and a 5-finger swipe needs 140 px. Default `1.2` — gives a small pinch-priority bias at high finger counts so ambiguous 4/5-finger motions resolve as pinch rather than swipe. Set `1.0` to disable the bias entirely. +- `swipe-progress-distance `: pixels of swipe distance that map to IPC `GestureProgress = 1.0`. IPC-output knob — doesn't affect classification. Tune this for tagged external-app gestures (sidebar drawers, scrubbers, etc.). Default: `200.0`. + +**Pinch:** + +- `pinch-trigger-distance `: pixels of `|spread_change|` before a pinch gesture commits. Default: `100.0`. +- `pinch-dominance-ratio `: `|spread_change|` must exceed `swipe_distance × this` for pinch to win the race against swipe. Higher = stricter pinch. Default: `1.0`. +- `pinch-sensitivity `: multiplier mapping finger spread change to continuous pinch animation delta (e.g. overview open/close progress). At `1.0`, one pixel of spread change contributes one pixel to the gesture accumulator. Applies to **all** pinch-bound continuous actions — the bind's own `sensitivity=` property is ignored for pinch because raw spread-delta pixels need different scaling from linear swipe distances. Default: `1.0`. +- `pinch-progress-distance `: pixels of spread change that map to IPC `GestureProgress = ±1.0`. Signed: positive for pinch-out, negative for pinch-in. Default: `100.0`. + +**Rotation:** + +- `rotation-trigger-angle `: cumulative rotation in **degrees** before a rotation gesture commits. Default: `20.0`. Rotation detection is an early proof of concept — see the warning in the [Rotation Gestures](./Gestures.md#rotation-gestures) section. +- `rotation-dominance-ratio `: rotation arc length (`|cumulative_rotation| × cluster_radius`) must exceed both `swipe_distance × this` and `|spread_change| × this` for rotation to win the race. Higher = stricter rotation. Default: `0.5` (deliberately lenient — rotation almost always includes incidental translation). Matches `pinch-dominance-ratio` semantics (higher = stricter for both). +- `rotation-progress-angle `: degrees of cumulative rotation that map to IPC `GestureProgress = ±1.0`. Signed: positive = counter-clockwise, negative = clockwise. Default: `90.0`. + +**Edge:** + +- `edge-start-distance `: width in pixels of the screen-edge start zone. A touch must *begin* within this distance from an edge to count as a `TouchEdge` gesture; touches starting farther in are treated as regular swipes. Default: `30.0`. + +Example: + +```kdl +input { + touchscreen { + gestures { + swipe-trigger-distance 26.0 + edge-start-distance 30.0 + pinch-sensitivity 1.0 + swipe-progress-distance 200.0 + pinch-progress-distance 100.0 + rotation-trigger-angle 15.0 + rotation-dominance-ratio 0.5 + } + } +} +``` + +### Touchpad Gesture Tuning + +Since: next + +The `touchpad { gestures { } }` subblock contains tuning parameters for touchpad gesture recognition. Like touchscreen, the actual gesture binds (`TouchpadSwipe fingers=N direction="..."`, `TouchpadPinch fingers=N direction="..."`) live in the main `binds {}` block. + +- `swipe-trigger-distance `: libinput delta units of centroid motion before a swipe gesture commits. These units are acceleration-adjusted and not directly comparable to touchscreen pixels. Default: `16.0`. +- `swipe-progress-distance `: libinput delta units of swipe motion that map to IPC `GestureProgress = 1.0`. Because libinput acceleration curves are nonlinear, the same physical swipe can produce different delta magnitudes depending on speed — this value is **not** directly comparable to the touchscreen `swipe-progress-distance`. Default: `40.0`. +- `pinch-trigger-scale `: `|scale - 1.0|` required before a `TouchpadPinch` bind fires. libinput normalizes pinch scale (1.0 = no change, 1.5 = 50% spread out, 0.5 = 50% spread in), so this is a unitless ratio and **not** directly comparable to the touchscreen `pinch-trigger-distance` (which is in pixels). Fires once per gesture when the threshold is crossed; direction is picked from the sign of the scale change. Default: `0.15`. + +Example: + +```kdl +input { + touchpad { + gestures { + swipe-trigger-distance 16.0 + swipe-progress-distance 40.0 + pinch-trigger-scale 0.15 + } + } +} +``` Tablets and touchscreens are absolute pointing devices that can be mapped to a specific output like so: @@ -273,7 +378,7 @@ input { map-to-output "eDP-1" } - touch { + touchscreen { map-to-output "eDP-1" } } diff --git a/docs/wiki/Configuration:-Window-Rules.md b/docs/wiki/Configuration:-Window-Rules.md index f8fa215ec3..d40f1a5988 100644 --- a/docs/wiki/Configuration:-Window-Rules.md +++ b/docs/wiki/Configuration:-Window-Rules.md @@ -99,6 +99,7 @@ window-rule { clip-to-geometry true tiled-state true baba-is-float true + touchscreen-gesture-passthrough true background-effect { xray true @@ -1017,6 +1018,41 @@ For example, GTK 4 pop-ups with pointing arrows (`has-arrow=true` property) are These pop-ups with custom shapes will need the app to implement the [ext-background-effect protocol](https://wayland.app/protocols/ext-background-effect-v1) to work properly. +#### `touchscreen-gesture-passthrough` + +Forward touchscreen multi-finger gestures to matching windows instead of letting niri's gesture recognizer consume them. + +By default, niri claims 3+ finger touchscreen swipes and pinches for compositor actions like workspace switching and overview toggle. +This rule opts specific windows out of that behavior so apps that implement their own touch gestures (browsers, drawing apps, mapping tools) receive the raw touch events. + +Escape hatches that still work on passthrough windows: + +- **Mod+touch gestures** still trigger compositor binds — holding the mod key always bypasses passthrough so you can invoke niri actions even on a passthrough window. +- **Edge swipes** still belong to niri — a swipe that starts in a screen-edge zone runs the edge gesture even if the window under it has passthrough enabled. +- **2-finger touches** are unaffected — they already forward to clients by default regardless of this rule. + +This rule is touchscreen-only. Touchpad gestures are not affected. + +To discover which app-id to match, run niri with `RUST_LOG=niri=debug` and watch for lines like `touch: captured 3-finger gesture over app-id="org.mozilla.firefox"` after performing a gesture on the target window. + +```kdl +// Let Firefox handle touch gestures itself (page navigation, pinch-zoom). +window-rule { + match app-id="firefox" + match app-id="org.mozilla.firefox" + + touchscreen-gesture-passthrough true +} + +// Same for a drawing app and Blender. +window-rule { + match app-id="org.kde.krita" + match app-id="org.blender.Blender" + + touchscreen-gesture-passthrough true +} +``` + #### Size Overrides You can amend the window's minimum and maximum size in logical pixels. diff --git a/docs/wiki/Design:-Gesture-IPC-Refactor.md b/docs/wiki/Design:-Gesture-IPC-Refactor.md new file mode 100644 index 0000000000..74f7941348 --- /dev/null +++ b/docs/wiki/Design:-Gesture-IPC-Refactor.md @@ -0,0 +1,836 @@ +# Gesture State via Environment Variables — Design Plan + +> [!NOTE] +> This is an open design RFC tied to PR [niri-wm/niri#3771](https://github.com/niri-wm/niri/pull/3771). +> Feedback, counterproposals, and use-case testing from the niri community are welcome. + +## Acknowledgments + +The core architectural ideas in this document — env-var spawn context, stdin-pipe progress streaming, the public IPC event stream, `noop = consume` semantics, per-window `binds {}` in `window-rules` with an `unbound` sentinel for fingers=1/2 disambiguation, and the critique of the tag system as a layer violation — originated from **Atan-D-RP4** in PR review discussion on [niri-wm/niri#3771](https://github.com/niri-wm/niri/pull/3771). This document consolidates those proposals into an implementation plan and extends them in a few places (the internal-vs-IPC progress mismatch analysis in Part 11, and the earlier three-gate disambiguation sketch in Part 12 now superseded by Atan's window-rule `binds {}` proposal). + +## Document Status and Reading Order + +This document evolved in layers as design discussion progressed. Read it in order, but be aware that later parts **supersede** earlier ones in places: + +- **Parts 1–9** — Initial spawn + env-var + stdin-pipe proposal (self-contained, covers the tag-replacement case) +- **Part 10** — Second-pass refinements: adds a public IPC event stream as a complementary channel, and proposes `noop = consume` semantics — this **supersedes Part 3d's claim that `noop` loses its purpose** +- **Part 11** — Cross-cutting concern: internal vs IPC progress mismatch (applies to all paths) +- **Part 12** — Disambiguation flow for `fingers=1`/`fingers=2` — this **supersedes Part 10c's "keep fingers=3..=10"** position. Originally a three-gate heuristic (passthrough rule + bind existence + threshold timing); **now superseded by Atan's per-window `binds {}` in `window-rules` proposal**, which collapses the three gates into one declarative mechanism. Scoped as a follow-up PR; the current PR stays at `fingers=3..=10`. + +Net current thinking: three complementary user paths (spawn / IPC event stream / direct action), `noop` means "compositor claims this gesture," `unbound` in a window-rule `binds {}` block releases the claim per-app, and fingers=1/2 lands in a separate follow-up PR per Part 12. + +## The Problem + +The current tag system creates a **split-brain** between configuration and consumption: + +1. The user writes a bind with `tag "workspace-nav"` in their niri config +2. A separate external app must independently know to connect to niri's IPC socket, subscribe to the event stream, and filter for events with tag `"workspace-nav"` +3. The bind config and the consuming app are coupled by a string convention that lives outside either one + +This doesn't fit niri's design principle where **config declares intent and the compositor executes it**. Tags leak compositor-internal state into a global IPC namespace that external apps must subscribe to and parse. + +## Proposal: Spawn with Gesture State in Environment Variables + +When a gesture fires a `spawn` action, attach the gesture's state as environment variables to the spawned process. The script reads its own env, does its thing, and exits. No IPC socket, no tag matching, no event stream — fully self-contained. + +## Detailed Design + +### Part 1: Environment Variables (Static State at Spawn Time) + +When `spawn` or `spawn-sh` fires from a gesture bind, set these env vars on the child process: + +```sh +NIRI_GESTURE_TYPE=TouchSwipe # TouchSwipe | TouchPinch | TouchRotate | TouchEdge | TouchpadSwipe +NIRI_GESTURE_FINGERS=3 # finger count +NIRI_GESTURE_DIRECTION=up # up|down|left|right (swipe), in|out (pinch), cw|ccw (rotate) +NIRI_GESTURE_EDGE=left # (edge only) top|bottom|left|right +NIRI_GESTURE_ZONE=start # (edge only) full|start|center|end +NIRI_GESTURE_CONTINUOUS=true # whether progress will stream on stdin +``` + +> [!NOTE] +> `sensitivity` and `natural_scroll` are **not** exposed as env vars. These are compositor-internal tuning — the compositor applies them when computing the `progress` value that streams on stdin. The spawned process receives already-adjusted progress and doesn't need to know or reapply these. This keeps the env vars focused on **what happened** (gesture identity) rather than **how it was configured** (tuning knobs). + +This is the **easy part**. All this state is already available in `extract_bind_info()` and the `Trigger` enum at the point where `do_action` is called. The spawn functions (`spawn`, `spawn_sh`, `spawn_sync`) just need a new parameter for optional gesture context, and `spawn_sync` adds `.env()` calls before spawning. + +**Config example (discrete gesture):** +```text +binds { + TouchSwipe fingers=3 direction="up" { + spawn "notify-send" "Swiped up with 3 fingers" + } +} +``` + +The spawned `notify-send` sees `NIRI_GESTURE_TYPE=TouchSwipe`, `NIRI_GESTURE_FINGERS=3`, etc. in its env. For discrete gestures this is all you need — the script runs, reads env, done. + +### Part 2: stdin Pipe (Dynamic State for Continuous Gestures) + +This is the **hard part** and where the real architectural value is. + +For continuous gestures (workspace-switch, overview, column-nav animations), the spawned process needs **live progress updates** as fingers move. Environment variables are write-once-at-spawn; they can't carry streaming state. + +**Solution: pipe progress to the child's stdin.** + +Currently, `spawn_sync` sets `stdin(Stdio::null())`. For continuous gesture spawns, change this to `stdin(Stdio::piped())` and keep the write-end of the pipe alive in the gesture's active state. + +#### Data format on stdin + +One JSON object per line (newline-delimited JSON / NDJSON): + +```jsonl +{"event":"progress","progress":0.15,"dx":0.0,"dy":-8.3,"timestamp_ms":48201} +{"event":"progress","progress":0.42,"dx":0.0,"dy":-12.1,"timestamp_ms":48217} +{"event":"progress","progress":0.73,"dx":0.0,"dy":-9.7,"timestamp_ms":48233} +{"event":"end","completed":true} +``` + +- `progress`: normalized, non-monotonic (same semantics as current `GestureProgress.progress`) +- `dx`/`dy` or `d_spread` or `d_angle`: raw physical delta, typed by gesture kind +- `timestamp_ms`: frame timestamp +- Final `{"event":"end","completed":true/false}` then stdin closes + +A bash script consuming this looks like: + +```bash +#!/bin/bash +# NIRI_GESTURE_TYPE, NIRI_GESTURE_FINGERS, etc. are in our env +echo "Gesture started: $NIRI_GESTURE_TYPE with $NIRI_GESTURE_FINGERS fingers" + +while IFS= read -r line; do + progress=$(echo "$line" | jq -r '.progress // empty') + event=$(echo "$line" | jq -r '.event') + + if [ "$event" = "end" ]; then + completed=$(echo "$line" | jq -r '.completed') + echo "Gesture ended, completed=$completed" + break + fi + + # Drive your animation with $progress + echo "Progress: $progress" +done +``` + +A Rust/Python/Go consumer reads stdin line-by-line and deserializes JSON. + +### Part 3: Architectural Changes Required + +#### 3a. Spawn infrastructure (`src/utils/spawning.rs`) + +Current signatures: +```rust +pub fn spawn(command: Vec, token: Option) +pub fn spawn_sh(command: String, token: Option) +fn spawn_sync(command, args, token) +``` + +The existing signatures gain an optional gesture context parameter. Internally, `spawn` checks whether it's in a gesture context and adjusts its behavior: + +```rust +// Same function, now context-aware +pub fn spawn(command: Vec, token: Option, gesture: Option) + -> Option // None for keyboard spawns; Some(pipe) for gesture spawns +``` + +When `gesture` is `Some`: +- Sets `NIRI_GESTURE_*` env vars on the child +- Uses `Stdio::piped()` for stdin and returns the write-end +- The process always gets the pipe — whether it reads stdin is the process's choice + +When `gesture` is `None`: behaves exactly as today (no env vars, `Stdio::null()`), returns `None`. + +This is the "spawn action has different behavior when called with these binds" pattern — the function itself is context-aware, not a new function. + +**Key concern: the double-fork.** Currently: +1. Main thread → spawner thread → `Command::spawn()` → intermediate child → grandchild (actual process) +2. Intermediate child exits immediately, grandchild is orphaned to init/systemd + +With stdin piping, the pipe's write-end must stay alive in the **compositor process** (not the spawner thread), because the gesture handler runs on the main thread and needs to write to it on every motion frame. + +Implementation approach for continuous spawns: +- Do the piped spawn synchronously on the main thread — fork+exec is fast (<1ms), and gesture commit only happens once per gesture, so blocking briefly is fine +- The double-fork still happens for process isolation, but the pipe write-end stays in compositor space +- This avoids the complexity of threading pipe fds back from a spawner thread via channels + +#### 3b. Action dispatch (`src/input/mod.rs` and `src/input/touch_gesture.rs`) + +Currently, `do_action` handles `Action::Spawn` generically with no context about what triggered it. The gesture code should intercept spawn actions before they reach `do_action`: + +```rust +// In touch_gesture.rs / mod.rs, at the point where a gesture bind fires: +if matches!(action, Action::Spawn(_) | Action::SpawnSh(_)) { + let ctx = GestureSpawnContext::from_trigger(trigger, continuous); + let pipe = match action { + Action::Spawn(cmd) => spawn(cmd, Some(token), Some(ctx)), + Action::SpawnSh(cmd) => spawn_sh(cmd, Some(token), Some(ctx)), + _ => unreachable!(), + }; + // If continuous, store pipe in ActiveTouchBind/ActiveSwipeBind +} else { + self.do_action(action, false); +} +``` + +This keeps `do_action` untouched — it doesn't need to know about gestures. The gesture code is the one that knows it's in a gesture context, so it handles spawn specially. All other actions (workspace-switch, focus-column, etc.) flow through `do_action` as before. + +#### 3c. Active gesture state (`src/niri.rs`) + +For continuous gesture spawns, the pipe write-end needs to live in the active gesture state so progress updates can write to it. With tags removed, these structs simplify — `tag` and `ipc_progress` are gone, replaced by `spawn_pipe`: + +```rust +pub struct ActiveSwipeBind { + pub kind: ContinuousGestureKind, + pub sensitivity: f64, + pub spawn_pipe: Option, // write-end for spawned process stdin +} + +pub enum ActiveTouchBind { + Swipe { + kind: ContinuousGestureKind, + sensitivity: f64, + natural_scroll: bool, + spawn_pipe: Option, + }, + Pinch { + kind: ContinuousGestureKind, + spawn_pipe: Option, + start_spread: f64, + last_spread: f64, + }, + Rotate { + kind: ContinuousGestureKind, + spawn_pipe: Option, + start_rotation: f64, + }, +} +``` + +On each gesture progress frame, if `spawn_pipe` is `Some`, write a JSON progress line to it. On gesture end, write the end event and drop the pipe (closes stdin). + +**EPIPE handling:** If the child process exits early, writing to the pipe will return EPIPE. This must be handled gracefully — just set `spawn_pipe = None` and continue (the gesture still drives compositor animations even if the external process died). + +#### 3d. Tags are removed entirely + +Since this is a private prototype, we don't need backwards compatibility. Tags are **replaced**, not supplemented. + +**What gets removed:** +- `tag: Option` from `Bind`, `TouchBindEntry`, `ActiveTouchBind`, `ActiveSwipeBind` +- `GestureBegin`, `GestureProgress`, `GestureEnd` IPC events (the tag-bearing ones) +- Tag field in the settings UI +- All `ipc_gesture_begin/progress/end` emission logic in `touch_gesture.rs` and `mod.rs` + +**What replaces each use case:** + +| Old (tags) | New | +|-----------|-----| +| Script reacts to a specific gesture | `spawn` + env vars | +| Animation driven by gesture progress | `spawn` + stdin pipe | +| Debug inspector sees all gestures | `RecognitionFrame` events (already exist, debug-only) — or we add a new lightweight `GestureEvent` on the IPC stream that carries the same env-var-level info but without requiring a tag in config | +| Long-running daemon monitors gestures | Same IPC stream, but tag-free: events identify gestures by type/fingers/direction, not user-assigned strings | + +**The `noop` action loses its gesture-specific purpose** *(superseded — see Part 10b).* Originally this proposal drops `noop`'s special meaning along with tags, but second-pass refinements reintroduce `noop = consume` as the "compositor claims this gesture for IPC consumption" signal. The current position is: `noop` gains new meaning as the consume marker, not loses it. See Part 10b for details. + +**What about `niri-gesture-inspector`?** It currently uses `GestureBegin`/`GestureEnd` events. Two options: +1. Keep a simplified, tag-free `GestureEvent` on the IPC stream (just type/fingers/direction/completed, no user tag) +2. The inspector already uses `RecognitionFrame` events — extend those slightly to cover the commit/end phase too + +Option 1 is cleaner: a single `GestureEvent` that fires on every gesture commit, carrying the trigger description and completion status. No tag field, no user config needed — it's purely observational. + +### Part 4: Implementation Phases + +#### Phase 0: Rip out tags + +**Scope:** Remove the entire tag system — config field, IPC events, emission logic, UI. + +**Changes:** +1. Remove `tag: Option` from `Bind` in `niri-config/src/binds.rs` +2. Remove `tag` from `ActiveTouchBind` variants and `ActiveSwipeBind` in `niri.rs` +3. Remove `GestureBegin`, `GestureProgress`, `GestureEnd` event variants from `niri-ipc/src/lib.rs` +4. Remove all `ipc_gesture_begin/progress/end` calls in `touch_gesture.rs` and `mod.rs` +5. Remove `extract_bind_info`'s tag extraction, simplify the tuple it returns +6. Remove `noop` action support from gesture binds (or keep `noop` as a general action but remove its special tag-emitting behavior) +7. Remove tag field from `TouchBindEntry` in the settings UI `config.rs` +8. Remove tag rows from touchscreen.rs and touchpad.rs add/edit forms +9. Add a simple, tag-free `GestureEvent` to the IPC stream for debug tools: + ```rust + GestureCommit { + trigger: String, // "TouchSwipe fingers=3 direction=\"up\"" + finger_count: u8, + is_continuous: bool, + } + GestureFinish { + trigger: String, + completed: bool, + } + ``` + These fire for ALL gesture commits unconditionally — no config needed. Debug tools (gesture-inspector) observe the stream without any bind config. + +**Complexity:** Medium (lots of deletion, but deletion is safe). The new `GestureCommit`/`GestureFinish` events are simpler than the old tagged trio because they have no user-defined fields. + +#### Phase 1: Environment variables + stdin pipe + +**Scope:** Make `spawn` context-aware when fired from gesture binds — env vars for identity, stdin pipe for progress. + +These ship together because the pipe is what makes this a real replacement for tags. Env vars without the pipe only covers discrete gestures; with the pipe, it covers everything. + +**Changes:** +1. Define `GestureSpawnContext` struct in `spawning.rs`: + ```rust + pub struct GestureSpawnContext { + pub gesture_type: String, // "TouchSwipe", "TouchPinch", etc. + pub fingers: u8, + pub direction: Option, // "up", "in", "cw", etc. + pub edge: Option, // "left", "top", etc. (edge gestures only) + pub zone: Option, // "full", "start", etc. (edge gestures only) + } + ``` + Note: no `continuous` flag — the compositor determines this from the gesture type. All gesture spawns get the pipe; `NIRI_GESTURE_CONTINUOUS` env var tells the process whether to expect progress data on stdin. +2. Modify `spawn`/`spawn_sh`/`spawn_sync` to accept `Option` — when present, set `NIRI_GESTURE_*` env vars and use `Stdio::piped()` for stdin +3. Return `Option` (pipe write-end) from spawn — `Some` for gesture spawns, `None` for keyboard spawns +4. Add `spawn_pipe: Option` to `ActiveTouchBind` variants and `ActiveSwipeBind` +5. In `touch_gesture.rs` and `mod.rs`, intercept `Action::Spawn`/`Action::SpawnSh` before `do_action` — build context from trigger, call gesture-aware spawn, store pipe in active state +6. On gesture progress, write NDJSON line to the pipe (`O_NONBLOCK`, skip frame on `EAGAIN`) +7. On gesture end, write `{"event":"end","completed":true/false}` and drop the pipe +8. Handle EPIPE: set `spawn_pipe = None`, continue gesture normally +9. **Refactor spawn for piped mode:** do the piped spawn synchronously on the main thread (fork+exec is fast, <1ms). Double-fork still happens for process isolation, but pipe write-end stays in compositor space + +**Complexity:** Medium-high. The spawn architecture change for piped mode is the hardest part. Non-blocking pipe writes at 120 Hz need care. + +**Value:** Full replacement for tags. Discrete scripts read env vars and ignore stdin. Continuous scripts read env vars and stdin. Same `spawn` action, compositor handles the rest. + +#### Phase 2: Reserved + +Originally skipped in the first-pass plan. **Filled in by Part 10d** as the `noop = consume` phase (replacing the `touchscreen-gesture-passthrough` window rule with bind-existence consumption semantics). + +#### Phase 3: Settings UI updates + +**Scope:** Update niri-touch-settings-UI to reflect the tag-free model. + +**Changes:** +1. Remove all tag-related UI (already done in Phase 0) +2. Remove `noop` from the action list for gesture binds (or keep it for "gesture exists but does nothing visible") +3. For `spawn` actions, add a help label: "Spawned processes receive gesture state via NIRI_GESTURE_* environment variables" +4. Consider adding a "Test" button that spawns a built-in script showing the env vars (nice-to-have) + +**Complexity:** Low. + +### Part 5: Non-Trivial Concerns + +#### 5a. Spawn latency on the main thread + +Currently spawn runs in a separate thread to avoid blocking the compositor. For piped spawns, we need the pipe fd on the main thread. Options: +- Fork+exec is fast (~1ms) — doing it on the main thread for gesture spawns is probably fine, especially since gesture commit only happens once per gesture +- Or: spawn in thread, send pipe fd back via a one-shot channel + +#### 5b. Pipe write blocking at 120 Hz + +If the child doesn't read fast enough, the pipe buffer fills and `write()` blocks. Solutions: +- Use `O_NONBLOCK` on the write-end; if write returns `EAGAIN`, skip that frame (child will see the next one) +- Pipe buffer is typically 64KB on Linux — at ~100 bytes per JSON line, that's ~640 frames of buffer, more than enough + +#### 5c. Child process lifecycle + +The child is spawned at gesture begin. What if: +- **Child exits early:** EPIPE on write → set `spawn_pipe = None`, continue gesture normally +- **Gesture ends before child is done:** Write end event, drop pipe (stdin closes), child sees EOF and should exit. If it doesn't, it's the child's problem (orphaned to init, same as any spawn) +- **Multiple rapid gestures:** Each spawns a new process. Previous one gets EOF'd when the gesture ends. This is fine — same as spawning any command rapidly + +#### 5d. Security / information leak surface + +Current spawn already inherits the compositor's environment (minus RUST_BACKTRACE, plus DISPLAY and user-configured env). Adding gesture state doesn't meaningfully expand the attack surface — the spawned process already runs with the user's privileges. The gesture info (fingers, direction) isn't sensitive. + +The stdin pipe is scoped to the child process's fd table — no other process can read it (unlike the IPC socket which any process can connect to). This is actually **better** isolation than tags. + +### Part 6: Config Example — Before and After + +**Before (tags + external daemon):** +```text +// niri config — user must invent a tag name +binds { + TouchSwipe fingers=3 direction="up" tag="ws-up" { noop; } +} + +// Separate daemon that must: +// 1. Be running before the gesture fires +// 2. Connect to niri's IPC socket +// 3. Subscribe to the event stream +// 4. Know the exact tag string "ws-up" +// 5. Filter events, handle begin/progress/end lifecycle + +// daemon.py: +// socket = connect_niri_ipc() +// for event in socket.event_stream(): +// if event.type == "GestureBegin" and event.tag == "ws-up": +// handle_begin(event) +// elif event.type == "GestureProgress" and event.tag == "ws-up": +// drive_animation(event.progress) +// elif event.type == "GestureEnd" and event.tag == "ws-up": +// finish(event.completed) +``` + +**After (spawn + env vars + stdin):** +```text +// niri config — no tag, no daemon coordination +binds { + TouchSwipe fingers=3 direction="up" { + spawn-sh "my-gesture-handler.sh" + } +} +``` + +```bash +#!/bin/bash +# my-gesture-handler.sh — fully self-contained +# Everything we need is in our environment: +echo "Got $NIRI_GESTURE_TYPE $NIRI_GESTURE_DIRECTION with $NIRI_GESTURE_FINGERS fingers" + +# For continuous gestures, progress streams on stdin: +if [ "$NIRI_GESTURE_CONTINUOUS" = "true" ]; then + while IFS= read -r line; do + progress=$(echo "$line" | jq -r '.progress // empty') + event=$(echo "$line" | jq -r '.event') + [ "$event" = "end" ] && break + # Drive animation with $progress + done +fi +``` + +No tag, no daemon, no IPC socket, no event stream, no string coordination between config and consumer. The process is born knowing everything about its gesture. + +### Part 7: What This Means for the Architecture + +**Tags were a layer-violation.** They made the compositor's IPC stream carry user-defined semantics (arbitrary tag strings) that only had meaning to external processes. The compositor itself never used the tag — it just forwarded it. This is the separation concern that motivated the rethink. + +**Env vars + stdin is compositor-native.** The compositor already spawns processes with enriched environments (`XDG_ACTIVATION_TOKEN`, `DISPLAY`, user-configured env). Adding gesture state to the spawn environment is the same pattern — the compositor prepares the child's world, the child runs in it. + +**The stdin pipe replaces the IPC event stream for the per-gesture case.** Instead of a global pub-sub channel (IPC event stream) where consumers filter by tag, each gesture gets a dedicated, private, typed channel (stdin pipe) that lives exactly as long as the gesture. This is: +- **Better isolated** — no cross-gesture interference, no global namespace +- **Simpler lifecycle** — pipe opens at gesture begin, closes at gesture end, no subscribe/unsubscribe +- **Self-cleaning** — when the gesture ends, the pipe closes, the process gets EOF, done + +**The IPC event stream survives** but becomes simpler: tag-free `GestureCommit`/`GestureFinish` events for observability tools, not for driving application logic. This is the right separation — the IPC stream is for *watching*, spawn is for *doing*. + +### Part 8: How `spawn` Becomes Context-Aware (Discrete vs Continuous) + +There's a key design question: **how does the compositor know whether a gesture spawn is discrete or continuous?** + +Currently, continuous vs discrete is inferred from the action type — `workspace-switch-gesture` is continuous because the compositor knows it drives an animation. `spawn` is always discrete (fire and forget). But with env-var spawn replacing tags, we need `spawn` to handle both modes. + +The guiding principle: the **spawn action** has different behavior **when called from a gesture bind** — `spawn` itself becomes context-aware, not a new action type. + +#### The approach: `spawn` always pipes on gesture binds + +When `spawn` fires from a gesture bind, the compositor always sets up the stdin pipe for gestures that support continuous tracking (swipe, pinch, rotate, edge). The process gets `NIRI_GESTURE_CONTINUOUS=true` in its env and progress streams on stdin. + +If the process doesn't care about continuous progress, it simply doesn't read stdin. The kernel pipe buffer (64KB on Linux, ~640 frames of JSON) absorbs the writes silently. When the gesture ends and the pipe closes, any unread data is discarded. No harm done. + +```text +// Discrete use — script ignores stdin, reads env, does its thing +TouchSwipe fingers=3 direction="up" { + spawn "notify-send" "Swiped up!" +} + +// Continuous use — same spawn, script reads stdin for progress +TouchSwipe fingers=4 direction="up" { + spawn-sh "my-animation-driver.sh" +} +``` + +Both are just `spawn`. The compositor doesn't need to know the script's intent. The pipe is always there; the script decides whether to use it. + +- From a keyboard/switch bind: `spawn` fires as today — no env vars, no pipe, `Stdio::null()` +- From a gesture bind: `spawn` sets `NIRI_GESTURE_*` env vars and pipes progress on stdin + +This is the most "niri" answer — the compositor figures out the right thing from context, the user just writes `spawn`. No new action types, no new config properties, no user-facing complexity. + +#### Alternatives considered and rejected + +- **`spawn-continuous` as a separate action:** Explicit, but doubles the action surface (spawn/spawn-sh/spawn-continuous/spawn-sh-continuous) for no real gain — the compositor already knows the context. +- **`spawn-gesture` as a new action:** Clean separation, but means `spawn` on a gesture bind would be "dumb" (no env vars), wasting the opportunity to enrich the existing action. Splits the action surface unnecessarily. +- **`continuous=true` bind property:** Unnecessary indirection — if the script doesn't want progress, it just doesn't read stdin. + +### Part 9: Open Questions + +1. **Progress format on stdin:** NDJSON (`{"progress":0.42,"dx":0.0,"dy":-12.1,"timestamp_ms":48217}`) is flexible and self-describing but requires a JSON parser. Alternative: tab-separated values (`0.42\t0.0\t-12.1\t48217`) — trivial to parse in bash with `read`, but harder to extend. JSON is probably the right default since niri's IPC already speaks JSON, and most languages parse it trivially. Feedback welcome on whether a simpler line format would better serve shell-script use cases. + +2. **Does spawn fully replace the external-daemon pattern?** With tags, a long-running daemon could monitor *all* gestures from one process. With spawn, each gesture gets its own short-lived process. For most users this is simpler, but a power-user who wants a single daemon reacting to multiple gesture types would need either: (a) the tag-free `GestureCommit`/`GestureFinish` IPC events proposed in Phase 0, or (b) multiple spawn binds that all call the same script (which reads its env to know which gesture fired). Option (b) is probably fine — the "daemon" pattern was always over-engineered for most use cases, but real use cases that break under (b) would be worth hearing about. + +3. **Does this fully address the layering concern?** The separation concern (config ↔ external app coupled by string convention) is eliminated: the process gets its context from the compositor directly via env vars, no string convention needed. But the process itself is still external — is `spawn` + env vars enough "niri-native", or would something more integrated (e.g., compositor-internal scripting for animation logic) be preferable? Opinions welcome. + +--- + +## Part 10: Second-Pass Refinements (2026-04) + +After the initial proposal (Parts 1–9), a second round of design discussion raised additional ideas. This section captures those refinements and how they interact with the original plan. + +### 10a. Prefer IPC event stream over spawn-pipe for observers + +A complementary channel was proposed: + +> Extending the IPC with something like `niri msg watch-gestures` or `niri msg event-stream --filter gestures`. Emit events with associated data (which can be limited or expanded via config with some sane defaults) for all committed gestures and clients can subscribe to those events to do things. + +**How this relates to the existing spawn+pipe proposal:** + +- The spawn+pipe approach (Phase 1 above) is still valuable for **self-contained per-gesture scripts** — no IPC subscription required, the process is born knowing its context. +- But for **long-running observers** (quickshell panels, sidebar drawers, HUDs), a public IPC event stream is the right channel — a daemon subscribes once and reacts to all gestures. +- These are **complementary**, not competing. Both should exist. + +**Refinement to Phase 0:** The "tag-free `GestureCommit`/`GestureFinish`" event from Phase 0 gets upgraded from a minor observability aside to a **first-class public API**: + +```console +$ niri msg event-stream | grep -E "GestureBegin|GestureProgress|GestureEnd" +GestureBegin trigger="TouchEdge edge=\"left\"" fingers=1 continuous=true +GestureProgress trigger="TouchEdge edge=\"left\"" progress=0.23 dx=0.0 dy=-12.1 timestamp_ms=48217 +GestureProgress trigger="TouchEdge edge=\"left\"" progress=0.47 dx=0.0 dy=-18.4 timestamp_ms=48233 +GestureEnd trigger="TouchEdge edge=\"left\"" completed=true +``` + +Trigger field is the **same string the user writes in config** — no invented tags, direct pattern-match. A sidebar daemon filters by `trigger="TouchEdge edge=\"left\""`, not by a user-assigned tag. + +Fields available on the stream (optionally filterable via config): +- `trigger` — e.g. `"TouchSwipe fingers=3 direction=\"up\""` +- `fingers` — finger count +- `continuous` — whether progress will stream +- `progress`, `delta`, `timestamp_ms` — on `GestureProgress` events +- `completed` — on `GestureEnd` events + +### 10b. `noop = consume` semantics (replaces `touchscreen-gesture-passthrough`) + +A proposed consumption model based on whether a gesture is bound: + +> If the IPC is used to let some app handle a gesture, bind it to `noop` in `binds {}` with a comment, and niri knows not to forward it to clients. Without a noop bind, it gets forwarded. This removes the need for the `touchscreen-gesture-passthrough` window-rule as a simplification. + +**The claim/forward decision:** + +| Bind state | Compositor action | Client receives event | +|------------|-------------------|----------------------| +| No bind for this gesture | Nothing | Yes (forwarded) | +| Bound to concrete action | Executes action, emits IPC | No (consumed) | +| Bound to `noop` | Does nothing, emits IPC | No (consumed by IPC daemon) | + +This replaces the current `touchscreen-gesture-passthrough` window rule semantics for the **gesture family level**. The window rule becomes unnecessary for the "block gesture passthrough for this trigger" use case — just bind it to `noop` or an action. + +**Caveat:** The current `touchscreen-gesture-passthrough` is a **window rule** — scoped per-window. The proposed model is **global per-trigger**. These aren't fully equivalent: + +- Today: window rule lets a browser handle its own 2-finger gestures while niri still handles them over other windows +- Proposed model: binding is global — you can't say "don't intercept 3-finger swipes over this specific window" + +Whether that loss of per-window scoping matters in practice is a separate design question. For the 3+ finger compositor-gesture use case it probably doesn't (users want the same gestures everywhere). For the 2-finger scroll/zoom passthrough it still matters — but that's handled by finger-range restriction, not by `noop`. + +**Conclusion on the window rule:** Originally considered for removal once `noop = consume` is in place. **However, Part 12 re-introduces it as Gate 1 of the fingers=1/2 disambiguation model** — the per-app escape hatch for apps like Firefox/PDF viewers that need native 1/2-finger touch. So the window rule stays if fingers=1/2 support ships; it can only be removed if the finger range stays at 3+. + +### 10c. Open question: fingers=1/2 with noop-consume + +A related suggestion: + +> Expand the valid finger range down to 1 finger. Especially useful for edge gestures. + +This interacts with `noop = consume` in a concerning way: + +- Binding `TouchSwipe fingers=1 direction="up" { noop; }` would **globally claim** all 1-finger up-swipes +- Every app loses its primary scroll interaction +- Email lists, photo viewers, web pages — all broken + +The `noop = consume` model works cleanly for 3+ finger gestures because they don't overlap with primary client input. Extending it to 1-2 fingers requires a spatial/temporal disambiguation mechanism — exactly what `TouchEdge` already provides via `edge-start-distance`. + +**Suggested position at the time of 10c (superseded — see Part 12):** Keep `fingers=3..=10` as the range for the 5 non-edge families. `TouchEdge` remains the 1-finger option (spatially restricted to avoid client conflict). If someone wants middle-of-screen 1-finger gestures, they need to propose a spatial/temporal disambiguation mechanism — not just expand the range. + +**Current position (Part 12):** That disambiguation mechanism exists as the three-gate model (window rule → bind-existence → threshold timing), making fingers=1/2 viable as an opt-in-per-pattern feature with zero cost for users who don't write such binds. See Part 12 for the full flow. + +### 10d. Refined implementation sketch + +Combining the original plan with the second-pass refinements: + +**Phase 0 — Rip out tags, add public gesture event stream** +- Remove `tag: Option` everywhere +- Add `GestureBegin`/`GestureProgress`/`GestureEnd` events to the public IPC stream, emitted for **all** committed gestures (no opt-in tag needed) +- Events carry `trigger` (config-matching string), `fingers`, `continuous`, `progress`, `delta`, `completed` +- This replaces the current tag-gated events with a universal stream + +**Phase 1 — spawn + env vars + stdin pipe** +- Unchanged from the original proposal +- For per-gesture self-contained scripts that don't need IPC + +**Phase 2 — `noop = consume` semantics (new)** +- Replace the current `touch_gesture_passthrough` check with: bound gesture (including `noop`) → don't forward to client +- Deprecate/remove `touchscreen-gesture-passthrough` window rule after verifying no real use case depends on per-window scoping +- Document clearly: `noop` bind = "niri claims this gesture for IPC consumption" + +**Phase 3 — Settings UI updates** (unchanged) + +### 10e. What this means for users + +**Before (tags):** +```text +TouchSwipe fingers=3 direction="up" tag="ws-up" { noop; } +``` +Plus a separate daemon that subscribes to IPC, filters by tag="ws-up", drives animation. + +**After (event stream + noop):** +```text +TouchSwipe fingers=3 direction="up" { noop; } // claims this gesture for IPC +``` +Daemon subscribes to IPC, filters by trigger pattern-match. No invented tag names. + +**Or even simpler (spawn + env):** +```text +TouchSwipe fingers=3 direction="up" { + spawn-sh "my-handler.sh" +} +``` +Script reads `NIRI_GESTURE_*` env vars, reads stdin for progress. + +Three clean paths — user picks whichever fits their use case: +1. **I want a self-contained script** → `spawn` +2. **I want a long-running daemon watching multiple gestures** → `noop` + IPC event stream +3. **I just want niri to do the thing** → bind to an action directly + +--- + +## Part 11: Cross-cutting Concern — Internal vs IPC Progress Mismatch + +This concern applies to **all three paths** above (spawn, event stream, noop-consume) because it's about the fundamental design of how niri's internal gesture math relates to what external consumers see. Previously documented separately in `TAG_GESTURE_PROGRESS_MISMATCH.md` — consolidated here since it's a subproblem of the tag-replacement architecture. + +### The two threshold systems + +Niri gesture handling has two independent progress/threshold systems that are not synchronized. + +**1. Internal compositor animations:** +Niri's layout code decides when to commit actions (workspace switch, column scroll, overview toggle) based on its own internal gesture math: + +- `workspace_switch_gesture_end()` — uses internal distance + velocity to decide whether to switch or snap back +- `view_offset_gesture_end()` — same for column scrolling +- `overview_gesture_update()` / `overview_gesture_end()` — own threshold for toggle commit + +These thresholds are **not configurable** and **not exposed** via IPC. External tools cannot know when niri will commit an action. + +**2. IPC progress events:** +External tools receive `GestureProgress` events with an accumulated `progress` value: + +```text +progress = accumulated_delta * sensitivity / gesture-progress-distance +``` + +Where: +- `sensitivity` — per-bind config (touchscreen default: 0.4, touchpad default: 1.0) +- `gesture-progress-distance` — configurable per-input-type (touchscreen: 200 px, touchpad: 40 libinput units) + +### The mismatch + +These two systems operate independently: + +- A touchscreen swipe might reach `progress = 0.8` in IPC, but niri's internal threshold commits the workspace switch at a completely different point +- Conversely, `progress` could hit `1.0` before niri commits, or niri could commit when `progress` is only `0.3` +- The IPC `GestureEnd { completed }` field distinguishes normal end (`true` when all fingers lift without interruption) from cancellation (`false` when a new finger arrives mid-gesture or cleanup fires on interruption). It does **not** indicate whether niri's internal threshold caused the compositor to actually commit the bound action — a touch workspace swipe that ends with all fingers lifted emits `completed: true` regardless of whether the compositor snapped forward to the new workspace or snapped back to the original. + +### Where this matters across the three paths + +- **`spawn` path:** The script's stdin stream has the same mismatch — progress values don't tell the script whether niri committed +- **`noop` + IPC event stream path:** Same mismatch — daemons watching the event stream can't predict niri's commits +- **`noop` with no compositor animation:** No mismatch — progress IS the sole output (the clean case) + +**Conclusion:** The mismatch is inherent to "gesture drives both a compositor animation AND external consumers." It's not fixed by changing the IPC channel. + +### Touchscreen vs touchpad scale difference + +The delta units are fundamentally different between input types: + +| Input | Delta Units | Default `gesture-progress-distance` | Default `sensitivity` | +|-------|------------|--------------------------------------|----------------------| +| Touchscreen | Screen pixels (large numbers, e.g., 500px per swipe) | 200 | 0.4 | +| Touchpad | Libinput acceleration-adjusted units (small numbers, e.g., 30 per swipe) | 40 | 1.0 | + +Both aim for roughly equivalent physical gesture sizes, but the underlying units are incomparable. A third-party app receiving progress events from both input types gets consistent 0-1 progress values, but the raw `delta_x`/`delta_y` values will differ dramatically in scale. + +### Touchscreen tracks closer to internal state than touchpad + +In practice, touchscreen IPC progress aligns more closely with niri's internal animation state than touchpad does. This is because: + +- **Touchscreen** deltas are in **screen pixels** — the same unit niri's layout code uses to track scroll offset and animation position. So accumulated `progress = pixels * sensitivity / distance` naturally correlates with niri's internal `scroll_offset / output_height`. +- **Touchpad** deltas pass through **libinput's acceleration curves** first, making the relationship between physical finger movement and layout displacement nonlinear. The same physical swipe distance can produce different delta magnitudes depending on speed, making it harder to tune IPC progress to match niri's commit point. + +This means an external app showing visual feedback alongside a compositor-animated gesture (e.g., a progress bar for workspace switching) will feel more in sync on touchscreen than touchpad. The mismatch on touchpad is more noticeable — niri may snap back while the external progress indicator shows 80%. + +### Potential fixes (independent of the IPC channel choice) + +- **Expose whether niri actually committed the action** in `GestureEnd` — add a `triggered` or `action_committed` field. Probably the simplest fix with highest value. +- **Expose niri's internal gesture completion percentage** alongside IPC progress, so consumers can drive their UI from the compositor's view of commit instead of raw finger motion. +- **Unify the two systems** so IPC progress matches the compositor's internal state. Biggest change, probably not worth it — the raw progress value is useful for apps that want to drive their own independent animations. + +These fixes should be tackled as part of Phase 0 (tag removal + public event stream) since they affect the event API shape. + +--- + +## Part 12: Disambiguation Flow for fingers=1 / fingers=2 + +The PR currently restricts touch gesture families (Swipe/Pinch/Rotate/Tap/TapHoldDrag) to `fingers=3..=10`. TouchEdge is hardcoded 1-finger and spatially scoped to the edge pixel range, so it doesn't conflict with general 1/2-finger app input. Opening up fingers=1 and fingers=2 on the other five families raises a disambiguation question: how do we distinguish "compositor wants this gesture" from "client wants this touch"? + +### Scope: deferred to a follow-up PR + +Per Atan's suggestion (2026-04-16), the current PR (niri-wm/niri#3771) stays scoped to `fingers=3..=10`, which is already larger than the Blur and Zoom PRs combined. fingers=1/2 support lands as a separate, focused follow-up PR built on the `window-rules` mechanism described below. + +### The conflict space + +- **fingers=1** — every tap, scroll, drag, and text selection in every app is a 1-finger touch +- **fingers=2** — every pinch-zoom, every two-finger scroll in browsers/PDF viewers/image viewers + +At fingers=3+ the contract is easy because virtually no native Wayland client uses 3+ finger gestures. At 1/2 we have to arbitrate. + +### Current direction: per-window `binds {}` in window-rules (Atan's proposal) + +Rather than heuristically arbitrating at runtime (the earlier three-gate model), expose a `binds {}` block inside `window-rule {}` so apps can declaratively release gestures the compositor claimed globally. This collapses "does this app want the gesture?" from three gates into one config lookup. + +```text +binds { + // Compositor claims 1-finger swipe up globally for IPC / bound action + TouchSwipe fingers=1 direction="up" { noop; } +} + +window-rule { + match app-id="firefox" + binds { + // Release the claim for firefox — gesture forwards to client so + // native scroll keeps working. `unbound` is a sentinel because an + // empty action block is invalid KDL. + TouchSwipe fingers=1 direction="up" { unbound; } + } +} +``` + +**Semantics:** + +- Global `binds {}` — compositor's default claim on a gesture pattern. `noop` = claim with no action (IPC/event consumer), a real action = claim + execute. +- Window-rule `binds {}` — per-app override. `unbound` releases the claim and forwards touch events to the client (when that window is focused / under the touch centroid). +- **Precedence:** window-rule `unbound` > window-rule action > global `noop` > global action > no bind (default passthrough). + +**Why this is cleaner than the old three-gate model:** + +- **Gate 1 (passthrough rule) + Gate 2 (bind existence) collapse into one** — the `binds {}` block in the window-rule *is* the passthrough decision, and `unbound` is its explicit keyword. +- **Gate 3 (threshold timing) becomes simpler, not disappeared** — if the matched rule says `unbound`, the compositor can skip the grab entirely for that window (no event buffering, no latency). If the rule claims the gesture, buffering kicks in normally. +- **Declarative, not heuristic** — intent lives in config, not in timing windows. +- **Reuses existing infrastructure** — niri already matches window rules on `app-id` / `title` / etc. + +### Properties that fall out for free + +- **Sentinel keyword is `unbound`.** Atan used `unbound` in the original proposal — going with that. (KDL can't have empty action blocks, so a sentinel is required.) +- **Partial direction overrides work naturally.** Each `fingers=N direction=D` combination is a separate bind entry, so a window rule can release `direction="up"` for native scroll while leaving `direction="left"` claimed by the global bind. No extra syntax needed. + +### Decided behavior — touch resolves like the mouse cursor (spatial, not focus-following) + +Window-rule matching for gestures works **exactly like mouse-cursor semantics**: the rule matches against the window the fingers are physically on, not the keyboard-focused window. + +**The mental model:** you can hover the mouse cursor over an unfocused firefox window and scroll-wheel — firefox scrolls without stealing focus from your terminal. Touch should behave identically: if you're typing in a terminal and you touch firefox to scroll it, firefox's window-rule applies (so its native scroll passthrough kicks in), even though the terminal still has keyboard focus. Your touch acts where your finger is, not where your keyboard cursor is blinking. + +**Concretely:** +- The window-rule lookup uses the window under the touch centroid at touch-down (with first-finger-position as the multi-finger tiebreaker). +- Keyboard focus is irrelevant to this decision — exactly as it is for mouse pointer events. +- This means an unfocused app's window-rule `binds { ... unbound; }` works without requiring the user to focus the app first — touching it is enough. + +**Edge cases:** +- **Touch on empty desktop / layer-shell surface** (no app window underneath) — no window-rule match; global `binds {}` applies as the default. +- **Touch crossing windows mid-gesture** — already decided: claim is locked at touch-down, doesn't re-evaluate (see "Decided behavior — claim resolves at touch-down" below). +- **Multi-finger gestures with fingers on different windows at touch-down** — centroid picks one window deterministically; first-finger-position is the tiebreaker if centroid lands on a gap. +- **IPC-claimed gestures (`noop` with no action)** — same rule applies. The window under the touch determines whether its rule releases the claim, even when the eventual consumer is an external IPC listener. + + +### Decided behavior — claim resolves at touch-down, stable for gesture lifetime + +The claim (compositor-grab vs client-passthrough) is decided **once**, at the moment the first finger lands, based on the window under the centroid at touch-down. It stays stable for the entire gesture lifetime — until all fingers lift — even if focus changes, the window moves, the cursor would be over a different window now, or additional fingers land later. + +**Why this matters in practice:** + +1. **The recognizer is stateful.** Once the compositor decides "this is mine," it begins accumulating `cumulative_dx`/`cumulative_dy`, computing pinch spread from initial finger spread, tracking rotation from initial angles. Flipping mid-gesture to "actually, give it to the client" would mean tearing down that state with no clean exit — the recognizer has no concept of "abort and rewind." + +2. **The client-event stream is also stateful.** Wayland clients expect a `touch_down → touch_motion* → touch_up` lifecycle per slot. If the compositor consumed the early events and then mid-gesture decides to forward, the client sees a `touch_motion` with no preceding `touch_down` — which is a protocol violation. The reverse (forwarding then consuming) leaves the client with a `touch_down` that never gets a `touch_up`, so it sits with a dangling slot until the next gesture cleans it up. + +3. **Continuous animations would visibly glitch.** A workspace-switch animation tracking finger position would reach 60% progress, then suddenly stop receiving updates because the claim flipped. The animation either snaps back, freezes, or completes phantom-style — all bad. + +4. **Focus-change-during-gesture is normal, not exceptional.** An interactive-move drag *deliberately* crosses windows — the whole point is moving a window across other windows. If the claim re-evaluated based on "what's under the centroid right now," every move-grab would be hijacked the moment it crossed another app's window. Touch-down resolution makes the claim about *intent at gesture start*, not *current spatial position*. + +5. **Late-landing fingers don't change the claim.** If the gesture started as 1-finger (compositor-claimed) and a second finger lands mid-gesture, the existing claim sticks. The new finger participates in the existing recognizer's state. Whether this triggers an unlock-to-higher-finger-count (the existing `unlock-on-new-finger` mechanism in `touch_gesture.rs`) is orthogonal — that's about gesture *type* (e.g. 3-finger swipe → 4-finger swipe), not about *who owns* the gesture. + +**Implementation:** the claim resolution lives in the `TouchDown` handler. The result (claimed-by-compositor vs forward-to-client + which client) gets stored on the active gesture state struct and read by every subsequent `TouchMotion`/`TouchUp`/`TouchFrame` in this gesture. No re-lookup, no re-matching of window rules. + +--- + +### Superseded: three-gate disambiguation + +The following was the earlier design before the window-rule `binds {}` proposal. Kept for reference; no longer the active plan. + +#### Three-gate disambiguation (composes bind-existence + window rule + threshold timing) + +**Gate 1 — Window rule passthrough (app opts out):** + +`touchscreen-gesture-passthrough` already exists as a window rule. This is the per-app override when a user has global fingers=1/2 binds but wants specific apps to feel native: + +```text +window-rule { + match app-id="firefox" + touchscreen-gesture-passthrough "always" // never defer, never consume +} +``` + +When matched → forward immediately, no recognizer involvement. + +**Gate 2 — Bind existence = consume signal (`noop=consume` model from Part 10b):** + +Today `noop` is just "no action." The proposal: the *presence of any bind* (including `noop`) at a given `fingers=N direction=D` slot is the claim "compositor wants this pattern, don't forward." + +- No bind at `TouchSwipe fingers=1 direction="up"` → pass through (current default, unchanged) +- `TouchSwipe fingers=1 direction="up" { noop; }` → compositor watches, claims if matched, never reaches client +- `TouchSwipe fingers=1 direction="up" { focus-workspace-up; }` → same, plus the action runs + +This keeps the opt-in per-pattern rather than requiring a global "enable fingers=1/2" switch. Users who don't write fingers=1 binds experience zero behavior change. + +**Gate 3 — Threshold timing (recognizer decides):** + +When a bind exists and the window is not in passthrough, the compositor must buffer the first ~100px/200ms before deciding "bound swipe or client drag?" This is the latency cost of opting in. + +### Disambiguation flow + +```text +TouchDown (1 or 2 fingers) + ↓ +Window under finger has touchscreen-gesture-passthrough="always"? + → yes: forward immediately, no recognizer (Gate 1) + ↓ no +Any TouchSwipe/Pinch/Rotate/Tap/TapHoldDrag fingers=N bind exists for this N? + → no: forward immediately (current behavior preserved) (Gate 2) + ↓ yes +Buffer events, run recognizer + ↓ +Threshold crossed, matches a bound pattern? + → yes: consume, drop buffered events, fire bind (Gate 3 commit) + ↓ no +Timeout expired or motion stopped? + → yes: flush buffered events to client, resume passthrough (Gate 3 release) +``` + +### Cost analysis + +- **Zero-cost for users who don't bind fingers=1/2** — if no such bind exists, Gate 2 short-circuits to immediate passthrough. No latency, no regression. +- **Per-pattern cost for users who do bind** — fingers=1/2 taps/drags in non-passthrough apps get buffered for threshold duration. Users accepted this cost by writing the bind. +- **Escape hatch for power users** — window rule passthrough lets them keep global fingers=1 binds while exempting specific apps. + +### Why `noop=consume` is the right primitive + +Without it, we'd need a separate syntax to say "claim this gesture but do nothing" — either a new keyword (`TouchSwipe fingers=1 consume;`) or a separate block. Treating bind presence as the claim signal means: + +- No new syntax +- `noop` gets a meaningful use (it's currently a no-op action with no purpose) +- Composable with real actions — binding to `focus-workspace-up` implies consume, same as binding to `noop` +- Matches the intuition that "if you told niri what to do with this gesture, niri should grab it" + +### Interaction with existing 2-finger scroll/touchpad semantics + +Touchpad 2-finger scroll is libinput-native and pre-classified — it arrives as `PointerAxis` events, not `GestureSwipe`. So `TouchpadSwipe fingers=2` would be an impossible bind (libinput never delivers 2-finger swipe events for touchpads). Touchpad fingers=1/2 disambiguation isn't really in scope — this applies to **touchscreen** fingers=1/2 only. + +### Open question + +Should fingers=1 and fingers=2 be *opt-in behind a config flag* (e.g., `allow-low-finger-gestures`) as a safety measure against users accidentally breaking their text selection? Arguments both ways: + +- **Opt-in flag:** explicit consent, easier to document "fingers=1 has latency cost" +- **No flag:** writing the bind is already opt-in per Gate 2; extra flag is redundant + +Leaning toward no flag — the bind existence is already the opt-in signal, and Gate 2 makes the cost zero for users who don't write the binds. diff --git a/docs/wiki/Design:-Touchscreen-Gestures.md b/docs/wiki/Design:-Touchscreen-Gestures.md new file mode 100644 index 0000000000..3ac4ec97a4 --- /dev/null +++ b/docs/wiki/Design:-Touchscreen-Gestures.md @@ -0,0 +1,395 @@ +# Design: Touchscreen Gestures + +> [!IMPORTANT] +> **Status: proposal / working prototype — not upstream niri's canonical design.** +> +> This document is not niri's official design position. It is a write-up of the choices I (the PR author) made while building a working touchscreen gesture implementation on the `feat/configurable-touch-gestures` branch, shaped by feedback from reviewers on the associated PR. The goal was to land *something that works* so there's a concrete reference point to experiment with, gather real-world feedback on, and iterate on — not to prescribe how niri should handle touch gestures long-term. +> +> Everything below describes what exists on this branch and why it was chosen over the alternatives I considered. It is explicitly open to being rethought, rewritten, or replaced. If you disagree with any section — especially §5 (design choices) and §6 (alternatives rejected) — that disagreement is the whole point of putting the rationale in writing. See §10 for how to push back. +> +> This document explains what Wayland gives us, what it doesn't, how other ecosystems solve the same problems, and why this implementation makes the specific choices it does. It is meant for contributors and reviewers deciding whether the current direction is worth building on, and for users of this branch curious about why the configuration surface looks the way it does. + +For how to **configure** gestures on this branch, see [Configuration: Window Rules](./Configuration:-Window-Rules.md) and the main niri config documentation. This doc is strictly about the *why*. + +--- + +## 1. Scope + +What this doc covers: + +- The Wayland protocol landscape relevant to touch input +- Why touchpad and touchscreen gestures live in different layers +- How iOS, Android, and other Linux shells approach gesture ownership +- The specific design choices niri makes and the reasoning behind each +- Alternatives we considered and rejected, with rationale +- Open questions and directions for future work + +What this doc does **not** cover: + +- Configuration syntax (see the wiki pages) +- Specific gesture recognizer math (read `src/input/touch_gesture.rs`) +- Touchpad gesture internals beyond "niri uses libinput via smithay" (they aren't a niri-local problem) + +--- + +## 2. The Wayland protocol landscape + +Wayland touch input has a hard split between two worlds. Understanding the split is prerequisite to understanding why this doc exists. + +### 2.1 `wl_touch` (core, stable) + +Part of the core Wayland protocol. Exposes raw touch point lifecycle events: + +- `down(slot, surface, x, y)` — a new finger landed +- `motion(slot, x, y)` — an existing finger moved +- `up(slot)` — a finger lifted +- `frame` — atomic batch boundary for multi-point updates +- `cancel` — compositor revokes the touch stream + +That is the entire API. No semantics, no gesture recognition, no "swipe" or "pinch" primitives. The spec is explicit that gesture interpretation is the caller's responsibility. The caller here means *whoever is reading `wl_touch`* — usually the compositor, sometimes the client. + +### 2.2 `wp_pointer_gestures_v1` (unstable, widely adopted) + +A separate protocol that provides **touchpad** gestures only. Defines three semantic gesture types: + +- **Swipe** (`zwp_pointer_gesture_swipe_v1`) — begin / update / end lifecycle with finger count and dx/dy deltas +- **Pinch** (`zwp_pointer_gesture_pinch_v1`) — begin / update / end with scale factor and rotation +- **Hold** (`zwp_pointer_gesture_hold_v1`) — begin / end, used for tap-and-hold style interactions + +This protocol exists because libinput already does touchpad gesture recognition from the raw hardware events, and the Wayland layer just needed a standard way to expose libinput's output to clients and compositors. Niri uses this for touchpad gestures via smithay's libinput integration — no custom recognizer needed on that side. + +The protocol is still marked unstable (`unstable-v1`) but is implemented by all major compositors and all major toolkits. It is effectively the standard. + +### 2.3 The gap: no touchscreen gesture protocol + +There is **no** Wayland protocol for touchscreen gestures. Not stable, not unstable, not staged as a proposal in `wayland-protocols`. The explicit design position from both the Wayland community and libinput is that touchscreen gesture recognition requires context (focus, window layout, app intent) that the input stack doesn't have. + +There is also no protocol for **client cooperation** — no way for an app to tell the compositor "I handle 3-finger swipes in my content area, leave them alone." The closest analogue, `zwp_keyboard_shortcuts_inhibit_v1`, exists for keyboard shortcuts but has no touch equivalent. + +This gap is the root cause of nearly every design compromise in this document. + +--- + +## 3. Why touchscreen gestures don't live in libinput + +libinput is the layer that turns raw kernel input device events into semantic events for compositors. It recognizes touchpad gestures (swipe, pinch, hold) and hands them up the stack cleanly. It explicitly refuses to do the same for touchscreens. The reason isn't laziness — it's a genuine architectural difference between the two input types. + +### 3.1 Touchpad: indirect manipulation, unambiguous recipient + +- Fingers move on a surface that **isn't** the thing they're affecting. The pointer is the proxy. +- The touchpad belongs to the focused window or the compositor. There's exactly one plausible recipient for any gesture event — whoever has pointer focus. +- Many modern touchpads (Apple Magic Trackpad, Microsoft Precision Touchpads) recognize gestures in **firmware**. The hardware says "3-finger swipe" directly. libinput forwards that, adds fallback recognition for dumber hardware, and exposes semantic events. +- State is clean: `n` fingers down means a gesture is active with that many fingers. Any finger lifting ends it. Palm rejection is well-understood. +- libinput can confidently say "this is a compositor/pointer-bound gesture event" because there's no other reasonable interpretation. + +### 3.2 Touchscreen: direct manipulation, ambiguous recipient + +- Fingers are **on** the thing they're affecting. The content under the finger is the target. +- The same 3-finger contact at the same coordinates could legitimately mean: + - The user is drawing three strokes in a paint app + - Two people on a shared tablet both tapping at once + - A compositor workspace-switch swipe + - A browser pinch-to-zoom on a webpage + - Palm rest plus one intentional tap +- libinput has zero visibility into what's under those coordinates. It doesn't know about Wayland surfaces, window focus, or client intent. Only the compositor has that context. +- Hardware is also dumber: touchscreens report `(slot, x, y)` per contact point with no gesture semantics. There's nothing for libinput to forward. +- State is messy: new fingers can arrive at any time, bezel phantom touches, hand resting, palm rejection depends on geometry libinput can't see. + +### 3.3 libinput's stated position + +Paraphrasing the libinput maintainers' public position: *"We can recognize motion from raw touch points, but we cannot tell you whether the user meant that motion for the compositor or for the app under their finger. That's a compositor decision, not an input-stack decision."* + +On touchpad that question has a trivial answer ("the compositor, always"). On touchscreen it doesn't, and libinput refuses to guess because a wrong guess means silently stealing input from an app. So touchscreen gesture recognition ends up **inside each compositor**, built from raw `wl_touch` events. Every major compositor has independently reinvented its own recognizer for exactly this reason. + +--- + +## 4. How other ecosystems solve it + +Worth understanding because the design lessons map directly onto what a Wayland solution could look like. + +### 4.1 iOS (UIKit) + +iOS has an explicit gesture recognizer arbitration system baked into UIKit: + +- **Every view can attach gesture recognizers.** Both apps and the system. +- **Priority chain.** System-level recognizers (home bar, control center, notification shade) sit at the top of the hierarchy. +- **Failure requirements.** A recognizer can declare "I only activate if this other recognizer fails first." This is how UIKit handles "tap vs. long-press" and also how system edge-swipe defers to an app's swipe when appropriate. +- **Simultaneous recognition.** Two recognizers can explicitly opt into firing at the same time — pinch + pan on a photo viewer, for example. +- **Dedicated edge recognizers.** `UIScreenEdgePanGestureRecognizer` is a distinct type. Apps can attach their own and negotiate with the system's. + +The negotiation isn't a runtime question per touch event — it's **declared up front** by the view hierarchy. When a finger lands, UIKit walks the view tree from deepest to shallowest, collects every recognizer that could match, then arbitrates based on declared priority and failure rules. + +This works because Apple ships iOS + UIKit + the hardware as one vertically integrated stack. There is no protocol problem because there is no protocol — it's all one process model with a shared API. + +### 4.2 Android + +Android takes a different approach but lands in the same place: + +- **`onInterceptTouchEvent` chain.** Touch events bubble up through `ViewGroup`s. Each parent can claim ownership by returning true, at which point children stop seeing the events. This is how scroll containers steal touches from buttons mid-gesture. +- **Standard framework classes.** `GestureDetector` and `ScaleGestureDetector` are built into the Android framework. Everyone uses the same ones, so gesture behavior is consistent across apps. +- **System gestures live above the app.** Back, home, and recents (since Android 10 gesture nav) are handled at the `WindowManager` layer, not inside the view hierarchy. +- **`systemGestureExclusionRects`** — this is the important one. An app can tell the system: *"in these rectangles, don't treat edge swipes as system gestures."* Games and drawing apps use this to claim screen edges when the user is actively using them for content. Apps can also read `WindowInsets.getSystemGestures()` to see where system gestures are active and lay out their UI accordingly. + +Android 10's gesture-nav rollout was specifically driven by this problem. Google needed to steal more of the screen edges for system gestures and ran into exactly the conflict niri runs into. Their answer was **`systemGestureExclusionRects`**: a tiny, minimal opt-out API that doesn't try to solve everything, just the most common conflict case. + +This is the closest real-world precedent for what a Wayland touchscreen gesture protocol could look like. + +### 4.3 Linux phone shells + +Linux mobile UX is the most instructive comparison because it's a touch-first world built on the same protocol stack we have. Every Linux phone shell has independently reinvented the same hacks: + +- **Phosh** (PinePhone, Librem 5, GNOME-based) — gestures handled inside Phoc, a wlroots-fork compositor. Apps receive raw `wl_touch` events; Phoc reserves edges for the app drawer and notification shade. No negotiation protocol. +- **Plasma Mobile** — uses KWin's touch handling. Hardcoded system edges. Same story. +- **SXMO** (minimalist postmarketOS shell) — uses `lisgd` as a separate daemon reading libinput directly. System owns everything; apps are effectively gesture-blind. +- **Furios / Droidian** (Halium-based, Android drivers underneath) — inherits Android's gesture semantics from the hardware layer but runs regular Wayland compositors on top. Ends up with the worst of both worlds. + +Every one of these shells ships with the same core limitation: **system gestures are hardcoded, app gestures are whatever the toolkit happens to support, there is no negotiation**. When Firefox on a PinePhone handles pinch-to-zoom, it works because GTK handles 2-finger touches directly via `wl_touch` — not because anyone negotiated anything. + +### 4.4 Userspace gesture daemons + +Several projects have tried the "external daemon recognizes gestures, compositor reads from it" architecture: + +- **TouchEgg** — originally X11, adapted for Wayland. Reads libinput events directly, recognizes gestures, maps them to actions via XML config. Popular as a "make Linux feel like macOS" touchpad tool. +- **lisgd** (libinput simple gesture daemon) — smaller scope, shell-command-based, stateless. Popular in SXMO and bespoke postmarketOS setups. +- **InputActions** — newer, KDE-specific, funded work for Plasma 6 Wayland. Lives *inside* KWin rather than as a separate daemon. + +The common issue: on Wayland, any external daemon architecture breaks on **device ownership**. libinput exposes a single reader interface per device — whichever process grabs it "owns" the stream. If TouchEgg grabs exclusively, the compositor gets nothing. If neither grabs exclusively, they both see every event and double-handle. There's no "daemon sits between kernel and compositor" slot in the Wayland stack; the compositor is the input router by design. + +X11 had this slot because of its split server/client architecture with a routable event path. Wayland removed it deliberately for security and simplicity. This is why **compositor-agnostic gesture daemons don't work on Wayland** and why KDE moved InputActions *inside* KWin. + +### 4.5 The unifying observation + +Every ecosystem that has solved touchscreen gesture ownership has done so by **owning the whole stack** — iOS with UIKit + the OS, Android with the view system + WindowManager, KDE with InputActions + KWin. The problem isn't that the solution is hard to design. It's that the solution requires coordination between input, toolkit, and window manager, and Linux has that coordination problem stratified across dozens of unrelated projects. + +--- + +## 5. Niri's design choices + +This section is explicitly opinionated. Each choice is labeled with its reasoning so reviewers can argue with the rationale, not just the result. + +### 5.1 Compositor-side recognizer from raw `wl_touch` + +**What:** Niri reads raw `wl_touch` events in `src/input/touch_gesture.rs` and runs its own gesture recognizer (direction lock, finger count tracking, pinch detection, edge swipe detection). + +**Why:** There is no alternative. libinput won't recognize touchscreen gestures. Clients receiving raw touches can't participate in compositor actions. Userspace daemons can't sit between the compositor and libinput. The compositor is the only layer that has both the input stream *and* the window context needed to make gesture routing decisions. This is the same conclusion KWin, Mutter, Phoc, and every other Wayland compositor has reached. + +### 5.2 Unified `binds {}` block with parameterized gesture triggers + +**What:** Touchscreen, touchpad, keyboard, and mouse gesture binds all live in the same `binds {}` block. Multi-finger gestures are parameterized via KDL properties: `TouchSwipe fingers=3 direction="up"` rather than hardcoded node names like `TouchSwipe3Up`. The five gesture families (`TouchSwipe`, `TouchpadSwipe`, `TouchPinch`, `TouchRotate`, `TouchEdge`) are the only first-class gesture node names; everything else is properties. + +**Why:** +- **Modifier combos come for free.** `Mod+TouchSwipe fingers=3 direction="up"` reuses the existing key-bind parser with no new code paths — modifiers are stripped off the node name before property parsing begins. +- **One lookup path.** `find_configured_bind()` handles every input type identically. `Trigger::TouchSwipe { fingers, direction }` is a struct variant, so `Eq`/`Hash` still work; bind lookup is unchanged from the hardcoded design. +- **Consistency with niri's existing model.** Niri's keyboard and mouse binds already live in `binds {}`, and all other bind attributes (`tag=`, `natural-scroll=`, `sensitivity=`, `cooldown-ms=`) are KDL properties. Hardcoding finger count into the *node name* was the one place where touch gestures diverged from the rest of the config grammar; this closes that gap. +- **Arbitrary finger counts.** `fingers=N` accepts any integer in `3..=10`. Users with tablets and large multitouch displays that report 6–10 contacts can bind to them without an enum change on the compositor side. The `3..=10` range is enforced by the parser with a clear error on out-of-range values. +- **Per-family validation.** Each family has its own legal direction vocabulary (swipe takes `up/down/left/right`, pinch takes `in/out`, rotate takes `cw/ccw`, edge takes `left/right/top/bottom` with optional `zone=`). Invalid combinations are rejected at parse time, not at runtime. +- **Hard break from the old syntax.** The previous enum-per-combination design (`TouchSwipe3Up`, `TouchEdgeTop:Left`) is gone — no dual-parse, no deprecation aliasing. A cleaner config grammar is worth the one-time migration cost for a pre-1.0 feature with a small user base. + +### 5.3 Tag property + IPC gesture events + +**What:** Gesture binds can carry an optional `tag="name"` property. Tagged binds emit `GestureBegin` / `GestureProgress` / `GestureEnd` events on niri's existing IPC event stream, letting external tools observe gestures for custom animations or UI feedback. + +**Why:** +- **External extensibility without a scripting runtime.** niri doesn't need to embed Lua or JavaScript; tools subscribe to IPC events and react. +- **Security-scoped.** Only tagged gesture binds emit IPC events. Keyboard input never appears in the event stream. This is a deliberate scoping decision — "we expose gestures because they're low-frequency, high-intent user actions, but we don't expose every keystroke." +- **Three distinct modes.** With tags + the `noop` action, niri supports: + 1. **Observe** — `tag="ws"` + real action: niri runs the action and emits IPC events for external UI feedback + 2. **IPC-only** — `tag="drawer"` + `noop`: niri captures the gesture purely for IPC, runs no compositor action + 3. **Plain** — no tag: niri runs the action, no IPC emission +- **Both discrete and continuous noop are supported.** A tagged `noop` bind on a swipe or pinch drives the full begin/update/end lifecycle, emitting continuous `GestureProgress` events for external animations — external tools can draw finger-tracked UI without the compositor performing any action of its own. +- **Enables `niri-tag-sidebar` and similar tools** to build gesture-driven UIs without having to reimplement touch recognition themselves. + +### 5.4 `touchscreen-gesture-passthrough` window rule + +**What:** A window-rule bool field. When set on a matching window, niri's recognizer stays out of the way for touches that start on that window — events forward raw to the client for the lifetime of the gesture. + +**Why:** +- **Solves the 80% case with the simplest possible mechanism.** For apps that always want touch events (browsers, drawing apps, mapping tools), a per-app static rule is enough. +- **User-controlled, not auto-detected.** Niri makes zero attempts to guess which apps want passthrough. Heuristics like "this is Electron, probably a webapp" produce unpredictable behavior. Explicit rule or nothing. +- **Doesn't wait for a Wayland protocol that isn't coming.** The reviewer who raised this concern ([issue discussion]) explicitly acknowledged the "elaborate automatic" version feels bad; this ships the blunt-but-predictable alternative now. +- **Discoverability via `RUST_LOG=niri=debug`.** When niri captures a gesture, it logs the app-id of the window under the touch, letting users see exactly which app-id to add to their passthrough rules. + +### 5.5 Escape hatches: Mod+touch and edge zones always bypass passthrough + +**What:** Even on a window with `touchscreen-gesture-passthrough true`, holding the mod key or starting a touch in a screen-edge zone still triggers compositor gestures. + +**Why:** +- **Discoverable fallbacks.** "Gestures don't work in this app? Try Mod+gesture, or swipe from the edge." Every passthrough window has a way to invoke compositor actions without removing the rule. +- **Edge detection runs before window lookup.** This isn't a special case — edge zones are already evaluated before the window is even checked, so passthrough is automatically excluded. +- **Mod+ is an explicit user intent signal.** If the user holds the mod key, they are unambiguously asking for a compositor action. Passthrough is for implicit gestures; Mod+ is explicit, so it wins. + +### 5.6 Per-edge zoned triggers + +**What:** Each screen edge is split into thirds along its perpendicular axis. `TouchEdge` accepts an optional `zone=` property — `edge="top" zone="left"`, etc. — giving 12 zoned triggers in addition to the 4 unzoned parents. Zoned triggers fall back to the parent if not configured. The zone vocabulary rotates per edge: `top`/`bottom` edges take `left|center|right`; `left`/`right` edges take `top|center|bottom`. Mismatched vocabularies are a parse error. + +**Why:** +- **12 + 4 = 16 edge actions possible** without adding a new concept; power users can bind distinct actions per edge zone. +- **Parent fallback.** A bare `TouchEdge edge="top"` catches any top-edge swipe that doesn't land in a more specific zoned bind, so adding one zoned bind doesn't break the others. +- **Matches real-world UI patterns.** Status bars, notification shades, and app drawers all want *different* actions for different parts of the same edge. +- **Matching UI support in external tooling.** `niri-tag-sidebar` mirrors the zone model so tagged panels can anchor to specific zones. + +### 5.7 Touchpad via `wp_pointer_gestures_v1` (libinput) + +**What:** Touchpad gestures are read from libinput via smithay's existing plumbing, exposed through the same `binds {}` block with `TouchpadSwipe fingers=N direction="..."` triggers. No compositor-side recognition. + +**Why:** Touchpad gesture recognition is a solved problem at the libinput layer. Writing our own recognizer for touchpad would duplicate work, produce inconsistent semantics vs. other compositors, and lose firmware-reported gesture quality from modern hardware. The right answer is "use the standard, expose it through niri's bind model." + +--- + +## 6. Alternatives considered and not shipped + +Every decision in section 5 had alternatives. This section records the ones we looked at and why they didn't ship, so the same conversations don't have to happen repeatedly. + +### 6.1 Dynamic per-gesture client dialog ("does your app want this?") + +**The idea:** Compositor detects a gesture starting, asks the client under the touch "want this one?", client responds yes/no, compositor routes accordingly. + +**Why not:** Requires a Wayland protocol that doesn't exist. Also adds IPC round-trip latency on gesture start, which is noticeable for continuous gestures. Parked until a protocol emerges. + +### 6.2 `allow-forwarding=true` per-bind property + +**The idea:** Each gesture bind gets a flag saying "forward to client instead of consuming." The reviewer's original proposal. + +**Why not:** The reviewer themselves acknowledged it "feels way too complicated." It puts the opt-out at the wrong layer — gesture policy should follow the *target app*, not the *bind*. A user wanting Firefox to handle gestures would have to annotate every single bind with `allow-forwarding` conditionally based on the focused window, which is exactly the complexity a window rule avoids. + +### 6.3 Zone granularity on passthrough window rule + +**The idea:** Instead of `touchscreen-gesture-passthrough true`, specify which gesture classes passthrough: `touchscreen-gesture-passthrough "swipe"`, or rectangles within the window where passthrough applies, or per-finger-count opt-outs. + +**Why not:** Overengineering for v1. The simple bool handles the common cases (browsers, drawing apps). Zone granularity only matters when the answer is "depends on what part of the window the finger is on," which is the dynamic case that only a real protocol can solve well. Trying to approximate it with static rectangles requires the user to manually track layout changes, which is worse than nothing. + +If a concrete use case appears that the bool can't handle, the field type can be widened (`Option` → `Option`) without a breaking config change. Keeping v1 minimal preserves that flexibility. + +### 6.4 Auto-detection heuristics + +**The idea:** Niri guesses which apps want passthrough based on app-id patterns, toolkit detection, window class hints, etc. + +**Why not:** Unpredictable. "This is Electron, probably a webapp" is wrong for VSCode. "This is Chromium, probably wants gestures" is wrong for a kiosk app. Heuristics fail in ways users can't debug, and silently stealing or forwarding input based on guesses is the worst possible failure mode. Explicit rule or nothing. + +### 6.5 External gesture daemon (TouchEgg-style) + +**The idea:** Run a separate process that recognizes gestures and sends actions to niri via IPC. + +**Why not:** Breaks on Wayland device ownership (see section 4.4). Any daemon reading libinput directly conflicts with the compositor reading the same device. A daemon reading from niri via some new "raw touch" IPC would duplicate gesture state between processes and add latency. KDE tried the external path and pulled it in-process for exactly these reasons. + +### 6.6 Global "disable all gestures when this app focused" + +**The idea:** One big toggle — when a passthrough app has focus, niri disables all touch gestures everywhere on screen. + +**Why not:** Too blunt. Breaks the edge swipe and Mod+gesture escape hatches that make passthrough tolerable in the first place. A user couldn't invoke the app drawer or workspace switch without unfocusing the app first. The per-touch decision made in `on_touch_down` is strictly better — it respects escape hatches automatically. + +--- + +## 7. Future directions + +Where this could go if the ecosystem moves. + +### 7.1 A minimal Wayland touchscreen gesture protocol + +The realistic shape, modeled on Android's `systemGestureExclusionRects`: + +1. Client advertises support via a new global interface (`wp_touch_gesture_exclusion_v1` or similar). +2. Client submits per-surface rectangles: "in these regions of my window, don't handle compositor gestures." +3. Compositor evaluates the rectangles when a touch starts; if the touch lands in an exclusion rect, forwards raw touches to the client. +4. Rectangles update on window resize / layout change via standard surface commit. + +This is intentionally narrower than a full capability-negotiation protocol. It doesn't try to support "client handles swipe but not pinch" or "client wants first 100ms of the gesture to decide." Android has shipped the rect-based model for 6+ years and it covers the important cases. Getting 80% of the solution into the protocol layer beats waiting forever for 100%. + +**What niri could do if such a protocol existed:** + +- The `touchscreen-gesture-passthrough` window rule becomes a **fallback** for apps that don't participate in the protocol. +- Apps that do participate (Firefox via GTK, Krita via Qt, etc.) get dynamic per-region control without any user configuration. +- The discoverability debug log becomes less important because correct behavior is automatic for participating apps. +- Niri would be one of the first compositors to support such a protocol if one is drafted. + +### 7.2 Unify IPC progress with niri's internal commit threshold + +IPC `GestureProgress` events already carry a normalized `progress: f64` (computed as `accumulated_delta * sensitivity / gesture-progress-distance`), so external consumers *do* get a 0→1 value. The unresolved problem is that niri has **two independent threshold systems** that are not synchronized: + +1. **IPC progress** — the value external tools see, driven by configured `gesture-progress-distance` +2. **Internal compositor commit** — niri's layout code decides whether to snap to the next workspace / column / overview state based on its own distance and velocity math + +These two can disagree. A swipe can reach IPC `progress = 0.8` while niri decides to snap back, or commit when IPC `progress = 0.3`. For external UIs driven by tagged gestures, this mismatch is visible — a progress bar showing 80% while niri snaps back feels broken. + +The improvement is to either (a) expose niri's internal progress alongside or instead of the IPC progress, or (b) make the IPC progress drive the commit decision so the two always agree. See `GESTURE_PROGRESS_MISMATCH.md` for the full write-up. + +In practice the touchscreen case tracks closer than touchpad because screen pixels match niri's internal units, while libinput's acceleration-curved touchpad deltas make the touchpad mismatch more noticeable. + +### 7.3 Touchpad gesture passthrough (sibling rule) + +For completeness, a `touchpad-gesture-passthrough` window rule could be added. The shape is different — touchpad has no "window under finger," so the rule would match the focused window instead — but the config surface would look analogous. Punted from v1 because the pain is smaller (2-finger touchpad gestures already forward by default via libinput) and the semantics need more thought. + +### 7.4 Have `GestureEnd { completed }` reflect internal commit, not just cancellation + +The `completed` field on `GestureEnd` currently distinguishes two cases: + +- `completed: true` — gesture ended normally (all fingers lifted without external interruption) +- `completed: false` — gesture was cancelled (a new finger arrived and restarted recognition, or cleanup fired) + +What it does **not** distinguish: whether niri's internal threshold actually committed the bound action. A touch workspace swipe that ends with all fingers lifted emits `completed: true` regardless of whether the compositor snapped forward to the new workspace or snapped back to the original. For tagged gestures driving external UIs, this is the same mismatch as §7.2 — the IPC event doesn't know what niri actually did. + +The fix is the same as §7.2: either unify the threshold systems so the answer is always knowable, or add a separate `action_committed: bool` field that propagates niri's internal snap decision. Either way, external tools should be able to answer "did the swipe actually do the thing?" from the `GestureEnd` event alone. + +--- + +## 8. Open questions + +Explicitly inviting pushback. None of these have right answers yet. + +### 8.1 Should passthrough be a simple bool or support zones? + +Currently a simple bool. If someone comes up with a concrete use case the bool can't handle — for example, a browser where users want pinch forwarded but edge swipes intercepted — the field type would need to widen. The field name (`touchscreen-gesture-passthrough`) is generic enough that this extension is a non-breaking change (the bool becomes one arm of a widened sum type). + +### 8.2 Should layer-shell windows support passthrough? + +Currently no — `touchscreen-gesture-passthrough` is a `WindowRule` field and layer-shell surfaces don't go through window rules. A sidebar panel that wanted to claim gestures on itself has no way to do so today. Adding layer-shell passthrough is probably the right call but requires deciding where the config lives (a new `layer-rule {}` block? Matching criteria reused?) and is punted for v1. + +### 8.3 Should Mod+gesture always bypass passthrough? + +Currently yes, hard-coded. A case could be made for a `touchscreen-gesture-passthrough-respect-mod false` subfield to let passthrough *also* forward mod-combo gestures. Nobody has asked for this yet, and the hard-coded behavior preserves a discoverable escape hatch, so keeping it hard-coded feels right. + +### 8.4 What about gestures that start on a passthrough window and drift onto the desktop? + +Current behavior: once the first finger decides passthrough on touch-down, the entire gesture stays in passthrough mode until all fingers lift, even if fingers move off the window. This avoids confusing mid-gesture handoffs, but it means a user who accidentally starts a gesture on a passthrough window can't rescue it onto the compositor by dragging away. Reversing the policy (mid-gesture handoff based on current position) is probably worse, but this is the trade-off. + +### 8.5 Continuous `noop` semantics + +If we add continuous noop (section 7.2), should the delta stream be raw pixels, normalized progress, or both? Raw is more flexible but forces external tools to do their own normalization. Normalized is easier to consume but loses information. Both means more IPC traffic. No decision yet. + +### 8.6 Should the debug log be promoted to `info` or stay at `debug`? + +The `touch: captured N-finger gesture over app-id=X` log line is currently at `debug` level. That means it requires `RUST_LOG=niri=debug` to see. Promoting it to `info` would surface it by default, which helps discoverability but adds noise to logs during normal use. Leaning toward leaving it at `debug` and documenting the `RUST_LOG` requirement, but open to arguments. + +--- + +## 9. Further reading + +External references for the design space covered in this document. + +### Wayland / libinput + +- [Wayland Protocols: `wp_pointer_gestures_v1`](https://wayland.app/protocols/pointer-gestures-unstable-v1) +- [Wayland Book: Touch input](https://wayland-book.com/seat/touch.html) +- [libinput gestures documentation](https://wayland.freedesktop.org/libinput/doc/latest/gestures.html) +- [`zwp_keyboard_shortcuts_inhibit_v1`](https://wayland.app/protocols/keyboard-shortcuts-inhibit-unstable-v1) — the keyboard-side analogue of what touch gesture inhibit would need + +### iOS / Android + +- iOS: search Apple Developer docs for `UIGestureRecognizer`, `UIScreenEdgePanGestureRecognizer` +- Android: search AOSP docs for `View.setSystemGestureExclusionRects`, `WindowInsets.getSystemGestures` + +### KDE / GNOME / Linux phone shells + +- [Input handling in spring 2025 — KDE Blogs](https://blogs.kde.org/2025/05/14/input-handling-in-spring-2025/) +- KDE InputActions — search KDE Discuss for "InputActions mouse gestures Wayland" +- [GNOME Shell gesture extensions](https://extensions.gnome.org/extension/4245/gesture-improvements/) +- Phosh / Phoc source (GitLab) — how a wlroots-based mobile shell handles touch edges +- SXMO / `lisgd` — the external-daemon model on Wayland + +### Niri internals + +- `src/input/touch_gesture.rs` — touchscreen gesture recognizer +- `src/input/move_grab.rs` — touch-driven window move grab (interacts with gesture detection) +- `niri-config/src/window_rule.rs` — where `touchscreen_gesture_passthrough` is parsed +- `src/window/mod.rs` — `ResolvedWindowRules` and the rule compute path +- [Configuration: Window Rules](./Configuration:-Window-Rules.md) — user-facing docs for the passthrough rule diff --git a/docs/wiki/Gestures.md b/docs/wiki/Gestures.md index 5c94d71f79..3e4cd47b95 100644 --- a/docs/wiki/Gestures.md +++ b/docs/wiki/Gestures.md @@ -52,19 +52,343 @@ Switch workspaces by holding Mod and the middle mouse button (or the ### Touchpad +Since: next Touchpad gestures are configured as binds in the main `binds {}` block, the same way keyboard shortcuts are. The trigger is `TouchpadSwipe` with `fingers=N` (integer in `3..=10`) and `direction="up|down|left|right"` properties. + +The defaults below reproduce the built-in behavior; you can rebind them to any other action or disable them entirely. + +```kdl +binds { + TouchpadSwipe fingers=3 direction="up" { focus-workspace-up; } + TouchpadSwipe fingers=3 direction="down" { focus-workspace-down; } + TouchpadSwipe fingers=3 direction="left" { focus-column-right; } + TouchpadSwipe fingers=3 direction="right" { focus-column-left; } + TouchpadSwipe fingers=4 direction="up" { toggle-overview; } + TouchpadSwipe fingers=4 direction="down" { toggle-overview; } +} +``` + +Tuning parameters for touchpad gesture recognition (`swipe-trigger-distance`, `swipe-progress-distance`, `pinch-trigger-scale`) live in the `input { touchpad { gestures { } } }` subblock — see [Configuration: Input](./Configuration:-Input.md#touchpad-gesture-tuning). + #### Workspace Switch -Switch workspaces with three-finger vertical swipes. +Switch workspaces with three-finger vertical swipes (default bind). #### Horizontal View Movement -Move the view horizontally with three-finger horizontal swipes. +Move the view horizontally with three-finger horizontal swipes (default bind). #### Open and Close the Overview Since: 25.05 -Open and close the overview with a four-finger vertical swipe. +Open and close the overview with a four-finger vertical swipe (default bind). + +#### Tap-Hold Gestures + +Since: next + +Stationary N-finger tap-holds on the touchpad — fingers land, hold stationary, then lift. The action fires on release. libinput handles motion discrimination: if fingers move, the gesture is promoted to a swipe or pinch and the candidate is dropped automatically. + +Fast taps (where fingers lift before libinput's internal hold detection threshold) are **not** intercepted — they pass through to the focused client. This means app-level quick-tap gestures (e.g. 3-finger tap-to-paste in terminals) coexist naturally with compositor tap-hold binds. + +```kdl +binds { + TouchpadTapHold fingers=3 { screenshot; } + TouchpadTapHold fingers=4 { spawn "notify-send" "4-finger tap-hold"; } + TouchpadTapHold fingers=5 { close-window; } +} +``` + +- `fingers=` — integer in `3..=10`. Required. 1- and 2-finger holds are handled by libinput and forwarded to clients; niri only intercepts 3+ finger holds. +- No `direction=` — tap-holds are omnidirectional. Including `direction=` is an error. + +Tap-holds are always **discrete** (fire-and-forget) — they cannot drive continuous animations. + +No niri-side tuning knobs are needed — libinput's hold gesture API handles the motion threshold and timing internally. + +#### Tap-Hold-Drag Gestures + +Since: next + +N-finger tap-hold-drag — fingers land, hold stationary, then start moving. The trigger activates when the held fingers begin moving, distinguishing it from a direct swipe (where fingers land already in motion). This is the same gesture macOS uses for three-finger window dragging. + +Tap-hold-drag can drive **continuous** actions (workspace switch, overview, view scroll) — the swipe deltas feed into the animation automatically. It can also fire discrete actions once on activation. + +```kdl +binds { + // Continuous: hold 3 fingers, then drag to switch workspaces + TouchpadTapHoldDrag fingers=3 { focus-workspace-up; } + + // Discrete: hold 4 fingers, then move to trigger once + TouchpadTapHoldDrag fingers=4 { spawn "notify-send" "drag started"; } +} +``` + +- `fingers=` — integer in `3..=10`. Required. +- No `direction=` — the drag direction is not part of the trigger. Including `direction=` is an error. + +The distinction between tap-hold-drag and a direct swipe is made by libinput: a tap-hold-drag is preceded by a `GestureHoldBegin` event (fingers were stationary first), while a direct swipe skips the hold phase entirely. This means the same finger count can be used for both without conflict — intent is distinguished by the pause before moving. + +#### Pinch Gestures + +Since: next + +N-finger touchpad pinch — fingers converging toward (or diverging from) the cluster centroid. libinput pre-classifies swipe-vs-pinch, so niri only needs a scale threshold to decide when the pinch is committed. + +```kdl +binds { + TouchpadPinch fingers=2 direction="in" { open-overview; } + TouchpadPinch fingers=2 direction="out" { close-overview; } + TouchpadPinch fingers=3 direction="in" { spawn "rofi" "-show" "drun"; } +} +``` + +- `fingers=` — integer in `2..=10`. Required. Unlike touchscreen pinch (which starts at 3 fingers to preserve 2-finger client passthrough), libinput emits touchpad pinch events natively for 2/3/4 fingers. 2-finger pinch is the most reliable. +- `direction=` — `"in"` (scale shrinking) or `"out"` (scale growing). Required. + +Pinch is always **discrete** (fires once per gesture when `|scale - 1.0|` crosses `pinch-trigger-scale`). Raw pinch events still forward to Wayland clients, so app-level pinch-to-zoom (e.g. Firefox, image viewers) keeps working — the bind fires in addition to the app's own handling. Bind 3+ fingers if you want compositor-only actions without app overlap. + +The threshold is configured via `input { touchpad { gestures { pinch-trigger-scale } } }` — see [Configuration: Input](./Configuration:-Input.md#touchpad-gesture-tuning). + +> [!NOTE] +> 3+ finger pinch works but requires fingers moving distinctly toward or away from the cluster centroid. If fingers mostly translate together, libinput classifies the motion as swipe instead and no pinch event is emitted. Rotation is not exposed on touchpad — it rides inside pinch events for 2-finger gestures only, and niri does not currently surface a `TouchpadRotate` trigger. + +### Touchscreen + +Since: next Touchscreen gestures are configured as binds in the main `binds {}` block using six parameterized node families — `TouchSwipe`, `TouchPinch`, `TouchRotate`, `TouchTap`, `TouchTapHoldDrag`, and `TouchEdge` — with KDL properties for finger count and direction. The `fingers=` property accepts any value in `3..=10`, so arbitrary finger counts are supported without an enum change. + +#### Swipe Gestures + +```kdl +binds { + TouchSwipe fingers=3 direction="up" { focus-workspace-up; } + TouchSwipe fingers=3 direction="down" { focus-workspace-down; } + TouchSwipe fingers=3 direction="left" { focus-column-right; } + TouchSwipe fingers=3 direction="right" { focus-column-left; } + TouchSwipe fingers=4 direction="up" { toggle-overview; } + TouchSwipe fingers=4 direction="down" { toggle-overview; } + // fingers=5 (and 6..=10) also work. +} +``` + +- `fingers=` — integer in `3..=10`. Rejecting `<3` preserves the 2-finger passthrough contract used by clients for scrolling/zooming. Required. +- `direction=` — one of `"up"`, `"down"`, `"left"`, `"right"`. Required. + +#### Pinch Gestures + +```kdl +binds { + TouchPinch fingers=3 direction="in" { open-overview; } + TouchPinch fingers=3 direction="out" { close-overview; } + // fingers=4/5/6/.../10 also work. +} +``` + +- `fingers=` — integer in `3..=10`. Required. +- `direction=` — one of `"in"` (spread shrinking) or `"out"` (spread growing). Required. + +Pinch vs swipe classification is controlled by the `pinch-trigger-distance` and `pinch-dominance-ratio` tuning parameters. + +#### Rotation Gestures + +> [!WARNING] +> +> Rotation detection is an early proof of concept and is currently **buggy and intermittent** on real hardware — recognition can misfire, lock at the wrong finger count, or fail to latch. The math, IPC, and bind plumbing are in place and tests pass, but real-world tuning still needs work. Use with caution and expect false positives / misses while this settles. + +Twisting the finger cluster clockwise or counter-clockwise (around its centroid) fires a rotation gesture. Rotation is detected from the averaged per-finger angle change, so the noise floor is √N lower than single-finger angular drift. + +```kdl +binds { + // 4-finger rotation walks column focus left/right. + TouchRotate fingers=4 direction="ccw" { focus-column-left; } + TouchRotate fingers=4 direction="cw" { focus-column-right; } +} +``` + +- `fingers=` — integer in `3..=10`. Required. +- `direction=` — one of `"cw"` (clockwise on screen) or `"ccw"` (counter-clockwise on screen). Required. The sign convention assumes the y-axis points down (standard screen coordinates). + +Rotation classification runs before pinch and swipe classification, so a clearly rotating finger cluster wins over any incidental spread or translation. Tuning lives under `input { touchscreen { gestures { } } }`: `rotation-trigger-angle` (minimum **degrees** before it latches, default 15°), `rotation-dominance-ratio` (how much rotation arc length must dominate swipe/spread change, default 0.5 — higher = stricter rotation, matching `pinch-dominance-ratio` semantics), and `rotation-progress-angle` (degrees that map to IPC `progress = ±1.0`, default 90°). + +Rotation gestures are **continuous** in the same sense as pinch: binding them to a continuous-capable action animates frame-by-frame, and tagged rotations emit `GestureProgress` events where the delta is `GestureDelta::Rotate { d_radians }`. + +Pinch gestures are **continuous**: when bound to a continuous-capable action like `open-overview`, `close-overview`, `toggle-overview`, `focus-workspace-*`, `focus-column-*`, or `noop`, the animation tracks finger motion frame-by-frame (pinch-in smoothly opens the overview, reversing the pinch smoothly closes it again). Binding a pinch to a non-continuous action like `spawn` or `close-window` still fires the action once on recognition, as before. + +The animation scale for pinch is controlled by `pinch-sensitivity`, not by the bind's `sensitivity=` property — pinch has its own dedicated knob because raw spread-delta pixels need a very different scaling from linear swipe distances. Tune `pinch-sensitivity` in the `touchscreen { gestures { } }` block if pinch-to-overview feels too fast or too slow. + +#### Tap Gestures + +Since: next + +Stationary N-finger taps — all fingers land and lift with minimal motion. Tap detection runs in parallel with swipe/pinch/rotate recognition using a spatial dead zone, matching the approach used by Android, iOS, and libinput. If any finger drifts beyond the wobble threshold or the swipe/pinch/rotate recognizer locks first, the tap candidate is killed. + +```kdl +binds { + TouchTap fingers=3 { screenshot; } + TouchTap fingers=4 { spawn "notify-send" "4-finger tap"; } + TouchTap fingers=5 { close-window; } +} +``` + +- `fingers=` — integer in `3..=10`. Required. +- No `direction=` — taps are omnidirectional. Including `direction=` is an error. + +Taps are always **discrete** (fire-and-forget) — they cannot drive continuous animations. + +Tuning parameters in `input { touchscreen { gestures { } } }`: + +- `tap-wobble-threshold` — maximum per-finger displacement (in pixels) before the tap candidate is killed. Default: 15. Increase if taps are too hard to trigger on your device; decrease if taps fire when you intended a swipe. +- `tap-timeout-ms` — maximum duration (in milliseconds) from the third finger landing to all fingers lifting. Default: 500. Acts as a tap-vs-hold safety cap. + +The wobble threshold (default 15 px) sits well below the swipe trigger distance (default 100 px), creating a dead zone between 15–100 px where neither tap nor swipe fires — this handles ambiguous gestures correctly. + +#### Tap-Hold-Drag Gestures + +Since: next + +N-finger tap-hold-drag — fingers land, hold stationary (within the wobble threshold), then start moving. The trigger fires at the wobble-kill moment — the transition from "was a tap candidate" to "started moving." This distinguishes tap-hold-drag from a direct swipe: direct swipes move immediately without a stationary hold phase. + +Tap-hold-drag supports an optional `direction=` property. Directional binds are checked first; if no directional bind matches, the omnidirectional (no `direction=`) bind is used as a fallback. + +```kdl +binds { + // Omnidirectional — fires regardless of initial movement direction + TouchTapHoldDrag fingers=3 { spawn "notify-send" "drag started"; } + + // Directional — only fires for that initial direction + TouchTapHoldDrag fingers=4 direction="left" { spawn "wl-copy"; } + TouchTapHoldDrag fingers=4 direction="right" { spawn "wl-paste"; } + TouchTapHoldDrag fingers=4 direction="up" { toggle-overview; } +} +``` + +- `fingers=` — integer in `3..=10`. Required. +- `direction=` — optional. One of `"up"`, `"down"`, `"left"`, `"right"`. When omitted, the trigger is omnidirectional. + +Tap-hold-drag can drive **continuous** actions — when bound to a continuous-capable action, the swipe deltas feed into the animation frame-by-frame after activation. Binding to a discrete action fires it once. + +Tuning parameters in `input { touchscreen { gestures { } } }`: + +- `tap-hold-trigger-delay-ms` — minimum hold duration (in milliseconds) before a wobble-kill can activate a tap-hold-drag bind. If fingers move before this delay elapses, normal swipe/pinch/rotate recognition continues instead. Default: 200. Increase if fast swipes accidentally trigger hold-drag; decrease if hold-drag feels sluggish to activate. + +The hold detection also reuses the tap candidate's wobble threshold (`tap-wobble-threshold`, default 15 px). Fingers must stay within this threshold during the hold phase. + +#### Edge Swipes + +One-finger swipes that begin within `edge-start-distance` pixels of a screen edge. Useful for drawers, panels, and any edge-activated UI. + +```kdl +binds { + TouchEdge edge="left" { focus-column-right; } + TouchEdge edge="right" { focus-column-left; } + TouchEdge edge="top" { focus-workspace-up; } + TouchEdge edge="bottom" { focus-workspace-down; } +} +``` + +- `edge=` — one of `"left"`, `"right"`, `"top"`, `"bottom"`. Required. +- `zone=` — optional third-of-the-edge qualifier (see Edge Zones below). +- No `fingers=` — edge swipes are always single-finger. Including `fingers=` is an error. + +The edge trigger zone width is set by `edge-start-distance` in the `touchscreen { gestures { } }` block. + +##### Edge swipes with continuous actions (overview, workspace switch) + +Edge swipes can be bound to continuous actions like `toggle-overview` or `focus-workspace-up`. Two things to be aware of: + +- **Direction inversion:** Edge swipes feeding into overview require `natural-scroll=true` to feel correct. Without it, swiping down from the top edge tries to close overview instead of opening it. + + ```kdl + binds { + TouchEdge edge="top" zone="right" natural-scroll=true { toggle-overview; } + } + ``` + +- **Left/right edges and overview:** Continuous overview gestures currently only track vertical (`delta_y`) motion. Left and right edge swipes produce primarily horizontal motion (`delta_x`), which the overview ignores. This means `toggle-overview` on a left or right edge swipe will not work. Use top or bottom edges for overview binds. This is a known limitation. + +##### Edge zones + +Since: next + +Each edge is also split into three zones along its perpendicular axis so you can bind separate actions to different parts of the same edge (like Android's status bar → notification tray vs. quick-settings split, or a top-right screenshot gesture). Add a `zone=` property to restrict the bind to one third. The zone vocabulary rotates per edge to match the direction of the split: + +| Edge | Valid `zone=` values | Meaning | +| --- | --- | --- | +| `edge="top"` | `"left"` / `"center"` / `"right"` | thirds along the x-axis | +| `edge="bottom"` | `"left"` / `"center"` / `"right"` | thirds along the x-axis | +| `edge="left"` | `"top"` / `"center"` / `"bottom"` | thirds along the y-axis | +| `edge="right"` | `"top"` / `"center"` / `"bottom"` | thirds along the y-axis | + +Mismatched vocabularies (e.g. `edge="left" zone="left"`) are a parse error. + +```kdl +binds { + // Split the top edge into three independent actions. + TouchEdge edge="top" zone="left" { spawn "notify-send" "left"; } + TouchEdge edge="top" zone="center" { spawn "notify-send" "pull down notifications"; } + TouchEdge edge="top" zone="right" { spawn "screenshot.sh"; } + + // Bottom-right corner for the overview; middle-bottom for app drawer. + TouchEdge edge="bottom" zone="center" { spawn "rofi" "-show" "drun"; } + TouchEdge edge="bottom" zone="right" { toggle-overview; } + + // Parent bind is still valid. If no zoned bind hits for a given touch, + // the parent (no `zone=`) trigger is used as a fallback — so a bare + // `TouchEdge edge="left"` catches any left-edge swipe that doesn't land + // in a more specific zone bind. + TouchEdge edge="left" { focus-column-right; } +} +``` + +Tuning parameters for touchscreen gesture recognition all live in the `input { touchscreen { gestures { } } }` subblock — see [Configuration: Input](./Configuration:-Input.md#touchscreen). + +### Gesture Tags and IPC Events + +Since: next + +Any gesture bind (touchscreen or touchpad) can carry a `tag="..."` property. When the gesture fires, niri emits `GestureBegin`, `GestureProgress`, and `GestureEnd` events on its IPC event stream, carrying the tag string. External applications subscribing to the event stream can react to those events — drive a sidebar drawer, show a scrubbing HUD, move a slider, etc. + +```kdl +binds { + // Tagged workspace switch — still switches workspaces, and also + // emits GestureProgress events with tag="ws-nav" for external apps + // that want to show a progress indicator alongside the animation. + TouchSwipe fingers=3 direction="up" tag="ws-nav" { focus-workspace-up; } + TouchSwipe fingers=3 direction="down" tag="ws-nav" { focus-workspace-down; } + + // Noop-tagged edge swipe — drives no compositor action, just emits + // IPC progress events so an external app (e.g. a sidebar drawer) + // can follow the finger. + TouchEdge edge="left" tag="sidebar-left" { noop; } + TouchEdge edge="right" tag="sidebar-right" { noop; } +} +``` + +The three IPC events are: + +- **`GestureBegin { tag, trigger, finger_count, is_continuous }`** — fired when gesture recognition has locked in. `is_continuous` is true for swipe, pinch, and edge gestures bound to continuous-capable actions (including `noop`), and false for discrete gestures bound to one-shot actions. +- **`GestureProgress { tag, progress, delta, timestamp_ms }`** — fired repeatedly while a continuous gesture is in motion. + - `progress` is **signed, unbounded**, normalized: it starts at `0.0` when the gesture is recognized and grows as the gesture continues. Reversing direction produces negative values, and overshoot can exceed `±1.0` — consumers should not assume the value is clamped. + - For **swipes and edge gestures**, progress accumulates adjusted (sensitivity-scaled, natural-scroll-adjusted) finger delta on the dominant axis, normalized by `swipe-progress-distance` (default 200 px for touchscreen, 40 libinput units for touchpad — same knob name, separate config block). Progress `±1.0` ≈ one progress-distance of movement. + - For **pinches**, progress is `(current_spread - start_spread) / pinch-progress-distance` (default 100 px). Positive = pinch-out (spread growing), negative = pinch-in. + - For **rotations**, progress is cumulative signed rotation divided by `rotation-progress-angle` (configured in **degrees**, default 90°). Positive = counter-clockwise on screen, negative = clockwise on screen. + - `delta` is a tagged enum carrying the per-event raw delta in a gesture-specific shape: + - `GestureDelta::Swipe { dx, dy }` — per-event finger delta in screen pixels (touchscreen) or libinput units (touchpad). + - `GestureDelta::Pinch { d_spread }` — per-event change in finger spread. + - `GestureDelta::Rotate { d_radians }` — per-event change in the averaged per-finger angle. Signed with the same on-screen convention as `progress`. +- **`GestureEnd { tag, completed }`** — fired when the gesture ends (fingers released). + +#### Noop Gestures + +Binding a tagged gesture to `noop` means the gesture emits IPC events without driving any compositor animation. This is the cleanest case for external apps: progress is the sole output, and the external app has full control over its own thresholds and snap behavior. Used by [niri-tag-sidebar](https://github.com/julianjc84/niri-tag-sidebar) for edge-swipe drawer panels. + +#### Progress vs Compositor Animation + +> [!WARNING] +> +> When a tagged gesture *also* drives a compositor animation (e.g. a tagged workspace switch), niri uses its own internal thresholds to decide when to commit the action — these are independent of the IPC `progress` value. An external app watching the progress value can't reliably predict when niri will actually commit. For `noop` gestures this isn't a concern because progress is the sole output. + +The `GestureEnd.completed` field is currently hardcoded `true` for touchscreen gestures and does **not** indicate whether niri actually committed the bound action. ### All Pointing Devices diff --git a/niri-config/src/binds.rs b/niri-config/src/binds.rs index 0be7596fec..9e47ba76c8 100644 --- a/niri-config/src/binds.rs +++ b/niri-config/src/binds.rs @@ -13,9 +13,42 @@ use smithay::input::keyboard::keysyms::KEY_NoSymbol; use smithay::input::keyboard::xkb::{keysym_from_name, KEYSYM_CASE_INSENSITIVE, KEYSYM_NO_FLAGS}; use smithay::input::keyboard::Keysym; +use crate::input::{EdgeZone, ScreenEdge}; use crate::recent_windows::{MruDirection, MruFilter, MruScope}; use crate::utils::{expect_only_children, MergeWith}; +/// Direction for a linear swipe gesture. +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub enum SwipeDirection { + Up, + Down, + Left, + Right, +} + +/// Direction for a pinch gesture. +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub enum PinchDirection { + In, + Out, +} + +/// Direction for a rotation gesture (as seen on screen). +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub enum RotateDirection { + /// Clockwise on screen. + Cw, + /// Counter-clockwise on screen. + Ccw, +} + +/// Inclusive bounds on the `fingers=` property for multi-finger gestures. +/// Parser rejects `fingers` values outside `[MIN_FINGERS, MAX_FINGERS]`. +/// `< 3` would collide with two-finger passthrough (scroll/zoom) and plain +/// single-finger touch handling; `> 10` exceeds any realistic hardware. +pub const MIN_FINGERS: u8 = 3; +pub const MAX_FINGERS: u8 = 10; + #[derive(Debug, Default, PartialEq)] pub struct Binds(pub Vec); @@ -28,6 +61,16 @@ pub struct Bind { pub allow_when_locked: bool, pub allow_inhibiting: bool, pub hotkey_overlay_title: Option>, + /// Sensitivity multiplier for touch gesture binds. + pub sensitivity: Option, + /// Natural scroll for touchscreen gesture binds. + pub natural_scroll: bool, + /// Optional tag for IPC gesture events. + /// When set, gesture begin/progress/end events are emitted on the IPC + /// event stream with this tag, allowing external tools to react. + /// Restricted to gesture triggers only (Touch*/Touchpad*) — rejected + /// on keyboard/mouse binds to prevent IPC event stream keylogging. + pub tag: Option, } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] @@ -52,6 +95,131 @@ pub enum Trigger { TouchpadScrollUp, TouchpadScrollLeft, TouchpadScrollRight, + /// Multi-finger touchpad swipe. + /// + /// KDL syntax: `TouchpadSwipe fingers=3 direction="up"`. `fingers` must + /// be in `MIN_FINGERS..=MAX_FINGERS`. + TouchpadSwipe { + fingers: u8, + direction: SwipeDirection, + }, + /// Multi-finger touchpad tap-hold (fingers land, hold stationary, + /// then lift). libinput handles motion discrimination via its hold + /// gesture API — `cancelled=false` on `GestureHoldEnd` means the + /// fingers never moved. Fires on release. Fast taps that lift before + /// libinput's hold threshold are not intercepted and pass through to + /// clients. Always discrete (fire-and-forget). + /// + /// KDL syntax: `TouchpadTapHold fingers=3`. + TouchpadTapHold { + fingers: u8, + }, + /// Multi-finger touchpad tap-hold-drag (fingers land, hold stationary, + /// then start moving). Fires when the held fingers begin moving — + /// libinput transitions from `GestureHold` to `GestureSwipe`. + /// Can drive continuous actions (workspace switch, overview, window + /// move) or fire a discrete action once on activation. + /// + /// KDL syntax: `TouchpadTapHoldDrag fingers=3`. + TouchpadTapHoldDrag { + fingers: u8, + }, + /// Multi-finger touchpad pinch (fingers converging / diverging around + /// the cluster centroid). libinput pre-classifies swipe-vs-pinch, so + /// niri only needs a scale threshold — see + /// `touchpad.gestures.pinch-trigger-scale`. Fires once per gesture + /// when `|scale - 1.0|` crosses the threshold; direction is picked + /// from the sign of the scale change. Raw pinch events still forward + /// to Wayland clients, so app-side zoom keeps working. + /// + /// KDL syntax: `TouchpadPinch fingers=2 direction="in"`. + TouchpadPinch { + fingers: u8, + direction: PinchDirection, + }, + /// Multi-finger touchscreen swipe. + /// + /// KDL syntax: `TouchSwipe fingers=3 direction="up"`. + TouchSwipe { + fingers: u8, + direction: SwipeDirection, + }, + /// Multi-finger touchscreen pinch (fingers converging / diverging + /// around the cluster centroid). + /// + /// KDL syntax: `TouchPinch fingers=3 direction="in"`. + TouchPinch { + fingers: u8, + direction: PinchDirection, + }, + /// Multi-finger touchscreen rotation (fingers twisting as a group around + /// the cluster centroid). Rotation starts at 3 fingers to preserve the + /// 2-finger passthrough contract used by clients for scrolling/zooming. + /// + /// KDL syntax: `TouchRotate fingers=3 direction="cw"`. + TouchRotate { + fingers: u8, + direction: RotateDirection, + }, + /// Multi-finger touchscreen tap (all fingers land and lift with minimal + /// motion). Runs in parallel with swipe/pinch/rotate recognition — if + /// motion exceeds `tap-wobble-threshold` or the recognizer locks, the + /// tap candidate is killed. Always discrete (fire-and-forget). + /// + /// KDL syntax: `TouchTap fingers=3`. + TouchTap { + fingers: u8, + }, + /// Multi-finger touchscreen tap-hold-drag (fingers land, hold + /// stationary within wobble threshold, then start moving). Fires at + /// the wobble-kill moment — the transition from "was a tap candidate" + /// to "started moving." Optional `direction` restricts to a specific + /// initial movement direction; `None` = omnidirectional (fires + /// regardless of direction). Can drive continuous actions. + /// + /// KDL syntax: + /// - `TouchTapHoldDrag fingers=3` (omnidirectional) + /// - `TouchTapHoldDrag fingers=3 direction="left"` (directional) + TouchTapHoldDrag { + fingers: u8, + direction: Option, + }, + /// Single-finger touchscreen edge swipe. + /// + /// `zone` picks one of the three zones along the edge's perpendicular + /// axis; `None` is the parent/any-zone fallback. At bind lookup time a + /// zoned trigger is preferred, with `zone: None` as a fallback. + /// + /// KDL syntax: + /// - `TouchEdge edge="left"` (parent) + /// - `TouchEdge edge="left" zone="top"` (zoned) + /// + /// Top/Bottom edges accept `zone="left"|"center"|"right"`; Left/Right + /// edges accept `zone="top"|"center"|"bottom"`. + TouchEdge { + edge: ScreenEdge, + zone: Option, + }, +} + +impl Trigger { + /// Returns true if this trigger is a gesture (touchscreen or touchpad). + /// Only gesture triggers support IPC tag events. + pub fn is_gesture(&self) -> bool { + matches!( + self, + Trigger::TouchpadSwipe { .. } + | Trigger::TouchpadTapHold { .. } + | Trigger::TouchpadTapHoldDrag { .. } + | Trigger::TouchpadPinch { .. } + | Trigger::TouchSwipe { .. } + | Trigger::TouchPinch { .. } + | Trigger::TouchRotate { .. } + | Trigger::TouchTap { .. } + | Trigger::TouchTapHoldDrag { .. } + | Trigger::TouchEdge { .. } + ) + } } bitflags! { @@ -390,6 +558,10 @@ pub enum Action { MruSetScope(MruScope), #[knuffel(skip)] MruCycleScope, + /// No-op action: the bind matches and consumes the gesture but does + /// nothing inside the compositor. Useful with `tag` to pipe gesture + /// events to external tools via IPC without triggering any niri action. + Noop, } impl From for Action { @@ -833,10 +1005,30 @@ where )); } - let key = node - .node_name - .parse::() - .map_err(|e| DecodeError::conversion(&node.node_name, e.wrap_err("invalid keybind")))?; + // Split modifiers from the node name. `Ctrl+Shift+TouchSwipe` → + // (Modifiers::CTRL|SHIFT, "TouchSwipe"). + let (modifiers, trigger_name) = match parse_modifiers(&node.node_name) { + Ok(pair) => pair, + Err(e) => { + return Err(DecodeError::conversion( + &node.node_name, + e.wrap_err("invalid keybind"), + )) + } + }; + let is_gesture_family = is_gesture_family_name(trigger_name); + + // For non-gesture triggers, parse the node name directly (keysyms, + // mouse buttons, wheel, TouchpadScroll). For gesture families we + // build the Trigger from properties below, because the node name + // alone carries no finger count / direction / edge info. + let key_from_name = if is_gesture_family { + None + } else { + Some(node.node_name.parse::().map_err(|e| { + DecodeError::conversion(&node.node_name, e.wrap_err("invalid keybind")) + })?) + }; let mut repeat = true; let mut cooldown = None; @@ -844,6 +1036,17 @@ where let mut allow_when_locked_node = None; let mut allow_inhibiting = true; let mut hotkey_overlay_title = None; + let mut sensitivity = None; + let mut natural_scroll = false; + let mut tag = None; + + // Gesture-specific properties, only populated / legal when + // `is_gesture_family` is true. + let mut gesture_fingers: Option = None; + let mut gesture_direction: Option = None; + let mut gesture_edge: Option = None; + let mut gesture_zone: Option = None; + for (name, val) in &node.properties { match &***name { "repeat" => { @@ -864,6 +1067,38 @@ where "hotkey-overlay-title" => { hotkey_overlay_title = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); } + "sensitivity" => { + sensitivity = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } + "natural-scroll" => { + natural_scroll = knuffel::traits::DecodeScalar::decode(val, ctx)?; + } + "tag" => { + tag = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } + // Gesture-specific properties. Note that knuffel stores + // `node.properties` as a BTreeMap keyed on name, so a + // KDL node written with `fingers=3 fingers=5 ...` is + // silently collapsed to its last value at AST-build + // time — this loop only ever sees one entry per name. + // Duplicate detection therefore can't happen here; the + // only way to reject duplicates would be to intercept + // the raw KDL source before knuffel parses it, which + // isn't worth it. Last-wins is KDL-level behavior, + // and users who care get the same hazard on every + // other bind property (`tag=`, `cooldown-ms=`, etc.). + "fingers" if is_gesture_family => { + gesture_fingers = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } + "direction" if is_gesture_family => { + gesture_direction = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } + "edge" if is_gesture_family => { + gesture_edge = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } + "zone" if is_gesture_family => { + gesture_zone = Some(knuffel::traits::DecodeScalar::decode(val, ctx)?); + } name_str => { ctx.emit_error(DecodeError::unexpected( name, @@ -874,6 +1109,40 @@ where } } + // Build the Key. For gesture families, combine node name + + // collected properties via build_gesture_trigger. + let key = if is_gesture_family { + let props = GestureTriggerProps { + fingers: gesture_fingers, + direction: gesture_direction.as_deref(), + edge: gesture_edge.as_deref(), + zone: gesture_zone.as_deref(), + }; + match build_gesture_trigger(trigger_name, &props) { + Ok(trigger) => Key { trigger, modifiers }, + Err(msg) => { + return Err(DecodeError::conversion(&node.node_name, miette!("{msg}"))); + } + } + } else { + key_from_name.unwrap() + }; + + // Tags are only supported on gesture triggers (touchscreen/touchpad). + // Allowing tags on keyboard/mouse binds would let the IPC event stream + // be used as a keylogger — every tagged keypress would emit an event + // with the key name to any process listening on the socket. Gestures + // are safe because they don't carry text input (you can't type a + // password with a 3-finger swipe). + if tag.is_some() && !key.trigger.is_gesture() { + ctx.emit_error(DecodeError::unexpected( + &node.node_name, + "property", + "tag is only supported on gesture triggers (Touch*/Touchpad*)", + )); + tag = None; + } + let mut children = node.children(); // If the action is invalid but the key is fine, we still want to return something. @@ -887,6 +1156,9 @@ where allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }; if let Some(child) = children.next() { @@ -923,6 +1195,9 @@ where allow_when_locked, allow_inhibiting, hotkey_overlay_title, + sensitivity, + natural_scroll, + tag, }) } Err(e) => { @@ -940,39 +1215,272 @@ where } } +/// Returns true if `s` names one of the five parameterized gesture +/// families. These are parsed via KDL properties in `Bind::decode_node`, +/// not via `FromStr for Key`. +pub(crate) fn is_gesture_family_name(s: &str) -> bool { + s.eq_ignore_ascii_case("TouchpadSwipe") + || s.eq_ignore_ascii_case("TouchpadTapHold") + || s.eq_ignore_ascii_case("TouchpadTapHoldDrag") + || s.eq_ignore_ascii_case("TouchpadPinch") + || s.eq_ignore_ascii_case("TouchSwipe") + || s.eq_ignore_ascii_case("TouchPinch") + || s.eq_ignore_ascii_case("TouchRotate") + || s.eq_ignore_ascii_case("TouchTap") + || s.eq_ignore_ascii_case("TouchTapHoldDrag") + || s.eq_ignore_ascii_case("TouchEdge") +} + +/// Splits `Ctrl+Shift+Foo` into `(modifiers, "Foo")`. +fn parse_modifiers(s: &str) -> Result<(Modifiers, &str), miette::Error> { + let mut modifiers = Modifiers::empty(); + let mut split = s.split('+'); + let key = split.next_back().unwrap(); + for part in split { + let part = part.trim(); + if part.eq_ignore_ascii_case("mod") { + modifiers |= Modifiers::COMPOSITOR; + } else if part.eq_ignore_ascii_case("ctrl") || part.eq_ignore_ascii_case("control") { + modifiers |= Modifiers::CTRL; + } else if part.eq_ignore_ascii_case("shift") { + modifiers |= Modifiers::SHIFT; + } else if part.eq_ignore_ascii_case("alt") { + modifiers |= Modifiers::ALT; + } else if part.eq_ignore_ascii_case("super") || part.eq_ignore_ascii_case("win") { + modifiers |= Modifiers::SUPER; + } else if part.eq_ignore_ascii_case("iso_level3_shift") || part.eq_ignore_ascii_case("mod5") + { + modifiers |= Modifiers::ISO_LEVEL3_SHIFT; + } else if part.eq_ignore_ascii_case("iso_level5_shift") || part.eq_ignore_ascii_case("mod3") + { + modifiers |= Modifiers::ISO_LEVEL5_SHIFT; + } else { + return Err(miette!("invalid modifier: {part}")); + } + } + Ok((modifiers, key)) +} + +/// Properties collected from a gesture bind node that feed into building +/// a parameterized `Trigger` variant. +#[derive(Debug, Default)] +pub(crate) struct GestureTriggerProps<'a> { + pub fingers: Option, + pub direction: Option<&'a str>, + pub edge: Option<&'a str>, + pub zone: Option<&'a str>, +} + +/// Build a parameterized gesture `Trigger` from a family name and the +/// properties collected on the KDL node. Returns a human-readable error +/// string on any invalid combination (the caller wraps it in a knuffel +/// `DecodeError`). +pub(crate) fn build_gesture_trigger( + family: &str, + props: &GestureTriggerProps<'_>, +) -> Result { + let expect_fingers = |props: &GestureTriggerProps<'_>, min: u8| -> Result { + let Some(n) = props.fingers else { + return Err(format!( + "{family} requires `fingers=N` (valid range {min}..={MAX_FINGERS})" + )); + }; + if !(min..=MAX_FINGERS).contains(&n) { + return Err(format!( + "fingers={n} out of range (valid range {min}..={MAX_FINGERS})" + )); + } + Ok(n) + }; + let reject_edge_zone = |props: &GestureTriggerProps<'_>| -> Result<(), String> { + if props.edge.is_some() { + return Err(format!("{family} does not accept an `edge=` property")); + } + if props.zone.is_some() { + return Err(format!("{family} does not accept a `zone=` property")); + } + Ok(()) + }; + + if family.eq_ignore_ascii_case("TouchSwipe") || family.eq_ignore_ascii_case("TouchpadSwipe") { + reject_edge_zone(props)?; + let fingers = expect_fingers(props, MIN_FINGERS)?; + let direction = props + .direction + .ok_or_else(|| format!("{family} requires `direction=\"up|down|left|right\"`"))?; + let direction = match direction.to_ascii_lowercase().as_str() { + "up" => SwipeDirection::Up, + "down" => SwipeDirection::Down, + "left" => SwipeDirection::Left, + "right" => SwipeDirection::Right, + other => { + return Err(format!( + "invalid direction=\"{other}\" for {family} (expected up|down|left|right)" + )) + } + }; + return Ok(if family.eq_ignore_ascii_case("TouchSwipe") { + Trigger::TouchSwipe { fingers, direction } + } else { + Trigger::TouchpadSwipe { fingers, direction } + }); + } + + if family.eq_ignore_ascii_case("TouchPinch") || family.eq_ignore_ascii_case("TouchpadPinch") { + reject_edge_zone(props)?; + // Touchpad pinch accepts 2 fingers — libinput emits pinch events + // natively for 2/3/4 fingers. Touchscreen pinch stays at 3+ to + // preserve 2-finger client passthrough (scroll/zoom). + let min = if family.eq_ignore_ascii_case("TouchpadPinch") { + 2 + } else { + MIN_FINGERS + }; + let fingers = expect_fingers(props, min)?; + let direction = props + .direction + .ok_or_else(|| format!("{family} requires `direction=\"in|out\"`"))?; + let direction = match direction.to_ascii_lowercase().as_str() { + "in" => PinchDirection::In, + "out" => PinchDirection::Out, + other => { + return Err(format!( + "invalid direction=\"{other}\" for {family} (expected in|out)" + )) + } + }; + return Ok(if family.eq_ignore_ascii_case("TouchPinch") { + Trigger::TouchPinch { fingers, direction } + } else { + Trigger::TouchpadPinch { fingers, direction } + }); + } + + if family.eq_ignore_ascii_case("TouchRotate") { + reject_edge_zone(props)?; + let fingers = expect_fingers(props, MIN_FINGERS)?; + let direction = props + .direction + .ok_or_else(|| "TouchRotate requires `direction=\"cw|ccw\"`".to_string())?; + let direction = match direction.to_ascii_lowercase().as_str() { + "cw" => RotateDirection::Cw, + "ccw" => RotateDirection::Ccw, + other => { + return Err(format!( + "invalid direction=\"{other}\" for TouchRotate (expected cw|ccw)" + )) + } + }; + return Ok(Trigger::TouchRotate { fingers, direction }); + } + + if family.eq_ignore_ascii_case("TouchTap") + || family.eq_ignore_ascii_case("TouchpadTapHold") + || family.eq_ignore_ascii_case("TouchpadTapHoldDrag") + { + reject_edge_zone(props)?; + let fingers = expect_fingers(props, MIN_FINGERS)?; + if props.direction.is_some() { + return Err(format!("{family} does not accept a `direction=` property")); + } + return Ok(if family.eq_ignore_ascii_case("TouchTap") { + Trigger::TouchTap { fingers } + } else if family.eq_ignore_ascii_case("TouchpadTapHold") { + Trigger::TouchpadTapHold { fingers } + } else { + Trigger::TouchpadTapHoldDrag { fingers } + }); + } + + if family.eq_ignore_ascii_case("TouchTapHoldDrag") { + reject_edge_zone(props)?; + let fingers = expect_fingers(props, MIN_FINGERS)?; + // direction= is optional for TouchTapHoldDrag (unlike TouchSwipe + // where it's required). None = omnidirectional. + let direction = match props.direction { + None => None, + Some(d) => { + let dir = match d.to_ascii_lowercase().as_str() { + "up" => SwipeDirection::Up, + "down" => SwipeDirection::Down, + "left" => SwipeDirection::Left, + "right" => SwipeDirection::Right, + other => { + return Err(format!( + "invalid direction=\"{other}\" for TouchTapHoldDrag \ + (expected up|down|left|right)" + )) + } + }; + Some(dir) + } + }; + return Ok(Trigger::TouchTapHoldDrag { fingers, direction }); + } + + if family.eq_ignore_ascii_case("TouchEdge") { + if props.fingers.is_some() { + return Err("TouchEdge does not accept a `fingers=` property".to_string()); + } + if props.direction.is_some() { + return Err( + "TouchEdge uses `edge=` (not `direction=`) and an optional `zone=`".to_string(), + ); + } + let edge = props + .edge + .ok_or_else(|| "TouchEdge requires `edge=\"left|right|top|bottom\"`".to_string())?; + let edge = match edge.to_ascii_lowercase().as_str() { + "left" => ScreenEdge::Left, + "right" => ScreenEdge::Right, + "top" => ScreenEdge::Top, + "bottom" => ScreenEdge::Bottom, + other => { + return Err(format!( + "invalid edge=\"{other}\" (expected left|right|top|bottom)" + )) + } + }; + // Zone parsing uses `zone_kdl_name` as the single source of truth + // for the axis-rotating vocabulary (top/bottom edges take + // left|center|right; left/right edges take top|center|bottom). + // We try each of the three legal EdgeZone values and see which + // one's KDL name matches the user's input. + let zone = match props.zone { + None => None, + Some(z) => { + let z_lower = z.to_ascii_lowercase(); + let matched = [EdgeZone::Start, EdgeZone::Center, EdgeZone::End] + .into_iter() + .find(|&ez| crate::input::zone_kdl_name(edge, ez) == z_lower); + match matched { + Some(ez) => Some(ez), + None => { + let valid = format!( + "{}|{}|{}", + crate::input::zone_kdl_name(edge, EdgeZone::Start), + crate::input::zone_kdl_name(edge, EdgeZone::Center), + crate::input::zone_kdl_name(edge, EdgeZone::End), + ); + return Err(format!( + "invalid zone=\"{z}\" for edge=\"{}\" (expected {valid})", + edge.as_kdl_name() + )); + } + } + } + }; + return Ok(Trigger::TouchEdge { edge, zone }); + } + + Err(format!("unknown gesture family `{family}`")) +} + impl FromStr for Key { type Err = miette::Error; fn from_str(s: &str) -> Result { - let mut modifiers = Modifiers::empty(); - - let mut split = s.split('+'); - let key = split.next_back().unwrap(); - - for part in split { - let part = part.trim(); - if part.eq_ignore_ascii_case("mod") { - modifiers |= Modifiers::COMPOSITOR - } else if part.eq_ignore_ascii_case("ctrl") || part.eq_ignore_ascii_case("control") { - modifiers |= Modifiers::CTRL; - } else if part.eq_ignore_ascii_case("shift") { - modifiers |= Modifiers::SHIFT; - } else if part.eq_ignore_ascii_case("alt") { - modifiers |= Modifiers::ALT; - } else if part.eq_ignore_ascii_case("super") || part.eq_ignore_ascii_case("win") { - modifiers |= Modifiers::SUPER; - } else if part.eq_ignore_ascii_case("iso_level3_shift") - || part.eq_ignore_ascii_case("mod5") - { - modifiers |= Modifiers::ISO_LEVEL3_SHIFT; - } else if part.eq_ignore_ascii_case("iso_level5_shift") - || part.eq_ignore_ascii_case("mod3") - { - modifiers |= Modifiers::ISO_LEVEL5_SHIFT; - } else { - return Err(miette!("invalid modifier: {part}")); - } - } + let (modifiers, key) = parse_modifiers(s)?; let trigger = if key.eq_ignore_ascii_case("MouseLeft") { Trigger::MouseLeft @@ -1000,6 +1508,19 @@ impl FromStr for Key { Trigger::TouchpadScrollLeft } else if key.eq_ignore_ascii_case("TouchpadScrollRight") { Trigger::TouchpadScrollRight + } else if is_gesture_family_name(key) { + // Gesture families (TouchpadSwipe, TouchSwipe, TouchPinch, + // TouchRotate, TouchEdge) are parameterized by KDL properties + // (`fingers=`, `direction=`, `edge=`, `zone=`), so the node + // name alone isn't enough to construct a Trigger. They are + // parsed in `Bind::decode_node` where `node.properties` is + // reachable. Reject them here so a bare gesture-family name + // without the expected property-parsing path produces a clear + // error instead of being silently routed to keysym lookup. + return Err(miette!( + "{key} is a parameterized gesture family — use property form like \ + `TouchSwipe fingers=3 direction=\"up\"`" + )); } else { let mut keysym = keysym_from_name(key, KEYSYM_CASE_INSENSITIVE); // The keyboard event handling code can receive either @@ -1100,4 +1621,647 @@ mod tests { }, ); } + + #[test] + fn bare_gesture_family_name_is_rejected_by_fromstr() { + // FromStr for Key doesn't have property context, so a bare + // `TouchSwipe` with no properties must fail (property parsing + // happens in Bind::decode_node). + assert!("TouchSwipe".parse::().is_err()); + assert!("TouchPinch".parse::().is_err()); + assert!("TouchRotate".parse::().is_err()); + assert!("TouchTap".parse::().is_err()); + assert!("TouchEdge".parse::().is_err()); + assert!("TouchpadSwipe".parse::().is_err()); + assert!("TouchpadTapHold".parse::().is_err()); + assert!("TouchpadTapHoldDrag".parse::().is_err()); + assert!("TouchpadPinch".parse::().is_err()); + assert!("TouchTapHoldDrag".parse::().is_err()); + } + + #[test] + fn old_hardcoded_touch_names_no_longer_parse() { + // Hard break: the old TouchSwipe3Up / TouchEdgeLeft style is gone. + // These should now be interpreted as unknown keysyms and fail. + assert!("TouchSwipe3Up".parse::().is_err()); + assert!("TouchPinch3In".parse::().is_err()); + assert!("TouchRotate4Cw".parse::().is_err()); + assert!("TouchEdgeTop:Left".parse::().is_err()); + } + + #[test] + fn build_touchswipe() { + let props = GestureTriggerProps { + fingers: Some(3), + direction: Some("up"), + edge: None, + zone: None, + }; + assert_eq!( + build_gesture_trigger("TouchSwipe", &props).unwrap(), + Trigger::TouchSwipe { + fingers: 3, + direction: SwipeDirection::Up + } + ); + } + + #[test] + fn build_touchswipe_arbitrary_fingers() { + for n in MIN_FINGERS..=MAX_FINGERS { + let props = GestureTriggerProps { + fingers: Some(n), + direction: Some("right"), + edge: None, + zone: None, + }; + let got = build_gesture_trigger("TouchSwipe", &props).unwrap(); + assert_eq!( + got, + Trigger::TouchSwipe { + fingers: n, + direction: SwipeDirection::Right + } + ); + } + } + + #[test] + fn fingers_out_of_range_rejected() { + for bad in [0u8, 1, 2, 11, 20] { + let props = GestureTriggerProps { + fingers: Some(bad), + direction: Some("up"), + edge: None, + zone: None, + }; + assert!( + build_gesture_trigger("TouchSwipe", &props).is_err(), + "fingers={bad} should be rejected" + ); + } + } + + #[test] + fn direction_validated_per_family() { + // "up" is valid for swipe but not pinch/rotate. + let swipe_up = GestureTriggerProps { + fingers: Some(3), + direction: Some("up"), + edge: None, + zone: None, + }; + assert!(build_gesture_trigger("TouchSwipe", &swipe_up).is_ok()); + assert!(build_gesture_trigger("TouchPinch", &swipe_up).is_err()); + assert!(build_gesture_trigger("TouchRotate", &swipe_up).is_err()); + + // "in" is valid for pinch but not swipe/rotate. + let pinch_in = GestureTriggerProps { + fingers: Some(3), + direction: Some("in"), + edge: None, + zone: None, + }; + assert!(build_gesture_trigger("TouchPinch", &pinch_in).is_ok()); + assert!(build_gesture_trigger("TouchpadPinch", &pinch_in).is_ok()); + assert!(build_gesture_trigger("TouchSwipe", &pinch_in).is_err()); + assert!(build_gesture_trigger("TouchRotate", &pinch_in).is_err()); + + // "cw" is valid for rotate but not swipe/pinch. + let rotate_cw = GestureTriggerProps { + fingers: Some(3), + direction: Some("cw"), + edge: None, + zone: None, + }; + assert!(build_gesture_trigger("TouchRotate", &rotate_cw).is_ok()); + assert!(build_gesture_trigger("TouchSwipe", &rotate_cw).is_err()); + assert!(build_gesture_trigger("TouchPinch", &rotate_cw).is_err()); + assert!(build_gesture_trigger("TouchpadPinch", &rotate_cw).is_err()); + } + + #[test] + fn build_touchpadpinch() { + let props = GestureTriggerProps { + fingers: Some(2), + direction: Some("in"), + edge: None, + zone: None, + }; + assert_eq!( + build_gesture_trigger("TouchpadPinch", &props).unwrap(), + Trigger::TouchpadPinch { + fingers: 2, + direction: PinchDirection::In + } + ); + + let props = GestureTriggerProps { + fingers: Some(3), + direction: Some("out"), + edge: None, + zone: None, + }; + assert_eq!( + build_gesture_trigger("TouchpadPinch", &props).unwrap(), + Trigger::TouchpadPinch { + fingers: 3, + direction: PinchDirection::Out + } + ); + } + + #[test] + fn touchpadpinch_requires_direction() { + let props = GestureTriggerProps { + fingers: Some(2), + direction: None, + edge: None, + zone: None, + }; + assert!(build_gesture_trigger("TouchpadPinch", &props).is_err()); + } + + #[test] + fn touchpadpinch_rejects_invalid_direction() { + for bad in ["up", "down", "left", "right", "cw", "ccw"] { + let props = GestureTriggerProps { + fingers: Some(2), + direction: Some(bad), + edge: None, + zone: None, + }; + assert!( + build_gesture_trigger("TouchpadPinch", &props).is_err(), + "direction=\"{bad}\" should be rejected for TouchpadPinch" + ); + } + } + + #[test] + fn touchedge_parent_no_zone() { + let props = GestureTriggerProps { + fingers: None, + direction: None, + edge: Some("left"), + zone: None, + }; + assert_eq!( + build_gesture_trigger("TouchEdge", &props).unwrap(), + Trigger::TouchEdge { + edge: ScreenEdge::Left, + zone: None + } + ); + } + + #[test] + fn touchedge_zoned() { + // Top edge + zone="right" → EdgeZone::End (thirds along x-axis). + let props = GestureTriggerProps { + fingers: None, + direction: None, + edge: Some("top"), + zone: Some("right"), + }; + assert_eq!( + build_gesture_trigger("TouchEdge", &props).unwrap(), + Trigger::TouchEdge { + edge: ScreenEdge::Top, + zone: Some(EdgeZone::End) + } + ); + // Left edge + zone="top" → EdgeZone::Start (thirds along y-axis). + let props = GestureTriggerProps { + fingers: None, + direction: None, + edge: Some("left"), + zone: Some("top"), + }; + assert_eq!( + build_gesture_trigger("TouchEdge", &props).unwrap(), + Trigger::TouchEdge { + edge: ScreenEdge::Left, + zone: Some(EdgeZone::Start) + } + ); + } + + #[test] + fn touchedge_zone_vocab_mismatch_rejected() { + // Left/Right edges need top/center/bottom zones, not left/right. + let bad = GestureTriggerProps { + fingers: None, + direction: None, + edge: Some("left"), + zone: Some("left"), + }; + assert!(build_gesture_trigger("TouchEdge", &bad).is_err()); + + // Top/Bottom edges need left/center/right zones, not top/bottom. + let bad = GestureTriggerProps { + fingers: None, + direction: None, + edge: Some("top"), + zone: Some("top"), + }; + assert!(build_gesture_trigger("TouchEdge", &bad).is_err()); + } + + #[test] + fn touchedge_rejects_fingers() { + let props = GestureTriggerProps { + fingers: Some(3), + direction: None, + edge: Some("left"), + zone: None, + }; + assert!(build_gesture_trigger("TouchEdge", &props).is_err()); + } + + #[test] + fn is_gesture_family_name_case_insensitive() { + assert!(is_gesture_family_name("TouchSwipe")); + assert!(is_gesture_family_name("touchswipe")); + assert!(is_gesture_family_name("TOUCHPINCH")); + assert!(is_gesture_family_name("TouchpadSwipe")); + assert!(is_gesture_family_name("TouchpadTapHold")); + assert!(is_gesture_family_name("touchpadtaphold")); + assert!(is_gesture_family_name("TouchpadTapHoldDrag")); + assert!(is_gesture_family_name("touchpadtapholddrag")); + assert!(is_gesture_family_name("TouchTap")); + assert!(is_gesture_family_name("touchtap")); + assert!(is_gesture_family_name("TouchTapHoldDrag")); + assert!(is_gesture_family_name("touchtapholddrag")); + assert!(!is_gesture_family_name("TouchSwipe3Up")); + assert!(!is_gesture_family_name("TouchpadScrollUp")); + } + + // Integration tests exercising the full Bind::decode_node two-phase + // parse path (strip modifiers → check family → conditional property + // loop → build trigger). These go through Config::parse_mem so the + // whole knuffel pipeline is exercised. + + #[track_caller] + fn parse_binds(binds_kdl: &str) -> crate::Config { + crate::Config::parse_mem(&format!("binds {{\n{binds_kdl}\n}}")) + .map_err(miette::Report::new) + .unwrap() + } + + #[track_caller] + fn parse_binds_err(binds_kdl: &str) -> String { + match crate::Config::parse_mem(&format!("binds {{\n{binds_kdl}\n}}")) { + Ok(_) => panic!("expected parse error, got Ok"), + Err(e) => format!("{:?}", miette::Report::new(e)), + } + } + + fn first_bind(config: &crate::Config) -> &Bind { + config.binds.0.first().expect("no binds parsed") + } + + #[test] + fn decode_node_touchswipe_basic() { + let cfg = parse_binds(r#"TouchSwipe fingers=3 direction="up" { focus-workspace-up; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchSwipe { + fingers: 3, + direction: SwipeDirection::Up, + } + ); + assert!(bind.key.modifiers.is_empty()); + } + + #[test] + fn decode_node_touchswipe_with_modifier() { + // `Mod+TouchSwipe ...` should strip the modifier and still parse + // the property form correctly. + let cfg = + parse_binds(r#"Mod+TouchSwipe fingers=4 direction="left" { focus-column-right; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchSwipe { + fingers: 4, + direction: SwipeDirection::Left, + } + ); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + } + + #[test] + fn decode_node_tag_on_gesture_allowed() { + let cfg = parse_binds( + r#"TouchSwipe fingers=3 direction="up" tag="ws-nav" { focus-workspace-up; }"#, + ); + let bind = first_bind(&cfg); + assert_eq!(bind.tag.as_deref(), Some("ws-nav")); + } + + #[test] + fn decode_node_tag_on_keyboard_bind_rejected() { + // tag="..." is a keylogging risk on keyboard binds and should + // fail parsing. + let err = parse_binds_err(r#"Ctrl+A tag="keylog" { spawn "uname"; }"#); + assert!( + err.contains("tag is only supported on gesture triggers"), + "unexpected error: {err}" + ); + } + + #[test] + fn decode_node_gesture_property_on_keyboard_bind_rejected() { + // `fingers=3` on a keyboard bind should fall through to the + // "unexpected property" arm. + let err = parse_binds_err(r#"Ctrl+A fingers=3 { spawn "uname"; }"#); + assert!( + err.contains("unexpected property"), + "unexpected error: {err}" + ); + } + + #[test] + fn decode_node_duplicate_fingers_last_wins() { + // KDL/knuffel stores properties in a BTreeMap keyed on name, so + // `fingers=3 fingers=5` silently keeps the last value. Document + // that observed behavior — this is *not* something niri controls + // and it applies to every bind property, not just gesture ones. + let cfg = + parse_binds(r#"TouchSwipe fingers=3 fingers=5 direction="up" { focus-workspace-up; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchSwipe { + fingers: 5, + direction: SwipeDirection::Up, + } + ); + } + + #[test] + fn decode_node_unknown_property_rejected() { + let err = parse_binds_err( + r#"TouchSwipe fingers=3 direction="up" foo="bar" { focus-workspace-up; }"#, + ); + assert!( + err.contains("unexpected property"), + "unexpected error: {err}" + ); + } + + #[test] + fn decode_node_touchedge_with_zone() { + let cfg = parse_binds(r#"TouchEdge edge="top" zone="right" { spawn "screenshot"; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchEdge { + edge: ScreenEdge::Top, + zone: Some(EdgeZone::End), + } + ); + } + + #[test] + fn decode_node_touchedge_missing_edge_rejected() { + let err = parse_binds_err(r#"TouchEdge { focus-column-right; }"#); + assert!(err.contains("requires `edge="), "unexpected error: {err}"); + } + + #[test] + fn decode_node_touchedge_zone_vocab_mismatch_rejected() { + // edge="left" doesn't take zone="left". + let err = parse_binds_err(r#"TouchEdge edge="left" zone="left" { noop; }"#); + assert!(err.contains("invalid zone"), "unexpected error: {err}"); + } + + #[test] + fn decode_node_mod_shift_touchedge_zoned() { + // Multi-modifier + zoned edge, exercising the full modifier + // stripping + property path. + let cfg = parse_binds( + r#"Mod+Shift+TouchEdge edge="right" zone="bottom" tag="zone-rb" { noop; }"#, + ); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchEdge { + edge: ScreenEdge::Right, + zone: Some(EdgeZone::End), + } + ); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + assert!(bind.key.modifiers.contains(Modifiers::SHIFT)); + assert_eq!(bind.tag.as_deref(), Some("zone-rb")); + } + + #[test] + fn decode_node_touchpad_swipe_parses() { + let cfg = + parse_binds(r#"TouchpadSwipe fingers=3 direction="right" { focus-column-left; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchpadSwipe { + fingers: 3, + direction: SwipeDirection::Right, + } + ); + } + + #[test] + fn decode_node_touchpad_tap_parses() { + let cfg = parse_binds(r#"TouchpadTapHold fingers=3 { screenshot; }"#); + let bind = first_bind(&cfg); + assert_eq!(bind.key.trigger, Trigger::TouchpadTapHold { fingers: 3 }); + } + + #[test] + fn decode_node_touchpad_tap_with_modifier() { + let cfg = parse_binds(r#"Mod+TouchpadTapHold fingers=4 { close-window; }"#); + let bind = first_bind(&cfg); + assert_eq!(bind.key.trigger, Trigger::TouchpadTapHold { fingers: 4 }); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + } + + #[test] + fn touchpad_tap_rejects_direction() { + let props = GestureTriggerProps { + fingers: Some(3), + direction: Some("up"), + edge: None, + zone: None, + }; + assert!( + build_gesture_trigger("TouchpadTapHold", &props).is_err(), + "TouchpadTapHold should reject direction=" + ); + } + + #[test] + fn touchpad_tap_rejects_fingers_below_3() { + for bad in [0u8, 1, 2] { + let props = GestureTriggerProps { + fingers: Some(bad), + direction: None, + edge: None, + zone: None, + }; + assert!( + build_gesture_trigger("TouchpadTapHold", &props).is_err(), + "TouchpadTapHold fingers={bad} should be rejected" + ); + } + } + + #[test] + fn decode_node_touchpad_tap_hold_drag_parses() { + let cfg = parse_binds(r#"TouchpadTapHoldDrag fingers=3 { focus-workspace-up; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchpadTapHoldDrag { fingers: 3 } + ); + } + + #[test] + fn decode_node_touchpad_tap_hold_drag_with_modifier() { + let cfg = parse_binds(r#"Mod+TouchpadTapHoldDrag fingers=4 { move-window-down; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchpadTapHoldDrag { fingers: 4 } + ); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + } + + #[test] + fn touchpad_tap_hold_drag_rejects_direction() { + let props = GestureTriggerProps { + fingers: Some(3), + direction: Some("up"), + edge: None, + zone: None, + }; + assert!( + build_gesture_trigger("TouchpadTapHoldDrag", &props).is_err(), + "TouchpadTapHoldDrag should reject direction=" + ); + } + + #[test] + fn decode_node_touch_tap_hold_drag_omnidirectional() { + let cfg = parse_binds(r#"TouchTapHoldDrag fingers=3 { screenshot; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchTapHoldDrag { + fingers: 3, + direction: None + } + ); + } + + #[test] + fn decode_node_touch_tap_hold_drag_directional() { + let cfg = + parse_binds(r#"TouchTapHoldDrag fingers=3 direction="left" { spawn "wl-copy"; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchTapHoldDrag { + fingers: 3, + direction: Some(SwipeDirection::Left), + } + ); + } + + #[test] + fn decode_node_touch_tap_hold_drag_with_modifier() { + let cfg = + parse_binds(r#"Mod+TouchTapHoldDrag fingers=4 direction="up" { toggle-overview; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchTapHoldDrag { + fingers: 4, + direction: Some(SwipeDirection::Up), + } + ); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + } + + #[test] + fn decode_node_rotation_parses() { + let cfg = parse_binds(r#"TouchRotate fingers=4 direction="cw" { focus-column-right; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchRotate { + fingers: 4, + direction: RotateDirection::Cw, + } + ); + } + + #[test] + fn decode_node_pinch_parses() { + let cfg = parse_binds(r#"TouchPinch fingers=3 direction="in" { open-overview; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchPinch { + fingers: 3, + direction: PinchDirection::In, + } + ); + } + + #[test] + fn decode_node_fingers_out_of_range_rejected() { + let err = parse_binds_err(r#"TouchSwipe fingers=2 direction="up" { focus-workspace-up; }"#); + assert!(err.contains("out of range"), "unexpected error: {err}"); + } + + #[test] + fn decode_node_pinch_direction_out_parses() { + let cfg = parse_binds(r#"TouchPinch fingers=4 direction="out" { close-overview; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchPinch { + fingers: 4, + direction: PinchDirection::Out, + } + ); + } + + #[test] + fn decode_node_swipe_with_rotate_direction_rejected() { + // `direction="cw"` is valid for TouchRotate but not TouchSwipe. + // Integration-layer coverage that per-family direction validation + // actually reaches the user through the full parse path. + let err = parse_binds_err(r#"TouchSwipe fingers=3 direction="cw" { focus-workspace-up; }"#); + assert!(err.contains("invalid direction"), "unexpected error: {err}"); + } + + #[test] + fn decode_node_touchpad_swipe_with_modifier() { + // Modifier-stripping on the touchpad family, mirroring the + // touchscreen `decode_node_touchswipe_with_modifier` test. + let cfg = + parse_binds(r#"Mod+TouchpadSwipe fingers=4 direction="down" { toggle-overview; }"#); + let bind = first_bind(&cfg); + assert_eq!( + bind.key.trigger, + Trigger::TouchpadSwipe { + fingers: 4, + direction: SwipeDirection::Down, + } + ); + assert!(bind.key.modifiers.contains(Modifiers::COMPOSITOR)); + } } diff --git a/niri-config/src/input.rs b/niri-config/src/input.rs index 5a2eb1369c..d6fa371874 100644 --- a/niri-config/src/input.rs +++ b/niri-config/src/input.rs @@ -16,7 +16,7 @@ pub struct Input { pub trackpoint: Trackpoint, pub trackball: Trackball, pub tablet: Tablet, - pub touch: Touch, + pub touchscreen: Touchscreen, pub disable_power_key_handling: bool, pub warp_mouse_to_focus: Option, pub focus_follows_mouse: Option, @@ -40,7 +40,7 @@ pub struct InputPart { #[knuffel(child)] pub tablet: Option, #[knuffel(child)] - pub touch: Option, + pub touchscreen: Option, #[knuffel(child)] pub disable_power_key_handling: Option, #[knuffel(child)] @@ -71,7 +71,7 @@ impl MergeWith for Input { trackpoint, trackball, tablet, - touch, + touchscreen, ); merge_clone_opt!( @@ -176,7 +176,7 @@ pub struct ScrollFactor { impl ScrollFactor { pub fn h_v_factors(&self) -> (f64, f64) { - let base_value = self.base.map(|f| f.0).unwrap_or(1.0); + let base_value = self.base.map(|f| f.0).unwrap_or(0.4); let h = self.horizontal.map(|f| f.0).unwrap_or(base_value); let v = self.vertical.map(|f| f.0).unwrap_or(base_value); (h, v) @@ -221,6 +221,38 @@ pub struct Touchpad { pub middle_emulation: bool, #[knuffel(child)] pub scroll_factor: Option, + #[knuffel(child)] + pub gestures: Option, +} + +impl Touchpad { + /// Swipe commit gate in libinput delta units (from + /// `swipe-trigger-distance`). Default 16. + pub fn swipe_trigger_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.swipe_trigger_distance) + .unwrap_or(16.0) + } + + /// Libinput delta units of swipe motion that map to IPC + /// `GestureProgress = 1.0`. Default 40. + pub fn swipe_progress_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.swipe_progress_distance) + .unwrap_or(40.0) + } + + /// Pinch commit gate in scale-ratio units (from + /// `pinch-trigger-scale`). `|scale - 1.0|` must exceed this before a + /// `TouchpadPinch` bind fires. Default 0.15. + pub fn pinch_trigger_scale(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.pinch_trigger_scale) + .unwrap_or(0.15) + } } #[derive(knuffel::Decode, Debug, Default, Clone, PartialEq)] @@ -372,13 +404,350 @@ pub struct Tablet { } #[derive(knuffel::Decode, Debug, Default, Clone, PartialEq)] -pub struct Touch { +pub struct Touchscreen { #[knuffel(child)] pub off: bool, + #[knuffel(child)] + pub natural_scroll: bool, #[knuffel(child, unwrap(arguments))] pub calibration_matrix: Option>, #[knuffel(child, unwrap(argument))] pub map_to_output: Option, + #[knuffel(child)] + pub gestures: Option, +} + +impl Touchscreen { + /// Swipe commit gate: centroid must travel this many pixels before a + /// swipe can latch. Default 100. + pub fn swipe_trigger_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.swipe_trigger_distance) + .unwrap_or(100.0) + } + + /// Width (in pixels) of the screen-edge start zone within which a + /// touch must begin to count as a `TouchEdge`. Default 12. + pub fn edge_start_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.edge_start_distance) + .unwrap_or(12.0) + } + + /// Pinch commit gate: `|spread_change|` must exceed this many pixels + /// before a pinch can latch. Default 100. + pub fn pinch_trigger_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.pinch_trigger_distance) + .unwrap_or(100.0) + } + + /// Pinch dominance ratio: `|spread_change|` must exceed + /// `swipe_distance × this` for pinch to win the race against swipe. + /// Higher = stricter pinch. Default 1.0. + pub fn pinch_dominance_ratio(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.pinch_dominance_ratio) + .unwrap_or(1.0) + } + + pub fn pinch_sensitivity(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.pinch_sensitivity) + .unwrap_or(1.0) + } + + /// Multi-finger scaling applied to `swipe_trigger_distance` for + /// gestures with more than 3 fingers. Default 1.2 — gives a small + /// pinch-priority bias at high finger counts (4/5-finger swipes need + /// slightly more commitment, so ambiguous pinches usually win). + pub fn swipe_multi_finger_scale(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.swipe_multi_finger_scale) + .unwrap_or(1.2) + } + + /// Pixels of swipe distance that map to IPC `GestureProgress = 1.0`. + /// Default 200. + pub fn swipe_progress_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.swipe_progress_distance) + .unwrap_or(200.0) + } + + /// Pixels of spread change that map to IPC `GestureProgress = ±1.0`. + /// Default 100. + pub fn pinch_progress_distance(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.pinch_progress_distance) + .unwrap_or(100.0) + } + + /// Rotation commit gate: cumulative rotation must exceed this many + /// **degrees** (in the KDL config — converted to radians internally) + /// before a rotation can latch. Default 20°. + pub fn rotation_trigger_angle(&self) -> f64 { + let deg = self + .gestures + .as_ref() + .and_then(|g| g.rotation_trigger_angle) + .unwrap_or(20.0); + deg.to_radians() + } + + /// Rotation dominance ratio: `rotation_arc` must exceed both + /// `swipe_distance × this` and `|spread_change| × this` for rotation + /// to win the race. Higher = stricter rotation. **Matches + /// `pinch_dominance_ratio` semantics** — both knobs read as + /// "higher = stricter". + /// + /// Default 0.5 (arc must be ≥ 0.5 × competing motion). This is + /// deliberately lenient because rotating a finger cluster almost + /// always produces some incidental translation; requiring arc to + /// *exceed* the translation (ratio ≥ 1.0) would reject most + /// real-world rotations. + pub fn rotation_dominance_ratio(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.rotation_dominance_ratio) + .unwrap_or(0.5) + } + + /// Degrees of rotation (in the KDL config — converted to radians + /// internally) that map to IPC `GestureProgress = ±1.0` for rotation + /// gestures. Default 90°. + pub fn rotation_progress_angle(&self) -> f64 { + let deg = self + .gestures + .as_ref() + .and_then(|g| g.rotation_progress_angle) + .unwrap_or(90.0); + deg.to_radians() + } + + /// Returns the swipe trigger distance scaled for a given finger + /// count. Extra fingers above 3 increase the distance by the + /// `swipe_multi_finger_scale` factor. + pub fn scaled_swipe_trigger_distance(&self, finger_count: usize) -> f64 { + let base = self.swipe_trigger_distance(); + let scale = self.swipe_multi_finger_scale(); + let extra = finger_count.saturating_sub(3) as f64; + base * (1.0 + extra * (scale - 1.0)) + } + + /// Maximum per-finger displacement (px) before a tap candidate is + /// killed. Default 15. + pub fn tap_wobble_threshold(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.tap_wobble_threshold) + .unwrap_or(15.0) + } + + /// Maximum tap duration in milliseconds. Default 500. + pub fn tap_timeout_ms(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.tap_timeout_ms) + .unwrap_or(500.0) + } + + /// Minimum hold duration (ms) before a wobble-kill can activate a + /// TouchTapHoldDrag bind. Prevents fast swipes from accidentally + /// triggering hold-drag. Default 200. + pub fn tap_hold_trigger_delay_ms(&self) -> f64 { + self.gestures + .as_ref() + .and_then(|g| g.tap_hold_trigger_delay_ms) + .unwrap_or(200.0) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ScreenEdge { + Left, + Right, + Top, + Bottom, +} + +/// Which third of an edge a touch landed in. +/// +/// The perpendicular axis of the edge is split into thirds: for Top/Bottom +/// that's the x axis (Start=leftmost third, End=rightmost third); for +/// Left/Right that's the y axis (Start=topmost third, End=bottommost third). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum EdgeZone { + Start, + Center, + End, +} + +impl ScreenEdge { + /// Lower-cased name used in KDL config and IPC events (`"left"`, + /// `"right"`, `"top"`, `"bottom"`). + pub fn as_kdl_name(self) -> &'static str { + match self { + ScreenEdge::Left => "left", + ScreenEdge::Right => "right", + ScreenEdge::Top => "top", + ScreenEdge::Bottom => "bottom", + } + } +} + +/// Lower-cased zone name used in KDL config and IPC events. The +/// vocabulary rotates per edge axis: top/bottom edges take +/// `left|center|right`; left/right edges take `top|center|bottom`. This +/// is the single source of truth for that mapping — parsers, IPC +/// emitters, and display helpers all share it. +pub fn zone_kdl_name(edge: ScreenEdge, zone: EdgeZone) -> &'static str { + match (edge, zone) { + (ScreenEdge::Top | ScreenEdge::Bottom, EdgeZone::Start) => "left", + (ScreenEdge::Top | ScreenEdge::Bottom, EdgeZone::Center) => "center", + (ScreenEdge::Top | ScreenEdge::Bottom, EdgeZone::End) => "right", + (ScreenEdge::Left | ScreenEdge::Right, EdgeZone::Start) => "top", + (ScreenEdge::Left | ScreenEdge::Right, EdgeZone::Center) => "center", + (ScreenEdge::Left | ScreenEdge::Right, EdgeZone::End) => "bottom", + } +} + +/// Tuning parameters for touchscreen gesture recognition. +/// +/// The actual gesture binds (e.g. `TouchSwipe fingers=3 direction="up"`, +/// `TouchEdge edge="left"`) live in the main `binds {}` block — this +/// struct only controls how movement is classified and how IPC progress +/// is reported. +#[derive(knuffel::Decode, Debug, Default, Clone, PartialEq)] +pub struct TouchscreenGesturesConfig { + /// Swipe commit gate: pixels of centroid movement required before a + /// swipe gesture latches. Lower values feel more responsive but risk + /// triggering on incidental finger drift. Default: 100.0. + #[knuffel(child, unwrap(argument))] + pub swipe_trigger_distance: Option, + /// Width (in pixels) of the screen-edge start zone. A touch must + /// *begin* within this distance from an edge for it to count as a + /// `TouchEdge edge="..."` gesture; touches starting farther in are + /// treated as regular swipes. Default: 12.0. + #[knuffel(child, unwrap(argument))] + pub edge_start_distance: Option, + /// Pinch commit gate: pixels of `|spread_change|` required before a + /// pinch gesture latches. Default: 100.0. + #[knuffel(child, unwrap(argument))] + pub pinch_trigger_distance: Option, + /// Pinch dominance ratio: `|spread_change|` must exceed + /// `swipe_distance × this` for pinch to beat swipe in the race. + /// Higher values make pinch stricter — the fingers really have to + /// move apart/together rather than glide across the screen. + /// Default: 1.0. + #[knuffel(child, unwrap(argument))] + pub pinch_dominance_ratio: Option, + /// Multiplier mapping finger spread change (in screen pixels) to + /// continuous pinch animation delta. Applies to all pinch-bound + /// continuous actions — the bind's own `sensitivity` property is + /// ignored for pinch, since raw spread-delta pixels need very + /// different scaling from linear swipe distances. At 1.0, one pixel + /// of spread change contributes one pixel to the underlying gesture + /// accumulator (same scale swipes use). Default: 1.0. + #[knuffel(child, unwrap(argument))] + pub pinch_sensitivity: Option, + /// Scaling applied to `swipe_trigger_distance` for gestures with + /// more than 3 fingers. The formula is + /// `base * (1 + (fingers − 3) * (scale − 1))`, so with a base of 100 + /// and scale 1.2 a 4-finger swipe needs 120 px and a 5-finger swipe + /// needs 140 px. Default 1.2 — gives a small pinch-priority bias at + /// high finger counts so ambiguous 4/5-finger motions resolve as + /// pinch rather than swipe. Set 1.0 to disable the bias entirely. + #[knuffel(child, unwrap(argument))] + pub swipe_multi_finger_scale: Option, + /// Pixels of swipe distance that map to IPC `GestureProgress = 1.0`. + /// IPC-only output knob — doesn't affect classification. Tune this + /// to make tagged external-app gestures (sidebar drawers etc.) feel + /// right on your display. Default: 200.0. + #[knuffel(child, unwrap(argument))] + pub swipe_progress_distance: Option, + /// Pixels of spread change that map to IPC + /// `GestureProgress = ±1.0` for pinch gestures. Signed: positive for + /// pinch-out (spread growing), negative for pinch-in (spread + /// shrinking). Default: 100.0. + #[knuffel(child, unwrap(argument))] + pub pinch_progress_distance: Option, + /// Rotation commit gate: cumulative rotation (in **degrees**) + /// required before a rotation gesture latches. Converted to radians + /// internally. Default: 20°. + #[knuffel(child, unwrap(argument))] + pub rotation_trigger_angle: Option, + /// Rotation dominance ratio: `rotation_arc` must exceed + /// `swipe_distance × this` AND `|spread_change| × this` for rotation + /// to beat swipe and pinch in the race. Higher = stricter, matching + /// `pinch_dominance_ratio` semantics. Default: 0.5 (deliberately + /// lenient — rotation almost always includes incidental translation, + /// so requiring arc to strictly exceed translation would reject + /// nearly all real-world rotations). + #[knuffel(child, unwrap(argument))] + pub rotation_dominance_ratio: Option, + /// Degrees of cumulative rotation that map to IPC + /// `GestureProgress = ±1.0` for rotation gestures. Signed: positive + /// for counter-clockwise, negative for clockwise. Default: 90°. + #[knuffel(child, unwrap(argument))] + pub rotation_progress_angle: Option, + /// Maximum per-finger displacement (in pixels) allowed during a tap + /// gesture. If any single finger moves more than this distance from + /// its initial landing position, the tap candidate is killed and the + /// gesture can only resolve as swipe/pinch/rotate. Default: 15.0. + #[knuffel(child, unwrap(argument))] + pub tap_wobble_threshold: Option, + /// Maximum duration (in milliseconds) from the third finger landing + /// to all fingers lifting for a tap to fire. Taps slower than this + /// are discarded — acts as a tap-vs-hold safety cap. Default: 500. + #[knuffel(child, unwrap(argument))] + pub tap_timeout_ms: Option, + /// Minimum hold duration (in milliseconds) before a wobble-kill can + /// activate a `TouchTapHoldDrag` bind. If fingers move before this + /// delay elapses, normal swipe/pinch/rotate recognition continues + /// instead. Prevents fast swipes from accidentally triggering + /// hold-drag. Default: 200. + #[knuffel(child, unwrap(argument))] + pub tap_hold_trigger_delay_ms: Option, +} + +/// Tuning parameters for touchpad gesture recognition. +/// +/// The actual gesture binds (e.g. `TouchpadSwipe fingers=3 direction="up"`) +/// live in the main `binds {}` block — this struct only controls how +/// movement is classified and how IPC progress is reported. +#[derive(knuffel::Decode, Debug, Default, Clone, PartialEq)] +pub struct TouchpadGesturesConfig { + /// Swipe commit gate: libinput delta units of centroid motion before + /// a swipe gesture latches. These units are acceleration-adjusted + /// and not directly comparable to touchscreen pixels. Default: 16.0. + #[knuffel(child, unwrap(argument))] + pub swipe_trigger_distance: Option, + /// Libinput delta units of swipe movement that map to IPC + /// `GestureProgress = 1.0`. Because libinput acceleration curves are + /// nonlinear, the same physical swipe can produce different delta + /// magnitudes depending on speed — this value is not directly + /// comparable to the touchscreen `swipe-progress-distance`. + /// Default: 40.0. + #[knuffel(child, unwrap(argument))] + pub swipe_progress_distance: Option, + /// Pinch commit gate: `|scale - 1.0|` must exceed this unitless scale + /// ratio before a `TouchpadPinch` bind fires. libinput normalizes + /// pinch scale (1.0 = no change, 1.5 = 50% spread out, 0.5 = 50% + /// spread in), so this is not comparable to the touchscreen + /// `pinch-trigger-distance` (which is in pixels). Fires once per + /// gesture when the threshold is crossed; direction is picked from + /// the sign of the scale change. Default: 0.15. + #[knuffel(child, unwrap(argument))] + pub pinch_trigger_scale: Option, } #[derive(knuffel::Decode, Debug, Clone, Copy, PartialEq)] diff --git a/niri-config/src/lib.rs b/niri-config/src/lib.rs index 909aeb80a5..00f674ca29 100644 --- a/niri-config/src/lib.rs +++ b/niri-config/src/lib.rs @@ -40,6 +40,7 @@ pub mod layout; pub mod misc; pub mod output; pub mod recent_windows; +pub mod touch_binds; pub mod utils; pub mod window_rule; pub mod workspace; @@ -725,7 +726,7 @@ mod tests { 4.0 5.0 6.0 } - touch { + touchscreen { map-to-output "eDP-1" } @@ -1029,6 +1030,7 @@ mod tests { vertical: None, }, ), + gestures: None, }, mouse: Mouse { off: false, @@ -1117,12 +1119,14 @@ mod tests { map_to_focused_window: true, left_handed: false, }, - touch: Touch { + touchscreen: Touchscreen { off: false, + natural_scroll: false, calibration_matrix: None, map_to_output: Some( "eDP-1", ), + gestures: None, }, disable_power_key_handling: true, warp_mouse_to_focus: Some( @@ -1890,6 +1894,7 @@ mod tests { saturation: None, }, }, + touchscreen_gesture_passthrough: None, }, ], layer_rules: [ @@ -1964,6 +1969,9 @@ mod tests { "Inhibit", ), ), + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -1980,6 +1988,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2000,6 +2011,9 @@ mod tests { allow_when_locked: true, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2018,6 +2032,9 @@ mod tests { hotkey_overlay_title: Some( None, ), + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2034,6 +2051,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2052,6 +2072,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2068,6 +2091,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2086,6 +2112,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2104,6 +2133,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2120,6 +2152,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2140,6 +2175,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2160,6 +2198,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2178,6 +2219,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2194,6 +2238,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2212,6 +2259,9 @@ mod tests { allow_when_locked: true, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, ], ), @@ -2336,6 +2386,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2358,6 +2411,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -2382,6 +2438,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, ], }, diff --git a/niri-config/src/recent_windows.rs b/niri-config/src/recent_windows.rs index 46e10f4c96..27247c7113 100644 --- a/niri-config/src/recent_windows.rs +++ b/niri-config/src/recent_windows.rs @@ -154,6 +154,9 @@ impl From for Bind { allow_when_locked: false, allow_inhibiting: x.allow_inhibiting, hotkey_overlay_title: x.hotkey_overlay_title, + sensitivity: None, + natural_scroll: false, + tag: None, } } } diff --git a/niri-config/src/touch_binds.rs b/niri-config/src/touch_binds.rs new file mode 100644 index 0000000000..0ce6fa2495 --- /dev/null +++ b/niri-config/src/touch_binds.rs @@ -0,0 +1,54 @@ +//! Touchscreen gesture types and continuous gesture detection. +//! +//! Gesture binds are configured in the main `binds {}` block using +//! parameterized trigger families (`TouchSwipe fingers=3 direction="up"`, +//! `TouchEdge edge="left"`, etc.). This module provides the gesture type +//! enum and continuous/discrete classification used by the touchscreen +//! dispatch code. + +use crate::binds::Action; + +/// Type of touchscreen gesture. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TouchGestureType { + SwipeUp, + SwipeDown, + SwipeLeft, + SwipeRight, + PinchIn, + PinchOut, + RotateCw, + RotateCcw, + Tap, + EdgeSwipeLeft, + EdgeSwipeRight, + EdgeSwipeTop, + EdgeSwipeBottom, +} + +/// Which continuous gesture animation to drive. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ContinuousGestureKind { + WorkspaceSwitch, + ViewScroll, + OverviewToggle, + /// No compositor animation — only emits IPC progress events for external tools. + Noop, +} + +/// Returns the continuous gesture kind for an action, or None if discrete. +pub fn continuous_gesture_kind(action: &Action) -> Option { + match action { + Action::FocusWorkspaceUp | Action::FocusWorkspaceDown => { + Some(ContinuousGestureKind::WorkspaceSwitch) + } + Action::FocusColumnLeft | Action::FocusColumnRight => { + Some(ContinuousGestureKind::ViewScroll) + } + Action::ToggleOverview | Action::OpenOverview | Action::CloseOverview => { + Some(ContinuousGestureKind::OverviewToggle) + } + Action::Noop => Some(ContinuousGestureKind::Noop), + _ => None, + } +} diff --git a/niri-config/src/window_rule.rs b/niri-config/src/window_rule.rs index f2bc2ad157..3d44d184b0 100644 --- a/niri-config/src/window_rule.rs +++ b/niri-config/src/window_rule.rs @@ -79,6 +79,8 @@ pub struct WindowRule { pub background_effect: BackgroundEffectRule, #[knuffel(child, default)] pub popups: PopupsRule, + #[knuffel(child, unwrap(argument))] + pub touchscreen_gesture_passthrough: Option, } /// Rules for popup surfaces. diff --git a/niri-ipc/src/lib.rs b/niri-ipc/src/lib.rs index 0aa3cb4f48..3431218714 100644 --- a/niri-ipc/src/lib.rs +++ b/niri-ipc/src/lib.rs @@ -1565,6 +1565,41 @@ pub enum CastTarget { }, } +/// Physical delta carried by a `GestureProgress` event, typed per gesture kind. +/// +/// Consumers that only drive animations can ignore this and use `progress`. +/// Consumers that need raw physical units (pixels, radians) match on the +/// variant. A future gesture kind shows up as a new variant, so exhaustively +/// matching consumers fail to compile until they handle it. +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] +#[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))] +#[serde(tag = "kind")] +pub enum GestureDelta { + /// Swipe / edge swipe. Raw pixel delta on both axes since the previous + /// event. Sensitivity and natural-scroll adjustments are **not** applied + /// here — this is the raw finger motion. + Swipe { + /// Horizontal delta in pixels since the previous event. + dx: f64, + /// Vertical delta in pixels since the previous event. + dy: f64, + }, + /// Pinch. Change in cluster spread (pixels) since the previous event. + /// Positive = fingers spreading, negative = fingers coming together. + Pinch { + /// Change in finger spread (average distance from the cluster + /// centroid) in pixels since the previous event. + d_spread: f64, + }, + /// Rotation. Change in cluster angle (radians) since the previous event. + /// Positive = counter-clockwise (mathematical convention). + Rotate { + /// Change in cluster angle in radians since the previous event. + /// Positive = counter-clockwise. + d_radians: f64, + }, +} + /// A compositor event. #[derive(Serialize, Deserialize, Debug, Clone)] #[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))] @@ -1705,6 +1740,156 @@ pub enum Event { /// Stream ID of the stopped screencast. stream_id: u64, }, + /// A gesture began (finger(s) crossed recognition threshold and matched a bind). + /// + /// Multi-finger touchscreen gesture commits emit this event for **every** + /// matched bind, tagged or not — untagged commits arrive with an empty + /// `tag` so debug tools (e.g. niri-gesture-inspector) can observe every + /// classification result. External consumers should filter on the tags + /// they care about and ignore empty-tag events. + /// + /// Edge swipes and touchpad gestures still only emit this event for + /// tagged binds. + GestureBegin { + /// User-defined tag from the bind config. Empty string for + /// untagged multi-finger touchscreen commits. + tag: String, + /// The trigger name, echoed back in the same property form used in + /// `binds {}`. Examples: + /// - `TouchSwipe fingers=3 direction="up"` + /// - `TouchpadSwipe fingers=4 direction="left"` + /// - `TouchPinch fingers=3 direction="in"` + /// - `TouchRotate fingers=4 direction="cw"` + /// - `TouchEdge edge="left"` (parent, no zone) + /// - `TouchEdge edge="top" zone="right"` (zoned) + /// Edge triggers emit the zoned form when a zoned bind fired and + /// the parent form when an unzoned bind fired. + trigger: String, + /// Number of fingers in the gesture. + finger_count: u8, + /// Whether this is a continuous (animation-driving) gesture. + /// Continuous gestures will emit `GestureProgress` events. + is_continuous: bool, + }, + /// A continuous gesture made progress (fires many times per second). + /// + /// Only emitted for continuous gestures on binds with a `tag` property. + GestureProgress { + /// User-defined tag from the bind config. + tag: String, + /// Signed, normalized progress. **Non-monotonic** — consumers must + /// handle the value going up and down as the user moves their fingers. + /// + /// Starts at `0.0` at the moment the gesture is recognized, then + /// changes as the gesture continues. The gesture's "natural" direction + /// (e.g. swipe-up, pinch-out, rotate-ccw) produces positive progress; + /// reversing direction produces negative values. Can exceed `±1.0` on + /// overshoot, and can return to `0.0` (or keep going negative) if the + /// user reverses a gesture mid-motion. Consumers that want a + /// commit/cancel decision should apply their own threshold to the + /// final value on `GestureEnd`, not assume progress is clamped or + /// monotonic. + /// + /// Normalization depends on the gesture kind (see `delta`): + /// - Swipes and edge gestures accumulate adjusted (sensitivity-scaled, + /// natural-scroll-adjusted) finger delta on the dominant axis, + /// normalized by `swipe-progress-distance` (default 200 px for + /// touchscreen, 40 libinput units for touchpad — same knob name, + /// separate config block). + /// - Pinches use `(current_spread - start_spread) / pinch-progress-distance` + /// (default 100 px) — an absolute measurement (not accumulated), + /// so pinching in then out returns progress cleanly to near 0 + /// with no float drift. Positive = pinch-out, negative = pinch-in. + /// - Rotations use `cumulative_rotation / rotation-progress-angle` + /// (default 90°). Positive = counter-clockwise. + progress: f64, + /// Physical delta since the previous event, typed per gesture kind. + /// Consumers that only drive animations can read `progress` and + /// ignore this; consumers that need raw physical units (pixels, + /// radians) match on the variant. + delta: GestureDelta, + /// Timestamp in milliseconds. + timestamp_ms: u32, + }, + /// A gesture ended (all fingers lifted). + /// + /// Emitted for both continuous and discrete tagged gestures. + GestureEnd { + /// User-defined tag from the bind config. + tag: String, + /// Whether the gesture completed (snapped to target) or cancelled (snapped back). + /// For discrete gestures, this is always `true`. + completed: bool, + }, + /// Per-frame recognition telemetry for the touchscreen gesture recognizer. + /// + /// Emitted at touch frame rate (~120 Hz) during the recognition phase of + /// a multi-finger gesture, before any LOCK decision is made. Intended + /// for debug / playground tooling (see `niri-gesture-inspector`) that + /// wants to visualize threshold crossings, dominance races, and the + /// `is_rotate` / `is_pinch` / `closest` classification state in real time. + /// + /// **Debug builds only.** The emission site in the compositor is gated + /// with `#[cfg(debug_assertions)]`, so release builds never produce this + /// event and pay zero cost. The enum variant itself is always defined + /// so that both debug and release clients compile against the same + /// `niri-ipc` crate — release clients simply never receive this variant + /// over the wire. Consumers must still handle the variant (even if just + /// with an empty match arm) because the type is always in scope. + /// + /// **Wire format stability.** This is a debug channel. Fields may be + /// added, renamed, or removed between niri versions without a deprecation + /// cycle. Consumers should tolerate missing/extra fields and treat the + /// event as best-effort telemetry, not a load-bearing API. + RecognitionFrame { + /// Current number of fingers on the touchscreen. + finger_count: u8, + /// Accumulated linear displacement of the finger cluster centroid + /// since recognition began, in screen pixels. + swipe_distance: f64, + /// Resolved swipe trigger distance in pixels (from + /// `swipe-trigger-distance`, scaled by finger count via + /// `swipe-multi-finger-scale`). + swipe_trigger_distance: f64, + /// `current_spread - initial_spread` — **signed** change in average + /// finger-to-centroid distance since recognition began, in pixels. + /// Negative = pinch-in, positive = pinch-out. The classifier + /// compares against `|spread_change|`; this raw signed value is + /// useful for visual direction display. + spread_change: f64, + /// Pinch trigger distance in pixels, from `pinch-trigger-distance` + /// (compared against `|spread_change|`). + pinch_trigger_distance: f64, + /// **Signed** cumulative rotation since recognition began, in + /// radians. Negative = counter-clockwise, positive = clockwise. + /// The classifier compares against `|rotation_rad|`. + rotation_rad: f64, + /// Rotation trigger angle in radians (from `rotation-trigger-angle`, + /// which the config accepts in degrees). + rotation_trigger_angle_rad: f64, + /// Rotation arc length (`|rotation_rad| × current_spread`) in pixels — + /// the tangential distance each finger would travel if the cluster + /// were rotating purely about its centroid. Commensurable with + /// `swipe_distance` and `spread_change` for dominance comparisons. + rotation_arc: f64, + /// Rotation arc trigger distance + /// (`rotation_trigger_angle_rad × current_spread`) in pixels — the + /// arc-length equivalent of the rotation angle trigger at the + /// current finger spread. + rotation_arc_trigger_distance: f64, + /// Whether all `is_rotate` commit gates are satisfied on this frame + /// (arc ≥ arc trigger AND dominates swipe/spread by + /// `rotation-dominance-ratio`). + is_rotate: bool, + /// Whether all `is_pinch` commit gates are satisfied on this frame. + is_pinch: bool, + /// Leading classification candidate by % of its own trigger on this + /// frame. One of `"swipe"`, `"pinch"`, `"rotate"`. + closest: String, + /// Timestamp in milliseconds, matching the per-frame timestamp used + /// elsewhere in the recognizer. + timestamp_ms: u32, + }, } impl From for Timestamp { diff --git a/niri-ipc/src/state.rs b/niri-ipc/src/state.rs index b603dfc66a..51e41869e0 100644 --- a/niri-ipc/src/state.rs +++ b/niri-ipc/src/state.rs @@ -49,6 +49,9 @@ pub struct EventStreamState { /// State of screencasts. pub casts: CastsState, + + /// State of gesture events. + pub gesture: GestureState, } /// The workspaces state communicated over the event stream. @@ -93,6 +96,17 @@ pub struct CastsState { pub casts: HashMap, } +/// The gesture state communicated over the event stream. +/// +/// Gestures are transient — when a new client connects, there may or may not +/// be a gesture in progress. We track the active tag so replicate() can send +/// a GestureBegin if a gesture is currently active. +#[derive(Debug, Default)] +pub struct GestureState { + /// The tag of the currently active gesture, if any. + pub active_tag: Option, +} + impl EventStreamStatePart for EventStreamState { fn replicate(&self) -> Vec { let mut events = Vec::new(); @@ -102,6 +116,7 @@ impl EventStreamStatePart for EventStreamState { events.extend(self.overview.replicate()); events.extend(self.config.replicate()); events.extend(self.casts.replicate()); + events.extend(self.gesture.replicate()); events } @@ -112,6 +127,7 @@ impl EventStreamStatePart for EventStreamState { let event = self.overview.apply(event)?; let event = self.config.apply(event)?; let event = self.casts.apply(event)?; + let event = self.gesture.apply(event)?; Some(event) } } @@ -321,3 +337,26 @@ impl EventStreamStatePart for CastsState { None } } + +impl EventStreamStatePart for GestureState { + fn replicate(&self) -> Vec { + // Gestures are transient — don't replay on connect. + vec![] + } + + fn apply(&mut self, event: Event) -> Option { + match event { + Event::GestureBegin { ref tag, .. } => { + self.active_tag = Some(tag.clone()); + } + Event::GestureProgress { .. } => { + // No state change needed. + } + Event::GestureEnd { .. } => { + self.active_tag = None; + } + event => return Some(event), + } + None + } +} diff --git a/resources/default-config.kdl b/resources/default-config.kdl index ccad1ac22e..dd72944772 100644 --- a/resources/default-config.kdl +++ b/resources/default-config.kdl @@ -40,6 +40,21 @@ input { // accel-profile "flat" // scroll-method "two-finger" // disabled-on-external-mouse + + // Touchpad gesture binds live in the main binds {} block using + // the `TouchpadSwipe` trigger with `fingers=` and `direction=` + // properties, e.g. + // TouchpadSwipe fingers=3 direction="up" { focus-workspace-up; } + // TouchpadSwipe fingers=4 direction="left" { focus-column-right; } + // `fingers=` accepts any integer in 3..=10. + // See the wiki page "Gestures" for the full list. + // + // gestures { + // // Libinput delta units of motion before a swipe commits. + // swipe-trigger-distance 16.0 + // // Libinput delta units of swipe that map to IPC progress = 1.0. + // swipe-progress-distance 40.0 + // } } mouse { @@ -61,6 +76,50 @@ input { // middle-emulation } + // touchscreen { + // // off + // // natural-scroll + // // map-to-output "eDP-1" + // // + // // Touchscreen gesture binds live in the main binds {} block + // // using parameterized triggers with KDL properties, e.g. + // // TouchSwipe fingers=3 direction="up" { focus-workspace-up; } + // // TouchPinch fingers=4 direction="in" { open-overview; } + // // TouchRotate fingers=4 direction="cw" { focus-column-right; } + // // TouchEdge edge="left" { focus-column-right; } + // // TouchEdge edge="top" zone="right" { spawn "screenshot.sh"; } + // // `fingers=` accepts any integer in 3..=10. Edges can be split + // // into thirds with `zone=`; the vocabulary rotates per edge axis + // // (top/bottom take left|center|right; left/right take + // // top|center|bottom). A parent bind (no `zone=`) still fires as + // // a fallback for zones without a dedicated bind. + // // See the wiki page "Gestures" for the full list. + // // + // // gestures { + // // // Classifier commit gates. Higher = harder to trigger. + // // swipe-trigger-distance 100.0 // px of centroid motion before swipe latches + // // edge-start-distance 30.0 // px-wide edge start zone + // // pinch-trigger-distance 100.0 // px of spread change before pinch latches + // // pinch-dominance-ratio 1.0 // spread must equal-or-beat swipe (higher = stricter pinch) + // // // Multiplier from finger spread to continuous pinch animation + // // // delta. Applies to all continuous pinch binds (overview, + // // // workspace switch, etc.). Ignores per-bind sensitivity. + // // pinch-sensitivity 1.0 + // // // Ramps swipe-trigger-distance for 4+ finger gestures. + // // // Default 1.2 = small pinch-priority bias at high finger + // // // counts. Set 1.0 to disable. + // // swipe-multi-finger-scale 1.2 + // // // IPC progress scaling (doesn't affect classification). + // // swipe-progress-distance 200.0 // px of swipe = progress 1.0 + // // pinch-progress-distance 100.0 // px of spread = progress ±1.0 (signed) + // // + // // // Rotation — angles in DEGREES. + // // rotation-trigger-angle 20.0 // ° before rotation can latch + // // rotation-dominance-ratio 0.5 // arc must beat swipe × this (higher = stricter) + // // rotation-progress-angle 90.0 // ° that map to IPC progress ±1.0 + // // } + // } + // Uncomment this to make the mouse warp to the center of newly focused windows. // warp-mouse-to-focus diff --git a/src/input/mod.rs b/src/input/mod.rs index 3e673bdbfc..9e54834f69 100644 --- a/src/input/mod.rs +++ b/src/input/mod.rs @@ -3,19 +3,40 @@ use std::collections::hash_map::Entry; use std::collections::HashSet; use std::time::Duration; +use crate::niri::ActiveSwipeBind; use calloop::timer::{TimeoutAction, Timer}; use input::event::gesture::GestureEventCoordinates as _; +use niri_config::touch_binds::{continuous_gesture_kind, ContinuousGestureKind}; + +/// Default sensitivity for touchpad gestures. +/// Higher than touchscreen (0.4) because touchpad deltas are smaller libinput units. +const TOUCHPAD_DEFAULT_SENSITIVITY: f64 = 1.0; +use self::move_grab::MoveGrab; +use self::pick_color_grab::PickColorGrab; +use self::pick_window_grab::PickWindowGrab; +use self::resize_grab::ResizeGrab; +use self::spatial_movement_grab::SpatialMovementGrab; +#[cfg(feature = "dbus")] +use crate::dbus::freedesktop_a11y::KbMonBlock; +use crate::layout::scrolling::ScrollDirection; +use crate::layout::{ActivateWindow, LayoutElement as _}; +use crate::niri::{CastTarget, PointerVisibility, State}; +use crate::ui::mru::{WindowMru, WindowMruUi}; +use crate::ui::screenshot_ui::ScreenshotUi; +use crate::utils::spawning::{spawn, spawn_sh}; +use crate::utils::{center, get_monotonic_time, CastSessionId, ResizeEdge}; use niri_config::{ - Action, Bind, Binds, Config, Key, ModKey, Modifiers, MruDirection, SwitchBinds, Trigger, + Action, Bind, Binds, Config, Key, ModKey, Modifiers, MruDirection, PinchDirection, + SwipeDirection, SwitchBinds, Trigger, MAX_FINGERS, MIN_FINGERS, }; -use niri_ipc::LayoutSwitchTarget; +use niri_ipc::{GestureDelta, LayoutSwitchTarget}; use smithay::backend::input::{ AbsolutePositionEvent, Axis, AxisSource, ButtonState, Device, DeviceCapability, Event, GestureBeginEvent, GestureEndEvent, GesturePinchUpdateEvent as _, GestureSwipeUpdateEvent as _, InputEvent, KeyState, KeyboardKeyEvent, Keycode, MouseButton, PointerAxisEvent, PointerButtonEvent, PointerMotionEvent, ProximityState, Switch, SwitchState, SwitchToggleEvent, TabletToolButtonEvent, TabletToolEvent, TabletToolProximityEvent, TabletToolTipEvent, - TabletToolTipState, TouchEvent, + TabletToolTipState, }; use smithay::backend::libinput::LibinputInputBackend; use smithay::input::dnd::DnDGrab; @@ -26,9 +47,7 @@ use smithay::input::pointer::{ GestureSwipeBeginEvent, GestureSwipeEndEvent, GestureSwipeUpdateEvent, GrabStartData as PointerGrabStartData, MotionEvent, PointerGrab, RelativeMotionEvent, }; -use smithay::input::touch::{ - DownEvent, GrabStartData as TouchGrabStartData, MotionEvent as TouchMotionEvent, UpEvent, -}; +use smithay::input::touch::GrabStartData as TouchGrabStartData; use smithay::input::SeatHandler; use smithay::output::Output; use smithay::reexports::wayland_server::protocol::wl_data_source::WlDataSource; @@ -37,22 +56,6 @@ use smithay::utils::{Logical, Point, Rectangle, Transform, SERIAL_COUNTER}; use smithay::wayland::keyboard_shortcuts_inhibit::KeyboardShortcutsInhibitor; use smithay::wayland::pointer_constraints::{with_pointer_constraint, PointerConstraint}; use smithay::wayland::tablet_manager::{TabletDescriptor, TabletSeatTrait}; -use touch_overview_grab::TouchOverviewGrab; - -use self::move_grab::MoveGrab; -use self::pick_color_grab::PickColorGrab; -use self::pick_window_grab::PickWindowGrab; -use self::resize_grab::ResizeGrab; -use self::spatial_movement_grab::SpatialMovementGrab; -#[cfg(feature = "dbus")] -use crate::dbus::freedesktop_a11y::KbMonBlock; -use crate::layout::scrolling::ScrollDirection; -use crate::layout::{ActivateWindow, LayoutElement as _}; -use crate::niri::{CastTarget, PointerVisibility, State}; -use crate::ui::mru::{WindowMru, WindowMruUi}; -use crate::ui::screenshot_ui::ScreenshotUi; -use crate::utils::spawning::{spawn, spawn_sh}; -use crate::utils::{center, get_monotonic_time, CastSessionId, ResizeEdge}; pub mod backend_ext; pub mod move_grab; @@ -63,6 +66,7 @@ pub mod scroll_swipe_gesture; pub mod scroll_tracker; pub mod spatial_movement_grab; pub mod swipe_tracker; +pub mod touch_gesture; pub mod touch_overview_grab; pub mod touch_resize_grab; @@ -2408,6 +2412,9 @@ impl State { self.niri.queue_redraw_mru_output(); } } + Action::Noop => { + // Intentionally does nothing. Used with tag for IPC-only binds. + } } } @@ -3139,6 +3146,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); let bind_right = Some(Bind { key: Key { @@ -3151,6 +3161,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); (bind_left, bind_right) } else { @@ -3208,6 +3221,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); let bind_down = Some(Bind { key: Key { @@ -3220,6 +3236,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); (bind_up, bind_down) } else if should_handle_in_overview && modifiers == Modifiers::SHIFT { @@ -3234,6 +3253,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); let bind_down = Some(Bind { key: Key { @@ -3246,6 +3268,9 @@ impl State { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); (bind_up, bind_down) } else { @@ -3791,21 +3816,107 @@ impl State { } fn on_gesture_swipe_begin(&mut self, event: I::GestureSwipeBeginEvent) { + // Swipe starting means hold → swipe transition; no tap-hold. + self.niri.touchpad_hold_begin = None; + + // Check for tap-hold-drag: a hold preceded this swipe. + if let Some(drag_fingers) = self.niri.touchpad_drag_pending.take() { + let trigger = Trigger::TouchpadTapHoldDrag { + fingers: drag_fingers, + }; + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mod_key = self.backend.mod_key(&self.niri.config.borrow()); + let config = self.niri.config.borrow(); + let modifiers = modifiers_from_state(mods); + let bindings = make_binds_iter(&config, &mut self.niri.window_mru_ui, modifiers); + let bind = find_configured_bind(bindings, mod_key, trigger, mods); + drop(config); + + if let Some(bind) = bind { + let kind = continuous_gesture_kind(&bind.action); + let sensitivity = bind.sensitivity.unwrap_or(TOUCHPAD_DEFAULT_SENSITIVITY); + let tag = bind.tag.clone(); + + // Emit IPC GestureBegin if tagged. + if let Some(ref tag) = tag { + let trigger_name = crate::input::touch_gesture::trigger_to_ipc_name(trigger); + self.ipc_gesture_begin(tag.clone(), trigger_name, drag_fingers, kind.is_some()); + } + + if let Some(kind) = kind { + // Continuous gesture — begin animation. Reuses the + // existing swipe bind infrastructure so swipe updates + // and end events feed into the animation automatically. + let is_overview_open = self.niri.layout.is_overview_open(); + match kind { + ContinuousGestureKind::OverviewToggle => { + self.niri.layout.overview_gesture_begin(); + self.niri.queue_redraw_all(); + } + ContinuousGestureKind::WorkspaceSwitch => { + if let Some(output) = self.niri.output_under_cursor() { + self.niri + .layout + .workspace_switch_gesture_begin(&output, true); + } + } + ContinuousGestureKind::ViewScroll => { + if self.niri.output_under_cursor().is_some() { + let output_ws = if is_overview_open { + self.niri.workspace_under_cursor(true) + } else { + self.niri.output_under_cursor().and_then(|output| { + let mon = self.niri.layout.monitor_for_output(&output)?; + Some((output, mon.active_workspace_ref())) + }) + }; + if let Some((output, ws)) = output_ws { + let ws_idx = + self.niri.layout.find_workspace_by_id(ws.id()).unwrap().0; + self.niri.layout.view_offset_gesture_begin( + &output, + Some(ws_idx), + true, + ); + } + } + } + ContinuousGestureKind::Noop => { + // No compositor animation. + } + } + self.niri.gesture_swipe_bind = Some(ActiveSwipeBind { + kind, + sensitivity, + tag, + ipc_progress: 0.0, + }); + } else { + // Discrete action — fire once. + if let Some(ref tag) = tag { + self.ipc_gesture_end(tag.clone(), true); + } + self.do_action(bind.action, bind.allow_when_locked); + } + + // Tap-hold-drag claimed this swipe — don't enter normal + // swipe handling. + return; + } + // No bind found for TouchpadTapHoldDrag — fall through to + // normal swipe handling below. + } + if self.niri.window_mru_ui.is_open() { // Don't start swipe gestures while in the MRU. return; } - if event.fingers() == 3 { - self.niri.gesture_swipe_3f_cumulative = Some((0., 0.)); - - // We handled this event. - return; - } else if event.fingers() == 4 { - self.niri.layout.overview_gesture_begin(); - self.niri.queue_redraw_all(); + let fingers = event.fingers() as usize; - // We handled this event. + // Accumulate for 3-5 finger swipes; bind lookup happens at threshold. + if (3..=5).contains(&fingers) { + self.niri.gesture_swipe_3f_cumulative = Some((0., 0., fingers)); return; } @@ -3844,6 +3955,13 @@ impl State { let uninverted_delta_y = delta_y; + // Read swipe trigger distance from touchpad config. + let threshold = { + let config = self.niri.config.borrow(); + config.input.touchpad.swipe_trigger_distance() + }; + + // Apply natural scroll from device (for direction detection during cumulative phase). let device = event.device(); if let Some(device) = (&device as &dyn Any).downcast_ref::() { if device.config_scroll_natural_scroll_enabled() { @@ -3854,38 +3972,105 @@ impl State { let is_overview_open = self.niri.layout.is_overview_open(); - if let Some((cx, cy)) = &mut self.niri.gesture_swipe_3f_cumulative { + if let Some((cx, cy, fingers)) = &mut self.niri.gesture_swipe_3f_cumulative { *cx += delta_x; *cy += delta_y; - // Check if the gesture moved far enough to decide. Threshold copied from GNOME Shell. - let (cx, cy) = (*cx, *cy); - if cx * cx + cy * cy >= 16. * 16. { + let (cx, cy, fingers) = (*cx, *cy, *fingers); + if cx * cx + cy * cy >= threshold * threshold { self.niri.gesture_swipe_3f_cumulative = None; - if let Some(output) = self.niri.output_under_cursor() { - if cx.abs() > cy.abs() { - let output_ws = if is_overview_open { - self.niri.workspace_under_cursor(true) - } else { - // We don't want to accidentally "catch" the wrong workspace during - // animations. - self.niri.output_under_cursor().and_then(|output| { - let mon = self.niri.layout.monitor_for_output(&output)?; - Some((output, mon.active_workspace_ref())) - }) - }; + // Look up bind for this swipe direction + finger count. + let is_horizontal = cx.abs() > cy.abs(); + let trigger = swipe_trigger(fingers, is_horizontal, cx, cy); + if let Some(trigger) = trigger { + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mod_key = self.backend.mod_key(&self.niri.config.borrow()); + let config = self.niri.config.borrow(); + let modifiers = modifiers_from_state(mods); + let bindings = + make_binds_iter(&config, &mut self.niri.window_mru_ui, modifiers); + let bind = find_configured_bind(bindings, mod_key, trigger, mods); + drop(config); - if let Some((output, ws)) = output_ws { - let ws_idx = self.niri.layout.find_workspace_by_id(ws.id()).unwrap().0; - self.niri - .layout - .view_offset_gesture_begin(&output, Some(ws_idx), true); + if let Some(bind) = bind { + let kind = continuous_gesture_kind(&bind.action); + let sensitivity = bind.sensitivity.unwrap_or(TOUCHPAD_DEFAULT_SENSITIVITY); + let tag = bind.tag.clone(); + + // Emit IPC GestureBegin if this bind has a tag. + if let Some(ref tag) = tag { + let trigger_name = + crate::input::touch_gesture::trigger_to_ipc_name(trigger); + self.ipc_gesture_begin( + tag.clone(), + trigger_name, + fingers as u8, + kind.is_some(), + ); } - } else { - self.niri - .layout - .workspace_switch_gesture_begin(&output, true); + + if let Some(kind) = kind { + // Continuous gesture — begin animation. + match kind { + ContinuousGestureKind::OverviewToggle => { + self.niri.layout.overview_gesture_begin(); + self.niri.queue_redraw_all(); + } + ContinuousGestureKind::WorkspaceSwitch => { + if let Some(output) = self.niri.output_under_cursor() { + self.niri + .layout + .workspace_switch_gesture_begin(&output, true); + } + } + ContinuousGestureKind::ViewScroll => { + if self.niri.output_under_cursor().is_some() { + let output_ws = if is_overview_open { + self.niri.workspace_under_cursor(true) + } else { + self.niri.output_under_cursor().and_then(|output| { + let mon = + self.niri.layout.monitor_for_output(&output)?; + Some((output, mon.active_workspace_ref())) + }) + }; + if let Some((output, ws)) = output_ws { + let ws_idx = self + .niri + .layout + .find_workspace_by_id(ws.id()) + .unwrap() + .0; + self.niri.layout.view_offset_gesture_begin( + &output, + Some(ws_idx), + true, + ); + } + } + } + ContinuousGestureKind::Noop => { + // No compositor animation. + } + } + self.niri.gesture_swipe_bind = Some(ActiveSwipeBind { + kind, + sensitivity, + tag, + ipc_progress: 0.0, + }); + } else { + // Discrete action — fire once. + if !matches!(bind.action, Action::Noop) { + self.handle_bind(bind); + } + // Emit immediate GestureEnd for discrete gestures. + if let Some(ref tag) = tag { + self.ipc_gesture_end(tag.clone(), true); + } + } + return; } } } @@ -3893,43 +4078,95 @@ impl State { let timestamp = Duration::from_micros(event.time()); - let mut handled = false; - let res = self - .niri - .layout - .workspace_switch_gesture_update(delta_y, timestamp, true); - if let Some(output) = res { - if let Some(output) = output { - self.niri.queue_redraw(&output); - } - handled = true; - } - - let res = self - .niri - .layout - .view_offset_gesture_update(delta_x, timestamp, true); - if let Some(output) = res { - if let Some(output) = output { - self.niri.queue_redraw(&output); + // Feed continuous gesture with bind sensitivity. + if let Some(ref bind) = self.niri.gesture_swipe_bind { + let kind = bind.kind; + let sensitivity = bind.sensitivity; + let tag = bind.tag.clone(); + let mut handled = false; + match kind { + ContinuousGestureKind::WorkspaceSwitch => { + let res = self.niri.layout.workspace_switch_gesture_update( + delta_y * sensitivity, + timestamp, + true, + ); + if let Some(output) = res { + if let Some(output) = output { + self.niri.queue_redraw(&output); + } + handled = true; + } + } + ContinuousGestureKind::ViewScroll => { + let res = self.niri.layout.view_offset_gesture_update( + delta_x * sensitivity, + timestamp, + true, + ); + if let Some(output) = res { + if let Some(output) = output { + self.niri.queue_redraw(&output); + } + handled = true; + } + } + ContinuousGestureKind::OverviewToggle => { + let res = self + .niri + .layout + .overview_gesture_update(-uninverted_delta_y * sensitivity, timestamp); + if let Some(redraw) = res { + if redraw { + self.niri.queue_redraw_all(); + } + handled = true; + } + } + ContinuousGestureKind::Noop => { + // No compositor animation — just emit IPC progress below. + handled = true; + } } - handled = true; - } - - let res = self - .niri - .layout - .overview_gesture_update(-uninverted_delta_y, timestamp); - if let Some(redraw) = res { - if redraw { - self.niri.queue_redraw_all(); + // Emit IPC GestureProgress for tagged touchpad gestures. + if handled { + if let Some(tag) = tag { + let progress_distance = { + let config = self.niri.config.borrow(); + config.input.touchpad.swipe_progress_distance() + }; + let adjusted_delta = match kind { + ContinuousGestureKind::WorkspaceSwitch + | ContinuousGestureKind::OverviewToggle => delta_y * sensitivity, + ContinuousGestureKind::ViewScroll => delta_x * sensitivity, + ContinuousGestureKind::Noop => { + if delta_y.abs() > delta_x.abs() { + delta_y * sensitivity + } else { + delta_x * sensitivity + } + } + }; + let progress = match &mut self.niri.gesture_swipe_bind { + Some(ref mut bind) => { + bind.ipc_progress += adjusted_delta / progress_distance; + bind.ipc_progress + } + None => 0.0, + }; + let ts_ms = timestamp.as_millis() as u32; + self.ipc_gesture_progress( + tag, + progress, + GestureDelta::Swipe { + dx: delta_x, + dy: delta_y, + }, + ts_ms, + ); + } + return; } - handled = true; - } - - if handled { - // We handled this event. - return; } let pointer = self.niri.seat.get_pointer().unwrap(); @@ -3949,6 +4186,8 @@ impl State { fn on_gesture_swipe_end(&mut self, event: I::GestureSwipeEndEvent) { self.niri.gesture_swipe_3f_cumulative = None; + // Take the bind to extract the tag before clearing. + let swipe_tag = self.niri.gesture_swipe_bind.take().and_then(|b| b.tag); let mut handled = false; let res = self.niri.layout.workspace_switch_gesture_end(Some(true)); @@ -3969,6 +4208,11 @@ impl State { handled = true; } + // Emit IPC GestureEnd for tagged touchpad gestures. + if let Some(tag) = swipe_tag { + self.ipc_gesture_end(tag, true); + } + if handled { // We handled this event. return; @@ -3992,6 +4236,16 @@ impl State { } fn on_gesture_pinch_begin(&mut self, event: I::GesturePinchBeginEvent) { + // Pinch starting means hold → pinch transition; no tap or drag. + self.niri.touchpad_hold_begin = None; + self.niri.touchpad_drag_pending = None; + + // Arm the touchpad-pinch classifier. libinput reports fingers as + // u32 but the MIN..=MAX range is small; clamp defensively. + let fingers = u8::try_from(event.fingers()).unwrap_or(u8::MAX); + self.niri.touchpad_pinch_fingers = Some(fingers); + self.niri.touchpad_pinch_latched = false; + let serial = SERIAL_COUNTER.next_serial(); let pointer = self.niri.seat.get_pointer().unwrap(); @@ -4010,6 +4264,57 @@ impl State { } fn on_gesture_pinch_update(&mut self, event: I::GesturePinchUpdateEvent) { + // Classify pinch for discrete TouchpadPinch bind. libinput's + // scale() is normalized to gesture-start (1.0 = no change); we + // fire once per gesture when |scale - 1.0| crosses the + // threshold. Raw events still forward to clients below so + // app-level pinch-to-zoom keeps working. + if !self.niri.touchpad_pinch_latched { + if let Some(fingers) = self.niri.touchpad_pinch_fingers { + let scale = event.scale(); + let threshold = self + .niri + .config + .borrow() + .input + .touchpad + .pinch_trigger_scale(); + if (scale - 1.0).abs() > threshold { + let direction = if scale > 1.0 { + PinchDirection::Out + } else { + PinchDirection::In + }; + self.niri.touchpad_pinch_latched = true; + + let trigger = Trigger::TouchpadPinch { fingers, direction }; + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mod_key = self.backend.mod_key(&self.niri.config.borrow()); + let config = self.niri.config.borrow(); + let modifiers = modifiers_from_state(mods); + let bindings = + make_binds_iter(&config, &mut self.niri.window_mru_ui, modifiers); + let bind = find_configured_bind(bindings, mod_key, trigger, mods); + drop(config); + + if let Some(bind) = bind { + if let Some(ref tag) = bind.tag { + let trigger_name = + crate::input::touch_gesture::trigger_to_ipc_name(trigger); + self.ipc_gesture_begin( + tag.clone(), + trigger_name, + fingers, + false, // discrete + ); + self.ipc_gesture_end(tag.clone(), true); + } + self.do_action(bind.action, bind.allow_when_locked); + } + } + } + } + let pointer = self.niri.seat.get_pointer().unwrap(); if self.update_pointer_contents() { @@ -4028,6 +4333,9 @@ impl State { } fn on_gesture_pinch_end(&mut self, event: I::GesturePinchEndEvent) { + self.niri.touchpad_pinch_fingers = None; + self.niri.touchpad_pinch_latched = false; + let serial = SERIAL_COUNTER.next_serial(); let pointer = self.niri.seat.get_pointer().unwrap(); @@ -4046,6 +4354,13 @@ impl State { } fn on_gesture_hold_begin(&mut self, event: I::GestureHoldBeginEvent) { + let fingers = event.fingers(); + + // Track 3+ finger holds for touchpad tap detection. + if fingers >= 3 { + self.niri.touchpad_hold_begin = Some(fingers as u8); + } + let serial = SERIAL_COUNTER.next_serial(); let pointer = self.niri.seat.get_pointer().unwrap(); @@ -4058,12 +4373,52 @@ impl State { &GestureHoldBeginEvent { serial, time: event.time_msec(), - fingers: event.fingers(), + fingers, }, ); } fn on_gesture_hold_end(&mut self, event: I::GestureHoldEndEvent) { + // Touchpad tap detection: if the hold ended cleanly (fingers lifted + // without moving) and we were tracking a 3+ finger hold, fire the + // TouchpadTapHold bind. + if !event.cancelled() { + if let Some(fingers) = self.niri.touchpad_hold_begin.take() { + let trigger = Trigger::TouchpadTapHold { fingers }; + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mod_key = self.backend.mod_key(&self.niri.config.borrow()); + let config = self.niri.config.borrow(); + let modifiers = modifiers_from_state(mods); + let bindings = make_binds_iter(&config, &mut self.niri.window_mru_ui, modifiers); + let bind = find_configured_bind(bindings, mod_key, trigger, mods); + drop(config); + + if let Some(bind) = bind { + // Emit IPC GestureBegin + GestureEnd for tagged taps. + if let Some(ref tag) = bind.tag { + let trigger_name = + crate::input::touch_gesture::trigger_to_ipc_name(trigger); + self.ipc_gesture_begin( + tag.clone(), + trigger_name, + fingers, + false, // taps are always discrete + ); + self.ipc_gesture_end(tag.clone(), true); + } + + self.do_action(bind.action, bind.allow_when_locked); + } + } + } else { + // Fingers moved — libinput promoted to swipe/pinch. + // Carry the finger count forward as a drag pending signal + // for the next SwipeBegin. + if let Some(fingers) = self.niri.touchpad_hold_begin.take() { + self.niri.touchpad_drag_pending = Some(fingers); + } + } + let serial = SERIAL_COUNTER.next_serial(); let pointer = self.niri.seat.get_pointer().unwrap(); @@ -4108,214 +4463,8 @@ impl State { self.compute_absolute_location(evt, self.niri.output_for_touch()) } - fn on_touch_down(&mut self, evt: I::TouchDownEvent) { - let Some(handle) = self.niri.seat.get_touch() else { - return; - }; - let Some(pos) = self.compute_touch_location(&evt) else { - return; - }; - let slot = evt.slot(); - - let serial = SERIAL_COUNTER.next_serial(); - - let under = self.niri.contents_under(pos); - - let mod_key = self.backend.mod_key(&self.niri.config.borrow()); - let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); - let mods = modifiers_from_state(mods); - let mod_down = mods.contains(mod_key.to_modifiers()); - - if self.niri.screenshot_ui.is_open() { - // If we'll be moving the existing selection, use the selection output. - let output = if mod_down { - self.niri.screenshot_ui.selection_output() - } else { - under.output.as_ref() - }; - - if let Some(output) = output.cloned() { - let geom = self.niri.global_space.output_geometry(&output).unwrap(); - let point = (pos - geom.loc.to_f64()) - .to_physical(output.current_scale().fractional_scale()) - .to_i32_round(); - - if self - .niri - .screenshot_ui - .pointer_down(output, point, Some(slot), mod_down) - { - self.niri.queue_redraw_all(); - } - } - } else if let Some(mru_output) = self.niri.window_mru_ui.output() { - if let Some((output, pos_within_output)) = self.niri.output_under(pos) { - if mru_output == output { - let id = self.niri.window_mru_ui.pointer_motion(pos_within_output); - if id.is_some() { - self.confirm_mru(); - } else { - self.niri.cancel_mru(); - } - } else { - self.niri.cancel_mru(); - } - } - } else if !handle.is_grabbed() { - if self.niri.layout.is_overview_open() - && !mod_down - && under.layer.is_none() - && under.output.is_some() - { - let (output, pos_within_output) = self.niri.output_under(pos).unwrap(); - let output = output.clone(); - - let mut matched_narrow = true; - let mut ws = self.niri.workspace_under(false, pos); - if ws.is_none() { - matched_narrow = false; - ws = self.niri.workspace_under(true, pos); - } - let ws_id = ws.map(|(_, ws)| ws.id()); - - let mapped = self.niri.window_under(pos); - let window = mapped.map(|mapped| mapped.window.clone()); - - let start_data = TouchGrabStartData { - focus: None, - slot, - location: pos, - }; - let start_timestamp = Duration::from_micros(evt.time()); - let grab = TouchOverviewGrab::new( - start_data, - start_timestamp, - output, - pos_within_output, - ws_id, - matched_narrow, - window, - ); - handle.set_grab(self, grab, serial); - } else if let Some((window, _)) = under.window { - self.niri.layout.activate_window(&window); - - // Check if we need to start a touch move grab. - if mod_down { - let start_data = TouchGrabStartData { - focus: None, - slot, - location: pos, - }; - let start_data = PointerOrTouchStartData::Touch(start_data); - if let Some(grab) = MoveGrab::new(self, start_data, window.clone(), true, None) - { - handle.set_grab(self, grab, serial); - } - } - - // FIXME: granular. - self.niri.queue_redraw_all(); - } else if let Some(output) = under.output { - self.niri.layout.focus_output(&output); - - // FIXME: granular. - self.niri.queue_redraw_all(); - } - self.niri.focus_layer_surface_if_on_demand(under.layer); - }; - - handle.down( - self, - under.surface, - &DownEvent { - slot, - location: pos, - serial, - time: evt.time_msec(), - }, - ); - - // We're using touch, hide the pointer. - self.niri.pointer_visibility = PointerVisibility::Disabled; - } - fn on_touch_up(&mut self, evt: I::TouchUpEvent) { - let Some(handle) = self.niri.seat.get_touch() else { - return; - }; - let slot = evt.slot(); - - if let Some(capture) = self.niri.screenshot_ui.pointer_up(Some(slot)) { - if capture { - self.confirm_screenshot(true); - } else { - self.niri.queue_redraw_all(); - } - } - - let serial = SERIAL_COUNTER.next_serial(); - handle.up( - self, - &UpEvent { - slot, - serial, - time: evt.time_msec(), - }, - ) - } - fn on_touch_motion(&mut self, evt: I::TouchMotionEvent) { - let Some(handle) = self.niri.seat.get_touch() else { - return; - }; - let Some(pos) = self.compute_touch_location(&evt) else { - return; - }; - let slot = evt.slot(); - - if let Some(output) = self.niri.screenshot_ui.selection_output().cloned() { - let geom = self.niri.global_space.output_geometry(&output).unwrap(); - let point = (pos - geom.loc.to_f64()) - .to_physical(output.current_scale().fractional_scale()) - .to_i32_round::(); - - self.niri.screenshot_ui.pointer_motion(point, Some(slot)); - self.niri.queue_redraw(&output); - } - - let under = self.niri.contents_under(pos); - handle.motion( - self, - under.surface, - &TouchMotionEvent { - slot, - location: pos, - time: evt.time_msec(), - }, - ); - - // Inform the layout of an ongoing DnD operation. - let is_dnd_grab = handle - .with_grab(|_, grab| Self::is_dnd_grab(grab.as_any())) - .unwrap_or(false); - if is_dnd_grab { - if let Some((output, pos_within_output)) = self.niri.output_under(pos) { - let output = output.clone(); - self.niri.layout.dnd_update(output, pos_within_output); - } - } - } - fn on_touch_frame(&mut self, _evt: I::TouchFrameEvent) { - let Some(handle) = self.niri.seat.get_touch() else { - return; - }; - handle.frame(self); - } - fn on_touch_cancel(&mut self, _evt: I::TouchCancelEvent) { - let Some(handle) = self.niri.seat.get_touch() else { - return; - }; - handle.cancel(self); - } + // Touch gesture handlers (on_touch_down, on_touch_up, on_touch_motion, + // on_touch_frame, on_touch_cancel) are in touch_gesture.rs. fn on_switch_toggle(&mut self, evt: I::SwitchToggleEvent) { let Some(switch) = evt.switch() else { @@ -4413,6 +4562,9 @@ fn should_intercept_key<'a>( // inhibited. allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); } } @@ -4479,6 +4631,9 @@ fn find_bind<'a>( // Hardcoded binds must never be inhibited. allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }); } @@ -4486,7 +4641,7 @@ fn find_bind<'a>( find_configured_bind(bindings, mod_key, trigger, mods) } -fn find_configured_bind<'a>( +pub(super) fn find_configured_bind<'a>( bindings: impl IntoIterator, mod_key: ModKey, trigger: Trigger, @@ -4717,6 +4872,9 @@ fn hardcoded_overview_bind(raw: Keysym, mods: ModifiersState) -> Option { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }) } @@ -4987,7 +5145,7 @@ pub fn apply_libinput_settings(config: &niri_config::Input, device: &mut input:: let is_touch = device.has_capability(input::DeviceCapability::Touch); if is_touch { - let c = &config.touch; + let c = &config.touchscreen; let _ = device.config_send_events_set_mode(if c.off { input::SendEventsMode::DISABLED } else { @@ -5069,6 +5227,25 @@ pub fn mods_with_finger_scroll_binds(mod_key: ModKey, binds: &Binds) -> HashSet< ) } +fn swipe_trigger(fingers: usize, is_horizontal: bool, cx: f64, cy: f64) -> Option { + let Ok(fingers_u8) = u8::try_from(fingers) else { + return None; + }; + if !(MIN_FINGERS..=MAX_FINGERS).contains(&fingers_u8) { + return None; + } + let direction = match (is_horizontal, cx, cy) { + (true, cx, _) if cx > 0. => SwipeDirection::Right, + (true, _, _) => SwipeDirection::Left, + (false, _, cy) if cy > 0. => SwipeDirection::Down, + (false, _, _) => SwipeDirection::Up, + }; + Some(Trigger::TouchpadSwipe { + fingers: fingers_u8, + direction, + }) +} + fn grab_allows_hot_corner(grab: &(dyn PointerGrab + 'static)) -> bool { let grab = grab.as_any(); @@ -5138,6 +5315,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }]); let comp_mod = ModKey::Super; @@ -5324,6 +5504,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -5336,6 +5519,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -5348,6 +5534,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -5360,6 +5549,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, Bind { key: Key { @@ -5372,6 +5564,9 @@ mod tests { allow_when_locked: false, allow_inhibiting: true, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }, ]); diff --git a/src/input/move_grab.rs b/src/input/move_grab.rs index 8834b10087..b790383632 100644 --- a/src/input/move_grab.rs +++ b/src/input/move_grab.rs @@ -478,9 +478,15 @@ impl TouchGrab for MoveGrab { return; } - if !self.on_toggle_floating(data) { - handle.unset_grab(self, data); - } + // Second finger landed: cancel the move grab so the multi-finger + // gesture recognizer can take over. On mouse, the second button + // toggles floating (see `button` impl above), but on touch a new + // finger almost always means "this is becoming a multi-finger + // gesture" rather than a deliberate float toggle. The points are + // already tracked in `touch_gesture_points` (inserted in + // `on_touch_down` before this call), so the recognizer picks up + // seamlessly once the grab releases. + handle.unset_grab(self, data); } fn up( diff --git a/src/input/touch_gesture.rs b/src/input/touch_gesture.rs new file mode 100644 index 0000000000..65389c3b18 --- /dev/null +++ b/src/input/touch_gesture.rs @@ -0,0 +1,2328 @@ +//! Touchscreen gesture handling. +//! +//! This file handles **touchscreen** (finger-on-screen) gestures only. +//! Touchpad/trackpad gestures are handled separately in `input/mod.rs` +//! via `on_gesture_swipe_*` using libinput gesture events. +//! +//! Naming convention (follows upstream niri): +//! `touch_*` fields on Niri → touchscreen +//! `gesture_swipe_*` fields → touchpad/trackpad +//! +//! Gesture types: +//! - Multi-finger (3+): any action via touch-binds (swipe, pinch) +//! - Edge swipe (1+): touch starts in screen edge zone +//! +//! Actions are mapped via `binds {}` in the KDL config. +//! The compositor infers whether an action is continuous (drives an +//! animation that tracks the finger) or discrete (fires once). +//! +//! IPC gesture events: +//! Tagged binds (`tag="name"`) emit GestureBegin/Progress/End events +//! on the IPC event stream, allowing external tools to observe or +//! drive custom animations. The `noop` action consumes a gesture +//! for IPC without triggering any compositor action. +//! +//! Note on Mod+touch: On touchscreens, touch serves double duty as +//! both click and gesture input. Mod+touch triggers window move/resize +//! grabs (hardcoded), so Mod+Touch* gesture binds can conflict with +//! Mod+click behavior when fingers land before the gesture threshold. + +use std::cmp::min; +use std::time::{Duration, Instant}; + +use smithay::backend::input::{Event as _, TouchEvent}; +use smithay::input::touch::{ + DownEvent, GrabStartData as TouchGrabStartData, MotionEvent as TouchMotionEvent, UpEvent, +}; +use smithay::utils::SERIAL_COUNTER; + +use super::backend_ext::NiriInputBackend as InputBackend; +use super::move_grab::MoveGrab; +use super::touch_overview_grab::TouchOverviewGrab; +use super::{find_configured_bind, modifiers_from_state, PointerOrTouchStartData}; +use niri_config::binds::{ + PinchDirection, RotateDirection, SwipeDirection, Trigger, MAX_FINGERS, MIN_FINGERS, +}; +use niri_config::input::{EdgeZone, ScreenEdge}; +use niri_config::touch_binds::{continuous_gesture_kind, ContinuousGestureKind, TouchGestureType}; +use niri_config::Action; +use niri_ipc::GestureDelta; + +use crate::layout::LayoutElement; +use crate::niri::{ActiveTouchBind, PointerVisibility, State, TapCandidate, TouchEdgeSwipeState}; +use crate::utils::with_toplevel_role; + +/// Default sensitivity for touchscreen gestures (both edge and multi-finger). +/// Lower than touchpad (1.0) because touchscreen deltas are in screen pixels. +const TOUCH_DEFAULT_SENSITIVITY: f64 = 0.4; + +/// Extract gesture info from a matched bind: continuous kind, sensitivity, +/// natural scroll, tag, and action. +fn extract_bind_info( + bind: niri_config::Bind, +) -> ( + Option, + f64, + bool, + Option, + Action, +) { + let kind = continuous_gesture_kind(&bind.action); + let sensitivity = bind.sensitivity.unwrap_or(TOUCH_DEFAULT_SENSITIVITY); + ( + kind, + sensitivity, + bind.natural_scroll, + bind.tag, + bind.action, + ) +} + +impl State { + pub(super) fn on_touch_down(&mut self, evt: I::TouchDownEvent) { + let Some(handle) = self.niri.seat.get_touch() else { + return; + }; + let Some(pos) = self.compute_touch_location(&evt) else { + return; + }; + let slot = evt.slot(); + + // Track touch point for multi-finger gesture detection. + let was_empty = self.niri.touch_gesture_points.is_empty(); + let was_single = self.niri.touch_gesture_points.len() == 1; + self.niri.touch_gesture_points.insert(Some(slot), pos); + + // When ANY new finger arrives, reset cumulative and spread so + // detection is based on movement with the current finger count. + // If the gesture was already locked (direction decided with fewer + // fingers), unlock and re-evaluate — this allows 5-finger gestures + // to override a 3-finger decision when more fingers land. + if !was_empty { + if self.niri.touch_gesture_locked { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG UNLOCK reason=new-finger was_locked=true now={}", + self.niri.touch_gesture_points.len(), + ); + // Unlock: end current gesture animations, restart recognition. + // If the gesture being interrupted was tagged, emit GestureEnd + // with completed=false — a consumer that received GestureBegin + // is contractually owed a matching GestureEnd even when the + // gesture is cancelled by a new finger landing. + self.niri.touch_gesture_locked = false; + let cancelled_tag = self + .niri + .touch_active_bind + .take() + .and_then(ActiveTouchBind::into_tag); + self.niri.layout.workspace_switch_gesture_end(Some(false)); + self.niri.layout.view_offset_gesture_end(Some(false)); + self.niri.layout.overview_gesture_end(); + if let Some(tag) = cancelled_tag { + self.ipc_gesture_end(tag, false); + } + } + self.niri.touch_gesture_cumulative = Some((0., 0.)); + if self.niri.touch_gesture_points.len() >= 3 { + self.niri.touch_gesture_initial_spread = + Some(calculate_spread(&self.niri.touch_gesture_points)); + // Initialize rotation tracking basis: record the current + // per-slot angles so the next motion frame can compute a + // finite delta, and zero the cumulative so recognition + // decisions see a fresh gesture. + self.niri.touch_gesture_cumulative_rotation = 0.0; + self.niri.touch_gesture_previous_angles = + calculate_per_slot_angles(&self.niri.touch_gesture_points); + } + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG FINGER-LAND fingers={} reset=recognition", + self.niri.touch_gesture_points.len(), + ); + + // Tap candidate tracking: initialize when finger count reaches 3, + // update peak_fingers when more fingers land. Runs in parallel + // with swipe/pinch/rotate recognition. Passthrough windows skip + // tap detection (same as swipe). + let finger_count = self.niri.touch_gesture_points.len(); + if finger_count >= 3 && !self.niri.touchscreen_gesture_passthrough { + if let Some(ref mut tap) = self.niri.touch_tap_candidate { + // More fingers landed — update peak and record new position. + if tap.alive { + tap.peak_fingers = tap.peak_fingers.max(finger_count as u8); + tap.initial_positions.insert(Some(slot), pos); + } + } else { + // First time reaching 3+ fingers — start tap candidate. + self.niri.touch_tap_candidate = Some(TapCandidate { + start_time: Instant::now(), + peak_fingers: finger_count as u8, + initial_positions: self.niri.touch_gesture_points.clone(), + alive: true, + }); + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP started fingers={}", + finger_count, + ); + } + } + } + + // First finger: check if it's in a screen edge zone for edge swipe detection. + if was_empty && self.niri.touch_edge_swipe.is_none() { + if let Some((output, pos_within_output)) = self.niri.output_under(pos) { + let size = self.niri.global_space.output_geometry(output).unwrap().size; + let config = self.niri.config.borrow(); + let threshold = config.input.touchscreen.edge_start_distance(); + if let Some((edge, zone)) = detect_edge(pos_within_output, size, threshold) { + // Lookup order: zoned trigger first, then unzoned parent + // as fallback. `zoned` records which one hit so that all + // downstream lookups and the IPC name emitted on + // gesture-begin stay consistent with the bind that fired. + let mod_key = self.backend.mod_key(&config); + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let zoned_trigger = Trigger::TouchEdge { + edge, + zone: Some(zone), + }; + let parent_trigger = Trigger::TouchEdge { edge, zone: None }; + let zoned_hit = + find_configured_bind(config.binds.0.iter(), mod_key, zoned_trigger, mods) + .is_some(); + let parent_hit = !zoned_hit + && find_configured_bind( + config.binds.0.iter(), + mod_key, + parent_trigger, + mods, + ) + .is_some(); + if zoned_hit || parent_hit { + self.niri.touch_edge_swipe = Some(TouchEdgeSwipeState::Pending { + edge, + zone, + zoned: zoned_hit, + cumulative: (0., 0.), + slot: Some(slot), + }); + } + } + } + } + + // When second finger arrives, start cumulative tracking for gesture recognition + // (unless an edge swipe is pending/active — edge swipe takes priority). + if was_single + && self.niri.touch_gesture_points.len() == 2 + && self.niri.touch_edge_swipe.is_none() + { + self.niri.touch_gesture_cumulative = Some((0., 0.)); + } + + // Check if we're tracking a multi-finger gesture (3+ fingers), + // a locked gesture (direction decided), or an edge swipe. If so, + // this event is not forwarded to the client. If earlier fingers + // in the same sequence WERE forwarded (first two fingers pre- + // transition), they are terminated via `up` + `wl_touch.cancel` + // at the transition in the `else` branch below so the client + // doesn't hold them as phantom down touches. + // Passthrough mode overrides — when set, the whole gesture + // forwards raw to the client regardless of finger count. + let tracking_gesture = (self.niri.touch_gesture_points.len() > 2 + || self.niri.touch_gesture_locked) + && !self.niri.touchscreen_gesture_passthrough; + let in_edge_zone = self.niri.touch_edge_swipe.is_some(); + + let serial = SERIAL_COUNTER.next_serial(); + + let under = self.niri.contents_under(pos); + + let mod_key = self.backend.mod_key(&self.niri.config.borrow()); + + // Touchscreen gesture passthrough: if this is the first finger and it + // landed on a window whose rule opts into passthrough, flip the state + // flag so the recognizer stays out of the way for the whole gesture. + // Mod+ always bypasses (escape hatch — user explicitly asked for a + // compositor action). Edge zones also take priority and are handled + // above, so a passthrough window in a screen-edge zone still yields + // the edge swipe to niri. + if was_empty && !in_edge_zone && !self.niri.touchscreen_gesture_passthrough { + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mods = modifiers_from_state(mods); + let mod_down = mods.contains(mod_key.to_modifiers()); + if !mod_down { + if let Some(mapped) = self.niri.window_under(pos) { + if mapped.rules().touchscreen_gesture_passthrough == Some(true) { + self.niri.touchscreen_gesture_passthrough = true; + } + } + } + } + + if in_edge_zone { + // Edge zone touch — skip window activation and client forwarding. + // The gesture will either activate (swipe) or cancel (lift = no-op). + } else if self.niri.screenshot_ui.is_open() { + if let Some(output) = under.output.clone() { + let geom = self.niri.global_space.output_geometry(&output).unwrap(); + let mut point = (pos - geom.loc.to_f64()) + .to_physical(output.current_scale().fractional_scale()) + .to_i32_round(); + + let size = output.current_mode().unwrap().size; + let transform = output.current_transform(); + let size = transform.transform_size(size); + point.x = min(size.w - 1, point.x); + point.y = min(size.h - 1, point.y); + + if self + .niri + .screenshot_ui + .pointer_down(output, point, Some(slot), false) + { + self.niri.queue_redraw_all(); + } + } + } else if let Some(mru_output) = self.niri.window_mru_ui.output() { + if let Some((output, pos_within_output)) = self.niri.output_under(pos) { + if mru_output == output { + let id = self.niri.window_mru_ui.pointer_motion(pos_within_output); + if id.is_some() { + self.confirm_mru(); + } else { + self.niri.cancel_mru(); + } + } else { + self.niri.cancel_mru(); + } + } + } else if !handle.is_grabbed() { + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + let mods = modifiers_from_state(mods); + let mod_down = mods.contains(mod_key.to_modifiers()); + + if self.niri.layout.is_overview_open() + && !mod_down + && under.layer.is_none() + && under.output.is_some() + { + let (output, pos_within_output) = self.niri.output_under(pos).unwrap(); + let output = output.clone(); + + let mut matched_narrow = true; + let mut ws = self.niri.workspace_under(false, pos); + if ws.is_none() { + matched_narrow = false; + ws = self.niri.workspace_under(true, pos); + } + let ws_id = ws.map(|(_, ws)| ws.id()); + + let mapped = self.niri.window_under(pos); + let window = mapped.map(|mapped| mapped.window.clone()); + + let start_data = TouchGrabStartData { + focus: None, + slot, + location: pos, + }; + let start_timestamp = Duration::from_micros(evt.time()); + let grab = TouchOverviewGrab::new( + start_data, + start_timestamp, + output, + pos_within_output, + ws_id, + matched_narrow, + window, + ); + handle.set_grab(self, grab, serial); + } else if let Some((window, _)) = under.window { + self.niri.layout.activate_window(&window); + + // Check if we need to start a touch move grab. + if mod_down { + let start_data = TouchGrabStartData { + focus: None, + slot, + location: pos, + }; + let start_data = PointerOrTouchStartData::Touch(start_data); + if let Some(grab) = MoveGrab::new(self, start_data, window.clone(), true, None) + { + handle.set_grab(self, grab, serial); + } + } + + // FIXME: granular. + self.niri.queue_redraw_all(); + } else if let Some(output) = under.output { + self.niri.layout.focus_output(&output); + + // FIXME: granular. + self.niri.queue_redraw_all(); + } + self.niri.focus_layer_surface_if_on_demand(under.layer); + }; + + // Only forward to client if not tracking a multi-finger gesture or edge swipe. + if !tracking_gesture && !in_edge_zone { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG FORWARD slot={:?} has_surface={}", + slot, + under.surface.is_some(), + ); + handle.down( + self, + under.surface, + &DownEvent { + slot, + location: pos, + serial, + time: evt.time_msec(), + }, + ); + self.niri.touch_forwarded_slots.insert(slot); + } else { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG BLOCKED slot={:?} tracking_gesture={} in_edge_zone={}", + slot, + tracking_gesture, + in_edge_zone, + ); + // Transition into gesture tracking — if earlier fingers in this + // sequence were already forwarded to a client as wl_touch.down, + // their matching .up events will be suppressed by this same + // gate. Emit explicit wl_touch.up for each forwarded slot AND + // wl_touch.cancel so the client can't hold them as phantoms. + if !self.niri.touch_forwarded_slots.is_empty() { + let forwarded: Vec<_> = self.niri.touch_forwarded_slots.drain().collect(); + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG CANCEL-CLIENT reason=gesture-start trigger_slot={:?} points={} forwarded_slots={:?}", + slot, + self.niri.touch_gesture_points.len(), + forwarded, + ); + for fwd_slot in forwarded { + let up_serial = SERIAL_COUNTER.next_serial(); + handle.up( + self, + &UpEvent { + slot: fwd_slot, + serial: up_serial, + time: evt.time_msec(), + }, + ); + } + handle.cancel(self); + handle.frame(self); + } + } + + // We're using touch, hide the pointer. + self.niri.pointer_visibility = PointerVisibility::Disabled; + } + + pub(super) fn on_touch_up(&mut self, evt: I::TouchUpEvent) { + let Some(handle) = self.niri.seat.get_touch() else { + return; + }; + let slot = evt.slot(); + + // Handle edge swipe state on finger lift. + if let Some(ref state) = self.niri.touch_edge_swipe { + match state { + TouchEdgeSwipeState::Pending { + slot: edge_slot, .. + } => { + if Some(slot) == *edge_slot { + // Finger lifted before threshold — normal tap, clear state. + self.niri.touch_edge_swipe = None; + } + } + TouchEdgeSwipeState::Active { kind, tag, .. } => { + let kind = *kind; + let tag = tag.clone(); + self.niri.touch_edge_swipe = None; + // End the gesture animation. + end_continuous_gesture(self, kind); + // Emit IPC GestureEnd for tagged edge swipe. + if let Some(tag) = tag { + self.ipc_gesture_end(tag, true); + } + self.niri.touch_gesture_points.remove(&Some(slot)); + return; + } + } + } + + // Check if we're tracking a multi-finger gesture before removing this touch point. + // Passthrough gestures forward all up events to the client regardless of finger count. + let tracking_gesture = (self.niri.touch_gesture_points.len() > 2 + || self.niri.touch_gesture_locked) + && !self.niri.touchscreen_gesture_passthrough; + + // Remove touch point from gesture tracking. + self.niri.touch_gesture_points.remove(&Some(slot)); + + // Pinch basis rebase on finger-lift. + // + // `calculate_spread()` is a purely geometric quantity of the point + // set (average distance from centroid). When a finger lifts, the + // set changes and the spread can jump by hundreds of pixels in a + // single event — not because fingers moved, but because the + // geometry did. Feeding that spurious delta into the SwipeTracker + // contaminates both `pos()` and `velocity()` and causes + // `projected_end_pos` to overshoot the commit threshold, which is + // why pinch gestures were always snapping to overview-open + // regardless of direction. + // + // Fix: rebase `last_spread` to the post-removal spread so the next + // motion event computes `incremental ≈ 0` across the + // discontinuity. Shift `start_spread` by the same delta so the IPC + // absolute offset `(current - start)` stays continuous for + // tagged consumers. + if self.niri.touch_gesture_locked { + if let Some(ActiveTouchBind::Pinch { + start_spread, + last_spread, + .. + }) = self.niri.touch_active_bind.as_mut() + { + let new_spread = calculate_spread(&self.niri.touch_gesture_points); + let shift = new_spread - *last_spread; + *last_spread = new_spread; + *start_spread += shift; + } + } + + // Rotation basis rebase on finger-lift. + // + // Same hazard as the pinch rebase above, different metric. When a + // finger lifts, the cluster centroid shifts and the per-slot angles + // computed relative to the new centroid can differ from the old + // ones by tens of degrees — not because fingers rotated, but + // because the reference point moved. The next motion frame would + // compute a spurious rotation delta from that discontinuity and + // feed it into the animation. + // + // Fix: overwrite `previous_angles` with fresh angles taken against + // the post-removal centroid. No delta is accumulated for this step; + // the next real motion event starts fresh. Because `ipc_progress` + // for rotation is computed as + // `(cumulative_rotation - start_rotation) / progress_distance`, + // leaving both values untouched keeps the IPC progress continuous + // across the discontinuity with no need for a compensating shift. + // + // This rebase applies whether the active bind is Rotate (mid-gesture + // finger lift of an active rotation) OR another variant (unlocked + // recognition phase with 3+ fingers still down, where rotation may + // still become the chosen classification on the next frame). + if !self.niri.touch_gesture_points.is_empty() { + self.niri.touch_gesture_previous_angles = + calculate_per_slot_angles(&self.niri.touch_gesture_points); + } + + // Spread basis rebase on finger-lift (recognition phase only). + // + // `spread_change = (current_spread - initial_spread).abs()` is the + // signal pinch recognition latches on. When a finger lifts during + // recognition, `current_spread` jumps because the geometry changed, + // not because fingers moved — and the jump typically exceeds + // `pinch_trigger_distance` immediately, causing a spurious + // PinchIn/PinchOut lock on the very next frame. This was visible in debug logs as + // users trying to retry a 5-finger rotation by lifting one finger + // and ending up with an unwanted PinchIn at fingers=4. + // + // Fix: during unlocked recognition, rebase `initial_spread` to the + // post-removal geometry so `spread_change` resets to zero across + // the discontinuity. Only applies while unlocked — once a pinch + // is already active the rebase above (at the `ActiveTouchBind::Pinch` + // branch) handles the locked case with continuous IPC progress. + if !self.niri.touch_gesture_points.is_empty() + && !self.niri.touch_gesture_locked + && self.niri.touch_gesture_points.len() >= 3 + { + self.niri.touch_gesture_initial_spread = + Some(calculate_spread(&self.niri.touch_gesture_points)); + } + + // End gesture when all fingers are lifted. + if self.niri.touch_gesture_points.is_empty() { + // Tap detection: if the candidate is still alive and within + // the timeout, fire the TouchTap trigger. + if let Some(tap) = self.niri.touch_tap_candidate.take() { + if tap.alive && !self.niri.touch_gesture_locked { + let elapsed_ms = tap.start_time.elapsed().as_millis() as f64; + let timeout = { + let config = self.niri.config.borrow(); + config.input.touchscreen.tap_timeout_ms() + }; + if elapsed_ms <= timeout { + let trigger = Trigger::TouchTap { + fingers: tap.peak_fingers, + }; + let bind_info = { + let config = self.niri.config.borrow(); + let mod_key = self.backend.mod_key(&config); + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + find_configured_bind(config.binds.0.iter(), mod_key, trigger, mods) + }; + let bind_matched = bind_info.is_some(); + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP fired fingers={} bind={} elapsed={:.0}ms", + tap.peak_fingers, + if bind_matched { "yes" } else { "no" }, + elapsed_ms, + ); + if let Some(bind) = bind_info { + let tag = bind.tag.clone(); + let trigger_name = format!("TouchTap fingers={}", tap.peak_fingers,); + // Emit GestureBegin + immediate GestureEnd for IPC. + self.ipc_gesture_begin( + tag.clone().unwrap_or_default(), + trigger_name, + tap.peak_fingers, + false, + ); + if !matches!(bind.action, Action::Noop) { + self.do_action(bind.action, false); + } + self.ipc_gesture_end(tag.unwrap_or_default(), true); + } + } else { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP killed reason=timeout elapsed={:.0}ms", + elapsed_ms, + ); + } + } + } + + self.niri.touch_gesture_cumulative = None; + self.niri.touch_gesture_locked = false; + self.niri.touch_forwarded_slots.clear(); + self.niri.touchscreen_gesture_passthrough = false; + self.niri.touch_frame_dirty = false; + self.niri.touch_frame_delta = (0., 0.); + self.niri.touch_frame_edge_delta = (0., 0.); + // Take the active bind to get the tag before clearing. + // We track `had_active` separately so we can emit GestureEnd + // even for untagged binds (debug tools rely on it). + let active_bind = self.niri.touch_active_bind.take(); + let had_active = active_bind.is_some(); + let active_tag = active_bind.and_then(ActiveTouchBind::into_tag); + self.niri.touch_gesture_initial_spread = None; + self.niri.touch_gesture_cumulative_rotation = 0.0; + self.niri.touch_gesture_previous_angles.clear(); + + // End any ongoing gesture animations. + if let Some(output) = self.niri.layout.workspace_switch_gesture_end(Some(true)) { + self.niri.queue_redraw(&output); + } + if let Some(output) = self.niri.layout.view_offset_gesture_end(Some(true)) { + self.niri.queue_redraw(&output); + } + self.niri.layout.overview_gesture_end(); + + // Emit IPC GestureEnd for every committed multi-finger + // gesture, tagged or not — empty tag for untagged binds. + if had_active { + self.ipc_gesture_end(active_tag.unwrap_or_default(), true); + } + } + + if let Some(capture) = self.niri.screenshot_ui.pointer_up(Some(slot)) { + if capture { + self.confirm_screenshot(true); + } else { + self.niri.queue_redraw_all(); + } + } + + // Only forward to client if not tracking a multi-finger gesture. + if !tracking_gesture { + let serial = SERIAL_COUNTER.next_serial(); + handle.up( + self, + &UpEvent { + slot, + serial, + time: evt.time_msec(), + }, + ) + } + } + + pub(super) fn on_touch_motion(&mut self, evt: I::TouchMotionEvent) { + let Some(handle) = self.niri.seat.get_touch() else { + return; + }; + let Some(pos) = self.compute_touch_location(&evt) else { + return; + }; + let slot = evt.slot(); + + // Track touch gesture with 2+ fingers. Skipped entirely under + // touchscreen gesture passthrough so the whole motion stream forwards + // raw to the client. `touch_gesture_points` is left untouched — slot + // cleanup in on_touch_up will still clear it. + let mut gesture_handled = false; + let tracked_slot = if self.niri.touchscreen_gesture_passthrough { + None + } else { + self.niri.touch_gesture_points.get(&Some(slot)).copied() + }; + if let Some(old_pos) = tracked_slot { + // Calculate delta from this finger's movement. + let delta_x = pos.x - old_pos.x; + let delta_y = pos.y - old_pos.y; + + // Update stored position. + self.niri.touch_gesture_points.insert(Some(slot), pos); + + // Handle edge swipe gesture: accumulate deltas per-slot, + // defer threshold check and feed to on_touch_frame. + if let Some(ref mut state) = self.niri.touch_edge_swipe { + match state { + TouchEdgeSwipeState::Pending { + cumulative, + slot: edge_slot, + .. + } if Some(slot) == *edge_slot => { + cumulative.0 += delta_x; + cumulative.1 += delta_y; + } + TouchEdgeSwipeState::Active { + slot: edge_slot, .. + } if Some(slot) == *edge_slot => { + // Track edge slot's delta separately so the feed + // doesn't include other fingers' motion. + self.niri.touch_frame_edge_delta.0 += delta_x; + self.niri.touch_frame_edge_delta.1 += delta_y; + gesture_handled = true; + } + TouchEdgeSwipeState::Active { .. } => { + gesture_handled = true; + } + _ => {} + } + } + + // Accumulate per-frame deltas for batched processing in + // on_touch_frame. Position update already happened above. + self.niri.touch_frame_delta.0 += delta_x; + self.niri.touch_frame_delta.1 += delta_y; + self.niri.touch_frame_dirty = true; + self.niri.touch_frame_timestamp = Duration::from_micros(evt.time()); + + // Tap wobble check runs per-motion (reads positions, cheap, + // and needs to kill the candidate as soon as any finger drifts). + if let Some(ref mut tap) = self.niri.touch_tap_candidate { + if tap.alive { + let wobble_threshold = { + let config = self.niri.config.borrow(); + config.input.touchscreen.tap_wobble_threshold() + }; + let wobble_sq = wobble_threshold * wobble_threshold; + let mut wobble_killed = false; + for (s, current_pos) in &self.niri.touch_gesture_points { + if let Some(initial) = tap.initial_positions.get(s) { + let dx = current_pos.x - initial.x; + let dy = current_pos.y - initial.y; + if dx * dx + dy * dy > wobble_sq { + wobble_killed = true; + break; + } + } + } + + if wobble_killed { + let peak_fingers = tap.peak_fingers; + tap.alive = false; + + // Check minimum hold duration before allowing + // tap-hold-drag. If fingers moved too quickly, + // skip hold-drag and let normal swipe recognition + // handle it. + let hold_delay_ms = { + let config = self.niri.config.borrow(); + config.input.touchscreen.tap_hold_trigger_delay_ms() + }; + let elapsed_ms = tap.start_time.elapsed().as_millis() as f64; + let hold_long_enough = elapsed_ms >= hold_delay_ms; + + // Compute centroid delta from initial positions for + // direction detection. + let (mut cx, mut cy) = (0.0, 0.0); + let mut count = 0usize; + for (s, current_pos) in &self.niri.touch_gesture_points { + if let Some(initial) = tap.initial_positions.get(s) { + cx += current_pos.x - initial.x; + cy += current_pos.y - initial.y; + count += 1; + } + } + if count > 0 { + cx /= count as f64; + cy /= count as f64; + } + + // Check for TouchTapHoldDrag bind — only if held + // long enough to distinguish from a fast swipe. + let bind_info = if hold_long_enough { + let is_horizontal = cx.abs() > cy.abs(); + let direction = match (is_horizontal, cx, cy) { + (true, cx, _) if cx > 0.0 => SwipeDirection::Right, + (true, _, _) => SwipeDirection::Left, + (false, _, cy) if cy > 0.0 => SwipeDirection::Down, + _ => SwipeDirection::Up, + }; + + let config = self.niri.config.borrow(); + let mod_key = self.backend.mod_key(&config); + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + // Try directional first. + let directional = Trigger::TouchTapHoldDrag { + fingers: peak_fingers, + direction: Some(direction), + }; + let bind = find_configured_bind( + config.binds.0.iter(), + mod_key, + directional, + mods, + ); + if bind.is_some() { + bind.map(|b| (extract_bind_info(b), directional)) + } else { + // Fall back to omnidirectional. + let omni = Trigger::TouchTapHoldDrag { + fingers: peak_fingers, + direction: None, + }; + find_configured_bind(config.binds.0.iter(), mod_key, omni, mods) + .map(|b| (extract_bind_info(b), omni)) + } + } else { + None + }; + + if let Some(((kind, sensitivity, natural_scroll, tag, action), trigger)) = + bind_info + { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP killed reason=wobble → \ + TouchTapHoldDrag fingers={} trigger={:?} \ + hold={:.0}ms", + peak_fingers, trigger, elapsed_ms, + ); + + // Lock the gesture so normal swipe/pinch/rotate + // recognition doesn't also fire. + self.niri.touch_gesture_locked = true; + self.niri.touch_gesture_cumulative = None; + let handle = self.niri.seat.get_touch().unwrap(); + handle.cancel(self); + + let trigger_name = trigger_to_ipc_name(trigger); + self.ipc_gesture_begin( + tag.clone().unwrap_or_default(), + trigger_name, + peak_fingers, + kind.is_some(), + ); + + if let Some(kind) = kind { + begin_continuous_gesture(self, kind, pos); + let active = ActiveTouchBind::Swipe { + kind, + sensitivity, + natural_scroll, + tag, + ipc_progress: 0.0, + }; + self.niri.touch_active_bind = Some(active); + } else { + if !matches!(action, Action::Noop) { + self.do_action(action, false); + } + self.ipc_gesture_end(tag.clone().unwrap_or_default(), true); + } + } else { + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP killed reason=wobble \ + hold={:.0}ms (need {:.0}ms for hold-drag)", + elapsed_ms, hold_delay_ms, + ); + } + } + } + } + + // Suppress client forwarding if we're in a multi-finger gesture + // or an active edge swipe. Processing happens in on_touch_frame. + let finger_count = self.niri.touch_gesture_points.len(); + let gesture_active = finger_count >= 3 || self.niri.touch_gesture_locked; + if gesture_active && self.niri.touch_edge_swipe.is_none() { + gesture_handled = true; + } + } + + if let Some(output) = self.niri.screenshot_ui.selection_output().cloned() { + let geom = self.niri.global_space.output_geometry(&output).unwrap(); + let mut point = (pos - geom.loc.to_f64()) + .to_physical(output.current_scale().fractional_scale()) + .to_i32_round::(); + + let size = output.current_mode().unwrap().size; + let transform = output.current_transform(); + let size = transform.transform_size(size); + point.x = point.x.clamp(0, size.w - 1); + point.y = point.y.clamp(0, size.h - 1); + + self.niri.screenshot_ui.pointer_motion(point, Some(slot)); + self.niri.queue_redraw(&output); + } + + // Only forward to client if not handling a multi-finger gesture. + if !gesture_handled { + let under = self.niri.contents_under(pos); + handle.motion( + self, + under.surface, + &TouchMotionEvent { + slot, + location: pos, + time: evt.time_msec(), + }, + ); + } + + // Inform the layout of an ongoing DnD operation. + let is_dnd_grab = handle + .with_grab(|_, grab| Self::is_dnd_grab(grab.as_any())) + .unwrap_or(false); + if is_dnd_grab { + if let Some((output, pos_within_output)) = self.niri.output_under(pos) { + let output = output.clone(); + self.niri.layout.dnd_update(output, pos_within_output); + } + } + } + + pub(super) fn on_touch_frame(&mut self, _evt: I::TouchFrameEvent) { + let Some(handle) = self.niri.seat.get_touch() else { + return; + }; + + // Process batched touch motion events. All per-slot position updates + // and delta accumulation happened in on_touch_motion; here we run + // the expensive processing once per hardware scan frame instead of + // once per finger. + if self.niri.touch_frame_dirty { + self.niri.touch_frame_dirty = false; + let delta_x = self.niri.touch_frame_delta.0; + let delta_y = self.niri.touch_frame_delta.1; + self.niri.touch_frame_delta = (0., 0.); + let timestamp = self.niri.touch_frame_timestamp; + + // Compute centroid for use in lock transition (window_under, + // begin_continuous_gesture). More correct than the last-moved + // finger's position for multi-finger gestures. + let pos = { + let points = &self.niri.touch_gesture_points; + if points.is_empty() { + smithay::utils::Point::from((0., 0.)) + } else { + let n = points.len() as f64; + let (sx, sy) = points + .values() + .fold((0., 0.), |(ax, ay), p| (ax + p.x, ay + p.y)); + smithay::utils::Point::from((sx / n, sy / n)) + } + }; + + // Edge swipe: threshold check (Pending → Active) and active feed. + // These were deferred from on_touch_motion to avoid per-slot feeds. + if let Some(ref state) = self.niri.touch_edge_swipe { + match state { + TouchEdgeSwipeState::Pending { + edge, + zone, + zoned, + cumulative, + slot: edge_slot, + .. + } => { + let edge = *edge; + let zone = *zone; + let zoned = *zoned; + let (cx, cy) = *cumulative; + let edge_slot = *edge_slot; + let threshold = { + let config = self.niri.config.borrow(); + config.input.touchscreen.swipe_trigger_distance() + }; + + if cx * cx + cy * cy >= threshold * threshold { + let trigger = Trigger::TouchEdge { + edge, + zone: if zoned { Some(zone) } else { None }, + }; + let bind_info = { + let config = self.niri.config.borrow(); + let mod_key = self.backend.mod_key(&config); + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + find_configured_bind(config.binds.0.iter(), mod_key, trigger, mods) + }; + let bind_info = bind_info.map(extract_bind_info); + + if let Some((kind, sensitivity, natural_scroll, tag, action)) = + bind_info + { + if let Some(ref tag) = tag { + let trigger_name = trigger_to_ipc_name(trigger); + self.ipc_gesture_begin( + tag.clone(), + trigger_name, + 1, + kind.is_some(), + ); + } + + if let Some(kind) = kind { + self.niri.touch_edge_swipe = + Some(TouchEdgeSwipeState::Active { + edge, + zone, + zoned, + kind, + sensitivity, + natural_scroll, + slot: edge_slot, + tag, + ipc_progress: 0.0, + }); + handle.cancel(self); + begin_continuous_gesture(self, kind, pos); + self.niri.queue_redraw_all(); + } else { + handle.cancel(self); + if !matches!(action, Action::Noop) { + self.do_action(action, false); + } + if let Some(ref tag) = tag { + self.ipc_gesture_end(tag.clone(), true); + } + self.niri.touch_edge_swipe = None; + } + } else { + self.niri.touch_edge_swipe = None; + } + } + } + TouchEdgeSwipeState::Active { + kind, + sensitivity, + natural_scroll, + tag, + .. + } => { + let kind = *kind; + let sensitivity = *sensitivity; + let natural = *natural_scroll; + let tag = tag.clone(); + // Use edge-slot-only delta, not the combined + // multi-finger delta. + let (edge_dx, edge_dy) = self.niri.touch_frame_edge_delta; + self.niri.touch_frame_edge_delta = (0., 0.); + feed_continuous_gesture( + self, + kind, + edge_dx, + edge_dy, + sensitivity, + natural, + timestamp, + tag.as_deref(), + ); + } + } + } + + // Process multi-finger gesture (3+ fingers or locked), no edge swipe. + let finger_count = self.niri.touch_gesture_points.len(); + let gesture_active = finger_count >= 3 || self.niri.touch_gesture_locked; + if gesture_active && self.niri.touch_edge_swipe.is_none() { + // Feed ongoing continuous gesture if one is active. + if let Some(ref active) = self.niri.touch_active_bind { + match active { + ActiveTouchBind::Swipe { + kind, + sensitivity, + natural_scroll, + tag, + .. + } => { + let kind = *kind; + let sensitivity = *sensitivity; + let natural = *natural_scroll; + let tag = tag.clone(); + feed_continuous_gesture( + self, + kind, + delta_x, + delta_y, + sensitivity, + natural, + timestamp, + tag.as_deref(), + ); + } + ActiveTouchBind::Pinch { kind, tag, .. } => { + let kind = *kind; + let tag = tag.clone(); + feed_continuous_pinch(self, kind, timestamp, tag.as_deref()); + } + ActiveTouchBind::Rotate { kind, tag, .. } => { + let kind = *kind; + let tag = tag.clone(); + feed_continuous_rotation(self, kind, timestamp, tag.as_deref()); + } + } + } else if let Some((cx, cy)) = &mut self.niri.touch_gesture_cumulative { + // Recognition phase: accumulate batched deltas. + *cx += delta_x; + *cy += delta_y; + + let finger_count_f = finger_count.max(1) as f64; + let (cx, cy) = (*cx / finger_count_f, *cy / finger_count_f); + let swipe_distance = (cx * cx + cy * cy).sqrt(); + + let (frame_rotation, new_angles) = calculate_rotation_delta( + &self.niri.touch_gesture_points, + &self.niri.touch_gesture_previous_angles, + ); + self.niri.touch_gesture_previous_angles = new_angles; + self.niri.touch_gesture_cumulative_rotation += frame_rotation; + + let (swipe_trigger, pinch_trigger, pinch_dom, rotation_trigger, rotation_dom) = { + let config = self.niri.config.borrow(); + ( + config + .input + .touchscreen + .scaled_swipe_trigger_distance(finger_count), + config.input.touchscreen.pinch_trigger_distance(), + config.input.touchscreen.pinch_dominance_ratio(), + config.input.touchscreen.rotation_trigger_angle(), + config.input.touchscreen.rotation_dominance_ratio(), + ) + }; + + let current_spread = calculate_spread(&self.niri.touch_gesture_points); + let initial_spread = self + .niri + .touch_gesture_initial_spread + .unwrap_or(current_spread); + let spread_change = (current_spread - initial_spread).abs(); + + let cumulative_rotation = self.niri.touch_gesture_cumulative_rotation; + let rotation_arc = cumulative_rotation.abs() * current_spread; + let rotation_arc_trigger_distance = rotation_trigger * current_spread; + + let is_rotate = finger_count >= 3 + && rotation_arc >= rotation_arc_trigger_distance + && rotation_arc >= swipe_distance * rotation_dom + && rotation_arc >= spread_change * rotation_dom; + + let is_pinch = spread_change > pinch_trigger + && spread_change > swipe_distance * pinch_dom + && !is_rotate; + + let closest = { + let swipe_frac = swipe_distance / swipe_trigger.max(1e-9); + let pinch_frac = spread_change / pinch_trigger.max(1e-9); + let rotate_frac = cumulative_rotation.abs() / rotation_trigger.max(1e-9); + if rotate_frac >= swipe_frac && rotate_frac >= pinch_frac { + "rotate" + } else if pinch_frac >= swipe_frac { + "pinch" + } else { + "swipe" + } + }; + tracing::trace!( + target: "niri::input::touch_gesture", + "TOUCH-DBG FRAME fingers={} \ + swipe={:.1}/{:.1} \ + spread={:.1}/{:.1} \ + rot={:.3}/{:.3}rad ({:.1}°) \ + arc={:.1} \ + is_rotate={} is_pinch={} closest={}", + finger_count, + swipe_distance, swipe_trigger, + spread_change, pinch_trigger, + cumulative_rotation.abs(), rotation_trigger, + cumulative_rotation.to_degrees(), + rotation_arc, + is_rotate, is_pinch, closest, + ); + + #[cfg(debug_assertions)] + self.ipc_recognition_frame( + finger_count as u8, + swipe_distance, + swipe_trigger, + current_spread - initial_spread, + pinch_trigger, + cumulative_rotation, + rotation_trigger, + rotation_arc, + rotation_arc_trigger_distance, + is_rotate, + is_pinch, + closest.to_string(), + timestamp.as_millis() as u32, + ); + + let rotation_candidate = + finger_count >= 3 && rotation_arc >= rotation_arc_trigger_distance; + + if is_rotate + || (swipe_distance >= swipe_trigger && !rotation_candidate) + || is_pinch + { + self.niri.touch_gesture_cumulative = None; + + if let Some(ref mut tap) = self.niri.touch_tap_candidate { + if tap.alive { + tap.alive = false; + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG TAP killed reason=lock", + ); + } + } + + if let Some(mapped) = self.niri.window_under(pos) { + let app_id = + with_toplevel_role(mapped.toplevel(), |role| role.app_id.clone()); + tracing::debug!( + "touch: captured {}-finger gesture over app-id={:?}", + finger_count, + app_id.unwrap_or_default(), + ); + } + + self.niri.touch_gesture_locked = true; + let handle = self.niri.seat.get_touch().unwrap(); + handle.cancel(self); + + let gesture_type = if is_rotate { + if cumulative_rotation > 0.0 { + TouchGestureType::RotateCcw + } else { + TouchGestureType::RotateCw + } + } else if is_pinch { + if current_spread < initial_spread { + TouchGestureType::PinchIn + } else { + TouchGestureType::PinchOut + } + } else if cx.abs() > cy.abs() { + if cx > 0.0 { + TouchGestureType::SwipeRight + } else { + TouchGestureType::SwipeLeft + } + } else { + if cy > 0.0 { + TouchGestureType::SwipeDown + } else { + TouchGestureType::SwipeUp + } + }; + + let bind_info = { + let config = self.niri.config.borrow(); + let trigger = + touch_gesture_to_trigger(gesture_type, finger_count as u8); + let mod_key = self.backend.mod_key(&config); + let mods = self.niri.seat.get_keyboard().unwrap().modifier_state(); + trigger.and_then(|t| { + find_configured_bind(config.binds.0.iter(), mod_key, t, mods) + }) + }; + let bind_info = bind_info.map(extract_bind_info); + + { + let trigger_name = + touch_gesture_to_trigger(gesture_type, finger_count as u8) + .map(trigger_to_ipc_name) + .unwrap_or_else(|| "Unknown".to_string()); + let (bind_matched, kind_str, tag_str) = match bind_info.as_ref() { + Some((kind, _, _, tag, _)) => ( + "yes", + kind.map(|k| format!("{:?}", k)) + .unwrap_or_else(|| "discrete".to_string()), + tag.clone().unwrap_or_else(|| "-".to_string()), + ), + None => ("no", "-".to_string(), "-".to_string()), + }; + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG LOCK fingers={} type={:?} \ + trigger={} bind={} kind={} tag={}", + finger_count, + gesture_type, + trigger_name, + bind_matched, + kind_str, + tag_str, + ); + } + + if let Some((kind, sensitivity, natural_scroll, tag, action)) = bind_info { + { + let trigger_name = + touch_gesture_to_trigger(gesture_type, finger_count as u8) + .map(trigger_to_ipc_name) + .unwrap_or_else(|| "Unknown".to_string()); + self.ipc_gesture_begin( + tag.clone().unwrap_or_default(), + trigger_name, + finger_count as u8, + kind.is_some(), + ); + } + + if let Some(kind) = kind { + begin_continuous_gesture(self, kind, pos); + let active = if is_rotate { + ActiveTouchBind::Rotate { + kind, + tag, + ipc_progress: 0.0, + start_rotation: cumulative_rotation, + } + } else if is_pinch { + ActiveTouchBind::Pinch { + kind, + tag, + ipc_progress: 0.0, + start_spread: current_spread, + last_spread: current_spread, + } + } else { + ActiveTouchBind::Swipe { + kind, + sensitivity, + natural_scroll, + tag, + ipc_progress: 0.0, + } + }; + self.niri.touch_active_bind = Some(active); + } else { + if !matches!(action, Action::Noop) { + self.do_action(action, false); + } + self.ipc_gesture_end(tag.clone().unwrap_or_default(), true); + } + } + } + } + } + } + + handle.frame(self); + } + + pub(super) fn on_touch_cancel(&mut self, _evt: I::TouchCancelEvent) { + let Some(handle) = self.niri.seat.get_touch() else { + return; + }; + + tracing::debug!( + target: "niri::input::touch_gesture", + "TOUCH-DBG CANCEL points={} locked={} passthrough={} edge_swipe={}", + self.niri.touch_gesture_points.len(), + self.niri.touch_gesture_locked, + self.niri.touchscreen_gesture_passthrough, + self.niri.touch_edge_swipe.is_some(), + ); + + // Collect tags for IPC GestureEnd before clearing state. + // Track `had_active` separately so we can emit a cancelled + // GestureEnd for untagged multi-finger binds too. + let active_bind = self.niri.touch_active_bind.take(); + let had_active = active_bind.is_some(); + let active_tag = active_bind.and_then(ActiveTouchBind::into_tag); + let edge_tag = match &self.niri.touch_edge_swipe { + Some(TouchEdgeSwipeState::Active { tag, .. }) => tag.clone(), + _ => None, + }; + + // Clear all touch gesture state. + self.niri.touch_gesture_points.clear(); + self.niri.touch_gesture_cumulative = None; + self.niri.touch_edge_swipe = None; + self.niri.touch_gesture_locked = false; + self.niri.touch_forwarded_slots.clear(); + self.niri.touch_gesture_initial_spread = None; + self.niri.touch_gesture_cumulative_rotation = 0.0; + self.niri.touch_gesture_previous_angles.clear(); + self.niri.touch_tap_candidate = None; + self.niri.touchscreen_gesture_passthrough = false; + self.niri.touch_frame_dirty = false; + self.niri.touch_frame_delta = (0., 0.); + self.niri.touch_frame_edge_delta = (0., 0.); + + // Cancel any ongoing gesture animations. + self.niri.layout.workspace_switch_gesture_end(Some(false)); + self.niri.layout.view_offset_gesture_end(Some(false)); + self.niri.layout.overview_gesture_end(); + + // Emit IPC GestureEnd (cancelled) for any committed multi-finger + // bind (tagged or untagged), and tagged edge swipes. + if had_active { + self.ipc_gesture_end(active_tag.unwrap_or_default(), false); + } + if let Some(tag) = edge_tag { + self.ipc_gesture_end(tag, false); + } + + handle.cancel(self); + } +} + +/// Convert a TouchGestureType + finger count to a Trigger for bind lookup. +fn touch_gesture_to_trigger(gesture: TouchGestureType, finger_count: u8) -> Option { + use TouchGestureType::*; + // Reject finger counts outside the supported range. Edge swipes are + // always single-finger so they're allowed through regardless. + if !(MIN_FINGERS..=MAX_FINGERS).contains(&finger_count) + && !matches!( + gesture, + EdgeSwipeLeft | EdgeSwipeRight | EdgeSwipeTop | EdgeSwipeBottom + ) + { + return None; + } + let fingers = finger_count; + match gesture { + SwipeUp => Some(Trigger::TouchSwipe { + fingers, + direction: SwipeDirection::Up, + }), + SwipeDown => Some(Trigger::TouchSwipe { + fingers, + direction: SwipeDirection::Down, + }), + SwipeLeft => Some(Trigger::TouchSwipe { + fingers, + direction: SwipeDirection::Left, + }), + SwipeRight => Some(Trigger::TouchSwipe { + fingers, + direction: SwipeDirection::Right, + }), + PinchIn => Some(Trigger::TouchPinch { + fingers, + direction: PinchDirection::In, + }), + PinchOut => Some(Trigger::TouchPinch { + fingers, + direction: PinchDirection::Out, + }), + RotateCw => Some(Trigger::TouchRotate { + fingers, + direction: RotateDirection::Cw, + }), + RotateCcw => Some(Trigger::TouchRotate { + fingers, + direction: RotateDirection::Ccw, + }), + Tap => Some(Trigger::TouchTap { fingers }), + EdgeSwipeLeft => Some(Trigger::TouchEdge { + edge: ScreenEdge::Left, + zone: None, + }), + EdgeSwipeRight => Some(Trigger::TouchEdge { + edge: ScreenEdge::Right, + zone: None, + }), + EdgeSwipeTop => Some(Trigger::TouchEdge { + edge: ScreenEdge::Top, + zone: None, + }), + EdgeSwipeBottom => Some(Trigger::TouchEdge { + edge: ScreenEdge::Bottom, + zone: None, + }), + } +} + +/// Detect which screen edge a touch position is near, if any, and which +/// third of that edge it lies in. +/// +/// The edge is the one closest to the touch point within `threshold`. The +/// zone splits the perpendicular axis into equal thirds: for Top/Bottom the +/// split is across x (Start = leftmost third, End = rightmost third); for +/// Left/Right it is across y (Start = topmost third, End = bottommost third). +fn detect_edge( + pos: smithay::utils::Point, + size: smithay::utils::Size, + threshold: f64, +) -> Option<(ScreenEdge, EdgeZone)> { + let x = pos.x; + let y = pos.y; + let w = size.w as f64; + let h = size.h as f64; + + let left = x; + let right = w - x; + let top = y; + let bottom = h - y; + + // Find the closest edge within threshold. + let mut closest: Option<(ScreenEdge, f64)> = None; + for (edge, dist) in [ + (ScreenEdge::Left, left), + (ScreenEdge::Right, right), + (ScreenEdge::Top, top), + (ScreenEdge::Bottom, bottom), + ] { + if dist < threshold { + if closest.map_or(true, |(_, d)| dist < d) { + closest = Some((edge, dist)); + } + } + } + + let (edge, _) = closest?; + + // Classify the perpendicular-axis position into thirds. + let (pos_along, extent) = match edge { + ScreenEdge::Top | ScreenEdge::Bottom => (x, w), + ScreenEdge::Left | ScreenEdge::Right => (y, h), + }; + let third = extent / 3.0; + let zone = if pos_along < third { + EdgeZone::Start + } else if pos_along < third * 2.0 { + EdgeZone::Center + } else { + EdgeZone::End + }; + + Some((edge, zone)) +} + +/// Begin a continuous gesture animation. +fn begin_continuous_gesture( + state: &mut State, + kind: ContinuousGestureKind, + pos: smithay::utils::Point, +) { + match kind { + ContinuousGestureKind::OverviewToggle => { + state.niri.layout.overview_gesture_begin(); + } + ContinuousGestureKind::WorkspaceSwitch => { + if let Some((output, _)) = state.niri.output_under(pos) { + let output = output.clone(); + state + .niri + .layout + .workspace_switch_gesture_begin(&output, true); + } + } + ContinuousGestureKind::ViewScroll => { + if let Some((output, _)) = state.niri.output_under(pos) { + let output = output.clone(); + let is_overview_open = state.niri.layout.is_overview_open(); + + let output_ws = if is_overview_open { + state.niri.workspace_under(true, pos) + } else { + state + .niri + .layout + .monitor_for_output(&output) + .map(|mon| (output.clone(), mon.active_workspace_ref())) + }; + + if let Some((output, ws)) = output_ws { + let ws_idx = state.niri.layout.find_workspace_by_id(ws.id()).unwrap().0; + state + .niri + .layout + .view_offset_gesture_begin(&output, Some(ws_idx), true); + } + } + } + ContinuousGestureKind::Noop => { + // No compositor animation — IPC events are emitted by the caller. + } + } +} + +/// Feed delta to an active continuous gesture. +fn feed_continuous_gesture( + state: &mut State, + kind: ContinuousGestureKind, + delta_x: f64, + delta_y: f64, + sensitivity: f64, + natural: bool, + timestamp: Duration, + tag: Option<&str>, +) { + // Compute progress: accumulate the adjusted (post-sensitivity, post-natural) + // primary axis delta. gesture-pixel-distance px ≈ 1 unit. + let progress_unit = { + let config = state.niri.config.borrow(); + config.input.touchscreen.swipe_progress_distance() + }; + + match kind { + ContinuousGestureKind::WorkspaceSwitch => { + let dy = if natural { -delta_y } else { delta_y }; + if state + .niri + .layout + .workspace_switch_gesture_update(dy * sensitivity, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::ViewScroll => { + let dx = if natural { -delta_x } else { delta_x }; + if state + .niri + .layout + .view_offset_gesture_update(dx * sensitivity, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::OverviewToggle => { + let dy = if natural { delta_y } else { -delta_y }; + if let Some(redraw) = state + .niri + .layout + .overview_gesture_update(dy * sensitivity, timestamp) + { + if redraw { + state.niri.queue_redraw_all(); + } + } + } + ContinuousGestureKind::Noop => { + // No compositor animation — IPC progress is emitted below. + } + } + + // Emit IPC GestureProgress if this bind has a tag. + if let Some(tag) = tag { + // Compute adjusted delta for progress accumulation. + let adjusted_delta = match kind { + ContinuousGestureKind::WorkspaceSwitch | ContinuousGestureKind::OverviewToggle => { + let dy = if natural { -delta_y } else { delta_y }; + dy * sensitivity + } + ContinuousGestureKind::ViewScroll => { + let dx = if natural { -delta_x } else { delta_x }; + dx * sensitivity + } + ContinuousGestureKind::Noop => { + // Use the dominant axis + let dy = if natural { -delta_y } else { delta_y }; + let dx = if natural { -delta_x } else { delta_x }; + if dy.abs() > dx.abs() { + dy * sensitivity + } else { + dx * sensitivity + } + } + }; + + // Update accumulated progress on the active Swipe bind or edge swipe. + // Pinches take the `feed_continuous_pinch` path and never reach here. + let progress = if let Some(ActiveTouchBind::Swipe { ipc_progress, .. }) = + state.niri.touch_active_bind.as_mut() + { + *ipc_progress += adjusted_delta / progress_unit; + *ipc_progress + } else if let Some(TouchEdgeSwipeState::Active { + ref mut ipc_progress, + .. + }) = state.niri.touch_edge_swipe + { + *ipc_progress += adjusted_delta / progress_unit; + *ipc_progress + } else { + // Fallback: no accumulator reachable (shouldn't happen on the + // hot path — the caller populates one of the two state slots + // before calling here). + adjusted_delta / progress_unit + }; + + let ts_ms = timestamp.as_millis() as u32; + state.ipc_gesture_progress( + tag.to_string(), + progress, + GestureDelta::Swipe { + dx: delta_x, + dy: delta_y, + }, + ts_ms, + ); + } +} + +/// Feed spread delta to an active continuous pinch gesture. +/// +/// Mirrors `feed_continuous_gesture` but drives the animation from change in +/// finger spread instead of linear dx/dy. Works for any finger count ≥ 3 +/// (3-finger, 4-finger, 5-finger pinches all ride this path). +/// +/// Sign convention: positive incremental spread = pinch-out (fingers spreading), +/// negative = pinch-in. For OverviewToggle we negate so pinch-in opens, matching +/// the legacy hardcoded behavior. +/// +/// Uses `pinch_sensitivity` from the touchscreen gestures config for the +/// animation drive — not the bind's `sensitivity` property. Pinch has its +/// own tuning knob because raw spread-delta pixels need very different +/// scaling from linear swipe distances. At the default `1.0`, one pixel of +/// spread change contributes one pixel to the underlying gesture +/// accumulator, matching the scale swipes use. +fn feed_continuous_pinch( + state: &mut State, + kind: ContinuousGestureKind, + timestamp: Duration, + tag: Option<&str>, +) { + // Batch the two config reads so we only borrow RefCell once per call. + let (pinch_sensitivity, progress_unit) = { + let config = state.niri.config.borrow(); + ( + config.input.touchscreen.pinch_sensitivity(), + config.input.touchscreen.pinch_progress_distance(), + ) + }; + + let current_spread = calculate_spread(&state.niri.touch_gesture_points); + + // Destructure the active Pinch variant directly. If the active bind is + // anything else (or None), something is badly wrong with the dispatch in + // on_touch_motion — bail out cleanly rather than panic. + let Some(ActiveTouchBind::Pinch { + start_spread, + last_spread, + .. + }) = state.niri.touch_active_bind.as_mut() + else { + return; + }; + let incremental = current_spread - *last_spread; + *last_spread = current_spread; + let start_spread = *start_spread; + + match kind { + ContinuousGestureKind::OverviewToggle => { + // Pinch-in (negative incremental) → positive anim delta → overview opens. + let delta = -incremental * pinch_sensitivity; + if let Some(redraw) = state.niri.layout.overview_gesture_update(delta, timestamp) { + if redraw { + state.niri.queue_redraw_all(); + } + } + } + ContinuousGestureKind::WorkspaceSwitch => { + // Semantically odd but not broken: pinch-out scrolls workspaces down. + let delta = incremental * pinch_sensitivity; + if state + .niri + .layout + .workspace_switch_gesture_update(delta, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::ViewScroll => { + let delta = incremental * pinch_sensitivity; + if state + .niri + .layout + .view_offset_gesture_update(delta, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::Noop => { + // No compositor animation — IPC progress is emitted below. + } + } + + // Emit IPC GestureProgress for tagged pinch binds. + if let Some(tag) = tag { + // Signed, unbounded: positive = pinch-out, negative = pinch-in. + // Unlike swipes, pinch progress is absolute (computed from start_spread + // each frame) rather than accumulated — reversing the pinch gives a + // direct inverse, with no drift from accumulated float error. + let progress = (current_spread - start_spread) / progress_unit; + if let Some(ActiveTouchBind::Pinch { ipc_progress, .. }) = + state.niri.touch_active_bind.as_mut() + { + *ipc_progress = progress; + } + let ts_ms = timestamp.as_millis() as u32; + state.ipc_gesture_progress( + tag.to_string(), + progress, + GestureDelta::Pinch { + d_spread: incremental, + }, + ts_ms, + ); + } +} + +/// Feed the per-frame rotation delta to an active continuous rotation gesture. +/// +/// Mirrors `feed_continuous_pinch`, but the scalar driving the animation is a +/// signed angular delta (radians, CCW positive) rather than a spread delta. +/// Unlike pinch, rotation must accumulate frame-to-frame because `atan2` wraps +/// at ±π and because fingers lifting shift the centroid; see +/// `calculate_rotation_delta` for the math and `rebase_rotation_basis` for +/// the finger-lift handling. +/// +/// The rotation is converted to a linear animation delta by multiplying by +/// `pinch_sensitivity` (same knob as pinch — rotation shares the "radial +/// gesture" category). For OverviewToggle, CCW opens the overview to mirror +/// the pinch-in → open convention (both are "gather inward" motions). +fn feed_continuous_rotation( + state: &mut State, + kind: ContinuousGestureKind, + timestamp: Duration, + tag: Option<&str>, +) { + // Batch config reads to hold the RefCell once per call. + let (pinch_sensitivity, rotation_progress_angle) = { + let config = state.niri.config.borrow(); + ( + config.input.touchscreen.pinch_sensitivity(), + config.input.touchscreen.rotation_progress_angle(), + ) + }; + + // Compute this frame's angular delta and update the previous-angle basis. + let (frame_rotation, new_angles) = calculate_rotation_delta( + &state.niri.touch_gesture_points, + &state.niri.touch_gesture_previous_angles, + ); + state.niri.touch_gesture_previous_angles = new_angles; + state.niri.touch_gesture_cumulative_rotation += frame_rotation; + let cumulative_rotation = state.niri.touch_gesture_cumulative_rotation; + + // Destructure the active Rotate variant to read its start_rotation; + // bail if misdispatched. + let Some(ActiveTouchBind::Rotate { start_rotation, .. }) = + state.niri.touch_active_bind.as_ref() + else { + return; + }; + let start_rotation = *start_rotation; + + // Convert angular motion to an animation-accumulator scalar. Arc length + // at a unit radius is the angular delta itself; scale by pinch_sensitivity + // so users with pinch tuned to their taste get rotation that feels the + // same. Multiply by a radius of 100 px to get units comparable to swipe + // pixel deltas (π/2 rad ≈ 157 px of "motion"). + const ROTATION_PIXEL_RADIUS: f64 = 100.0; + let anim_delta = frame_rotation * ROTATION_PIXEL_RADIUS * pinch_sensitivity; + + match kind { + ContinuousGestureKind::OverviewToggle => { + // CCW (positive frame_rotation) → positive anim delta → overview + // opens. Matches the pinch-in "gather inward" convention. + if let Some(redraw) = state + .niri + .layout + .overview_gesture_update(anim_delta, timestamp) + { + if redraw { + state.niri.queue_redraw_all(); + } + } + } + ContinuousGestureKind::WorkspaceSwitch => { + if state + .niri + .layout + .workspace_switch_gesture_update(anim_delta, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::ViewScroll => { + if state + .niri + .layout + .view_offset_gesture_update(anim_delta, timestamp, true) + .is_some() + { + state.niri.queue_redraw_all(); + } + } + ContinuousGestureKind::Noop => { + // No compositor animation — IPC progress is emitted below. + } + } + + // Emit IPC GestureProgress for tagged rotation binds. + if let Some(tag) = tag { + // Signed, unbounded: positive = CCW, negative = CW. Progress is the + // rotation since recognition, normalized by the progress distance. + // `cumulative_rotation - start_rotation` keeps the running metric + // out of the progress math so the recognition-phase rotation isn't + // included in the animation drive. + let progress = (cumulative_rotation - start_rotation) / rotation_progress_angle; + if let Some(ActiveTouchBind::Rotate { ipc_progress, .. }) = + state.niri.touch_active_bind.as_mut() + { + *ipc_progress = progress; + } + let ts_ms = timestamp.as_millis() as u32; + state.ipc_gesture_progress( + tag.to_string(), + progress, + GestureDelta::Rotate { + d_radians: frame_rotation, + }, + ts_ms, + ); + } +} + +/// End a continuous gesture animation. +fn end_continuous_gesture(state: &mut State, kind: ContinuousGestureKind) { + match kind { + ContinuousGestureKind::WorkspaceSwitch => { + if let Some(output) = state.niri.layout.workspace_switch_gesture_end(Some(true)) { + state.niri.queue_redraw(&output); + } + } + ContinuousGestureKind::ViewScroll => { + if let Some(output) = state.niri.layout.view_offset_gesture_end(Some(true)) { + state.niri.queue_redraw(&output); + } + } + ContinuousGestureKind::OverviewToggle => { + state.niri.layout.overview_gesture_end(); + } + ContinuousGestureKind::Noop => { + // No compositor animation to end. + } + } +} + +/// Calculate the average spread of touch points (average distance from centroid). +fn calculate_spread( + points: &std::collections::HashMap< + Option, + smithay::utils::Point, + >, +) -> f64 { + if points.len() < 2 { + return 0.0; + } + + let n = points.len() as f64; + let (sum_x, sum_y) = points + .values() + .fold((0.0, 0.0), |(sx, sy), p| (sx + p.x, sy + p.y)); + let centroid_x = sum_x / n; + let centroid_y = sum_y / n; + + let total_dist: f64 = points + .values() + .map(|p| { + let dx = p.x - centroid_x; + let dy = p.y - centroid_y; + (dx * dx + dy * dy).sqrt() + }) + .sum(); + + total_dist / n +} + +/// Compute per-slot angles (in radians) from the cluster centroid. +/// +/// Only slots that have an actual `TouchSlot` identifier (not `None`) are +/// returned — angles have to be tracked across frames by slot, and `None` +/// slots can't be followed. Returns an empty map if fewer than 2 real slots +/// are present. +fn calculate_per_slot_angles( + points: &std::collections::HashMap< + Option, + smithay::utils::Point, + >, +) -> std::collections::HashMap { + let mut out = std::collections::HashMap::new(); + let slotted: Vec<_> = points + .iter() + .filter_map(|(slot, pt)| slot.map(|s| (s, pt))) + .collect(); + if slotted.len() < 2 { + return out; + } + let n = slotted.len() as f64; + let (sx, sy) = slotted + .iter() + .fold((0.0, 0.0), |(ax, ay), (_, p)| (ax + p.x, ay + p.y)); + let cx = sx / n; + let cy = sy / n; + for (slot, pt) in slotted { + // atan2(-dy, dx): screen y grows downward, so we flip the y axis to + // get the mathematical convention where positive angles are + // counter-clockwise *as the user sees them on the screen*. Without + // the flip, a CCW rotation on the glass would produce a negative + // angle delta in screen space, which is confusing for users. + out.insert(slot, (-(pt.y - cy)).atan2(pt.x - cx)); + } + out +} + +/// Compute the averaged frame-to-frame rotation delta (in radians) across all +/// fingers present in both frames. +/// +/// Returns `(frame_delta, new_angles)`: +/// - `frame_delta` is the signed average angular delta across fingers +/// present in both frames, with ±π unwrap applied. Positive = CCW. +/// A noise floor of 0.001 rad is applied: smaller values clamp to 0 to +/// prevent sub-threshold drift from accumulating into a false rotation on +/// held-still fingers. +/// - `new_angles` is the fresh per-slot angle map to store for the next +/// frame's comparison. +/// +/// Returns `(0.0, new_angles)` with no accumulated delta when fewer than 2 +/// fingers overlap between frames — the caller should still overwrite its +/// stored map so the next frame has a basis. +fn calculate_rotation_delta( + current_points: &std::collections::HashMap< + Option, + smithay::utils::Point, + >, + previous_angles: &std::collections::HashMap, +) -> ( + f64, + std::collections::HashMap, +) { + use std::f64::consts::{PI, TAU}; + const NOISE_FLOOR: f64 = 0.001; + + let new_angles = calculate_per_slot_angles(current_points); + if new_angles.is_empty() || previous_angles.is_empty() { + return (0.0, new_angles); + } + + let mut sum = 0.0; + let mut count = 0usize; + for (slot, &curr) in &new_angles { + let Some(&prev) = previous_angles.get(slot) else { + continue; + }; + let raw = curr - prev; + // Unwrap across the ±π boundary: any delta with |Δ| > π is on the + // wrong side of the wrap; shift by 2π to get the short-way delta. + let unwrapped = if raw > PI { + raw - TAU + } else if raw < -PI { + raw + TAU + } else { + raw + }; + sum += unwrapped; + count += 1; + } + + if count == 0 { + return (0.0, new_angles); + } + + let avg = sum / count as f64; + let filtered = if avg.abs() < NOISE_FLOOR { 0.0 } else { avg }; + (filtered, new_angles) +} + +/// Convert a gesture Trigger to its KDL config name for IPC events. The +/// emitted string echoes the same property form users write in `binds {}` +/// (e.g. `TouchSwipe fingers=3 direction="up"`) so IPC consumers can +/// string-match against their own config 1:1. Non-gesture variants fall +/// through to `"Unknown"` — this function is only meant for gesture +/// triggers. +pub(crate) fn trigger_to_ipc_name(trigger: Trigger) -> String { + match trigger { + Trigger::TouchSwipe { fingers, direction } => { + format!( + "TouchSwipe fingers={fingers} direction=\"{}\"", + swipe_dir_name(direction) + ) + } + Trigger::TouchpadSwipe { fingers, direction } => { + format!( + "TouchpadSwipe fingers={fingers} direction=\"{}\"", + swipe_dir_name(direction) + ) + } + Trigger::TouchPinch { fingers, direction } => { + format!( + "TouchPinch fingers={fingers} direction=\"{}\"", + pinch_dir_name(direction) + ) + } + Trigger::TouchpadPinch { fingers, direction } => { + format!( + "TouchpadPinch fingers={fingers} direction=\"{}\"", + pinch_dir_name(direction) + ) + } + Trigger::TouchRotate { fingers, direction } => { + format!( + "TouchRotate fingers={fingers} direction=\"{}\"", + rotate_dir_name(direction) + ) + } + Trigger::TouchTap { fingers } => { + format!("TouchTap fingers={fingers}") + } + Trigger::TouchpadTapHold { fingers } => { + format!("TouchpadTapHold fingers={fingers}") + } + Trigger::TouchpadTapHoldDrag { fingers } => { + format!("TouchpadTapHoldDrag fingers={fingers}") + } + Trigger::TouchTapHoldDrag { fingers, direction } => match direction { + Some(d) => format!( + "TouchTapHoldDrag fingers={fingers} direction=\"{}\"", + swipe_dir_name(d) + ), + None => format!("TouchTapHoldDrag fingers={fingers}"), + }, + Trigger::TouchEdge { edge, zone } => { + let edge_str = edge.as_kdl_name(); + match zone { + None => format!("TouchEdge edge=\"{edge_str}\""), + Some(z) => format!( + "TouchEdge edge=\"{edge_str}\" zone=\"{}\"", + niri_config::input::zone_kdl_name(edge, z) + ), + } + } + // Every current caller only passes gesture triggers. If that + // invariant ever breaks we want to hear about it loudly in dev + // rather than silently emitting "Unknown" into the IPC stream. + other => { + debug_assert!( + false, + "trigger_to_ipc_name called with non-gesture trigger: {other:?}" + ); + "Unknown".to_string() + } + } +} + +fn swipe_dir_name(d: SwipeDirection) -> &'static str { + match d { + SwipeDirection::Up => "up", + SwipeDirection::Down => "down", + SwipeDirection::Left => "left", + SwipeDirection::Right => "right", + } +} + +fn pinch_dir_name(d: PinchDirection) -> &'static str { + match d { + PinchDirection::In => "in", + PinchDirection::Out => "out", + } +} + +fn rotate_dir_name(d: RotateDirection) -> &'static str { + match d { + RotateDirection::Cw => "cw", + RotateDirection::Ccw => "ccw", + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + use std::f64::consts::{FRAC_PI_2, PI}; + + use smithay::backend::input::TouchSlot; + use smithay::utils::Point; + + use super::{calculate_per_slot_angles, calculate_rotation_delta}; + + fn slot(n: u32) -> TouchSlot { + // TouchSlot is From>. + TouchSlot::from(Some(n)) + } + + fn point(x: f64, y: f64) -> Point { + Point::from((x, y)) + } + + fn points_from( + items: &[(u32, f64, f64)], + ) -> HashMap, Point> { + items + .iter() + .map(|(n, x, y)| (Some(slot(*n)), point(*x, *y))) + .collect() + } + + #[test] + fn angles_empty_for_single_finger() { + let pts = points_from(&[(0, 5.0, 5.0)]); + assert!(calculate_per_slot_angles(&pts).is_empty()); + } + + #[test] + fn angles_three_fingers_around_origin() { + // Three fingers spaced 120° apart around the origin, so the + // centroid is exactly (0, 0) and each finger lands on a known + // angle in the screen-flipped math convention. + // 0°: (10, 0) screen + // +120°: screen (10·cos 120°, -10·sin 120°) = (-5, -8.660) + // -120°: screen (10·cos -120°, -10·sin -120°) = (-5, +8.660) + let r: f64 = 10.0; + let pts = points_from(&[ + (0, r, 0.0), + ( + 1, + r * 120.0_f64.to_radians().cos(), + -r * 120.0_f64.to_radians().sin(), + ), + ( + 2, + r * (-120.0_f64).to_radians().cos(), + -r * (-120.0_f64).to_radians().sin(), + ), + ]); + let angles = calculate_per_slot_angles(&pts); + let tolerance = 1e-9; + assert!( + (angles[&slot(0)] - 0.0).abs() < tolerance, + "slot 0 = {}", + angles[&slot(0)] + ); + assert!( + (angles[&slot(1)] - 120.0_f64.to_radians()).abs() < tolerance, + "slot 1 = {}", + angles[&slot(1)] + ); + assert!( + (angles[&slot(2)] - (-120.0_f64).to_radians()).abs() < tolerance, + "slot 2 = {}", + angles[&slot(2)] + ); + } + + /// Build a point set with N fingers arranged around the origin at the + /// given angles (screen-flipped math convention: +x right, +y up on + /// screen). Each finger is placed at radius 10. + fn ring_points( + angles: &[(u32, f64)], + ) -> HashMap, Point> { + let r = 10.0_f64; + let items: Vec<(u32, f64, f64)> = angles + .iter() + .map(|(n, a)| (*n, r * a.cos(), -r * a.sin())) + .collect(); + points_from(&items) + } + + #[test] + fn rotation_static_frames_is_zero() { + let pts = ring_points(&[ + (0, 0.0), + (1, 120.0_f64.to_radians()), + (2, -120.0_f64.to_radians()), + ]); + let prev = calculate_per_slot_angles(&pts); + let (delta, _) = calculate_rotation_delta(&pts, &prev); + assert_eq!(delta, 0.0); + } + + #[test] + fn rotation_quarter_turn_ccw() { + // Three fingers equally spaced 120° apart. Rotate the entire cluster + // +90° (CCW as seen on screen) around the origin. + let initial = ring_points(&[ + (0, 0.0), + (1, 120.0_f64.to_radians()), + (2, -120.0_f64.to_radians()), + ]); + let rotated = ring_points(&[ + (0, 90.0_f64.to_radians()), + (1, 210.0_f64.to_radians()), + (2, -30.0_f64.to_radians()), + ]); + let prev = calculate_per_slot_angles(&initial); + let (delta, _) = calculate_rotation_delta(&rotated, &prev); + // +90° CCW = +π/2. + let tolerance = 1e-9; + assert!((delta - FRAC_PI_2).abs() < tolerance, "delta = {delta}"); + } + + #[test] + fn rotation_wrap_across_positive_pi() { + // Two fingers 180° apart, prev at +170° and -10°. Both rotate +20° CCW: + // slot 0: +170° → +190° ≡ -170° (wrap across +π) + // slot 1: -10° → +10° (normal) + // Raw subtraction for slot 0 is (-170 - 170) = -340°, unwrap → +20°. + // Average across fingers = +20° = +0.349 rad. + let prev_points = ring_points(&[(0, 170.0_f64.to_radians()), (1, -10.0_f64.to_radians())]); + let prev = calculate_per_slot_angles(&prev_points); + let curr = ring_points(&[(0, -170.0_f64.to_radians()), (1, 10.0_f64.to_radians())]); + let (delta, _) = calculate_rotation_delta(&curr, &prev); + let expected = 20.0_f64.to_radians(); + assert!( + (delta - expected).abs() < 1e-9, + "delta = {delta}, expected ~{expected}" + ); + } + + #[test] + fn rotation_noise_floor_zeroes_tiny_delta() { + // Two fingers nudged by < 0.001 rad each: averaged delta is + // below the noise floor and should clamp to exactly 0.0. + let prev_points = ring_points(&[(0, 0.0), (1, PI)]); + let prev = calculate_per_slot_angles(&prev_points); + let eps = 0.0005_f64; + let curr = ring_points(&[(0, eps), (1, PI + eps)]); + let (delta, _) = calculate_rotation_delta(&curr, &prev); + assert_eq!(delta, 0.0); + } +} diff --git a/src/ipc/client.rs b/src/ipc/client.rs index 40a58efa19..486fa662e1 100644 --- a/src/ipc/client.rs +++ b/src/ipc/client.rs @@ -506,6 +506,74 @@ pub fn handle_msg(mut msg: Msg, json: bool) -> anyhow::Result<()> { Event::CastStopped { stream_id } => { println!("Cast stopped: stream id {stream_id}"); } + Event::GestureBegin { + tag, + trigger, + finger_count, + is_continuous, + } => { + let kind = if is_continuous { + "continuous" + } else { + "discrete" + }; + println!( + "Gesture begin: tag={tag} trigger={trigger} \ + fingers={finger_count} ({kind})" + ); + } + Event::GestureProgress { + tag, + progress, + delta, + timestamp_ms, + } => { + let delta_str = match delta { + niri_ipc::GestureDelta::Swipe { dx, dy } => { + format!("swipe ({dx:.1},{dy:.1})") + } + niri_ipc::GestureDelta::Pinch { d_spread } => { + format!("pinch Δspread={d_spread:.1}") + } + niri_ipc::GestureDelta::Rotate { d_radians } => { + format!("rotate Δ={d_radians:.4}rad") + } + }; + println!( + "Gesture progress: tag={tag} progress={progress:.3} \ + {delta_str} t={timestamp_ms}" + ); + } + Event::GestureEnd { tag, completed } => { + let status = if completed { "completed" } else { "cancelled" }; + println!("Gesture end: tag={tag} ({status})"); + } + Event::RecognitionFrame { + finger_count, + swipe_distance, + swipe_trigger_distance, + spread_change, + pinch_trigger_distance, + rotation_rad, + rotation_trigger_angle_rad, + rotation_arc, + rotation_arc_trigger_distance: _, + is_rotate, + is_pinch, + closest, + timestamp_ms, + } => { + println!( + "Recognition frame: fingers={finger_count} \ + swipe={swipe_distance:.1}/{swipe_trigger_distance:.1} \ + spread={spread_change:.1}/{pinch_trigger_distance:.1} \ + rot={:.1}°/{:.1}° arc={rotation_arc:.1} \ + is_rotate={is_rotate} is_pinch={is_pinch} \ + closest={closest} t={timestamp_ms}", + rotation_rad.to_degrees(), + rotation_trigger_angle_rad.to_degrees(), + ); + } } } } diff --git a/src/ipc/server.rs b/src/ipc/server.rs index db71da6dbd..18cc705b9f 100644 --- a/src/ipc/server.rs +++ b/src/ipc/server.rs @@ -17,8 +17,8 @@ use futures_util::{select_biased, AsyncBufReadExt, AsyncWrite, AsyncWriteExt, Fu use niri_config::OutputName; use niri_ipc::state::{EventStreamState, EventStreamStatePart as _}; use niri_ipc::{ - Action, Event, KeyboardLayouts, OutputConfigChanged, Overview, Reply, Request, Response, - Timestamp, WindowLayout, Workspace, + Action, Event, GestureDelta, KeyboardLayouts, OutputConfigChanged, Overview, Reply, Request, + Response, Timestamp, WindowLayout, Workspace, }; use smithay::desktop::layer_map_for_output; use smithay::input::pointer::{ @@ -942,4 +942,101 @@ impl State { state.apply(event.clone()); server.send_event(event); } + + /// Emit a GestureBegin IPC event for a tagged bind. + pub fn ipc_gesture_begin( + &mut self, + tag: String, + trigger: String, + finger_count: u8, + is_continuous: bool, + ) { + let Some(server) = &self.niri.ipc_server else { + return; + }; + let mut state = server.event_stream_state.borrow_mut(); + let event = Event::GestureBegin { + tag, + trigger, + finger_count, + is_continuous, + }; + state.apply(event.clone()); + server.send_event(event); + } + + /// Emit a GestureProgress IPC event for a tagged continuous gesture. + pub fn ipc_gesture_progress( + &mut self, + tag: String, + progress: f64, + delta: GestureDelta, + timestamp_ms: u32, + ) { + let Some(server) = &self.niri.ipc_server else { + return; + }; + let event = Event::GestureProgress { + tag, + progress, + delta, + timestamp_ms, + }; + // No state.apply needed — progress doesn't change tracked state. + server.send_event(event); + } + + /// Emit a GestureEnd IPC event for a tagged bind. + pub fn ipc_gesture_end(&mut self, tag: String, completed: bool) { + let Some(server) = &self.niri.ipc_server else { + return; + }; + let mut state = server.event_stream_state.borrow_mut(); + let event = Event::GestureEnd { tag, completed }; + state.apply(event.clone()); + server.send_event(event); + } + + /// Emit a RecognitionFrame IPC event with per-frame touchscreen + /// recognizer telemetry. Compiled out of release builds via + /// `#[cfg(debug_assertions)]` at the call site. No `state.apply` + /// because recognition telemetry doesn't mutate tracked state; it's + /// pure stream output. + #[allow(clippy::too_many_arguments)] + pub fn ipc_recognition_frame( + &mut self, + finger_count: u8, + swipe_distance: f64, + swipe_trigger_distance: f64, + spread_change: f64, + pinch_trigger_distance: f64, + rotation_rad: f64, + rotation_trigger_angle_rad: f64, + rotation_arc: f64, + rotation_arc_trigger_distance: f64, + is_rotate: bool, + is_pinch: bool, + closest: String, + timestamp_ms: u32, + ) { + let Some(server) = &self.niri.ipc_server else { + return; + }; + let event = Event::RecognitionFrame { + finger_count, + swipe_distance, + swipe_trigger_distance, + spread_change, + pinch_trigger_distance, + rotation_rad, + rotation_trigger_angle_rad, + rotation_arc, + rotation_arc_trigger_distance, + is_rotate, + is_pinch, + closest, + timestamp_ms, + }; + server.send_event(event); + } } diff --git a/src/niri.rs b/src/niri.rs index 190ef09d55..c043ce44ec 100644 --- a/src/niri.rs +++ b/src/niri.rs @@ -14,12 +14,14 @@ use _server_decoration::server::org_kde_kwin_server_decoration_manager::Mode as use anyhow::{bail, ensure, Context}; use calloop::futures::Scheduler; use niri_config::debug::PreviewRender; +use niri_config::input::{EdgeZone, ScreenEdge}; +use niri_config::touch_binds::ContinuousGestureKind; use niri_config::{ Config, FloatOrInt, Key, Modifiers, OutputName, TrackLayout, WarpMouseToFocusMode, WorkspaceReference, Xkb, }; use smithay::backend::allocator::Fourcc; -use smithay::backend::input::Keycode; +use smithay::backend::input::{Keycode, TouchSlot}; use smithay::backend::renderer::damage::OutputDamageTracker; use smithay::backend::renderer::element::memory::MemoryRenderBufferRenderElement; use smithay::backend::renderer::element::surface::WaylandSurfaceRenderElement; @@ -191,6 +193,137 @@ const CLEAR_COLOR_LOCKED: [f32; 4] = [0.3, 0.1, 0.1, 1.]; // should be ~1.995 seconds. const FRAME_CALLBACK_THROTTLE: Option = Some(Duration::from_millis(995)); +/// Tap candidate tracking for N-finger tap detection. +/// +/// Runs in parallel with swipe/pinch/rotate recognition. Killed when any +/// finger drifts beyond `tap-wobble-threshold` or when the recognizer locks. +/// If still alive when all fingers lift within `tap-timeout-ms`, fires the +/// `TouchTap { fingers }` trigger. +pub struct TapCandidate { + pub start_time: Instant, + /// Highest finger count observed during this tap sequence. + pub peak_fingers: u8, + /// Initial landing position for each finger slot, used to compute + /// per-finger displacement for the wobble check. + pub initial_positions: HashMap, Point>, + /// Set to false when wobble threshold exceeded or recognizer locks. + /// Single-shot: once dead, cannot resurrect. + pub alive: bool, +} + +/// State for touchscreen edge swipe gesture detection. +pub enum TouchEdgeSwipeState { + /// First touch landed in edge zone; waiting for motion to confirm swipe. + Pending { + edge: ScreenEdge, + /// Which third of the edge the touch landed in. Carried through so + /// the gesture stays locked to whichever bind (zoned or unzoned) hit + /// at touch-down, even if later lookups need to re-derive the trigger. + zone: EdgeZone, + /// True if the zoned trigger was the one that matched. False if the + /// unzoned parent trigger was the one that matched (fallback path). + /// Determines which Trigger name to emit for IPC events. + zoned: bool, + cumulative: (f64, f64), + slot: Option, + }, + /// Swipe recognized; gesture animation is active (continuous gesture). + Active { + edge: ScreenEdge, + zone: EdgeZone, + zoned: bool, + kind: ContinuousGestureKind, + sensitivity: f64, + natural_scroll: bool, + slot: Option, + /// IPC tag for gesture events. + tag: Option, + /// Accumulated progress for IPC (0.0 = start, 1.0 = one unit). + ipc_progress: f64, + }, +} + +/// State for an active touchpad swipe gesture (after bind matched). +pub struct ActiveSwipeBind { + pub kind: ContinuousGestureKind, + pub sensitivity: f64, + /// IPC tag for gesture events. + pub tag: Option, + /// Accumulated progress for IPC (0.0 = start, 1.0 = one unit). + pub ipc_progress: f64, +} + +/// State for an active multi-finger touch gesture (after bind matched). +/// +/// Split by gesture shape: swipes carry linear dx/dy state; pinches carry +/// finger-spread state. Using an enum here (rather than a shared struct +/// with a flag) means illegal states like "swipe with a pinch start spread" +/// are unrepresentable, mirroring the `TouchEdgeSwipeState::Pending/Active` +/// pattern used elsewhere in this file. +pub enum ActiveTouchBind { + Swipe { + kind: ContinuousGestureKind, + sensitivity: f64, + natural_scroll: bool, + /// IPC tag for gesture events. + tag: Option, + /// Accumulated progress for IPC. Signed and unbounded — grows as the + /// finger moves in the recognized direction, goes negative on reversal, + /// and can exceed `±1.0` on overshoot. + ipc_progress: f64, + }, + Pinch { + kind: ContinuousGestureKind, + /// IPC tag for gesture events. + tag: Option, + /// Absolute IPC progress — recomputed each feed frame as + /// `(current - start) / pinch_progress_distance`. Signed: positive + /// for pinch-out, negative for pinch-in. Non-monotonic: reversing + /// the pinch reverses the progress. + ipc_progress: f64, + /// Finger spread at the moment the pinch was recognized. + start_spread: f64, + /// Finger spread at the previous motion event. Subtracted from the + /// current spread to produce the incremental delta that drives the + /// animation. + last_spread: f64, + }, + Rotate { + kind: ContinuousGestureKind, + /// IPC tag for gesture events. + tag: Option, + /// IPC progress — recomputed each feed frame as + /// `(cumulative_rotation - start_rotation) / rotation_progress_distance`. + /// Signed: positive for CCW, negative for CW. Non-monotonic. + ipc_progress: f64, + /// Value of `touch_gesture_cumulative_rotation` at the moment the + /// gesture was recognized. Subtracted from the running cumulative to + /// produce the rotation *since recognition*, so the recognition-phase + /// rotation doesn't bleed into the animated progress. Unlike pinch's + /// absolute `current - start` comparison, the underlying metric must + /// accumulate per-frame because `atan2` wraps at ±π and because + /// fingers lifting mid-gesture shift the centroid. See + /// `calculate_rotation_delta` for the per-frame math. + start_rotation: f64, + }, +} + +impl ActiveTouchBind { + pub fn kind(&self) -> ContinuousGestureKind { + match self { + Self::Swipe { kind, .. } | Self::Pinch { kind, .. } | Self::Rotate { kind, .. } => { + *kind + } + } + } + + pub fn into_tag(self) -> Option { + match self { + Self::Swipe { tag, .. } | Self::Pinch { tag, .. } | Self::Rotate { tag, .. } => tag, + } + } +} + pub struct Niri { pub config: Rc>, @@ -369,7 +502,87 @@ pub struct Niri { pub notified_activity_this_iteration: bool, pub pointer_inside_hot_corner: bool, pub tablet_cursor_location: Option>, - pub gesture_swipe_3f_cumulative: Option<(f64, f64)>, + /// Cumulative (x, y) delta, plus which gestures are valid for this finger count, + /// Cumulative touchpad swipe delta and finger count during recognition phase. + /// (cx, cy, fingers) + pub gesture_swipe_3f_cumulative: Option<(f64, f64, usize)>, + /// Active touchpad swipe gesture from binds. + pub gesture_swipe_bind: Option, + /// Active touch points for multi-finger gesture detection. + pub touch_gesture_points: HashMap, Point>, + /// Cumulative delta when tracking a 2+ finger touch gesture. + /// Set to Some((0, 0)) when gesture recognition starts (2nd finger down), + /// cleared when gesture completes or is cancelled. + pub touch_gesture_cumulative: Option<(f64, f64)>, + /// Edge swipe gesture state for touchscreen. + pub touch_edge_swipe: Option, + /// Set when a multi-finger touch gesture is locked (direction decided). + /// While locked, touch events are suppressed from clients until all + /// fingers are lifted. Any slots that were already forwarded before + /// the recognizer decided this was a gesture have their matching up + /// plus a `wl_touch.cancel` emitted at the transition — see + /// `touch_forwarded_slots`. + pub touch_gesture_locked: bool, + /// Slots whose `wl_touch.down` was forwarded to a client but whose + /// matching `wl_touch.up` has not yet been sent. When the recognizer + /// decides the sequence is a compositor gesture, we must emit `up` + /// (and `cancel`) for these slots so the client doesn't hold them as + /// phantom "still-down" touches — the gesture gate will otherwise + /// suppress their real up events. + pub touch_forwarded_slots: HashSet, + /// Set when the first finger of a touch gesture landed on a window with + /// `touchscreen-gesture-passthrough true`. While set, the gesture + /// recognizer is bypassed and touch events forward directly to the + /// client for the lifetime of the gesture (until all fingers lift). + /// Cleared in `on_touch_up` when `touch_gesture_points` becomes empty. + pub touchscreen_gesture_passthrough: bool, + /// Active touch gesture bind (after direction decided and bind matched). + pub touch_active_bind: Option, + /// Initial spread (average distance from centroid) when 3+ fingers first tracked. + pub touch_gesture_initial_spread: Option, + /// Cumulative signed rotation in radians accumulated across motion frames + /// while 3+ fingers are tracked. Positive = CCW. Reset to 0 when the + /// gesture starts or ends. + pub touch_gesture_cumulative_rotation: f64, + /// Per-slot angle (radians, from the cluster centroid) recorded at the + /// previous motion frame. Used to compute the per-finger delta each frame + /// before averaging. Rebuilt on finger-lift to avoid centroid-shift + /// artifacts (see `rebase_rotation_basis`). + pub touch_gesture_previous_angles: HashMap, + /// Tap candidate for N-finger tap detection. Runs in parallel with + /// swipe/pinch/rotate recognition. Killed by wobble or recognizer lock. + pub touch_tap_candidate: Option, + /// Touchpad hold-gesture tracking for N-finger tap-hold detection. + /// Stores finger_count from GestureHoldBegin. Cleared on + /// GestureHoldEnd; if !cancelled and fingers >= 3, fires a + /// TouchpadTapHold bind before clearing. + pub touchpad_hold_begin: Option, + /// Set when a hold ends with `cancelled=true` (fingers started moving) + /// and we had a 3+ finger hold candidate. The next `SwipeBegin` checks + /// this to decide whether to enter tap-hold-drag mode instead of + /// normal swipe. Stores finger count. + pub touchpad_drag_pending: Option, + /// Active touchpad pinch gesture's finger count (from + /// `GesturePinchBegin.fingers`). `None` outside a pinch. Used to + /// build the `TouchpadPinch{fingers, direction}` trigger when the + /// scale threshold is crossed. + pub touchpad_pinch_fingers: Option, + /// Whether the current pinch gesture has already fired a discrete + /// `TouchpadPinch` bind. Prevents double-firing within one gesture. + /// Reset on pinch begin/end. + pub touchpad_pinch_latched: bool, + /// Accumulated per-frame deltas for touchscreen gesture batching. + /// Summed across all per-slot TouchMotion events within a single + /// hardware scan frame; consumed and zeroed in on_touch_frame. + pub touch_frame_delta: (f64, f64), + /// Per-frame delta for the edge swipe slot only. Edge swipes are + /// single-finger, so their feed must not include other fingers' motion. + pub touch_frame_edge_delta: (f64, f64), + /// Set true when any touch_gesture_points position changed in this + /// frame. Cleared after on_touch_frame processes the batch. + pub touch_frame_dirty: bool, + /// Timestamp of the last TouchMotion in this frame (for gesture feeds). + pub touch_frame_timestamp: Duration, pub overview_scroll_swipe_gesture: ScrollSwipeGesture, pub vertical_wheel_tracker: ScrollTracker, pub horizontal_wheel_tracker: ScrollTracker, @@ -1506,7 +1719,7 @@ impl State { || config.input.trackball != old_config.input.trackball || config.input.trackpoint != old_config.input.trackpoint || config.input.tablet != old_config.input.tablet - || config.input.touch != old_config.input.touch + || config.input.touchscreen != old_config.input.touchscreen { libinput_config_changed = true; } @@ -2581,6 +2794,26 @@ impl Niri { pointer_inside_hot_corner: false, tablet_cursor_location: None, gesture_swipe_3f_cumulative: None, + gesture_swipe_bind: None, + touch_gesture_points: HashMap::new(), + touch_gesture_cumulative: None, + touch_edge_swipe: None, + touch_gesture_locked: false, + touch_forwarded_slots: HashSet::new(), + touchscreen_gesture_passthrough: false, + touch_active_bind: None, + touch_gesture_initial_spread: None, + touch_gesture_cumulative_rotation: 0.0, + touch_gesture_previous_angles: HashMap::new(), + touch_tap_candidate: None, + touchpad_hold_begin: None, + touchpad_drag_pending: None, + touchpad_pinch_fingers: None, + touchpad_pinch_latched: false, + touch_frame_delta: (0., 0.), + touch_frame_edge_delta: (0., 0.), + touch_frame_dirty: false, + touch_frame_timestamp: Duration::ZERO, overview_scroll_swipe_gesture: ScrollSwipeGesture::new(), vertical_wheel_tracker: ScrollTracker::new(120), horizontal_wheel_tracker: ScrollTracker::new(120), @@ -3615,7 +3848,7 @@ impl Niri { pub fn output_for_touch(&self) -> Option<&Output> { let config = self.config.borrow(); - let map_to_output = config.input.touch.map_to_output.as_ref(); + let map_to_output = config.input.touchscreen.map_to_output.as_ref(); map_to_output .and_then(|name| self.output_by_name_match(name)) .or_else(|| self.global_space.outputs().next()) diff --git a/src/protocols/foreign_toplevel.rs b/src/protocols/foreign_toplevel.rs index 11379169de..a8eb248d75 100644 --- a/src/protocols/foreign_toplevel.rs +++ b/src/protocols/foreign_toplevel.rs @@ -5,11 +5,13 @@ use std::sync::Arc; use arrayvec::ArrayVec; use smithay::output::Output; use smithay::reexports::wayland_protocols::ext::foreign_toplevel_list::v1::server::{ - ext_foreign_toplevel_handle_v1::{self, ExtForeignToplevelHandleV1}, ext_foreign_toplevel_list_v1::{self, ExtForeignToplevelListV1}, + ext_foreign_toplevel_handle_v1::{self, ExtForeignToplevelHandleV1}, + ext_foreign_toplevel_list_v1::{self, ExtForeignToplevelListV1}, }; use smithay::reexports::wayland_protocols::xdg::shell::server::xdg_toplevel; use smithay::reexports::wayland_protocols_wlr::foreign_toplevel::v1::server::{ - zwlr_foreign_toplevel_handle_v1::{self, ZwlrForeignToplevelHandleV1}, zwlr_foreign_toplevel_manager_v1::{self, ZwlrForeignToplevelManagerV1}, + zwlr_foreign_toplevel_handle_v1::{self, ZwlrForeignToplevelHandleV1}, + zwlr_foreign_toplevel_manager_v1::{self, ZwlrForeignToplevelManagerV1}, }; use smithay::reexports::wayland_server::backend::ClientId; use smithay::reexports::wayland_server::protocol::wl_output::WlOutput; @@ -18,12 +20,12 @@ use smithay::reexports::wayland_server::{ Client, DataInit, Dispatch, DisplayHandle, GlobalDispatch, New, Resource, }; use smithay::wayland::shell::xdg::{ - ToplevelState, ToplevelStateSet, XdgToplevelSurfaceRoleAttributes + ToplevelState, ToplevelStateSet, XdgToplevelSurfaceRoleAttributes, }; use crate::niri::State; -use crate::window::mapped::MappedId; use crate::utils::with_toplevel_role_and_current; +use crate::window::mapped::MappedId; const EXT_LIST_VERSION: u32 = 1; const WLR_MANAGEMENT_VERSION: u32 = 3; diff --git a/src/ui/hotkey_overlay.rs b/src/ui/hotkey_overlay.rs index b8fdca1300..82f9616f00 100644 --- a/src/ui/hotkey_overlay.rs +++ b/src/ui/hotkey_overlay.rs @@ -5,7 +5,11 @@ use std::fmt::Write as _; use std::iter::zip; use std::rc::Rc; -use niri_config::{Action, Bind, Config, Key, ModKey, Modifiers, Trigger}; +use niri_config::input::{EdgeZone, ScreenEdge}; +use niri_config::{ + Action, Bind, Config, Key, ModKey, Modifiers, PinchDirection, RotateDirection, SwipeDirection, + Trigger, +}; use pangocairo::cairo::{self, ImageSurface}; use pangocairo::pango::{AttrColor, AttrInt, AttrList, AttrString, FontDescription, Weight}; use smithay::backend::renderer::element::Kind; @@ -560,12 +564,102 @@ fn key_name(screen_reader: bool, mod_key: ModKey, key: &Key) -> String { Trigger::TouchpadScrollUp => String::from("Touchpad Scroll Up"), Trigger::TouchpadScrollLeft => String::from("Touchpad Scroll Left"), Trigger::TouchpadScrollRight => String::from("Touchpad Scroll Right"), + Trigger::TouchpadSwipe { fingers, direction } => { + format!( + "Touchpad {fingers}-Finger Swipe {}", + swipe_dir_label(direction) + ) + } + Trigger::TouchpadTapHold { fingers } => { + format!("Touchpad {fingers}-Finger Tap-Hold") + } + Trigger::TouchpadTapHoldDrag { fingers } => { + format!("Touchpad {fingers}-Finger Tap-Hold-Drag") + } + Trigger::TouchpadPinch { fingers, direction } => { + format!( + "Touchpad {fingers}-Finger Pinch {}", + pinch_dir_label(direction) + ) + } + Trigger::TouchSwipe { fingers, direction } => { + format!( + "Touch {fingers}-Finger Swipe {}", + swipe_dir_label(direction) + ) + } + Trigger::TouchPinch { fingers, direction } => { + format!( + "Touch {fingers}-Finger Pinch {}", + pinch_dir_label(direction) + ) + } + Trigger::TouchRotate { fingers, direction } => { + format!( + "Touch {fingers}-Finger Rotate {}", + rotate_dir_label(direction) + ) + } + Trigger::TouchTap { fingers } => { + format!("Touch {fingers}-Finger Tap") + } + Trigger::TouchTapHoldDrag { fingers, direction } => match direction { + Some(d) => format!( + "Touch {fingers}-Finger Tap-Hold-Drag {}", + swipe_dir_label(d) + ), + None => format!("Touch {fingers}-Finger Tap-Hold-Drag"), + }, + Trigger::TouchEdge { edge, zone } => format_touch_edge_label(edge, zone), }; name.push_str(&pretty); name } +fn swipe_dir_label(d: SwipeDirection) -> &'static str { + match d { + SwipeDirection::Up => "Up", + SwipeDirection::Down => "Down", + SwipeDirection::Left => "Left", + SwipeDirection::Right => "Right", + } +} + +fn pinch_dir_label(d: PinchDirection) -> &'static str { + match d { + PinchDirection::In => "In", + PinchDirection::Out => "Out", + } +} + +fn rotate_dir_label(d: RotateDirection) -> &'static str { + match d { + RotateDirection::Cw => "CW", + RotateDirection::Ccw => "CCW", + } +} + +fn format_touch_edge_label(edge: ScreenEdge, zone: Option) -> String { + // Use niri_config's shared edge/zone naming, capitalized for display. + let edge_name = capitalize_first(edge.as_kdl_name()); + match zone { + None => format!("Touch Edge {edge_name}"), + Some(z) => { + let zone_name = capitalize_first(niri_config::input::zone_kdl_name(edge, z)); + format!("Touch Edge {edge_name}-{zone_name}") + } + } +} + +fn capitalize_first(s: &str) -> String { + let mut chars = s.chars(); + match chars.next() { + None => String::new(), + Some(c) => c.to_ascii_uppercase().to_string() + chars.as_str(), + } +} + fn prettify_keysym_name(screen_reader: bool, name: &str) -> String { let name = if screen_reader { name diff --git a/src/ui/mru.rs b/src/ui/mru.rs index d8ec6de16e..e151403e5e 100644 --- a/src/ui/mru.rs +++ b/src/ui/mru.rs @@ -1840,6 +1840,9 @@ fn make_preset_opened_binds() -> Vec { allow_when_locked: false, allow_inhibiting: false, hotkey_overlay_title: None, + sensitivity: None, + natural_scroll: false, + tag: None, }) }; diff --git a/src/window/mod.rs b/src/window/mod.rs index 14527cd242..e2e51d408d 100644 --- a/src/window/mod.rs +++ b/src/window/mod.rs @@ -125,6 +125,12 @@ pub struct ResolvedWindowRules { /// Rules for this window's popups. pub popups: ResolvedPopupsRules, + + /// Forward touchscreen multi-finger gestures to this window instead of + /// letting niri's gesture recognizer consume them. Intended for apps that + /// implement their own gestures (browsers, drawing apps). Mod+gestures and + /// edge gestures still go to the compositor. Touchscreen only. + pub touchscreen_gesture_passthrough: Option, } impl<'a> WindowRef<'a> { @@ -308,6 +314,10 @@ impl ResolvedWindowRules { .merge_with(&rule.background_effect); resolved.popups.merge_with(&rule.popups); + + if let Some(x) = rule.touchscreen_gesture_passthrough { + resolved.touchscreen_gesture_passthrough = Some(x); + } } resolved.open_on_output = open_on_output.map(|x| x.to_owned());