diff --git a/CHANGELOG.md b/CHANGELOG.md
index 30678be..1aeaad1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,112 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [3.10.0] - 2026-04-02
+
+### Added
+- Split `power-bi-report` skill into 5 focused skills: `power-bi-report` (overview), `power-bi-visuals`, `power-bi-pages`, `power-bi-themes`, `power-bi-filters` (12 skills total)
+- CLAUDE.md snippet now organises skills by layer (Semantic Model vs Report Layer)
+- Skill triggering test suite (19 prompts, 12 skills)
+
+### Fixed
+- `filter_add_topn` inner subquery now correctly references category table when it differs from order-by table
+- `theme_set` resourcePackages structure now matches Desktop format (flat `items` array)
+- `visual_bind` type annotation corrected to `list[dict[str, Any]]`
+- `tmdl_diff` hierarchy changes reported as `hierarchies_*` instead of falling to `other_*`
+- Missing `VisualTypeError` and `ReportNotFoundError` classes added to `errors.py`
+- `report`, `visual`, `filters`, `format`, `bookmarks` command groups registered in CLI
+
+### Changed
+- README rewritten to cover both semantic model and report layers, 12 skills, 27 command groups, 32 visual types
+
+## [3.9.0] - 2026-04-01
+
+### Added
+- `pbi database diff-tmdl` command: compare two TMDL export folders offline, summarise changes (tables, measures, columns, relationships, model properties); lineageTag-only changes are stripped to avoid false positives
+
+### Fixed
+- `filter_add_topn` inner subquery now correctly references the category table when it differs from the order-by table (cross-table TopN filters)
+- `theme_set` resourcePackages structure now matches Desktop format (flat `items`, not nested `resourcePackage`)
+- `visual_bind` type annotation corrected from `list[dict[str, str]]` to `list[dict[str, Any]]`
+- `tmdl_diff` hierarchy changes now reported as `hierarchies_*` instead of falling through to `other_*`
+- Missing `VisualTypeError` and `ReportNotFoundError` error classes added to `errors.py`
+- `report`, `visual`, `filters`, `format`, `bookmarks` command groups registered in CLI (were implemented but inaccessible)
+
+## [3.8.0] - 2026-04-01
+
+### Added
+- `azureMap` visual type (Azure Maps) with Category and Size roles
+- `pageBinding` field surfaced in `page_get()` for drillthrough pages
+
+### Fixed
+- `card` and `multiRowCard` queryState role corrected from `Fields` to `Values` (matches Desktop)
+- `kpi` template: added `TrendLine` queryState key (date/axis column for sparkline)
+- `gauge` template: added `MaxValue` queryState key (target/max measure)
+- `MaxValue` added to `MEASURE_ROLES`
+- kpi role aliases: `--trend`, `--trend_line`
+- gauge role aliases: `--max`, `--max_value`, `--target`
+
+## [3.7.0] - 2026-04-01
+
+### Added
+- `page_type`, `filter_config`, and `visual_interactions` fields in page read operations (`page_get`, `page_list`)
+
+## [3.6.0] - 2026-04-01
+
+### Added
+- `image` visual type (static images, no data binding)
+- `shape` visual type (decorative shapes)
+- `textbox` visual type (rich text)
+- `pageNavigator` visual type (page navigation buttons)
+- `advancedSlicerVisual` visual type (tile/image slicer)
+
+## [3.5.0] - 2026-04-01
+
+### Added
+- `clusteredColumnChart` visual type with aliases `clustered_column`
+- `clusteredBarChart` visual type with aliases `clustered_bar`
+- `textSlicer` visual type with alias `text_slicer`
+- `listSlicer` visual type with alias `list_slicer`
+
+## [3.4.0] - 2026-03-31
+
+### Added
+- `cardVisual` (modern card) visual type with `Data` role and aliases `card_visual`, `modern_card`
+- `actionButton` visual type with alias `action_button`, `button`
+- `pbi report set-background` command to set page background colour
+- `pbi report set-visibility` command to hide/show pages
+- `pbi visual set-container` command for border, background, and title on visual containers
+
+### Fixed
+- Visual container schema URL updated from 1.5.0 to 2.7.0
+- `visualGroup` containers tagged as type `group` in `visual_list`
+- Colour validation, KeyError guards, visibility surfacing, no-op detection
+
+## [3.0.0] - 2026-03-31
+
+### Added
+- **PBIR report layer**: `pbi report` command group (create, info, validate, list-pages, add-page, delete-page, get-page, set-theme, get-theme, diff-theme, preview, reload, convert)
+- **Visual CRUD**: `pbi visual` command group (add, get, list, update, delete, bind, where, bulk-bind, bulk-update, bulk-delete, calc-add, calc-list, calc-delete, set-container)
+- **Filters**: `pbi filters` command group (list, add-categorical, add-topn, add-relative-date, remove, clear)
+- **Formatting**: `pbi format` command group (get, clear, background-gradient, background-conditional, background-measure)
+- **Bookmarks**: `pbi bookmarks` command group (list, get, add, delete, set-visibility)
+- 20 visual type templates (barChart, lineChart, card, tableEx, pivotTable, slicer, kpi, gauge, donutChart, columnChart, areaChart, ribbonChart, waterfallChart, scatterChart, funnelChart, multiRowCard, treemap, cardNew, stackedBarChart, lineStackedColumnComboChart)
+- HTML preview server (`pbi report preview`) with live reload
+- Power BI Desktop reload trigger (`pbi report reload`)
+- PBIR path auto-detection (walk-up from CWD, `.pbip` sibling detection)
+- `power-bi-report` Claude Code skill (8th skill)
+- Visual data binding with `Table[Column]` notation and role aliases
+- Visual calculations (calc-add, calc-list, calc-delete)
+- Bulk operations for mass visual updates across pages
+
+### Changed
+- Architecture: pbi-cli now covers both semantic model layer (via .NET TOM) and report layer (via PBIR JSON files)
+
+## [2.2.0] - 2026-03-27
+
+### Added
+- Promotional SVG assets and redesigned README
+
## [2.0.0] - 2026-03-27
### Breaking
diff --git a/README.md b/README.md
index 21f3850..720d2b7 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
Give Claude Code the Power BI skills it needs.
- Install once, then just ask Claude to work with your semantic models.
+ Install once, then just ask Claude to work with your semantic models and reports.
@@ -117,22 +117,34 @@ Add the printed path to your system PATH, then restart your terminal. We recomme
## Skills
-After running `pbi connect`, Claude Code discovers **7 Power BI skills** automatically. Each skill teaches Claude a different area. You don't need to memorize commands.
+After running `pbi connect`, Claude Code discovers **12 Power BI skills** automatically. Each skill teaches Claude a different area. You don't need to memorize commands.
-
+
+### Semantic Model Skills (require `pbi connect`)
+
| Skill | What you say | What Claude does |
|-------|-------------|-----------------|
| **DAX** | *"What are the top 10 products by revenue?"* | Writes and executes DAX queries, validates syntax |
| **Modeling** | *"Create a star schema with Sales and Calendar"* | Creates tables, relationships, measures, hierarchies |
-| **Deployment** | *"Save a snapshot before I make changes"* | Exports/imports TMDL, manages transactions |
+| **Deployment** | *"Save a snapshot before I make changes"* | Exports/imports TMDL, manages transactions, diffs snapshots |
| **Security** | *"Set up RLS for regional managers"* | Creates roles, filters, perspectives |
| **Docs** | *"Document everything in this model"* | Generates data dictionaries, measure inventories |
| **Partitions** | *"Show me the M query for the Sales table"* | Manages partitions, expressions, calendar config |
| **Diagnostics** | *"Why is this query so slow?"* | Traces queries, checks model health, benchmarks |
+### Report Layer Skills (no connection needed)
+
+| Skill | What you say | What Claude does |
+|-------|-------------|-----------------|
+| **Report** | *"Create a new report project for Sales"* | Scaffolds PBIR reports, validates structure, previews layout |
+| **Visuals** | *"Add a bar chart showing revenue by region"* | Adds, binds, updates, bulk-manages 32 visual types |
+| **Pages** | *"Add an Executive Overview page"* | Manages pages, bookmarks, visibility, drillthrough |
+| **Themes** | *"Apply our corporate brand colours"* | Applies themes, conditional formatting, colour scales |
+| **Filters** | *"Show only the top 10 products"* | Adds page/visual filters (TopN, date, categorical) |
+
---
## Architecture
@@ -141,7 +153,10 @@ After running `pbi connect`, Claude Code discovers **7 Power BI skills** automat
-Direct in-process .NET interop from Python to Power BI Desktop. No MCP server, no external binaries, sub-second execution.
+**Two layers, one CLI:**
+
+- **Semantic Model layer** -- Direct in-process .NET interop from Python to Power BI Desktop via TOM/ADOMD. No MCP server, no external binaries, sub-second execution.
+- **Report layer** -- Reads and writes PBIR (Enhanced Report Format) JSON files directly. No connection needed. Works with `.pbip` projects.
Configuration details
@@ -163,21 +178,57 @@ Bundled DLLs ship inside the Python package (`pbi_cli/dlls/`).
## All Commands
-
-
-
+27 command groups covering both the semantic model and the report layer.
+
+| Category | Commands |
+|----------|----------|
+| **Queries** | `dax execute`, `dax validate`, `dax clear-cache` |
+| **Model** | `table`, `column`, `measure`, `relationship`, `hierarchy`, `calc-group` |
+| **Deploy** | `database export-tmdl`, `database import-tmdl`, `database export-tmsl`, `database diff-tmdl`, `transaction` |
+| **Security** | `security-role`, `perspective` |
+| **Connect** | `connect`, `disconnect`, `connections list`, `connections last` |
+| **Data** | `partition`, `expression`, `calendar`, `advanced culture` |
+| **Diagnostics** | `trace start/stop/fetch/export`, `model stats` |
+| **Report** | `report create`, `report info`, `report validate`, `report preview`, `report reload` |
+| **Pages** | `report add-page`, `report delete-page`, `report get-page`, `report set-background`, `report set-visibility` |
+| **Visuals** | `visual add/get/list/update/delete`, `visual bind`, `visual bulk-bind/bulk-update/bulk-delete`, `visual where` |
+| **Filters** | `filters list`, `filters add-categorical/add-topn/add-relative-date`, `filters remove/clear` |
+| **Formatting** | `format get/clear`, `format background-gradient/background-conditional/background-measure` |
+| **Bookmarks** | `bookmarks list/get/add/delete/set-visibility` |
+| **Tools** | `setup`, `repl`, `skills install/list/uninstall` |
Use `--json` for machine-readable output (for scripts and AI agents):
```bash
pbi --json measure list
pbi --json dax execute "EVALUATE Sales"
+pbi --json visual list --page overview
```
Run `pbi --help` for full options.
---
+## Supported Visual Types (32)
+
+pbi-cli supports creating and binding data to 32 Power BI visual types:
+
+**Charts:** bar, line, column, area, ribbon, waterfall, stacked bar, clustered bar, clustered column, scatter, funnel, combo, donut/pie, treemap
+
+**Cards/KPIs:** card (legacy), cardVisual (modern), cardNew, multi-row card, KPI, gauge
+
+**Tables:** table, matrix (pivot table)
+
+**Slicers:** slicer, text slicer, list slicer, advanced slicer (tile/image)
+
+**Maps:** Azure Map
+
+**Decorative:** action button, image, shape, textbox, page navigator
+
+Use friendly aliases: `pbi visual add --page p1 --type bar` instead of `--type barChart`.
+
+---
+
## REPL Mode
For interactive work, the REPL keeps a persistent connection:
@@ -208,7 +259,7 @@ pip install -e ".[dev]"
```bash
ruff check src/ tests/ # Lint
mypy src/ # Type check
-pytest -m "not e2e" # Run tests
+pytest -m "not e2e" # Run tests (488 tests)
```
---
diff --git a/README.pypi.md b/README.pypi.md
index 570f2f9..a881664 100644
--- a/README.pypi.md
+++ b/README.pypi.md
@@ -1,7 +1,7 @@
**Give Claude Code the Power BI skills it needs.**
-Install once, then just ask Claude to work with your semantic models.
+Install once, then just ask Claude to work with your semantic models *and* reports.
@@ -13,14 +13,18 @@ Install once, then just ask Claude to work with your semantic models.
## What is this?
-pbi-cli gives **Claude Code** (and other AI agents) the ability to manage Power BI semantic models. It ships with 7 skills that Claude discovers automatically. You ask in plain English, Claude uses the right `pbi` commands.
+pbi-cli gives **Claude Code** (and other AI agents) the ability to manage Power BI semantic models **and reports**. It ships with 12 skills that Claude discovers automatically. You ask in plain English, Claude uses the right `pbi` commands.
```
You Claude Code pbi-cli Power BI
"Add a YTD measure ---> Uses Power BI ---> CLI commands ---> Desktop
- to the Sales table" skills
+ to the Sales table" skills (12)
```
+**Two layers, one CLI:**
+- **Semantic Model** -- Direct .NET interop to Power BI Desktop (measures, tables, DAX, security)
+- **Report Layer** -- Reads/writes PBIR JSON files directly (visuals, pages, themes, filters)
+
---
## Get Started
@@ -40,8 +44,6 @@ pbi connect # 2. Auto-detects Power BI Desktop and installs ski
That's it. Open Power BI Desktop with a `.pbix` file, run `pbi connect`, and everything is set up automatically. Open Claude Code and start asking.
-You can also specify the port manually: `pbi connect -d localhost:54321`
-
> **Requires:** Windows with Python 3.10+ and Power BI Desktop running.
@@ -60,12 +62,7 @@ Find the directory:
python -c "import site; print(site.getusersitepackages().replace('site-packages','Scripts'))"
```
-Add the printed path to your system PATH:
-```cmd
-setx PATH "%PATH%;C:\Users\YourName\AppData\Roaming\Python\PythonXXX\Scripts"
-```
-
-Then **restart your terminal**. We recommend `pipx` instead to avoid this entirely.
+Add the printed path to your system PATH, then restart your terminal. We recommend `pipx` to avoid this entirely.
@@ -73,182 +70,76 @@ Then **restart your terminal**. We recommend `pipx` instead to avoid this entire
## Skills
-After running `pbi connect`, Claude Code discovers **7 Power BI skills**. Each skill teaches Claude a different area of Power BI development. You don't need to memorize commands. Just describe what you want.
+After running `pbi connect`, Claude Code discovers **12 Power BI skills**. Each skill teaches Claude a different area. You don't need to memorize commands.
-```
-You: "Set up RLS for regional managers"
- |
- v
-Claude Code --> Picks the right skill
- |
- +-- Modeling
- +-- DAX
- +-- Deployment
- +-- Security
- +-- Documentation
- +-- Diagnostics
- +-- Partitions
-```
+### Semantic Model (require `pbi connect`)
-### Modeling
+| Skill | What you say | What Claude does |
+|-------|-------------|-----------------|
+| **DAX** | *"Top 10 products by revenue?"* | Writes and executes DAX queries |
+| **Modeling** | *"Create a star schema"* | Creates tables, relationships, measures |
+| **Deployment** | *"Save a snapshot"* | Exports/imports TMDL, diffs snapshots |
+| **Security** | *"Set up RLS"* | Creates roles, filters, perspectives |
+| **Docs** | *"Document this model"* | Generates data dictionaries |
+| **Partitions** | *"Show the M query"* | Manages partitions, expressions |
+| **Diagnostics** | *"Why is this slow?"* | Traces queries, benchmarks |
-> *"Create a star schema with Sales, Products, and Calendar tables"*
+### Report Layer (no connection needed)
-Claude creates the tables, sets up relationships, marks the date table, and adds formatted measures. Covers tables, columns, measures, relationships, hierarchies, and calculation groups.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi table create Sales --mode Import
-pbi table create Products --mode Import
-pbi table create Calendar --mode Import
-pbi relationship create --from-table Sales --from-column ProductKey --to-table Products --to-column ProductKey
-pbi relationship create --from-table Sales --from-column DateKey --to-table Calendar --to-column DateKey
-pbi table mark-date Calendar --date-column Date
-pbi measure create "Total Revenue" -e "SUM(Sales[Revenue])" -t Sales --format-string "$#,##0"
-```
-
-
-### DAX
-
-> *"What are the top 10 products by revenue this year?"*
-
-Claude writes and executes DAX queries, validates syntax, and creates measures with time intelligence patterns like YTD, previous year, and rolling averages.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi dax execute "
-EVALUATE
-TOPN(
- 10,
- ADDCOLUMNS(VALUES(Products[Name]), \"Revenue\", CALCULATE(SUM(Sales[Amount]))),
- [Revenue], DESC
-)
-"
-```
-
-
-### Deployment
-
-> *"Export the model to Git for version control"*
-
-Claude exports your model as TMDL files for version control and imports them back. Handles transactions for safe multi-step changes.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi database export-tmdl ./model/
-# ... you commit to git ...
-pbi database import-tmdl ./model/
-```
-
-
-### Security
-
-> *"Set up row-level security so regional managers only see their region"*
-
-Claude creates RLS roles with descriptions, sets up perspectives for different user groups, and exports the model for version control.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi security-role create "Regional Manager" --description "Users see only their region's data"
-pbi perspective create "Executive Dashboard"
-pbi perspective create "Regional Detail"
-pbi database export-tmdl ./model-backup/
-```
-
-
-### Documentation
-
-> *"Document everything in this model"*
-
-Claude catalogs every table, measure, column, and relationship. Generates data dictionaries, measure inventories, and can export the full model as TMDL for human-readable reference.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi --json model get
-pbi --json model stats
-pbi --json table list
-pbi --json measure list
-pbi --json relationship list
-pbi database export-tmdl ./model-docs/
-```
-
-
-### Diagnostics
-
-> *"Why is this DAX query so slow?"*
-
-Claude traces query execution, clears caches for clean benchmarks, checks model health, and verifies the environment.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi dax clear-cache
-pbi trace start
-pbi dax execute "EVALUATE SUMMARIZECOLUMNS(...)" --timeout 300
-pbi trace stop
-pbi trace export ./trace.json
-```
-
-
-### Partitions & Expressions
-
-> *"Set up partitions for incremental refresh on the Sales table"*
-
-Claude manages table partitions, shared M/Power Query expressions, and calendar table configuration.
-
-
-Example: what Claude runs behind the scenes
-
-```bash
-pbi partition list --table Sales
-pbi partition create "Sales_2024" --table Sales --expression "..." --mode Import
-pbi expression create "ServerURL" --expression '"https://api.example.com"'
-pbi calendar mark Calendar --date-column Date
-```
-
+| Skill | What you say | What Claude does |
+|-------|-------------|-----------------|
+| **Report** | *"Create a new report"* | Scaffolds PBIR reports, validates, previews |
+| **Visuals** | *"Add a bar chart"* | Adds, binds, bulk-manages 32 visual types |
+| **Pages** | *"Add a new page"* | Manages pages, bookmarks, drillthrough |
+| **Themes** | *"Apply brand colours"* | Themes, conditional formatting |
+| **Filters** | *"Show top 10 only"* | TopN, date, categorical filters |
---
## All Commands
-22 command groups covering the full Power BI Tabular Object Model. You rarely need these directly when using Claude Code, but they're available for scripting, CI/CD, or manual use.
+27 command groups covering both the semantic model and the report layer.
| Category | Commands |
|----------|----------|
| **Queries** | `dax execute`, `dax validate`, `dax clear-cache` |
| **Model** | `table`, `column`, `measure`, `relationship`, `hierarchy`, `calc-group` |
-| **Deploy** | `database export-tmdl`, `database import-tmdl`, `database export-tmsl`, `transaction` |
+| **Deploy** | `database export-tmdl/import-tmdl/export-tmsl/diff-tmdl`, `transaction` |
| **Security** | `security-role`, `perspective` |
-| **Connect** | `connect`, `disconnect`, `connections list`, `connections last` |
+| **Connect** | `connect`, `disconnect`, `connections list/last` |
| **Data** | `partition`, `expression`, `calendar`, `advanced culture` |
-| **Diagnostics** | `trace start`, `trace stop`, `trace fetch`, `trace export`, `model stats` |
-| **Tools** | `setup`, `repl`, `skills install`, `skills list` |
+| **Diagnostics** | `trace start/stop/fetch/export`, `model stats` |
+| **Report** | `report create/info/validate/preview/reload`, `report add-page/delete-page/get-page` |
+| **Visuals** | `visual add/get/list/update/delete/bind`, `visual bulk-bind/bulk-update/bulk-delete` |
+| **Filters** | `filters list/add-categorical/add-topn/add-relative-date/remove/clear` |
+| **Formatting** | `format get/clear/background-gradient/background-conditional/background-measure` |
+| **Bookmarks** | `bookmarks list/get/add/delete/set-visibility` |
+| **Tools** | `setup`, `repl`, `skills install/list/uninstall` |
-Use `--json` for machine-readable output (for scripts and AI agents):
+Use `--json` for machine-readable output:
```bash
pbi --json measure list
-pbi --json dax execute "EVALUATE Sales"
+pbi --json visual list --page overview
```
-Run `pbi --help` for full options.
+---
+
+## 32 Supported Visual Types
+
+**Charts:** bar, line, column, area, ribbon, waterfall, stacked bar, clustered bar, clustered column, scatter, funnel, combo, donut/pie, treemap
+
+**Cards/KPIs:** card, cardVisual (modern), cardNew, multi-row card, KPI, gauge
+
+**Tables:** table, matrix • **Slicers:** slicer, text, list, advanced • **Maps:** Azure Map
+
+**Decorative:** action button, image, shape, textbox, page navigator
---
## REPL Mode
-For interactive work, the REPL keeps a persistent connection alive between commands:
+For interactive work, the REPL keeps a persistent connection:
```
$ pbi repl
@@ -261,44 +152,7 @@ pbi(localhost-54321)> dax execute "EVALUATE TOPN(5, Sales)"
pbi(localhost-54321)> exit
```
-Tab completion, command history, and a dynamic prompt showing your active connection.
-
----
-
-## How It Works
-
-pbi-cli connects directly to Power BI Desktop's Analysis Services engine via pythonnet and the .NET Tabular Object Model (TOM). No external binaries or MCP servers needed. Everything runs in-process for sub-second command execution.
-
-```
-+------------------+ +---------------------+ +------------------+
-| pbi-cli | | Bundled TOM DLLs | | Power BI |
-| (Python CLI) | pythnet | (.NET in-process) | XMLA | Desktop |
-| Click commands |-------->| TOM / ADOMD.NET |-------->| msmdsrv.exe |
-+------------------+ +---------------------+ +------------------+
-```
-
-**Why a CLI?** When an AI agent uses an MCP server directly, the tool schemas consume ~4,000+ tokens per tool in the context window. A `pbi` command costs ~30 tokens. Same capabilities, 100x less context.
-
-
-Configuration details
-
-All config lives in `~/.pbi-cli/`:
-
-```
-~/.pbi-cli/
- config.json # Default connection preference
- connections.json # Named connections
- repl_history # REPL command history
-```
-
-Bundled DLLs ship inside the Python package (`pbi_cli/dlls/`):
-- Microsoft.AnalysisServices.Tabular.dll
-- Microsoft.AnalysisServices.AdomdClient.dll
-- Microsoft.AnalysisServices.Core.dll
-- Microsoft.AnalysisServices.Tabular.Json.dll
-- Microsoft.AnalysisServices.dll
-
-
+Tab completion, command history, and a dynamic prompt.
---
@@ -313,7 +167,7 @@ pip install -e ".[dev]"
```bash
ruff check src/ tests/ # Lint
mypy src/ # Type check
-pytest -m "not e2e" # Run tests
+pytest -m "not e2e" # Run tests (488 tests)
```
---
diff --git a/pyproject.toml b/pyproject.toml
index 95759e8..c4b6903 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,15 +4,15 @@ build-backend = "setuptools.build_meta"
[project]
name = "pbi-cli-tool"
-version = "2.2.0"
-description = "CLI for Power BI semantic models - direct .NET connection for token-efficient AI agent usage"
+version = "3.10.0"
+description = "CLI for Power BI semantic models and PBIR reports - direct .NET connection for token-efficient AI agent usage"
readme = "README.pypi.md"
license = {text = "MIT"}
requires-python = ">=3.10"
authors = [
{name = "pbi-cli contributors"},
]
-keywords = ["power-bi", "cli", "semantic-model", "dax", "claude-code", "tom"]
+keywords = ["power-bi", "cli", "semantic-model", "dax", "claude-code", "tom", "pbir", "report"]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
@@ -50,6 +50,8 @@ dev = [
"ruff>=0.4.0",
"mypy>=1.10",
]
+reload = ["pywin32>=306"]
+preview = ["websockets>=12.0"]
[tool.setuptools.packages.find]
where = ["src"]
@@ -57,6 +59,7 @@ where = ["src"]
[tool.setuptools.package-data]
"pbi_cli.skills" = ["**/*.md"]
"pbi_cli.dlls" = ["*.dll"]
+"pbi_cli.templates" = ["**/*.json"]
[tool.ruff]
target-version = "py310"
@@ -71,6 +74,10 @@ select = ["E", "F", "I", "N", "W", "UP"]
"src/pbi_cli/core/session.py" = ["N806"]
"src/pbi_cli/core/tom_backend.py" = ["N806", "N814"]
"src/pbi_cli/core/dotnet_loader.py" = ["N806", "N814"]
+# Win32 API constants use UPPER_CASE; PowerShell inline scripts are long
+"src/pbi_cli/utils/desktop_reload.py" = ["N806", "E501"]
+# HTML/SVG template strings are inherently long
+"src/pbi_cli/preview/renderer.py" = ["E501"]
# Mock objects mirror .NET CamelCase API
"tests/conftest.py" = ["N802", "N806"]
diff --git a/src/pbi_cli/__init__.py b/src/pbi_cli/__init__.py
index 627233f..e9f37c9 100644
--- a/src/pbi_cli/__init__.py
+++ b/src/pbi_cli/__init__.py
@@ -1,3 +1,3 @@
"""pbi-cli: CLI for Power BI semantic models via direct .NET interop."""
-__version__ = "2.2.0"
+__version__ = "3.10.0"
diff --git a/src/pbi_cli/commands/_helpers.py b/src/pbi_cli/commands/_helpers.py
index 9fbd101..e02d2f1 100644
--- a/src/pbi_cli/commands/_helpers.py
+++ b/src/pbi_cli/commands/_helpers.py
@@ -5,12 +5,20 @@ from __future__ import annotations
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
+import click
+
from pbi_cli.core.errors import TomError
from pbi_cli.core.output import format_result, print_error
if TYPE_CHECKING:
from pbi_cli.main import PbiContext
+# Statuses that indicate a write operation (triggers Desktop sync)
+_WRITE_STATUSES = frozenset({
+ "created", "deleted", "updated", "applied", "added",
+ "cleared", "bound", "removed", "set",
+})
+
def run_command(
ctx: PbiContext,
@@ -20,18 +28,82 @@ def run_command(
"""Execute a backend function with standard error handling.
Calls ``fn(**kwargs)`` and formats the output based on the
- ``--json`` flag. Returns the result or exits on error.
+ ``--json`` flag.
+
+ If the current Click context has a ``report_path`` key (set by
+ report-layer command groups), write operations automatically
+ trigger a safe Desktop sync: save Desktop's work, re-apply our
+ PBIR changes, and reopen.
"""
try:
result = fn(**kwargs)
format_result(result, ctx.json_output)
- return result
except Exception as e:
print_error(str(e))
if not ctx.repl_mode:
raise SystemExit(1)
raise TomError(fn.__name__, str(e))
+ # Auto-sync Desktop for report-layer write operations
+ if _is_report_write(result):
+ definition_path = kwargs.get("definition_path")
+ _try_desktop_sync(definition_path)
+
+ return result
+
+
+def _is_report_write(result: Any) -> bool:
+ """Check if the result indicates a report-layer write."""
+ if not isinstance(result, dict):
+ return False
+ status = result.get("status", "")
+ if status not in _WRITE_STATUSES:
+ return False
+
+ # Only sync if we're inside a report-layer command group
+ click_ctx = click.get_current_context(silent=True)
+ if click_ctx is None:
+ return False
+
+ # Walk up to the group to find report_path
+ parent = click_ctx.parent
+ while parent is not None:
+ obj = parent.obj
+ if isinstance(obj, dict) and "report_path" in obj:
+ return True
+ parent = parent.parent
+ return False
+
+
+def _try_desktop_sync(definition_path: Any = None) -> None:
+ """Attempt Desktop sync, silently ignore failures."""
+ try:
+ from pbi_cli.utils.desktop_sync import sync_desktop
+
+ # Find report_path hint from the Click context chain
+ report_path = None
+ click_ctx = click.get_current_context(silent=True)
+ parent = click_ctx.parent if click_ctx else None
+ while parent is not None:
+ obj = parent.obj
+ if isinstance(obj, dict) and "report_path" in obj:
+ report_path = obj["report_path"]
+ break
+ parent = parent.parent
+
+ # Convert definition_path to string for sync
+ defn_str = str(definition_path) if definition_path is not None else None
+
+ result = sync_desktop(report_path, definition_path=defn_str)
+ status = result.get("status", "")
+ msg = result.get("message", "")
+ if status == "success":
+ print_error(f" Desktop: {msg}")
+ elif status == "manual":
+ print_error(f" {msg}")
+ except Exception:
+ pass # sync is best-effort, never block the command
+
def build_definition(
required: dict[str, Any],
diff --git a/src/pbi_cli/commands/bookmarks.py b/src/pbi_cli/commands/bookmarks.py
new file mode 100644
index 0000000..2ae8390
--- /dev/null
+++ b/src/pbi_cli/commands/bookmarks.py
@@ -0,0 +1,132 @@
+"""PBIR bookmark management commands."""
+
+from __future__ import annotations
+
+import click
+
+from pbi_cli.commands._helpers import run_command
+from pbi_cli.main import PbiContext, pass_context
+
+
+@click.group()
+@click.option(
+ "--path",
+ "-p",
+ default=None,
+ help="Path to .Report folder (auto-detected from CWD if omitted).",
+)
+@click.pass_context
+def bookmarks(ctx: click.Context, path: str | None) -> None:
+ """Manage report bookmarks."""
+ ctx.ensure_object(dict)
+ ctx.obj["report_path"] = path
+
+
+@bookmarks.command(name="list")
+@click.pass_context
+@pass_context
+def list_bookmarks(ctx: PbiContext, click_ctx: click.Context) -> None:
+ """List all bookmarks in the report."""
+ from pbi_cli.core.bookmark_backend import bookmark_list
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, bookmark_list, definition_path=definition_path)
+
+
+@bookmarks.command(name="get")
+@click.argument("name")
+@click.pass_context
+@pass_context
+def get_bookmark(ctx: PbiContext, click_ctx: click.Context, name: str) -> None:
+ """Get full details for a bookmark by NAME."""
+ from pbi_cli.core.bookmark_backend import bookmark_get
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, bookmark_get, definition_path=definition_path, name=name)
+
+
+@bookmarks.command(name="add")
+@click.option("--display-name", "-d", required=True, help="Human-readable bookmark name.")
+@click.option("--page", "-g", required=True, help="Target page name (active section).")
+@click.option("--name", "-n", default=None, help="Bookmark ID (auto-generated if omitted).")
+@click.pass_context
+@pass_context
+def add_bookmark(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ display_name: str,
+ page: str,
+ name: str | None,
+) -> None:
+ """Add a new bookmark pointing to a page."""
+ from pbi_cli.core.bookmark_backend import bookmark_add
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ bookmark_add,
+ definition_path=definition_path,
+ display_name=display_name,
+ target_page=page,
+ name=name,
+ )
+
+
+@bookmarks.command(name="delete")
+@click.argument("name")
+@click.pass_context
+@pass_context
+def delete_bookmark(ctx: PbiContext, click_ctx: click.Context, name: str) -> None:
+ """Delete a bookmark by NAME."""
+ from pbi_cli.core.bookmark_backend import bookmark_delete
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, bookmark_delete, definition_path=definition_path, name=name)
+
+
+@bookmarks.command(name="set-visibility")
+@click.argument("name")
+@click.option("--page", "-g", required=True, help="Page name (folder name).")
+@click.option("--visual", "-v", required=True, help="Visual name (folder name).")
+@click.option(
+ "--hidden/--visible",
+ default=True,
+ help="Set the visual as hidden (default) or visible in the bookmark.",
+)
+@click.pass_context
+@pass_context
+def set_visibility(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ name: str,
+ page: str,
+ visual: str,
+ hidden: bool,
+) -> None:
+ """Set a visual hidden or visible inside bookmark NAME.
+
+ NAME is the bookmark identifier (hex folder name).
+ Use --hidden to hide the visual, --visible to show it.
+ """
+ from pbi_cli.core.bookmark_backend import bookmark_set_visibility
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ bookmark_set_visibility,
+ definition_path=definition_path,
+ name=name,
+ page_name=page,
+ visual_name=visual,
+ hidden=hidden,
+ )
diff --git a/src/pbi_cli/commands/database.py b/src/pbi_cli/commands/database.py
index 7e81f9d..1235719 100644
--- a/src/pbi_cli/commands/database.py
+++ b/src/pbi_cli/commands/database.py
@@ -48,6 +48,24 @@ def export_tmdl(ctx: PbiContext, folder_path: str) -> None:
run_command(ctx, _export_tmdl, database=session.database, folder_path=folder_path)
+@database.command(name="diff-tmdl")
+@click.argument("base_folder", type=click.Path(exists=True, file_okay=False))
+@click.argument("head_folder", type=click.Path(exists=True, file_okay=False))
+@pass_context
+def diff_tmdl_cmd(ctx: PbiContext, base_folder: str, head_folder: str) -> None:
+ """Compare two TMDL export folders and show what changed.
+
+ Useful for CI/CD to summarise model changes between branches:
+
+ pbi database diff-tmdl ./base-export/ ./head-export/
+
+ No Power BI Desktop connection is required.
+ """
+ from pbi_cli.core.tmdl_diff import diff_tmdl_folders
+
+ run_command(ctx, diff_tmdl_folders, base_folder=base_folder, head_folder=head_folder)
+
+
@database.command(name="export-tmsl")
@pass_context
def export_tmsl(ctx: PbiContext) -> None:
diff --git a/src/pbi_cli/commands/filters.py b/src/pbi_cli/commands/filters.py
new file mode 100644
index 0000000..33a6525
--- /dev/null
+++ b/src/pbi_cli/commands/filters.py
@@ -0,0 +1,244 @@
+"""PBIR filter management commands."""
+
+from __future__ import annotations
+
+import click
+
+from pbi_cli.commands._helpers import run_command
+from pbi_cli.main import PbiContext, pass_context
+
+
+@click.group()
+@click.option(
+ "--path",
+ "-p",
+ default=None,
+ help="Path to .Report folder (auto-detected from CWD if omitted).",
+)
+@click.pass_context
+def filters(ctx: click.Context, path: str | None) -> None:
+ """Manage page and visual filters."""
+ ctx.ensure_object(dict)
+ ctx.obj["report_path"] = path
+
+
+@filters.command(name="list")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--visual", default=None, help="Visual name (returns visual filters if given).")
+@click.pass_context
+@pass_context
+def filter_list_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual: str | None,
+) -> None:
+ """List filters on a page or visual."""
+ from pbi_cli.core.filter_backend import filter_list
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_list,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ )
+
+
+@filters.command(name="add-categorical")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--table", required=True, help="Table name.")
+@click.option("--column", required=True, help="Column name.")
+@click.option(
+ "--value",
+ "values",
+ multiple=True,
+ required=True,
+ help="Value to include (repeat for multiple).",
+)
+@click.option("--visual", default=None, help="Visual name (adds visual filter if given).")
+@click.option("--name", "-n", default=None, help="Filter ID (auto-generated if omitted).")
+@click.pass_context
+@pass_context
+def add_categorical_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ table: str,
+ column: str,
+ values: tuple[str, ...],
+ visual: str | None,
+ name: str | None,
+) -> None:
+ """Add a categorical filter to a page or visual."""
+ from pbi_cli.core.filter_backend import filter_add_categorical
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_add_categorical,
+ definition_path=definition_path,
+ page_name=page,
+ table=table,
+ column=column,
+ values=list(values),
+ visual_name=visual,
+ name=name,
+ )
+
+
+@filters.command(name="add-topn")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--table", required=True, help="Table containing the filtered column.")
+@click.option("--column", required=True, help="Column to filter (e.g. Country).")
+@click.option("--n", type=int, required=True, help="Number of items to keep.")
+@click.option("--order-by-table", required=True, help="Table containing the ordering column.")
+@click.option("--order-by-column", required=True, help="Column to rank by (e.g. Sales).")
+@click.option(
+ "--direction",
+ default="Top",
+ show_default=True,
+ help="'Top' (highest N) or 'Bottom' (lowest N).",
+)
+@click.option("--visual", default=None, help="Visual name (adds visual filter if given).")
+@click.option("--name", "-n_", default=None, help="Filter ID (auto-generated if omitted).")
+@click.pass_context
+@pass_context
+def add_topn_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ table: str,
+ column: str,
+ n: int,
+ order_by_table: str,
+ order_by_column: str,
+ direction: str,
+ visual: str | None,
+ name: str | None,
+) -> None:
+ """Add a TopN filter (keep top/bottom N rows by a ranking column)."""
+ from pbi_cli.core.filter_backend import filter_add_topn
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_add_topn,
+ definition_path=definition_path,
+ page_name=page,
+ table=table,
+ column=column,
+ n=n,
+ order_by_table=order_by_table,
+ order_by_column=order_by_column,
+ direction=direction,
+ visual_name=visual,
+ name=name,
+ )
+
+
+@filters.command(name="add-relative-date")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--table", required=True, help="Table containing the date column.")
+@click.option("--column", required=True, help="Date column to filter (e.g. Date).")
+@click.option("--amount", type=int, required=True, help="Number of periods (e.g. 3).")
+@click.option(
+ "--unit",
+ required=True,
+ help="Time unit: days, weeks, months, or years.",
+)
+@click.option("--visual", default=None, help="Visual name (adds visual filter if given).")
+@click.option("--name", "-n", default=None, help="Filter ID (auto-generated if omitted).")
+@click.pass_context
+@pass_context
+def add_relative_date_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ table: str,
+ column: str,
+ amount: int,
+ unit: str,
+ visual: str | None,
+ name: str | None,
+) -> None:
+ """Add a RelativeDate filter (e.g. last 3 months)."""
+ from pbi_cli.core.filter_backend import filter_add_relative_date
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_add_relative_date,
+ definition_path=definition_path,
+ page_name=page,
+ table=table,
+ column=column,
+ amount=amount,
+ time_unit=unit,
+ visual_name=visual,
+ name=name,
+ )
+
+
+@filters.command(name="remove")
+@click.argument("filter_name")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--visual", default=None, help="Visual name (removes from visual if given).")
+@click.pass_context
+@pass_context
+def remove_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ filter_name: str,
+ page: str,
+ visual: str | None,
+) -> None:
+ """Remove a filter by name from a page or visual."""
+ from pbi_cli.core.filter_backend import filter_remove
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_remove,
+ definition_path=definition_path,
+ page_name=page,
+ filter_name=filter_name,
+ visual_name=visual,
+ )
+
+
+@filters.command(name="clear")
+@click.option("--page", required=True, help="Page name (folder name, not display name).")
+@click.option("--visual", default=None, help="Visual name (clears visual filters if given).")
+@click.pass_context
+@pass_context
+def clear_cmd(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual: str | None,
+) -> None:
+ """Remove all filters from a page or visual."""
+ from pbi_cli.core.filter_backend import filter_clear
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ filter_clear,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ )
diff --git a/src/pbi_cli/commands/format_cmd.py b/src/pbi_cli/commands/format_cmd.py
new file mode 100644
index 0000000..475362e
--- /dev/null
+++ b/src/pbi_cli/commands/format_cmd.py
@@ -0,0 +1,251 @@
+"""PBIR visual conditional formatting commands."""
+
+from __future__ import annotations
+
+import click
+
+from pbi_cli.commands._helpers import run_command
+from pbi_cli.main import PbiContext, pass_context
+
+
+@click.group(name="format")
+@click.option(
+ "--report-path",
+ default=None,
+ help="Path to .Report folder (auto-detected from CWD if omitted).",
+)
+@click.pass_context
+def format_cmd(ctx: click.Context, report_path: str | None) -> None:
+ """Manage visual conditional formatting."""
+ ctx.ensure_object(dict)
+ ctx.obj["report_path"] = report_path
+
+
+@format_cmd.command(name="get")
+@click.argument("visual")
+@click.option("--page", "-p", required=True, help="Page name (folder name, not display name).")
+@click.pass_context
+@pass_context
+def format_get(ctx: PbiContext, click_ctx: click.Context, visual: str, page: str) -> None:
+ """Show current formatting objects for a visual.
+
+ VISUAL is the visual folder name (e.g. 5b30ba9c6ce5b695a8df).
+ """
+ from pbi_cli.core.format_backend import format_get as _format_get
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ _format_get,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ )
+
+
+@format_cmd.command(name="clear")
+@click.argument("visual")
+@click.option("--page", "-p", required=True, help="Page name (folder name, not display name).")
+@click.pass_context
+@pass_context
+def format_clear(ctx: PbiContext, click_ctx: click.Context, visual: str, page: str) -> None:
+ """Remove all conditional formatting from a visual.
+
+ VISUAL is the visual folder name (e.g. 5b30ba9c6ce5b695a8df).
+ """
+ from pbi_cli.core.format_backend import format_clear as _format_clear
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ _format_clear,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ )
+
+
+@format_cmd.command(name="background-gradient")
+@click.argument("visual")
+@click.option("--page", "-p", required=True, help="Page name (folder name, not display name).")
+@click.option("--input-table", required=True, help="Table name driving the gradient.")
+@click.option("--input-column", required=True, help="Column name driving the gradient.")
+@click.option(
+ "--field",
+ "field_query_ref",
+ required=True,
+ help='queryRef of the target field (e.g. "Sum(financials.Profit)").',
+)
+@click.option("--min-color", default="minColor", show_default=True, help="Gradient minimum color.")
+@click.option("--max-color", default="maxColor", show_default=True, help="Gradient maximum color.")
+@click.pass_context
+@pass_context
+def background_gradient(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual: str,
+ page: str,
+ input_table: str,
+ input_column: str,
+ field_query_ref: str,
+ min_color: str,
+ max_color: str,
+) -> None:
+ """Apply a linear gradient background color rule to a visual column.
+
+ VISUAL is the visual folder name (e.g. 5b30ba9c6ce5b695a8df).
+
+ Example:
+
+ pbi format background-gradient MyVisual --page overview
+ --input-table financials --input-column Profit
+ --field "Sum(financials.Profit)"
+ """
+ from pbi_cli.core.format_backend import format_background_gradient
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ format_background_gradient,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ input_table=input_table,
+ input_column=input_column,
+ field_query_ref=field_query_ref,
+ min_color=min_color,
+ max_color=max_color,
+ )
+
+
+@format_cmd.command(name="background-conditional")
+@click.argument("visual")
+@click.option("--page", "-p", required=True, help="Page name (folder name, not display name).")
+@click.option("--input-table", required=True, help="Table containing the evaluated column.")
+@click.option("--input-column", required=True, help="Column whose aggregation is tested.")
+@click.option(
+ "--threshold",
+ type=float,
+ required=True,
+ help="Numeric threshold value to compare against.",
+)
+@click.option(
+ "--color",
+ "color_hex",
+ required=True,
+ help="Hex color to apply when condition is met (e.g. #12239E).",
+)
+@click.option(
+ "--comparison",
+ default="gt",
+ show_default=True,
+ help="Comparison: eq, neq, gt, gte, lt, lte.",
+)
+@click.option(
+ "--field",
+ "field_query_ref",
+ default=None,
+ help='queryRef of the target field. Defaults to "Sum(table.column)".',
+)
+@click.pass_context
+@pass_context
+def background_conditional(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual: str,
+ page: str,
+ input_table: str,
+ input_column: str,
+ threshold: float,
+ color_hex: str,
+ comparison: str,
+ field_query_ref: str | None,
+) -> None:
+ """Apply a rule-based conditional background color to a visual column.
+
+ VISUAL is the visual folder name (e.g. 5b30ba9c6ce5b695a8df).
+ Colors the cell when Sum(input_column) satisfies the comparison.
+
+ Example:
+
+ pbi format background-conditional MyVisual --page overview
+ --input-table financials --input-column "Units Sold"
+ --threshold 100000 --color "#12239E" --comparison gt
+ """
+ from pbi_cli.core.format_backend import format_background_conditional
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ format_background_conditional,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ input_table=input_table,
+ input_column=input_column,
+ threshold=threshold,
+ color_hex=color_hex,
+ comparison=comparison,
+ field_query_ref=field_query_ref,
+ )
+
+
+@format_cmd.command(name="background-measure")
+@click.argument("visual")
+@click.option("--page", "-p", required=True, help="Page name (folder name, not display name).")
+@click.option("--measure-table", required=True, help="Table containing the color measure.")
+@click.option(
+ "--measure-property", required=True, help="Name of the DAX measure returning hex color."
+)
+@click.option(
+ "--field",
+ "field_query_ref",
+ required=True,
+ help='queryRef of the target field (e.g. "Sum(financials.Sales)").',
+)
+@click.pass_context
+@pass_context
+def background_measure(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual: str,
+ page: str,
+ measure_table: str,
+ measure_property: str,
+ field_query_ref: str,
+) -> None:
+ """Apply a DAX measure-driven background color rule to a visual column.
+
+ VISUAL is the visual folder name (e.g. 5b30ba9c6ce5b695a8df).
+ The DAX measure must return a valid hex color string.
+
+ Example:
+
+ pbi format background-measure MyVisual --page overview
+ --measure-table financials
+ --measure-property "Conditional Formatting Sales"
+ --field "Sum(financials.Sales)"
+ """
+ from pbi_cli.core.format_backend import format_background_measure
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ format_background_measure,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual,
+ measure_table=measure_table,
+ measure_property=measure_property,
+ field_query_ref=field_query_ref,
+ )
diff --git a/src/pbi_cli/commands/report.py b/src/pbi_cli/commands/report.py
new file mode 100644
index 0000000..172ba5d
--- /dev/null
+++ b/src/pbi_cli/commands/report.py
@@ -0,0 +1,324 @@
+"""PBIR report management commands."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import click
+
+from pbi_cli.commands._helpers import run_command
+from pbi_cli.main import PbiContext, pass_context
+
+
+@click.group()
+@click.option(
+ "--path",
+ "-p",
+ default=None,
+ help="Path to .Report folder (auto-detected from CWD if omitted).",
+)
+@click.pass_context
+def report(ctx: click.Context, path: str | None) -> None:
+ """Manage Power BI PBIR reports (pages, themes, validation)."""
+ ctx.ensure_object(dict)
+ ctx.obj["report_path"] = path
+
+
+@report.command()
+@click.pass_context
+@pass_context
+def info(ctx: PbiContext, click_ctx: click.Context) -> None:
+ """Show report metadata summary."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import report_info
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, report_info, definition_path=definition_path)
+
+
+@report.command()
+@click.argument("target_path", type=click.Path())
+@click.option("--name", "-n", required=True, help="Report name.")
+@click.option(
+ "--dataset-path",
+ default=None,
+ help="Relative path to semantic model folder (e.g. ../MyModel.Dataset).",
+)
+@pass_context
+def create(
+ ctx: PbiContext, target_path: str, name: str, dataset_path: str | None
+) -> None:
+ """Scaffold a new PBIR report project."""
+ from pbi_cli.core.report_backend import report_create
+
+ run_command(
+ ctx,
+ report_create,
+ target_path=Path(target_path),
+ name=name,
+ dataset_path=dataset_path,
+ )
+
+
+@report.command(name="list-pages")
+@click.pass_context
+@pass_context
+def list_pages(ctx: PbiContext, click_ctx: click.Context) -> None:
+ """List all pages in the report."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_list
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, page_list, definition_path=definition_path)
+
+
+@report.command(name="add-page")
+@click.option("--display-name", "-d", required=True, help="Page display name.")
+@click.option("--name", "-n", default=None, help="Page ID (auto-generated if omitted).")
+@click.option("--width", type=int, default=1280, help="Page width in pixels.")
+@click.option("--height", type=int, default=720, help="Page height in pixels.")
+@click.pass_context
+@pass_context
+def add_page(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ display_name: str,
+ name: str | None,
+ width: int,
+ height: int,
+) -> None:
+ """Add a new page to the report."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_add
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ page_add,
+ definition_path=definition_path,
+ display_name=display_name,
+ name=name,
+ width=width,
+ height=height,
+ )
+
+
+@report.command(name="delete-page")
+@click.argument("name")
+@click.pass_context
+@pass_context
+def delete_page(ctx: PbiContext, click_ctx: click.Context, name: str) -> None:
+ """Delete a page and all its visuals."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_delete
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, page_delete, definition_path=definition_path, page_name=name)
+
+
+@report.command(name="get-page")
+@click.argument("name")
+@click.pass_context
+@pass_context
+def get_page(ctx: PbiContext, click_ctx: click.Context, name: str) -> None:
+ """Get details of a specific page."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_get
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, page_get, definition_path=definition_path, page_name=name)
+
+
+@report.command(name="set-theme")
+@click.option("--file", "-f", required=True, type=click.Path(exists=True), help="Theme JSON file.")
+@click.pass_context
+@pass_context
+def set_theme(ctx: PbiContext, click_ctx: click.Context, file: str) -> None:
+ """Apply a custom theme to the report."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import theme_set
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ theme_set,
+ definition_path=definition_path,
+ theme_path=Path(file),
+ )
+
+
+@report.command(name="get-theme")
+@click.pass_context
+@pass_context
+def get_theme(ctx: PbiContext, click_ctx: click.Context) -> None:
+ """Show the current theme (base and custom) applied to the report."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import theme_get
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(ctx, theme_get, definition_path=definition_path)
+
+
+@report.command(name="diff-theme")
+@click.option(
+ "--file", "-f", required=True, type=click.Path(exists=True),
+ help="Proposed theme JSON file.",
+)
+@click.pass_context
+@pass_context
+def diff_theme(ctx: PbiContext, click_ctx: click.Context, file: str) -> None:
+ """Compare a proposed theme JSON against the currently applied theme.
+
+ Shows which theme keys would be added, removed, or changed.
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import theme_diff
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ theme_diff,
+ definition_path=definition_path,
+ theme_path=Path(file),
+ )
+
+
+@report.command(name="set-background")
+@click.argument("page_name")
+@click.option("--color", "-c", required=True, help="Hex color e.g. '#F8F9FA'.")
+@click.pass_context
+@pass_context
+def set_background(
+ ctx: PbiContext, click_ctx: click.Context, page_name: str, color: str
+) -> None:
+ """Set the background color of a page."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_set_background
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ page_set_background,
+ definition_path=definition_path,
+ page_name=page_name,
+ color=color,
+ )
+
+
+@report.command(name="set-visibility")
+@click.argument("page_name")
+@click.option(
+ "--hidden/--visible",
+ default=True,
+ help="Hide or show the page in navigation.",
+)
+@click.pass_context
+@pass_context
+def set_visibility(
+ ctx: PbiContext, click_ctx: click.Context, page_name: str, hidden: bool
+) -> None:
+ """Hide or show a page in the report navigation."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.report_backend import page_set_visibility
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ page_set_visibility,
+ definition_path=definition_path,
+ page_name=page_name,
+ hidden=hidden,
+ )
+
+
+@report.command()
+@click.option("--full", is_flag=True, default=False, help="Run enhanced validation with warnings.")
+@click.pass_context
+@pass_context
+def validate(ctx: PbiContext, click_ctx: click.Context, full: bool) -> None:
+ """Validate the PBIR report structure and JSON files."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+
+ if full:
+ from pbi_cli.core.pbir_validators import validate_report_full
+
+ run_command(ctx, validate_report_full, definition_path=definition_path)
+ else:
+ from pbi_cli.core.report_backend import report_validate
+
+ run_command(ctx, report_validate, definition_path=definition_path)
+
+
+@report.command()
+@pass_context
+def reload(ctx: PbiContext) -> None:
+ """Trigger Power BI Desktop to reload the current report.
+
+ Sends Ctrl+Shift+F5 to Power BI Desktop. Tries pywin32 first,
+ falls back to PowerShell, then prints manual instructions.
+
+ Install pywin32 for best results: pip install pbi-cli-tool[reload]
+ """
+ from pbi_cli.utils.desktop_reload import reload_desktop
+
+ run_command(ctx, reload_desktop)
+
+
+@report.command()
+@click.option("--port", type=int, default=8080, help="HTTP server port (WebSocket uses port+1).")
+@click.pass_context
+@pass_context
+def preview(ctx: PbiContext, click_ctx: click.Context, port: int) -> None:
+ """Start a live preview server for the PBIR report.
+
+ Opens an HTTP server that renders the report as HTML/SVG.
+ Auto-reloads in the browser when PBIR files change.
+
+ Install websockets for this feature: pip install pbi-cli-tool[preview]
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.preview.server import start_preview_server
+
+ report_path = click_ctx.parent.obj.get("report_path") if click_ctx.parent else None
+ definition_path = resolve_report_path(report_path)
+ run_command(
+ ctx,
+ start_preview_server,
+ definition_path=definition_path,
+ port=port,
+ )
+
+
+@report.command()
+@click.argument("source_path", type=click.Path(exists=True))
+@click.option("--output", "-o", default=None, type=click.Path(), help="Output directory.")
+@click.option("--force", is_flag=True, default=False, help="Overwrite existing .pbip file.")
+@pass_context
+def convert(ctx: PbiContext, source_path: str, output: str | None, force: bool) -> None:
+ """Convert a .Report folder into a distributable .pbip project.
+
+ Creates the .pbip project file and .gitignore for version control.
+ Note: does NOT convert .pbix to .pbip (use Power BI Desktop for that).
+ """
+ from pbi_cli.core.report_backend import report_convert
+
+ run_command(
+ ctx,
+ report_convert,
+ source_path=Path(source_path),
+ output_path=Path(output) if output else None,
+ force=force,
+ )
diff --git a/src/pbi_cli/commands/visual.py b/src/pbi_cli/commands/visual.py
new file mode 100644
index 0000000..3ef3521
--- /dev/null
+++ b/src/pbi_cli/commands/visual.py
@@ -0,0 +1,652 @@
+"""PBIR visual CRUD commands."""
+
+from __future__ import annotations
+
+import click
+
+from pbi_cli.commands._helpers import run_command
+from pbi_cli.main import PbiContext, pass_context
+
+
+@click.group()
+@click.option(
+ "--path",
+ "-p",
+ default=None,
+ help="Path to .Report folder (auto-detected from CWD if omitted).",
+)
+@click.pass_context
+def visual(ctx: click.Context, path: str | None) -> None:
+ """Manage visuals in PBIR report pages."""
+ ctx.ensure_object(dict)
+ ctx.obj["report_path"] = path
+
+
+def _get_report_path(click_ctx: click.Context) -> str | None:
+ """Extract report_path from parent context."""
+ if click_ctx.parent:
+ return click_ctx.parent.obj.get("report_path")
+ return None
+
+
+@visual.command(name="list")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.pass_context
+@pass_context
+def visual_list(ctx: PbiContext, click_ctx: click.Context, page: str) -> None:
+ """List all visuals on a page."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_list as _visual_list
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(ctx, _visual_list, definition_path=definition_path, page_name=page)
+
+
+@visual.command()
+@click.argument("name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.pass_context
+@pass_context
+def get(ctx: PbiContext, click_ctx: click.Context, name: str, page: str) -> None:
+ """Get detailed information about a visual."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_get
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_get,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=name,
+ )
+
+
+@visual.command()
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option(
+ "--type",
+ "visual_type",
+ required=True,
+ help="Visual type (bar_chart, line_chart, card, table, matrix).",
+)
+@click.option("--name", "-n", default=None, help="Visual name (auto-generated if omitted).")
+@click.option("--x", type=float, default=None, help="X position on canvas.")
+@click.option("--y", type=float, default=None, help="Y position on canvas.")
+@click.option("--width", type=float, default=None, help="Visual width in pixels.")
+@click.option("--height", type=float, default=None, help="Visual height in pixels.")
+@click.pass_context
+@pass_context
+def add(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual_type: str,
+ name: str | None,
+ x: float | None,
+ y: float | None,
+ width: float | None,
+ height: float | None,
+) -> None:
+ """Add a new visual to a page."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_add
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_add,
+ definition_path=definition_path,
+ page_name=page,
+ visual_type=visual_type,
+ name=name,
+ x=x,
+ y=y,
+ width=width,
+ height=height,
+ )
+
+
+@visual.command()
+@click.argument("name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--x", type=float, default=None, help="New X position.")
+@click.option("--y", type=float, default=None, help="New Y position.")
+@click.option("--width", type=float, default=None, help="New width.")
+@click.option("--height", type=float, default=None, help="New height.")
+@click.option("--hidden/--visible", default=None, help="Toggle visibility.")
+@click.pass_context
+@pass_context
+def update(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ name: str,
+ page: str,
+ x: float | None,
+ y: float | None,
+ width: float | None,
+ height: float | None,
+ hidden: bool | None,
+) -> None:
+ """Update visual position, size, or visibility."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_update
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_update,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=name,
+ x=x,
+ y=y,
+ width=width,
+ height=height,
+ hidden=hidden,
+ )
+
+
+@visual.command()
+@click.argument("name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.pass_context
+@pass_context
+def delete(ctx: PbiContext, click_ctx: click.Context, name: str, page: str) -> None:
+ """Delete a visual from a page."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_delete
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_delete,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=name,
+ )
+
+
+@visual.command()
+@click.argument("name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option(
+ "--category",
+ multiple=True,
+ help="Category/axis column: bar, line, donut charts. Table[Column] format.",
+)
+@click.option(
+ "--value",
+ multiple=True,
+ help="Value/measure: all chart types. Treated as measure. Table[Measure] format.",
+)
+@click.option(
+ "--row",
+ multiple=True,
+ help="Row grouping column: matrix only. Table[Column] format.",
+)
+@click.option(
+ "--field",
+ multiple=True,
+ help="Data field: card, slicer. Treated as measure for cards. Table[Field] format.",
+)
+@click.option(
+ "--legend",
+ multiple=True,
+ help="Legend/series column: bar, line, donut charts. Table[Column] format.",
+)
+@click.option(
+ "--indicator",
+ multiple=True,
+ help="KPI indicator measure. Table[Measure] format.",
+)
+@click.option(
+ "--goal",
+ multiple=True,
+ help="KPI goal measure. Table[Measure] format.",
+)
+@click.pass_context
+@pass_context
+def bind(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ name: str,
+ page: str,
+ category: tuple[str, ...],
+ value: tuple[str, ...],
+ row: tuple[str, ...],
+ field: tuple[str, ...],
+ legend: tuple[str, ...],
+ indicator: tuple[str, ...],
+ goal: tuple[str, ...],
+) -> None:
+ """Bind semantic model fields to a visual's data roles.
+
+ Examples:
+
+ pbi visual bind mychart --page p1 --category "Geo[Region]" --value "Sales[Amount]"
+
+ pbi visual bind mycard --page p1 --field "Sales[Total Revenue]"
+
+ pbi visual bind mymatrix --page p1 --row "Product[Category]" --value "Sales[Qty]"
+
+ pbi visual bind mykpi --page p1 --indicator "Sales[Revenue]" --goal "Sales[Target]"
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_bind
+
+ bindings: list[dict[str, str]] = []
+ for f in category:
+ bindings.append({"role": "category", "field": f})
+ for f in value:
+ bindings.append({"role": "value", "field": f})
+ for f in row:
+ bindings.append({"role": "row", "field": f})
+ for f in field:
+ bindings.append({"role": "field", "field": f})
+ for f in legend:
+ bindings.append({"role": "legend", "field": f})
+ for f in indicator:
+ bindings.append({"role": "indicator", "field": f})
+ for f in goal:
+ bindings.append({"role": "goal", "field": f})
+
+ if not bindings:
+ raise click.UsageError(
+ "At least one binding required "
+ "(--category, --value, --row, --field, --legend, --indicator, or --goal)."
+ )
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_bind,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=name,
+ bindings=bindings,
+ )
+
+
+# ---------------------------------------------------------------------------
+# v3.1.0 Bulk operations
+# ---------------------------------------------------------------------------
+
+
+@visual.command()
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--type", "visual_type", default=None, help="Filter by PBIR visual type or alias.")
+@click.option("--name-pattern", default=None, help="fnmatch glob on visual name (e.g. 'Chart_*').")
+@click.option("--x-min", type=float, default=None, help="Minimum x position.")
+@click.option("--x-max", type=float, default=None, help="Maximum x position.")
+@click.option("--y-min", type=float, default=None, help="Minimum y position.")
+@click.option("--y-max", type=float, default=None, help="Maximum y position.")
+@click.pass_context
+@pass_context
+def where(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual_type: str | None,
+ name_pattern: str | None,
+ x_min: float | None,
+ x_max: float | None,
+ y_min: float | None,
+ y_max: float | None,
+) -> None:
+ """Filter visuals by type and/or position bounds.
+
+ Examples:
+
+ pbi visual where --page overview --type barChart
+
+ pbi visual where --page overview --x-max 640
+
+ pbi visual where --page overview --type kpi --name-pattern "KPI_*"
+ """
+ from pbi_cli.core.bulk_backend import visual_where
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_where,
+ definition_path=definition_path,
+ page_name=page,
+ visual_type=visual_type,
+ name_pattern=name_pattern,
+ x_min=x_min,
+ x_max=x_max,
+ y_min=y_min,
+ y_max=y_max,
+ )
+
+
+@visual.command(name="bulk-bind")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--type", "visual_type", required=True, help="Target PBIR visual type or alias.")
+@click.option("--name-pattern", default=None, help="Restrict to visuals matching fnmatch pattern.")
+@click.option("--category", multiple=True, help="Category/axis. Table[Column].")
+@click.option("--value", multiple=True, help="Value/measure: all chart types. Table[Measure].")
+@click.option("--row", multiple=True, help="Row grouping: matrix only. Table[Column].")
+@click.option("--field", multiple=True, help="Data field: card, slicer. Table[Field].")
+@click.option("--legend", multiple=True, help="Legend/series. Table[Column].")
+@click.option("--indicator", multiple=True, help="KPI indicator measure. Table[Measure].")
+@click.option("--goal", multiple=True, help="KPI goal measure. Table[Measure].")
+@click.option("--column", "col_value", multiple=True, help="Combo column Y. Table[Measure].")
+@click.option("--line", multiple=True, help="Line Y axis for combo chart. Table[Measure].")
+@click.option("--x", "x_field", multiple=True, help="X axis for scatter chart. Table[Measure].")
+@click.option("--y", "y_field", multiple=True, help="Y axis for scatter chart. Table[Measure].")
+@click.pass_context
+@pass_context
+def bulk_bind(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual_type: str,
+ name_pattern: str | None,
+ category: tuple[str, ...],
+ value: tuple[str, ...],
+ row: tuple[str, ...],
+ field: tuple[str, ...],
+ legend: tuple[str, ...],
+ indicator: tuple[str, ...],
+ goal: tuple[str, ...],
+ col_value: tuple[str, ...],
+ line: tuple[str, ...],
+ x_field: tuple[str, ...],
+ y_field: tuple[str, ...],
+) -> None:
+ """Bind fields to ALL visuals of a given type on a page.
+
+ Examples:
+
+ pbi visual bulk-bind --page overview --type barChart \\
+ --category "Date[Month]" --value "Sales[Revenue]"
+
+ pbi visual bulk-bind --page overview --type kpi \\
+ --indicator "Sales[Revenue]" --goal "Sales[Target]"
+
+ pbi visual bulk-bind --page overview --type lineStackedColumnComboChart \\
+ --column "Sales[Revenue]" --line "Sales[Margin]"
+ """
+ from pbi_cli.core.bulk_backend import visual_bulk_bind
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ bindings: list[dict[str, str]] = []
+ for f in category:
+ bindings.append({"role": "category", "field": f})
+ for f in value:
+ bindings.append({"role": "value", "field": f})
+ for f in row:
+ bindings.append({"role": "row", "field": f})
+ for f in field:
+ bindings.append({"role": "field", "field": f})
+ for f in legend:
+ bindings.append({"role": "legend", "field": f})
+ for f in indicator:
+ bindings.append({"role": "indicator", "field": f})
+ for f in goal:
+ bindings.append({"role": "goal", "field": f})
+ for f in col_value:
+ bindings.append({"role": "column", "field": f})
+ for f in line:
+ bindings.append({"role": "line", "field": f})
+ for f in x_field:
+ bindings.append({"role": "x", "field": f})
+ for f in y_field:
+ bindings.append({"role": "y", "field": f})
+
+ if not bindings:
+ raise click.UsageError("At least one binding role required.")
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_bulk_bind,
+ definition_path=definition_path,
+ page_name=page,
+ visual_type=visual_type,
+ bindings=bindings,
+ name_pattern=name_pattern,
+ )
+
+
+@visual.command(name="bulk-update")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--type", "visual_type", default=None, help="Filter by visual type or alias.")
+@click.option("--name-pattern", default=None, help="fnmatch filter on visual name.")
+@click.option("--width", type=float, default=None, help="Set width for all matching visuals.")
+@click.option("--height", type=float, default=None, help="Set height for all matching visuals.")
+@click.option("--x", "set_x", type=float, default=None, help="Set x position.")
+@click.option("--y", "set_y", type=float, default=None, help="Set y position.")
+@click.option("--hidden/--visible", default=None, help="Show or hide all matching visuals.")
+@click.pass_context
+@pass_context
+def bulk_update(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual_type: str | None,
+ name_pattern: str | None,
+ width: float | None,
+ height: float | None,
+ set_x: float | None,
+ set_y: float | None,
+ hidden: bool | None,
+) -> None:
+ """Update dimensions or visibility for ALL visuals matching the filter.
+
+ Examples:
+
+ pbi visual bulk-update --page overview --type kpi --height 200 --width 300
+
+ pbi visual bulk-update --page overview --name-pattern "Temp_*" --hidden
+ """
+ from pbi_cli.core.bulk_backend import visual_bulk_update
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_bulk_update,
+ definition_path=definition_path,
+ page_name=page,
+ where_type=visual_type,
+ where_name_pattern=name_pattern,
+ set_hidden=hidden,
+ set_width=width,
+ set_height=height,
+ set_x=set_x,
+ set_y=set_y,
+ )
+
+
+@visual.command(name="bulk-delete")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--type", "visual_type", default=None, help="Filter by visual type or alias.")
+@click.option("--name-pattern", default=None, help="fnmatch filter on visual name.")
+@click.pass_context
+@pass_context
+def bulk_delete(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ page: str,
+ visual_type: str | None,
+ name_pattern: str | None,
+) -> None:
+ """Delete ALL visuals matching the filter (requires --type or --name-pattern).
+
+ Examples:
+
+ pbi visual bulk-delete --page overview --type barChart
+
+ pbi visual bulk-delete --page overview --name-pattern "Draft_*"
+ """
+ from pbi_cli.core.bulk_backend import visual_bulk_delete
+ from pbi_cli.core.pbir_path import resolve_report_path
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_bulk_delete,
+ definition_path=definition_path,
+ page_name=page,
+ where_type=visual_type,
+ where_name_pattern=name_pattern,
+ )
+
+
+# ---------------------------------------------------------------------------
+# v3.2.0 Visual Calculations (Phase 7)
+# ---------------------------------------------------------------------------
+
+
+@visual.command(name="calc-add")
+@click.argument("visual_name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--name", "calc_name", required=True, help="Display name for the calculation.")
+@click.option("--expression", required=True, help="DAX expression for the calculation.")
+@click.option("--role", default="Y", show_default=True, help="Target data role (e.g. Y, Values).")
+@click.pass_context
+@pass_context
+def calc_add(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual_name: str,
+ page: str,
+ calc_name: str,
+ expression: str,
+ role: str,
+) -> None:
+ """Add a visual calculation to a data role's projections.
+
+ Examples:
+
+ pbi visual calc-add MyChart --page overview --name "Running sum" \\
+ --expression "RUNNINGSUM([Sum of Sales])"
+
+ pbi visual calc-add MyChart --page overview --name "Rank" \\
+ --expression "RANK()" --role Y
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_calc_add
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_calc_add,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual_name,
+ calc_name=calc_name,
+ expression=expression,
+ role=role,
+ )
+
+
+@visual.command(name="calc-list")
+@click.argument("visual_name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.pass_context
+@pass_context
+def calc_list(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual_name: str,
+ page: str,
+) -> None:
+ """List all visual calculations on a visual across all roles.
+
+ Examples:
+
+ pbi visual calc-list MyChart --page overview
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_calc_list
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_calc_list,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual_name,
+ )
+
+
+@visual.command(name="set-container")
+@click.argument("name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option(
+ "--border-show",
+ type=bool,
+ default=None,
+ help="Show (true) or hide (false) the visual border.",
+)
+@click.option(
+ "--background-show",
+ type=bool,
+ default=None,
+ help="Show (true) or hide (false) the visual background.",
+)
+@click.option("--title", default=None, help="Set container title text.")
+@click.pass_context
+@pass_context
+def set_container(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ name: str,
+ page: str,
+ border_show: bool | None,
+ background_show: bool | None,
+ title: str | None,
+) -> None:
+ """Set container-level border, background, or title on a visual."""
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_set_container
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_set_container,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=name,
+ border_show=border_show,
+ background_show=background_show,
+ title=title,
+ )
+
+
+@visual.command(name="calc-delete")
+@click.argument("visual_name")
+@click.option("--page", required=True, help="Page name/ID.")
+@click.option("--name", "calc_name", required=True, help="Name of the calculation to delete.")
+@click.pass_context
+@pass_context
+def calc_delete(
+ ctx: PbiContext,
+ click_ctx: click.Context,
+ visual_name: str,
+ page: str,
+ calc_name: str,
+) -> None:
+ """Delete a visual calculation by name.
+
+ Examples:
+
+ pbi visual calc-delete MyChart --page overview --name "Running sum"
+ """
+ from pbi_cli.core.pbir_path import resolve_report_path
+ from pbi_cli.core.visual_backend import visual_calc_delete
+
+ definition_path = resolve_report_path(_get_report_path(click_ctx))
+ run_command(
+ ctx,
+ visual_calc_delete,
+ definition_path=definition_path,
+ page_name=page,
+ visual_name=visual_name,
+ calc_name=calc_name,
+ )
diff --git a/src/pbi_cli/core/bookmark_backend.py b/src/pbi_cli/core/bookmark_backend.py
new file mode 100644
index 0000000..fcaf4e9
--- /dev/null
+++ b/src/pbi_cli/core/bookmark_backend.py
@@ -0,0 +1,246 @@
+"""Pure-function backend for PBIR bookmark operations.
+
+Mirrors ``report_backend.py`` but operates on the bookmarks subfolder.
+Every function takes a ``Path`` to the definition folder and returns a plain
+Python dict suitable for ``format_result()``.
+"""
+
+from __future__ import annotations
+
+import json
+import secrets
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError
+
+# ---------------------------------------------------------------------------
+# Schema constants
+# ---------------------------------------------------------------------------
+
+SCHEMA_BOOKMARKS_METADATA = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/bookmarksMetadata/1.0.0/schema.json"
+)
+SCHEMA_BOOKMARK = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/bookmark/2.1.0/schema.json"
+)
+
+# ---------------------------------------------------------------------------
+# JSON helpers
+# ---------------------------------------------------------------------------
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ """Read and parse a JSON file."""
+ return json.loads(path.read_text(encoding="utf-8"))
+
+
+def _write_json(path: Path, data: dict[str, Any]) -> None:
+ """Write JSON with consistent formatting."""
+ path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+def _generate_name() -> str:
+ """Generate a 20-character hex identifier matching PBIR convention."""
+ return secrets.token_hex(10)
+
+
+# ---------------------------------------------------------------------------
+# Path helpers
+# ---------------------------------------------------------------------------
+
+
+def _bookmarks_dir(definition_path: Path) -> Path:
+ return definition_path / "bookmarks"
+
+
+def _index_path(definition_path: Path) -> Path:
+ return _bookmarks_dir(definition_path) / "bookmarks.json"
+
+
+def _bookmark_path(definition_path: Path, name: str) -> Path:
+ return _bookmarks_dir(definition_path) / f"{name}.bookmark.json"
+
+
+# ---------------------------------------------------------------------------
+# Public API
+# ---------------------------------------------------------------------------
+
+
+def bookmark_list(definition_path: Path) -> list[dict[str, Any]]:
+ """List all bookmarks.
+
+ Returns a list of ``{name, display_name, active_section}`` dicts.
+ Returns ``[]`` if the bookmarks folder or index does not exist.
+ """
+ index_file = _index_path(definition_path)
+ if not index_file.exists():
+ return []
+
+ index = _read_json(index_file)
+ items: list[dict[str, Any]] = index.get("items", [])
+
+ results: list[dict[str, Any]] = []
+ for item in items:
+ name = item.get("name", "")
+ bm_file = _bookmark_path(definition_path, name)
+ if not bm_file.exists():
+ continue
+ bm = _read_json(bm_file)
+ exploration = bm.get("explorationState", {})
+ results.append({
+ "name": name,
+ "display_name": bm.get("displayName", ""),
+ "active_section": exploration.get("activeSection"),
+ })
+
+ return results
+
+
+def bookmark_get(definition_path: Path, name: str) -> dict[str, Any]:
+ """Get the full data for a single bookmark by name.
+
+ Raises ``PbiCliError`` if the bookmark does not exist.
+ """
+ bm_file = _bookmark_path(definition_path, name)
+ if not bm_file.exists():
+ raise PbiCliError(f"Bookmark '{name}' not found.")
+ return _read_json(bm_file)
+
+
+def bookmark_add(
+ definition_path: Path,
+ display_name: str,
+ target_page: str,
+ name: str | None = None,
+) -> dict[str, Any]:
+ """Create a new bookmark pointing to *target_page*.
+
+ Creates the ``bookmarks/`` directory and ``bookmarks.json`` index if they
+ do not already exist. Returns a status dict with the created bookmark info.
+ """
+ bm_name = name if name is not None else _generate_name()
+
+ bm_dir = _bookmarks_dir(definition_path)
+ bm_dir.mkdir(parents=True, exist_ok=True)
+
+ index_file = _index_path(definition_path)
+ if index_file.exists():
+ index = _read_json(index_file)
+ else:
+ index = {"$schema": SCHEMA_BOOKMARKS_METADATA, "items": []}
+
+ index["items"] = list(index.get("items", []))
+ index["items"].append({"name": bm_name})
+ _write_json(index_file, index)
+
+ bookmark_data: dict[str, Any] = {
+ "$schema": SCHEMA_BOOKMARK,
+ "displayName": display_name,
+ "name": bm_name,
+ "options": {"targetVisualNames": []},
+ "explorationState": {
+ "version": "1.3",
+ "activeSection": target_page,
+ },
+ }
+ _write_json(_bookmark_path(definition_path, bm_name), bookmark_data)
+
+ return {
+ "status": "created",
+ "name": bm_name,
+ "display_name": display_name,
+ "target_page": target_page,
+ }
+
+
+def bookmark_delete(
+ definition_path: Path,
+ name: str,
+) -> dict[str, Any]:
+ """Delete a bookmark by name.
+
+ Removes the ``.bookmark.json`` file and its entry in ``bookmarks.json``.
+ Raises ``PbiCliError`` if the bookmark is not found.
+ """
+ index_file = _index_path(definition_path)
+ if not index_file.exists():
+ raise PbiCliError(f"Bookmark '{name}' not found.")
+
+ index = _read_json(index_file)
+ items: list[dict[str, Any]] = index.get("items", [])
+ existing_names = [i.get("name") for i in items]
+
+ if name not in existing_names:
+ raise PbiCliError(f"Bookmark '{name}' not found.")
+
+ bm_file = _bookmark_path(definition_path, name)
+ if bm_file.exists():
+ bm_file.unlink()
+
+ updated_items = [i for i in items if i.get("name") != name]
+ updated_index = {**index, "items": updated_items}
+ _write_json(index_file, updated_index)
+
+ return {"status": "deleted", "name": name}
+
+
+def bookmark_set_visibility(
+ definition_path: Path,
+ name: str,
+ page_name: str,
+ visual_name: str,
+ hidden: bool,
+) -> dict[str, Any]:
+ """Set a visual's hidden/visible state inside a bookmark's explorationState.
+
+ When *hidden* is ``True``, sets ``singleVisual.display.mode = "hidden"``.
+ When *hidden* is ``False``, removes the ``display`` key from ``singleVisual``
+ (presence of ``display`` is what hides the visual in Power BI Desktop).
+
+ Creates the ``explorationState.sections.{page_name}.visualContainers.{visual_name}``
+ path if it does not already exist in the bookmark.
+
+ Raises ``PbiCliError`` if the bookmark does not exist.
+ Returns a status dict with name, page, visual, and the new visibility state.
+ """
+ bm_file = _bookmark_path(definition_path, name)
+ if not bm_file.exists():
+ raise PbiCliError(f"Bookmark '{name}' not found.")
+
+ bm = _read_json(bm_file)
+
+ # Navigate / build the explorationState path immutably.
+ exploration = dict(bm.get("explorationState") or {})
+ sections = dict(exploration.get("sections") or {})
+ page_section = dict(sections.get(page_name) or {})
+ visual_containers = dict(page_section.get("visualContainers") or {})
+ container = dict(visual_containers.get(visual_name) or {})
+ single_visual = dict(container.get("singleVisual") or {})
+
+ if hidden:
+ single_visual = {**single_visual, "display": {"mode": "hidden"}}
+ else:
+ single_visual = {k: v for k, v in single_visual.items() if k != "display"}
+
+ new_container = {**container, "singleVisual": single_visual}
+ new_visual_containers = {**visual_containers, visual_name: new_container}
+ new_page_section = {**page_section, "visualContainers": new_visual_containers}
+ new_sections = {**sections, page_name: new_page_section}
+ new_exploration = {**exploration, "sections": new_sections}
+ new_bm = {**bm, "explorationState": new_exploration}
+
+ _write_json(bm_file, new_bm)
+
+ return {
+ "status": "updated",
+ "bookmark": name,
+ "page": page_name,
+ "visual": visual_name,
+ "hidden": hidden,
+ }
diff --git a/src/pbi_cli/core/bulk_backend.py b/src/pbi_cli/core/bulk_backend.py
new file mode 100644
index 0000000..0b7db3f
--- /dev/null
+++ b/src/pbi_cli/core/bulk_backend.py
@@ -0,0 +1,217 @@
+"""Bulk visual operations for PBIR reports.
+
+Orchestration layer over visual_backend.py -- applies filtering
+(by type, name pattern, position bounds) and fans out to the
+individual visual_* pure functions.
+
+Every function follows the same signature contract as the rest of the
+report layer: takes a ``definition_path: Path`` and returns a plain dict.
+"""
+
+from __future__ import annotations
+
+import fnmatch
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.visual_backend import (
+ VISUAL_DATA_ROLES,
+ _resolve_visual_type,
+ visual_bind,
+ visual_delete,
+ visual_list,
+ visual_update,
+)
+
+
+def visual_where(
+ definition_path: Path,
+ page_name: str,
+ visual_type: str | None = None,
+ name_pattern: str | None = None,
+ x_min: float | None = None,
+ x_max: float | None = None,
+ y_min: float | None = None,
+ y_max: float | None = None,
+) -> list[dict[str, Any]]:
+ """Filter visuals on a page by type and/or position bounds.
+
+ Returns the subset of ``visual_list()`` matching ALL provided criteria.
+ All filter arguments are optional -- omitting all returns every visual.
+
+ Args:
+ definition_path: Path to the ``definition/`` folder.
+ page_name: Name of the page to search.
+ visual_type: Resolved PBIR visualType or user alias (e.g. ``"bar"``).
+ name_pattern: fnmatch pattern matched against visual names (e.g. ``"Chart_*"``).
+ x_min: Minimum x position (inclusive).
+ x_max: Maximum x position (inclusive).
+ y_min: Minimum y position (inclusive).
+ y_max: Maximum y position (inclusive).
+ """
+ resolved_type: str | None = None
+ if visual_type is not None:
+ resolved_type = _resolve_visual_type(visual_type)
+
+ all_visuals = visual_list(definition_path, page_name)
+ result: list[dict[str, Any]] = []
+
+ for v in all_visuals:
+ if resolved_type is not None and v.get("visual_type") != resolved_type:
+ continue
+ if name_pattern is not None and not fnmatch.fnmatch(v.get("name", ""), name_pattern):
+ continue
+ x = v.get("x", 0.0)
+ y = v.get("y", 0.0)
+ if x_min is not None and x < x_min:
+ continue
+ if x_max is not None and x > x_max:
+ continue
+ if y_min is not None and y < y_min:
+ continue
+ if y_max is not None and y > y_max:
+ continue
+ result.append(v)
+
+ return result
+
+
+def visual_bulk_bind(
+ definition_path: Path,
+ page_name: str,
+ visual_type: str,
+ bindings: list[dict[str, str]],
+ name_pattern: str | None = None,
+) -> dict[str, Any]:
+ """Bind fields to every visual of a given type on a page.
+
+ Applies the same ``bindings`` list to each matching visual by calling
+ ``visual_bind()`` in sequence. Stops and raises on the first error.
+
+ Args:
+ definition_path: Path to the ``definition/`` folder.
+ page_name: Name of the page.
+ visual_type: PBIR visualType or user alias -- required (unlike ``visual_where``).
+ bindings: List of ``{"role": ..., "field": ...}`` dicts, same format as
+ ``visual_bind()``.
+ name_pattern: Optional fnmatch filter on visual name.
+
+ Returns:
+ ``{"bound": N, "page": page_name, "type": resolved_type, "visuals": [names],
+ "bindings": bindings}``
+ """
+ matching = visual_where(
+ definition_path,
+ page_name,
+ visual_type=visual_type,
+ name_pattern=name_pattern,
+ )
+ bound_names: list[str] = []
+ for v in matching:
+ visual_bind(definition_path, page_name, v["name"], bindings)
+ bound_names.append(v["name"])
+
+ resolved_type = _resolve_visual_type(visual_type)
+ return {
+ "bound": len(bound_names),
+ "page": page_name,
+ "type": resolved_type,
+ "visuals": bound_names,
+ "bindings": bindings,
+ }
+
+
+def visual_bulk_update(
+ definition_path: Path,
+ page_name: str,
+ where_type: str | None = None,
+ where_name_pattern: str | None = None,
+ set_hidden: bool | None = None,
+ set_width: float | None = None,
+ set_height: float | None = None,
+ set_x: float | None = None,
+ set_y: float | None = None,
+) -> dict[str, Any]:
+ """Apply position/visibility updates to all visuals matching the filter.
+
+ Delegates to ``visual_update()`` for each match. At least one ``set_*``
+ argument must be provided.
+
+ Returns:
+ ``{"updated": N, "page": page_name, "visuals": [names]}``
+ """
+ if all(v is None for v in (set_hidden, set_width, set_height, set_x, set_y)):
+ raise ValueError("At least one set_* argument must be provided to bulk-update")
+
+ matching = visual_where(
+ definition_path,
+ page_name,
+ visual_type=where_type,
+ name_pattern=where_name_pattern,
+ )
+ updated_names: list[str] = []
+ for v in matching:
+ visual_update(
+ definition_path,
+ page_name,
+ v["name"],
+ x=set_x,
+ y=set_y,
+ width=set_width,
+ height=set_height,
+ hidden=set_hidden,
+ )
+ updated_names.append(v["name"])
+
+ return {
+ "updated": len(updated_names),
+ "page": page_name,
+ "visuals": updated_names,
+ }
+
+
+def visual_bulk_delete(
+ definition_path: Path,
+ page_name: str,
+ where_type: str | None = None,
+ where_name_pattern: str | None = None,
+) -> dict[str, Any]:
+ """Delete all visuals on a page matching the filter criteria.
+
+ Delegates to ``visual_delete()`` for each match.
+
+ Returns:
+ ``{"deleted": N, "page": page_name, "visuals": [names]}``
+ """
+ if where_type is None and where_name_pattern is None:
+ raise ValueError(
+ "Provide at least --type or --name-pattern to prevent accidental bulk deletion"
+ )
+
+ matching = visual_where(
+ definition_path,
+ page_name,
+ visual_type=where_type,
+ name_pattern=where_name_pattern,
+ )
+ deleted_names: list[str] = []
+ for v in matching:
+ visual_delete(definition_path, page_name, v["name"])
+ deleted_names.append(v["name"])
+
+ return {
+ "deleted": len(deleted_names),
+ "page": page_name,
+ "visuals": deleted_names,
+ }
+
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+
+def _supported_roles_for_type(visual_type: str) -> list[str]:
+ """Return the data role names for a visual type (for help text generation)."""
+ resolved = _resolve_visual_type(visual_type)
+ return VISUAL_DATA_ROLES.get(resolved, [])
diff --git a/src/pbi_cli/core/claude_integration.py b/src/pbi_cli/core/claude_integration.py
index 22019ce..70ef282 100644
--- a/src/pbi_cli/core/claude_integration.py
+++ b/src/pbi_cli/core/claude_integration.py
@@ -19,13 +19,21 @@ _PBI_CLI_CLAUDE_MD_SNIPPET = (
"When working with Power BI, DAX, semantic models, or data modeling,\n"
"invoke the relevant pbi-cli skill before responding:\n"
"\n"
+ "**Semantic Model (requires `pbi connect`):**\n"
"- **power-bi-dax** -- DAX queries, measures, calculations\n"
"- **power-bi-modeling** -- tables, columns, measures, relationships\n"
- "- **power-bi-diagnostics** -- troubleshooting, tracing, setup\n"
- "- **power-bi-deployment** -- TMDL export/import, transactions\n"
+ "- **power-bi-deployment** -- TMDL export/import, transactions, diff\n"
"- **power-bi-docs** -- model documentation, data dictionary\n"
"- **power-bi-partitions** -- partitions, M expressions, data sources\n"
"- **power-bi-security** -- RLS roles, perspectives, access control\n"
+ "- **power-bi-diagnostics** -- troubleshooting, tracing, setup\n"
+ "\n"
+ "**Report Layer (no connection needed):**\n"
+ "- **power-bi-report** -- scaffold, validate, preview PBIR reports\n"
+ "- **power-bi-visuals** -- add, bind, update, bulk-manage visuals\n"
+ "- **power-bi-pages** -- pages, bookmarks, visibility, drillthrough\n"
+ "- **power-bi-themes** -- themes, conditional formatting, styling\n"
+ "- **power-bi-filters** -- page and visual filters (TopN, date, categorical)\n"
"\n"
"Critical: Multi-line DAX (VAR/RETURN) cannot be passed via `-e`.\n"
"Use `--file` or stdin piping instead. See power-bi-dax skill.\n"
diff --git a/src/pbi_cli/core/errors.py b/src/pbi_cli/core/errors.py
index 34d6f33..dfaf855 100644
--- a/src/pbi_cli/core/errors.py
+++ b/src/pbi_cli/core/errors.py
@@ -40,3 +40,27 @@ class TomError(PbiCliError):
self.operation = operation
self.detail = detail
super().__init__(f"{operation}: {detail}")
+
+
+class VisualTypeError(PbiCliError):
+ """Raised when a visual type is not recognised."""
+
+ def __init__(self, visual_type: str) -> None:
+ self.visual_type = visual_type
+ super().__init__(
+ f"Unknown visual type '{visual_type}'. "
+ "Run 'pbi visual types' to see supported types."
+ )
+
+
+class ReportNotFoundError(PbiCliError):
+ """Raised when no PBIR report definition folder can be found."""
+
+ def __init__(
+ self,
+ message: str = (
+ "No PBIR report found. Run this command inside a .pbip project "
+ "or pass --path to the .Report folder."
+ ),
+ ) -> None:
+ super().__init__(message)
diff --git a/src/pbi_cli/core/filter_backend.py b/src/pbi_cli/core/filter_backend.py
new file mode 100644
index 0000000..be10078
--- /dev/null
+++ b/src/pbi_cli/core/filter_backend.py
@@ -0,0 +1,514 @@
+"""Pure-function backend for PBIR filter operations.
+
+Every function takes a ``Path`` to the definition folder and returns a plain
+Python dict suitable for ``format_result()``.
+
+Filters are stored in ``filterConfig.filters[]`` inside either:
+- ``pages//page.json`` for page-level filters
+- ``pages//visuals//visual.json`` for visual-level filters
+"""
+
+from __future__ import annotations
+
+import json
+import secrets
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError
+from pbi_cli.core.pbir_path import get_page_dir, get_visual_dir
+
+# ---------------------------------------------------------------------------
+# JSON helpers
+# ---------------------------------------------------------------------------
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ """Read and parse a JSON file."""
+ return json.loads(path.read_text(encoding="utf-8"))
+
+
+def _write_json(path: Path, data: dict[str, Any]) -> None:
+ """Write JSON with consistent formatting."""
+ path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+def _generate_name() -> str:
+ """Generate a 20-character hex identifier matching PBIR convention."""
+ return secrets.token_hex(10)
+
+
+# ---------------------------------------------------------------------------
+# Path resolution helpers
+# ---------------------------------------------------------------------------
+
+
+def _resolve_target_path(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str | None,
+) -> Path:
+ """Return the JSON file path for the target (page or visual)."""
+ if visual_name is None:
+ return get_page_dir(definition_path, page_name) / "page.json"
+ return get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+
+
+def _get_filters(data: dict[str, Any]) -> list[dict[str, Any]]:
+ """Extract the filters list from a page or visual JSON dict."""
+ filter_config = data.get("filterConfig")
+ if not isinstance(filter_config, dict):
+ return []
+ filters = filter_config.get("filters")
+ if not isinstance(filters, list):
+ return []
+ return filters
+
+
+def _set_filters(data: dict[str, Any], filters: list[dict[str, Any]]) -> dict[str, Any]:
+ """Return a new dict with filterConfig.filters replaced (immutable update)."""
+ filter_config = dict(data.get("filterConfig") or {})
+ filter_config["filters"] = filters
+ return {**data, "filterConfig": filter_config}
+
+
+# ---------------------------------------------------------------------------
+# Public API
+# ---------------------------------------------------------------------------
+
+
+def filter_list(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str | None = None,
+) -> list[dict[str, Any]]:
+ """List filters on a page or specific visual.
+
+ If visual_name is None, returns page-level filters from page.json.
+ If visual_name is given, returns visual-level filters from visual.json.
+ Returns the raw filter dicts from filterConfig.filters[].
+ """
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+ data = _read_json(target)
+ return _get_filters(data)
+
+
+def _to_pbi_literal(value: str) -> str:
+ """Convert a CLI string value to a Power BI literal.
+
+ Power BI uses typed literals: strings are single-quoted (``'text'``),
+ integers use an ``L`` suffix (``123L``), and doubles use ``D`` (``1.5D``).
+ """
+ # Try integer first (e.g. "2014" -> "2014L")
+ try:
+ int(value)
+ return f"{value}L"
+ except ValueError:
+ pass
+ # Try float (e.g. "3.14" -> "3.14D")
+ try:
+ float(value)
+ return f"{value}D"
+ except ValueError:
+ pass
+ # Fall back to string literal
+ return f"'{value}'"
+
+
+def filter_add_categorical(
+ definition_path: Path,
+ page_name: str,
+ table: str,
+ column: str,
+ values: list[str],
+ visual_name: str | None = None,
+ name: str | None = None,
+) -> dict[str, Any]:
+ """Add a categorical filter to a page or visual.
+
+ Builds the full filterConfig entry from table/column/values.
+ The source alias is always the first letter of the table name (lowercase).
+ Returns a status dict with name, type, and scope.
+ """
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+
+ filter_name = name if name is not None else _generate_name()
+ alias = table[0].lower()
+ scope = "visual" if visual_name is not None else "page"
+
+ where_values: list[list[dict[str, Any]]] = [
+ [{"Literal": {"Value": _to_pbi_literal(v)}}] for v in values
+ ]
+
+ entry: dict[str, Any] = {
+ "name": filter_name,
+ "field": {
+ "Column": {
+ "Expression": {"SourceRef": {"Entity": table}},
+ "Property": column,
+ }
+ },
+ "type": "Categorical",
+ "filter": {
+ "Version": 2,
+ "From": [{"Name": alias, "Entity": table, "Type": 0}],
+ "Where": [
+ {
+ "Condition": {
+ "In": {
+ "Expressions": [
+ {
+ "Column": {
+ "Expression": {"SourceRef": {"Source": alias}},
+ "Property": column,
+ }
+ }
+ ],
+ "Values": where_values,
+ }
+ }
+ }
+ ],
+ },
+ }
+
+ if scope == "page":
+ entry["howCreated"] = "User"
+
+ data = _read_json(target)
+ filters = list(_get_filters(data))
+ filters.append(entry)
+ updated = _set_filters(data, filters)
+ _write_json(target, updated)
+
+ return {"status": "added", "name": filter_name, "type": "Categorical", "scope": scope}
+
+
+def filter_remove(
+ definition_path: Path,
+ page_name: str,
+ filter_name: str,
+ visual_name: str | None = None,
+) -> dict[str, Any]:
+ """Remove a filter by name from a page or visual.
+
+ Raises PbiCliError if filter_name is not found.
+ Returns a status dict with the removed filter name.
+ """
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+
+ data = _read_json(target)
+ filters = _get_filters(data)
+ remaining = [f for f in filters if f.get("name") != filter_name]
+
+ if len(remaining) == len(filters):
+ raise PbiCliError(
+ f"Filter '{filter_name}' not found on "
+ f"{'visual ' + visual_name if visual_name else 'page'} '{page_name}'."
+ )
+
+ updated = _set_filters(data, remaining)
+ _write_json(target, updated)
+ return {"status": "removed", "name": filter_name}
+
+
+def filter_add_topn(
+ definition_path: Path,
+ page_name: str,
+ table: str,
+ column: str,
+ n: int,
+ order_by_table: str,
+ order_by_column: str,
+ direction: str = "Top",
+ visual_name: str | None = None,
+ name: str | None = None,
+) -> dict[str, Any]:
+ """Add a TopN filter to a page or visual.
+
+ *direction* is ``"Top"`` (highest N by *order_by_column*) or
+ ``"Bottom"`` (lowest N). Direction maps to Power BI query Direction
+ values: Top = 2 (Descending), Bottom = 1 (Ascending).
+
+ Returns a status dict with name, type, scope, n, and direction.
+ """
+ direction_upper = direction.strip().capitalize()
+ if direction_upper not in ("Top", "Bottom"):
+ raise PbiCliError(f"direction must be 'Top' or 'Bottom', got '{direction}'.")
+
+ pbi_direction = 2 if direction_upper == "Top" else 1
+
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+
+ filter_name = name if name is not None else _generate_name()
+ cat_alias = table[0].lower()
+ ord_alias = order_by_table[0].lower()
+ # Avoid alias collision when both tables start with the same letter
+ if ord_alias == cat_alias and order_by_table != table:
+ ord_alias = ord_alias + "2"
+ scope = "visual" if visual_name is not None else "page"
+
+ # Inner subquery From: include both tables when they differ
+ inner_from: list[dict[str, Any]] = [
+ {"Name": cat_alias, "Entity": table, "Type": 0},
+ ]
+ if order_by_table != table:
+ inner_from.append({"Name": ord_alias, "Entity": order_by_table, "Type": 0})
+
+ entry: dict[str, Any] = {
+ "name": filter_name,
+ "field": {
+ "Column": {
+ "Expression": {"SourceRef": {"Entity": table}},
+ "Property": column,
+ }
+ },
+ "type": "TopN",
+ "filter": {
+ "Version": 2,
+ "From": [
+ {
+ "Name": "subquery",
+ "Expression": {
+ "Subquery": {
+ "Query": {
+ "Version": 2,
+ "From": inner_from,
+ "Select": [
+ {
+ "Column": {
+ "Expression": {
+ "SourceRef": {"Source": cat_alias}
+ },
+ "Property": column,
+ },
+ "Name": "field",
+ }
+ ],
+ "OrderBy": [
+ {
+ "Direction": pbi_direction,
+ "Expression": {
+ "Aggregation": {
+ "Expression": {
+ "Column": {
+ "Expression": {
+ "SourceRef": {
+ "Source": ord_alias
+ if order_by_table != table
+ else cat_alias
+ }
+ },
+ "Property": order_by_column,
+ }
+ },
+ "Function": 0,
+ }
+ },
+ }
+ ],
+ "Top": n,
+ }
+ }
+ },
+ "Type": 2,
+ },
+ {"Name": cat_alias, "Entity": table, "Type": 0},
+ ],
+ "Where": [
+ {
+ "Condition": {
+ "In": {
+ "Expressions": [
+ {
+ "Column": {
+ "Expression": {
+ "SourceRef": {"Source": cat_alias}
+ },
+ "Property": column,
+ }
+ }
+ ],
+ "Table": {"SourceRef": {"Source": "subquery"}},
+ }
+ }
+ }
+ ],
+ },
+ }
+
+ if scope == "page":
+ entry["howCreated"] = "User"
+
+ data = _read_json(target)
+ filters = list(_get_filters(data))
+ filters.append(entry)
+ updated = _set_filters(data, filters)
+ _write_json(target, updated)
+
+ return {
+ "status": "added",
+ "name": filter_name,
+ "type": "TopN",
+ "scope": scope,
+ "n": n,
+ "direction": direction_upper,
+ }
+
+
+# TimeUnit integer codes used by Power BI for RelativeDate filters.
+_RELATIVE_DATE_TIME_UNITS: dict[str, int] = {
+ "days": 0,
+ "weeks": 1,
+ "months": 2,
+ "years": 3,
+}
+
+
+def filter_add_relative_date(
+ definition_path: Path,
+ page_name: str,
+ table: str,
+ column: str,
+ amount: int,
+ time_unit: str,
+ visual_name: str | None = None,
+ name: str | None = None,
+) -> dict[str, Any]:
+ """Add a RelativeDate filter (e.g. "last 3 months") to a page or visual.
+
+ *amount* is a positive integer representing the period count.
+ *time_unit* is one of ``"days"``, ``"weeks"``, ``"months"``, ``"years"``.
+
+ The filter matches rows where *column* falls in the last *amount* *time_unit*
+ relative to today (inclusive of the current period boundary).
+
+ Returns a status dict with name, type, scope, amount, and time_unit.
+ """
+ time_unit_lower = time_unit.strip().lower()
+ if time_unit_lower not in _RELATIVE_DATE_TIME_UNITS:
+ valid = ", ".join(_RELATIVE_DATE_TIME_UNITS)
+ raise PbiCliError(
+ f"time_unit must be one of {valid}, got '{time_unit}'."
+ )
+ time_unit_code = _RELATIVE_DATE_TIME_UNITS[time_unit_lower]
+ days_code = _RELATIVE_DATE_TIME_UNITS["days"]
+
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+
+ filter_name = name if name is not None else _generate_name()
+ alias = table[0].lower()
+ scope = "visual" if visual_name is not None else "page"
+
+ # LowerBound: DateSpan(DateAdd(DateAdd(Now(), +1, days), -amount, time_unit), days)
+ lower_bound: dict[str, Any] = {
+ "DateSpan": {
+ "Expression": {
+ "DateAdd": {
+ "Expression": {
+ "DateAdd": {
+ "Expression": {"Now": {}},
+ "Amount": 1,
+ "TimeUnit": days_code,
+ }
+ },
+ "Amount": -amount,
+ "TimeUnit": time_unit_code,
+ }
+ },
+ "TimeUnit": days_code,
+ }
+ }
+
+ # UpperBound: DateSpan(Now(), days)
+ upper_bound: dict[str, Any] = {
+ "DateSpan": {
+ "Expression": {"Now": {}},
+ "TimeUnit": days_code,
+ }
+ }
+
+ entry: dict[str, Any] = {
+ "name": filter_name,
+ "field": {
+ "Column": {
+ "Expression": {"SourceRef": {"Entity": table}},
+ "Property": column,
+ }
+ },
+ "type": "RelativeDate",
+ "filter": {
+ "Version": 2,
+ "From": [{"Name": alias, "Entity": table, "Type": 0}],
+ "Where": [
+ {
+ "Condition": {
+ "Between": {
+ "Expression": {
+ "Column": {
+ "Expression": {"SourceRef": {"Source": alias}},
+ "Property": column,
+ }
+ },
+ "LowerBound": lower_bound,
+ "UpperBound": upper_bound,
+ }
+ }
+ }
+ ],
+ },
+ }
+
+ if scope == "page":
+ entry["howCreated"] = "User"
+
+ data = _read_json(target)
+ filters = list(_get_filters(data))
+ filters.append(entry)
+ updated = _set_filters(data, filters)
+ _write_json(target, updated)
+
+ return {
+ "status": "added",
+ "name": filter_name,
+ "type": "RelativeDate",
+ "scope": scope,
+ "amount": amount,
+ "time_unit": time_unit_lower,
+ }
+
+
+def filter_clear(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str | None = None,
+) -> dict[str, Any]:
+ """Remove all filters from a page or visual.
+
+ Returns a status dict with the count of removed filters and scope.
+ """
+ target = _resolve_target_path(definition_path, page_name, visual_name)
+ if not target.exists():
+ raise PbiCliError(f"File not found: {target}")
+
+ scope = "visual" if visual_name is not None else "page"
+ data = _read_json(target)
+ filters = _get_filters(data)
+ removed = len(filters)
+
+ updated = _set_filters(data, [])
+ _write_json(target, updated)
+ return {"status": "cleared", "removed": removed, "scope": scope}
diff --git a/src/pbi_cli/core/format_backend.py b/src/pbi_cli/core/format_backend.py
new file mode 100644
index 0000000..9071853
--- /dev/null
+++ b/src/pbi_cli/core/format_backend.py
@@ -0,0 +1,403 @@
+"""Pure-function backend for PBIR conditional formatting operations.
+
+Mirrors ``report_backend.py`` but focuses on visual conditional formatting.
+Every function takes a ``Path`` to the definition folder and returns a plain
+Python dict suitable for ``format_result()``.
+"""
+
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError
+from pbi_cli.core.pbir_path import get_visual_dir
+
+# ---------------------------------------------------------------------------
+# JSON helpers (same as report_backend / visual_backend)
+# ---------------------------------------------------------------------------
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ """Read and parse a JSON file."""
+ return json.loads(path.read_text(encoding="utf-8"))
+
+
+def _write_json(path: Path, data: dict[str, Any]) -> None:
+ """Write JSON with consistent formatting."""
+ path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+# ---------------------------------------------------------------------------
+# Internal helpers
+# ---------------------------------------------------------------------------
+
+
+def _load_visual(definition_path: Path, page_name: str, visual_name: str) -> dict[str, Any]:
+ """Load and return visual JSON data, raising PbiCliError if missing."""
+ visual_path = get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+ if not visual_path.exists():
+ raise PbiCliError(
+ f"Visual '{visual_name}' not found on page '{page_name}'. "
+ f"Expected: {visual_path}"
+ )
+ return _read_json(visual_path)
+
+
+def _save_visual(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ data: dict[str, Any],
+) -> None:
+ """Write visual JSON data back to disk."""
+ visual_path = get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+ _write_json(visual_path, data)
+
+
+def _get_values_list(objects: dict[str, Any]) -> list[dict[str, Any]]:
+ """Return the objects.values list, defaulting to empty."""
+ return list(objects.get("values", []))
+
+
+def _replace_or_append(
+ values: list[dict[str, Any]],
+ new_entry: dict[str, Any],
+ field_query_ref: str,
+) -> list[dict[str, Any]]:
+ """Return a new list with *new_entry* replacing any existing entry
+ whose ``selector.metadata`` matches *field_query_ref*, or appended
+ if no match exists. Immutable -- does not modify the input list.
+ """
+ replaced = False
+ result: list[dict[str, Any]] = []
+ for entry in values:
+ meta = entry.get("selector", {}).get("metadata", "")
+ if meta == field_query_ref:
+ result.append(new_entry)
+ replaced = True
+ else:
+ result.append(entry)
+ if not replaced:
+ result.append(new_entry)
+ return result
+
+
+# ---------------------------------------------------------------------------
+# Public API
+# ---------------------------------------------------------------------------
+
+
+def format_get(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+) -> dict[str, Any]:
+ """Return current formatting objects for a visual.
+
+ Returns ``{"visual": visual_name, "objects": {...}}`` where *objects*
+ is the content of ``visual.objects`` (empty dict if absent).
+ """
+ data = _load_visual(definition_path, page_name, visual_name)
+ objects = data.get("visual", {}).get("objects", {})
+ return {"visual": visual_name, "objects": objects}
+
+
+def format_clear(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+) -> dict[str, Any]:
+ """Clear all formatting objects from a visual.
+
+ Sets ``visual.objects`` to ``{}`` and persists the change.
+ Returns ``{"status": "cleared", "visual": visual_name}``.
+ """
+ data = _load_visual(definition_path, page_name, visual_name)
+ visual_section = dict(data.get("visual", {}))
+ visual_section["objects"] = {}
+ new_data = {**data, "visual": visual_section}
+ _save_visual(definition_path, page_name, visual_name, new_data)
+ return {"status": "cleared", "visual": visual_name}
+
+
+def format_background_gradient(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ input_table: str,
+ input_column: str,
+ field_query_ref: str,
+ min_color: str = "minColor",
+ max_color: str = "maxColor",
+) -> dict[str, Any]:
+ """Add a linear gradient background color rule to a visual column.
+
+ *input_table* / *input_column*: the measure/column driving the gradient
+ (used for the FillRule.Input Aggregation).
+
+ *field_query_ref*: the queryRef of the target field (e.g.
+ ``"Sum(financials.Profit)"``). Used as ``selector.metadata``.
+
+ Adds/replaces the entry in ``visual.objects.values[]`` whose
+ ``selector.metadata`` matches *field_query_ref*.
+
+ Returns ``{"status": "applied", "visual": visual_name,
+ "rule": "gradient", "field": field_query_ref}``.
+ """
+ data = _load_visual(definition_path, page_name, visual_name)
+ visual_section = dict(data.get("visual", {}))
+ objects = dict(visual_section.get("objects", {}))
+ values = _get_values_list(objects)
+
+ new_entry: dict[str, Any] = {
+ "properties": {
+ "backColor": {
+ "solid": {
+ "color": {
+ "expr": {
+ "FillRule": {
+ "Input": {
+ "Aggregation": {
+ "Expression": {
+ "Column": {
+ "Expression": {
+ "SourceRef": {"Entity": input_table}
+ },
+ "Property": input_column,
+ }
+ },
+ "Function": 0,
+ }
+ },
+ "FillRule": {
+ "linearGradient2": {
+ "min": {
+ "color": {
+ "Literal": {"Value": f"'{min_color}'"}
+ }
+ },
+ "max": {
+ "color": {
+ "Literal": {"Value": f"'{max_color}'"}
+ }
+ },
+ "nullColoringStrategy": {
+ "strategy": {
+ "Literal": {"Value": "'asZero'"}
+ }
+ },
+ }
+ },
+ }
+ }
+ }
+ }
+ }
+ },
+ "selector": {
+ "data": [{"dataViewWildcard": {"matchingOption": 1}}],
+ "metadata": field_query_ref,
+ },
+ }
+
+ new_values = _replace_or_append(values, new_entry, field_query_ref)
+ new_objects = {**objects, "values": new_values}
+ new_visual = {**visual_section, "objects": new_objects}
+ new_data = {**data, "visual": new_visual}
+ _save_visual(definition_path, page_name, visual_name, new_data)
+
+ return {
+ "status": "applied",
+ "visual": visual_name,
+ "rule": "gradient",
+ "field": field_query_ref,
+ }
+
+
+# ComparisonKind integer codes (Power BI query expression).
+_COMPARISON_KINDS: dict[str, int] = {
+ "eq": 0,
+ "neq": 1,
+ "gt": 2,
+ "gte": 3,
+ "lt": 4,
+ "lte": 5,
+}
+
+
+def format_background_conditional(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ input_table: str,
+ input_column: str,
+ threshold: float | int,
+ color_hex: str,
+ comparison: str = "gt",
+ field_query_ref: str | None = None,
+) -> dict[str, Any]:
+ """Add a rule-based conditional background color to a visual column.
+
+ When the aggregated value of *input_column* satisfies the comparison
+ against *threshold*, the cell background is set to *color_hex*.
+
+ *comparison* is one of ``"eq"``, ``"neq"``, ``"gt"``, ``"gte"``,
+ ``"lt"``, ``"lte"`` (default ``"gt"``).
+
+ *field_query_ref* is the ``selector.metadata`` queryRef of the target
+ field (e.g. ``"Sum(financials.Units Sold)"``). Defaults to
+ ``"Sum({table}.{column})"`` if omitted.
+
+ Returns ``{"status": "applied", "visual": visual_name,
+ "rule": "conditional", "field": field_query_ref}``.
+ """
+ comparison_lower = comparison.strip().lower()
+ if comparison_lower not in _COMPARISON_KINDS:
+ valid = ", ".join(_COMPARISON_KINDS)
+ raise PbiCliError(
+ f"comparison must be one of {valid}, got '{comparison}'."
+ )
+ comparison_kind = _COMPARISON_KINDS[comparison_lower]
+
+ if field_query_ref is None:
+ field_query_ref = f"Sum({input_table}.{input_column})"
+
+ # Format threshold as a Power BI decimal literal (D suffix).
+ threshold_literal = f"{threshold}D"
+
+ data = _load_visual(definition_path, page_name, visual_name)
+ visual_section = dict(data.get("visual", {}))
+ objects = dict(visual_section.get("objects", {}))
+ values = _get_values_list(objects)
+
+ new_entry: dict[str, Any] = {
+ "properties": {
+ "backColor": {
+ "solid": {
+ "color": {
+ "expr": {
+ "Conditional": {
+ "Cases": [
+ {
+ "Condition": {
+ "Comparison": {
+ "ComparisonKind": comparison_kind,
+ "Left": {
+ "Aggregation": {
+ "Expression": {
+ "Column": {
+ "Expression": {
+ "SourceRef": {
+ "Entity": input_table
+ }
+ },
+ "Property": input_column,
+ }
+ },
+ "Function": 0,
+ }
+ },
+ "Right": {
+ "Literal": {
+ "Value": threshold_literal
+ }
+ },
+ }
+ },
+ "Value": {
+ "Literal": {"Value": f"'{color_hex}'"}
+ },
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ },
+ "selector": {
+ "data": [{"dataViewWildcard": {"matchingOption": 1}}],
+ "metadata": field_query_ref,
+ },
+ }
+
+ new_values = _replace_or_append(values, new_entry, field_query_ref)
+ new_objects = {**objects, "values": new_values}
+ new_visual = {**visual_section, "objects": new_objects}
+ new_data = {**data, "visual": new_visual}
+ _save_visual(definition_path, page_name, visual_name, new_data)
+
+ return {
+ "status": "applied",
+ "visual": visual_name,
+ "rule": "conditional",
+ "field": field_query_ref,
+ }
+
+
+def format_background_measure(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ measure_table: str,
+ measure_property: str,
+ field_query_ref: str,
+) -> dict[str, Any]:
+ """Add a measure-driven background color rule to a visual column.
+
+ *measure_table* / *measure_property*: the DAX measure that returns a
+ hex color string.
+
+ *field_query_ref*: the queryRef of the target field.
+
+ Adds/replaces the entry in ``visual.objects.values[]`` whose
+ ``selector.metadata`` matches *field_query_ref*.
+
+ Returns ``{"status": "applied", "visual": visual_name,
+ "rule": "measure", "field": field_query_ref}``.
+ """
+ data = _load_visual(definition_path, page_name, visual_name)
+ visual_section = dict(data.get("visual", {}))
+ objects = dict(visual_section.get("objects", {}))
+ values = _get_values_list(objects)
+
+ new_entry: dict[str, Any] = {
+ "properties": {
+ "backColor": {
+ "solid": {
+ "color": {
+ "expr": {
+ "Measure": {
+ "Expression": {
+ "SourceRef": {"Entity": measure_table}
+ },
+ "Property": measure_property,
+ }
+ }
+ }
+ }
+ }
+ },
+ "selector": {
+ "data": [{"dataViewWildcard": {"matchingOption": 1}}],
+ "metadata": field_query_ref,
+ },
+ }
+
+ new_values = _replace_or_append(values, new_entry, field_query_ref)
+ new_objects = {**objects, "values": new_values}
+ new_visual = {**visual_section, "objects": new_objects}
+ new_data = {**data, "visual": new_visual}
+ _save_visual(definition_path, page_name, visual_name, new_data)
+
+ return {
+ "status": "applied",
+ "visual": visual_name,
+ "rule": "measure",
+ "field": field_query_ref,
+ }
diff --git a/src/pbi_cli/core/pbir_models.py b/src/pbi_cli/core/pbir_models.py
new file mode 100644
index 0000000..9bafd93
--- /dev/null
+++ b/src/pbi_cli/core/pbir_models.py
@@ -0,0 +1,220 @@
+"""Frozen dataclasses for PBIR (Power BI Enhanced Report Format) structures."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from pathlib import Path
+
+# -- PBIR Schema URLs -------------------------------------------------------
+
+SCHEMA_REPORT = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/report/1.2.0/schema.json"
+)
+SCHEMA_PAGE = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/page/2.1.0/schema.json"
+)
+SCHEMA_PAGES_METADATA = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/pagesMetadata/1.0.0/schema.json"
+)
+SCHEMA_VERSION = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/versionMetadata/1.0.0/schema.json"
+)
+SCHEMA_VISUAL_CONTAINER = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/visualContainer/2.7.0/schema.json"
+)
+SCHEMA_BOOKMARKS_METADATA = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/bookmarksMetadata/1.0.0/schema.json"
+)
+SCHEMA_BOOKMARK = (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/item/report/definition/bookmark/2.1.0/schema.json"
+)
+
+
+# -- Visual type identifiers ------------------------------------------------
+
+SUPPORTED_VISUAL_TYPES: frozenset[str] = frozenset({
+ # Original 9
+ "barChart",
+ "lineChart",
+ "card",
+ "pivotTable",
+ "tableEx",
+ "slicer",
+ "kpi",
+ "gauge",
+ "donutChart",
+ # v3.1.0 additions
+ "columnChart",
+ "areaChart",
+ "ribbonChart",
+ "waterfallChart",
+ "scatterChart",
+ "funnelChart",
+ "multiRowCard",
+ "treemap",
+ "cardNew",
+ "stackedBarChart",
+ "lineStackedColumnComboChart",
+ # v3.4.0 additions
+ "cardVisual",
+ "actionButton",
+ # v3.5.0 additions (confirmed from HR Analysis Desktop export)
+ "clusteredColumnChart",
+ "clusteredBarChart",
+ "textSlicer",
+ "listSlicer",
+ # v3.6.0 additions (confirmed from HR Analysis Desktop export)
+ "image",
+ "shape",
+ "textbox",
+ "pageNavigator",
+ "advancedSlicerVisual",
+ # v3.8.0 additions
+ "azureMap",
+})
+
+# Mapping from user-friendly names to PBIR visualType identifiers
+VISUAL_TYPE_ALIASES: dict[str, str] = {
+ # Original 9
+ "bar_chart": "barChart",
+ "bar": "barChart",
+ "line_chart": "lineChart",
+ "line": "lineChart",
+ "card": "card",
+ "table": "tableEx",
+ "matrix": "pivotTable",
+ "slicer": "slicer",
+ "kpi": "kpi",
+ "gauge": "gauge",
+ "donut": "donutChart",
+ "donut_chart": "donutChart",
+ "pie": "donutChart",
+ # v3.1.0 additions
+ "column": "columnChart",
+ "column_chart": "columnChart",
+ "area": "areaChart",
+ "area_chart": "areaChart",
+ "ribbon": "ribbonChart",
+ "ribbon_chart": "ribbonChart",
+ "waterfall": "waterfallChart",
+ "waterfall_chart": "waterfallChart",
+ "scatter": "scatterChart",
+ "scatter_chart": "scatterChart",
+ "funnel": "funnelChart",
+ "funnel_chart": "funnelChart",
+ "multi_row_card": "multiRowCard",
+ "treemap": "treemap",
+ "card_new": "cardNew",
+ "new_card": "cardNew",
+ "stacked_bar": "stackedBarChart",
+ "stacked_bar_chart": "stackedBarChart",
+ "combo": "lineStackedColumnComboChart",
+ "combo_chart": "lineStackedColumnComboChart",
+ # v3.4.0 additions
+ "card_visual": "cardVisual",
+ "modern_card": "cardVisual",
+ "action_button": "actionButton",
+ "button": "actionButton",
+ # v3.5.0 additions
+ "clustered_column": "clusteredColumnChart",
+ "clustered_column_chart": "clusteredColumnChart",
+ "clustered_bar": "clusteredBarChart",
+ "clustered_bar_chart": "clusteredBarChart",
+ "text_slicer": "textSlicer",
+ "list_slicer": "listSlicer",
+ # v3.6.0 additions
+ "img": "image",
+ "text_box": "textbox",
+ "page_navigator": "pageNavigator",
+ "page_nav": "pageNavigator",
+ "navigator": "pageNavigator",
+ "advanced_slicer": "advancedSlicerVisual",
+ "adv_slicer": "advancedSlicerVisual",
+ "tile_slicer": "advancedSlicerVisual",
+ # v3.8.0 additions
+ "azure_map": "azureMap",
+ "map": "azureMap",
+}
+
+
+# -- Default theme -----------------------------------------------------------
+
+DEFAULT_BASE_THEME = {
+ "name": "CY24SU06",
+ "reportVersionAtImport": "5.55",
+ "type": "SharedResources",
+}
+
+
+# -- Dataclasses -------------------------------------------------------------
+
+
+@dataclass(frozen=True)
+class PbirPosition:
+ """Visual position and dimensions on a page canvas."""
+
+ x: float
+ y: float
+ width: float
+ height: float
+ z: int = 0
+ tab_order: int = 0
+
+
+@dataclass(frozen=True)
+class PbirVisual:
+ """Summary of a single PBIR visual."""
+
+ name: str
+ visual_type: str
+ position: PbirPosition
+ page_name: str
+ folder_path: Path
+ has_query: bool = False
+
+
+@dataclass(frozen=True)
+class PbirPage:
+ """Summary of a single PBIR page."""
+
+ name: str
+ display_name: str
+ ordinal: int
+ width: int
+ height: int
+ display_option: str
+ visual_count: int
+ folder_path: Path
+
+
+@dataclass(frozen=True)
+class PbirReport:
+ """Summary of a PBIR report."""
+
+ name: str
+ definition_path: Path
+ page_count: int
+ theme_name: str
+ pages: list[PbirPage] = field(default_factory=list)
+
+
+@dataclass(frozen=True)
+class FieldBinding:
+ """A single field binding for a visual data role."""
+
+ role: str
+ table: str
+ column: str
+ is_measure: bool = False
+
+ @property
+ def qualified_name(self) -> str:
+ """Return Table[Column] notation."""
+ return f"{self.table}[{self.column}]"
diff --git a/src/pbi_cli/core/pbir_path.py b/src/pbi_cli/core/pbir_path.py
new file mode 100644
index 0000000..464afb1
--- /dev/null
+++ b/src/pbi_cli/core/pbir_path.py
@@ -0,0 +1,163 @@
+"""PBIR report folder resolution and path utilities."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+from pbi_cli.core.errors import ReportNotFoundError
+
+# Maximum parent directories to walk up when auto-detecting
+_MAX_WALK_UP = 5
+
+
+def resolve_report_path(explicit_path: str | None = None) -> Path:
+ """Resolve the PBIR definition folder path.
+
+ Resolution order:
+ 1. Explicit ``--path`` provided by user.
+ 2. Walk up from CWD looking for ``*.Report/definition/report.json``.
+ 3. Look for a sibling ``.pbip`` file and derive the ``.Report`` folder.
+ 4. Raise ``ReportNotFoundError``.
+ """
+ if explicit_path is not None:
+ return _resolve_explicit(Path(explicit_path))
+
+ cwd = Path.cwd()
+
+ # Try walk-up detection
+ found = _find_definition_walkup(cwd)
+ if found is not None:
+ return found
+
+ # Try .pbip sibling detection
+ found = _find_from_pbip(cwd)
+ if found is not None:
+ return found
+
+ raise ReportNotFoundError()
+
+
+def _resolve_explicit(path: Path) -> Path:
+ """Normalise an explicit path to the definition folder."""
+ path = path.resolve()
+
+ # User pointed directly at the definition folder
+ if path.name == "definition" and (path / "report.json").exists():
+ return path
+
+ # User pointed at the .Report folder
+ defn = path / "definition"
+ if defn.is_dir() and (defn / "report.json").exists():
+ return defn
+
+ # User pointed at something that contains a .Report child
+ for child in path.iterdir() if path.is_dir() else []:
+ if child.name.endswith(".Report") and child.is_dir():
+ defn = child / "definition"
+ if (defn / "report.json").exists():
+ return defn
+
+ raise ReportNotFoundError(
+ f"No PBIR definition found at '{path}'. "
+ "Expected a folder containing definition/report.json."
+ )
+
+
+def _find_definition_walkup(start: Path) -> Path | None:
+ """Walk up from *start* looking for a .Report/definition/ folder."""
+ current = start.resolve()
+ for _ in range(_MAX_WALK_UP):
+ for child in current.iterdir():
+ if child.is_dir() and child.name.endswith(".Report"):
+ defn = child / "definition"
+ if defn.is_dir() and (defn / "report.json").exists():
+ return defn
+ parent = current.parent
+ if parent == current:
+ break
+ current = parent
+ return None
+
+
+def _find_from_pbip(start: Path) -> Path | None:
+ """Look for a .pbip file and derive the .Report folder."""
+ if not start.is_dir():
+ return None
+ try:
+ for item in start.iterdir():
+ if item.is_file() and item.suffix == ".pbip":
+ report_folder = start / f"{item.stem}.Report"
+ defn = report_folder / "definition"
+ if defn.is_dir() and (defn / "report.json").exists():
+ return defn
+ except (OSError, PermissionError):
+ return None
+ return None
+
+
+def get_pages_dir(definition_path: Path) -> Path:
+ """Return the pages directory, creating it if needed."""
+ pages = definition_path / "pages"
+ pages.mkdir(exist_ok=True)
+ return pages
+
+
+def get_page_dir(definition_path: Path, page_name: str) -> Path:
+ """Return the directory for a specific page."""
+ return definition_path / "pages" / page_name
+
+
+def get_visuals_dir(definition_path: Path, page_name: str) -> Path:
+ """Return the visuals directory for a specific page."""
+ visuals = definition_path / "pages" / page_name / "visuals"
+ visuals.mkdir(parents=True, exist_ok=True)
+ return visuals
+
+
+def get_visual_dir(
+ definition_path: Path, page_name: str, visual_name: str
+) -> Path:
+ """Return the directory for a specific visual."""
+ return definition_path / "pages" / page_name / "visuals" / visual_name
+
+
+def validate_report_structure(definition_path: Path) -> list[str]:
+ """Check that the PBIR folder structure is valid.
+
+ Returns a list of error messages (empty if valid).
+ """
+ errors: list[str] = []
+
+ if not definition_path.is_dir():
+ errors.append(f"Definition folder does not exist: {definition_path}")
+ return errors
+
+ report_json = definition_path / "report.json"
+ if not report_json.exists():
+ errors.append("Missing required file: report.json")
+
+ version_json = definition_path / "version.json"
+ if not version_json.exists():
+ errors.append("Missing required file: version.json")
+
+ pages_dir = definition_path / "pages"
+ if pages_dir.is_dir():
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ page_json = page_dir / "page.json"
+ if not page_json.exists():
+ errors.append(f"Page folder '{page_dir.name}' missing page.json")
+ visuals_dir = page_dir / "visuals"
+ if visuals_dir.is_dir():
+ for visual_dir in sorted(visuals_dir.iterdir()):
+ if not visual_dir.is_dir():
+ continue
+ visual_json = visual_dir / "visual.json"
+ if not visual_json.exists():
+ errors.append(
+ f"Visual folder '{page_dir.name}/visuals/{visual_dir.name}' "
+ "missing visual.json"
+ )
+
+ return errors
diff --git a/src/pbi_cli/core/pbir_validators.py b/src/pbi_cli/core/pbir_validators.py
new file mode 100644
index 0000000..2c8f421
--- /dev/null
+++ b/src/pbi_cli/core/pbir_validators.py
@@ -0,0 +1,435 @@
+"""Enhanced PBIR validation beyond basic structure checks.
+
+Provides three tiers of validation:
+ 1. Structural: folder layout and file existence (in pbir_path.py)
+ 2. Schema: required fields, valid types, cross-file consistency
+ 3. Model-aware: field bindings against a connected semantic model (optional)
+"""
+
+from __future__ import annotations
+
+import json
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Any
+
+
+@dataclass(frozen=True)
+class ValidationResult:
+ """Immutable container for a single validation finding."""
+
+ level: str # "error", "warning", "info"
+ file: str
+ message: str
+
+
+def validate_report_full(definition_path: Path) -> dict[str, Any]:
+ """Run all validation tiers and return a structured report.
+
+ Returns a dict with ``valid``, ``errors``, ``warnings``, and ``summary``.
+ """
+ findings: list[ValidationResult] = []
+
+ # Tier 1: structural (reuse existing)
+ from pbi_cli.core.pbir_path import validate_report_structure
+
+ structural = validate_report_structure(definition_path)
+ for msg in structural:
+ findings.append(ValidationResult("error", "", msg))
+
+ if not definition_path.is_dir():
+ return _build_result(findings)
+
+ # Tier 2: JSON syntax
+ findings.extend(_validate_json_syntax(definition_path))
+
+ # Tier 2: schema validation per file type
+ findings.extend(_validate_report_json(definition_path))
+ findings.extend(_validate_version_json(definition_path))
+ findings.extend(_validate_pages_metadata(definition_path))
+ findings.extend(_validate_all_pages(definition_path))
+ findings.extend(_validate_all_visuals(definition_path))
+
+ # Tier 2: cross-file consistency
+ findings.extend(_validate_page_order_consistency(definition_path))
+ findings.extend(_validate_visual_name_uniqueness(definition_path))
+
+ return _build_result(findings)
+
+
+def validate_bindings_against_model(
+ definition_path: Path,
+ model_tables: list[dict[str, Any]],
+) -> list[ValidationResult]:
+ """Tier 3: cross-reference visual field bindings against a model.
+
+ ``model_tables`` should be a list of dicts with 'name' and 'columns' keys,
+ where 'columns' is a list of dicts with 'name' keys. Measures are included
+ as columns.
+ """
+ findings: list[ValidationResult] = []
+
+ # Build lookup set
+ valid_fields: set[str] = set()
+ for table in model_tables:
+ table_name = table.get("name", "")
+ for col in table.get("columns", []):
+ valid_fields.add(f"{table_name}[{col.get('name', '')}]")
+ for mea in table.get("measures", []):
+ valid_fields.add(f"{table_name}[{mea.get('name', '')}]")
+
+ pages_dir = definition_path / "pages"
+ if not pages_dir.is_dir():
+ return findings
+
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ visuals_dir = page_dir / "visuals"
+ if not visuals_dir.is_dir():
+ continue
+ for vdir in sorted(visuals_dir.iterdir()):
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+ try:
+ data = json.loads(vfile.read_text(encoding="utf-8"))
+ visual_config = data.get("visual", {})
+ query = visual_config.get("query", {})
+
+ # Check Commands-based bindings
+ for cmd in query.get("Commands", []):
+ sq = cmd.get("SemanticQueryDataShapeCommand", {}).get("Query", {})
+ sources = {s["Name"]: s["Entity"] for s in sq.get("From", [])}
+ for sel in sq.get("Select", []):
+ ref = _extract_field_ref(sel, sources)
+ if ref and ref not in valid_fields:
+ rel = f"{page_dir.name}/visuals/{vdir.name}"
+ findings.append(ValidationResult(
+ "warning",
+ rel,
+ f"Field '{ref}' not found in semantic model",
+ ))
+ except (json.JSONDecodeError, KeyError, TypeError):
+ continue
+
+ return findings
+
+
+# ---------------------------------------------------------------------------
+# Tier 2 validators
+# ---------------------------------------------------------------------------
+
+
+def _validate_json_syntax(definition_path: Path) -> list[ValidationResult]:
+ """Check all JSON files parse without errors."""
+ findings: list[ValidationResult] = []
+ for json_file in definition_path.rglob("*.json"):
+ try:
+ json.loads(json_file.read_text(encoding="utf-8"))
+ except json.JSONDecodeError as e:
+ rel = str(json_file.relative_to(definition_path))
+ findings.append(ValidationResult("error", rel, f"Invalid JSON: {e}"))
+ return findings
+
+
+def _validate_report_json(definition_path: Path) -> list[ValidationResult]:
+ """Validate report.json required fields and schema."""
+ findings: list[ValidationResult] = []
+ report_json = definition_path / "report.json"
+ if not report_json.exists():
+ return findings # Structural check already caught this
+
+ try:
+ data = json.loads(report_json.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ return findings
+
+ if "$schema" not in data:
+ findings.append(ValidationResult("warning", "report.json", "Missing $schema reference"))
+
+ if "themeCollection" not in data:
+ findings.append(ValidationResult(
+ "error", "report.json", "Missing required 'themeCollection'"
+ ))
+ else:
+ tc = data["themeCollection"]
+ if "baseTheme" not in tc:
+ findings.append(ValidationResult(
+ "warning", "report.json", "themeCollection missing 'baseTheme'"
+ ))
+
+ if "layoutOptimization" not in data:
+ findings.append(ValidationResult(
+ "error", "report.json", "Missing required 'layoutOptimization'"
+ ))
+
+ return findings
+
+
+def _validate_version_json(definition_path: Path) -> list[ValidationResult]:
+ """Validate version.json content."""
+ findings: list[ValidationResult] = []
+ version_json = definition_path / "version.json"
+ if not version_json.exists():
+ return findings
+
+ try:
+ data = json.loads(version_json.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ return findings
+
+ if "version" not in data:
+ findings.append(ValidationResult("error", "version.json", "Missing required 'version'"))
+
+ return findings
+
+
+def _validate_pages_metadata(definition_path: Path) -> list[ValidationResult]:
+ """Validate pages.json if present."""
+ findings: list[ValidationResult] = []
+ pages_json = definition_path / "pages" / "pages.json"
+ if not pages_json.exists():
+ return findings
+
+ try:
+ data = json.loads(pages_json.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ return findings
+
+ page_order = data.get("pageOrder", [])
+ if not isinstance(page_order, list):
+ findings.append(ValidationResult(
+ "error", "pages/pages.json", "'pageOrder' must be an array"
+ ))
+
+ return findings
+
+
+def _validate_all_pages(definition_path: Path) -> list[ValidationResult]:
+ """Validate individual page.json files."""
+ findings: list[ValidationResult] = []
+ pages_dir = definition_path / "pages"
+ if not pages_dir.is_dir():
+ return findings
+
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ page_json = page_dir / "page.json"
+ if not page_json.exists():
+ continue
+
+ try:
+ data = json.loads(page_json.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ continue
+
+ rel = f"pages/{page_dir.name}/page.json"
+
+ for req in ("name", "displayName", "displayOption"):
+ if req not in data:
+ findings.append(ValidationResult("error", rel, f"Missing required '{req}'"))
+
+ valid_options = {
+ "FitToPage", "FitToWidth", "ActualSize",
+ "ActualSizeTopLeft", "DeprecatedDynamic",
+ }
+ opt = data.get("displayOption")
+ if opt and opt not in valid_options:
+ findings.append(ValidationResult(
+ "warning", rel, f"Unknown displayOption '{opt}'"
+ ))
+
+ if opt != "DeprecatedDynamic":
+ if "width" not in data:
+ findings.append(ValidationResult("error", rel, "Missing required 'width'"))
+ if "height" not in data:
+ findings.append(ValidationResult("error", rel, "Missing required 'height'"))
+
+ name = data.get("name", "")
+ if name and len(name) > 50:
+ findings.append(ValidationResult(
+ "warning", rel, f"Name exceeds 50 chars: '{name[:20]}...'"
+ ))
+
+ return findings
+
+
+def _validate_all_visuals(definition_path: Path) -> list[ValidationResult]:
+ """Validate individual visual.json files."""
+ findings: list[ValidationResult] = []
+ pages_dir = definition_path / "pages"
+ if not pages_dir.is_dir():
+ return findings
+
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ visuals_dir = page_dir / "visuals"
+ if not visuals_dir.is_dir():
+ continue
+ for vdir in sorted(visuals_dir.iterdir()):
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+
+ try:
+ data = json.loads(vfile.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ continue
+
+ rel = f"pages/{page_dir.name}/visuals/{vdir.name}/visual.json"
+
+ if "name" not in data:
+ findings.append(ValidationResult("error", rel, "Missing required 'name'"))
+
+ if "position" not in data:
+ findings.append(ValidationResult("error", rel, "Missing required 'position'"))
+ else:
+ pos = data["position"]
+ for req in ("x", "y", "width", "height"):
+ if req not in pos:
+ findings.append(ValidationResult(
+ "error", rel, f"Position missing required '{req}'"
+ ))
+
+ visual_config = data.get("visual", {})
+ vtype = visual_config.get("visualType", "")
+ if not vtype:
+ # Could be a visualGroup, which is also valid
+ if "visualGroup" not in data:
+ findings.append(ValidationResult(
+ "warning", rel, "Missing 'visual.visualType' (not a visual group either)"
+ ))
+
+ return findings
+
+
+# ---------------------------------------------------------------------------
+# Cross-file consistency
+# ---------------------------------------------------------------------------
+
+
+def _validate_page_order_consistency(definition_path: Path) -> list[ValidationResult]:
+ """Check that pages.json references match actual page folders."""
+ findings: list[ValidationResult] = []
+ pages_json = definition_path / "pages" / "pages.json"
+ if not pages_json.exists():
+ return findings
+
+ try:
+ data = json.loads(pages_json.read_text(encoding="utf-8"))
+ except json.JSONDecodeError:
+ return findings
+
+ page_order = data.get("pageOrder", [])
+ pages_dir = definition_path / "pages"
+
+ actual_pages = {
+ d.name
+ for d in pages_dir.iterdir()
+ if d.is_dir() and (d / "page.json").exists()
+ }
+
+ for name in page_order:
+ if name not in actual_pages:
+ findings.append(ValidationResult(
+ "warning",
+ "pages/pages.json",
+ f"pageOrder references '{name}' but no such page folder exists",
+ ))
+
+ unlisted = actual_pages - set(page_order)
+ for name in sorted(unlisted):
+ findings.append(ValidationResult(
+ "info",
+ "pages/pages.json",
+ f"Page '{name}' exists but is not listed in pageOrder",
+ ))
+
+ return findings
+
+
+def _validate_visual_name_uniqueness(definition_path: Path) -> list[ValidationResult]:
+ """Check that visual names are unique within each page."""
+ findings: list[ValidationResult] = []
+ pages_dir = definition_path / "pages"
+ if not pages_dir.is_dir():
+ return findings
+
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ visuals_dir = page_dir / "visuals"
+ if not visuals_dir.is_dir():
+ continue
+
+ names_seen: dict[str, str] = {}
+ for vdir in sorted(visuals_dir.iterdir()):
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+ try:
+ data = json.loads(vfile.read_text(encoding="utf-8"))
+ name = data.get("name", "")
+ if name in names_seen:
+ rel = f"pages/{page_dir.name}/visuals/{vdir.name}/visual.json"
+ findings.append(ValidationResult(
+ "error",
+ rel,
+ f"Duplicate visual name '{name}' (also in {names_seen[name]})",
+ ))
+ else:
+ names_seen[name] = vdir.name
+ except (json.JSONDecodeError, KeyError):
+ continue
+
+ return findings
+
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+
+def _build_result(findings: list[ValidationResult]) -> dict[str, Any]:
+ """Build the final validation report dict."""
+ errors = [f for f in findings if f.level == "error"]
+ warnings = [f for f in findings if f.level == "warning"]
+ infos = [f for f in findings if f.level == "info"]
+
+ return {
+ "valid": len(errors) == 0,
+ "errors": [{"file": f.file, "message": f.message} for f in errors],
+ "warnings": [{"file": f.file, "message": f.message} for f in warnings],
+ "info": [{"file": f.file, "message": f.message} for f in infos],
+ "summary": {
+ "errors": len(errors),
+ "warnings": len(warnings),
+ "info": len(infos),
+ },
+ }
+
+
+def _extract_field_ref(
+ select_item: dict[str, Any], sources: dict[str, str]
+) -> str | None:
+ """Extract a Table[Column] reference from a semantic query select item."""
+ for kind in ("Column", "Measure"):
+ if kind in select_item:
+ item = select_item[kind]
+ source_name = (
+ item.get("Expression", {}).get("SourceRef", {}).get("Source", "")
+ )
+ prop = item.get("Property", "")
+ table = sources.get(source_name, source_name)
+ if table and prop:
+ return f"{table}[{prop}]"
+ return None
diff --git a/src/pbi_cli/core/report_backend.py b/src/pbi_cli/core/report_backend.py
new file mode 100644
index 0000000..c790770
--- /dev/null
+++ b/src/pbi_cli/core/report_backend.py
@@ -0,0 +1,797 @@
+"""Pure-function backend for PBIR report and page operations.
+
+Mirrors ``tom_backend.py`` but operates on JSON files instead of .NET TOM.
+Every function takes a ``Path`` to the definition folder and returns a plain
+Python dict suitable for ``format_result()``.
+"""
+
+from __future__ import annotations
+
+import json
+import re
+import secrets
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError
+from pbi_cli.core.pbir_models import (
+ DEFAULT_BASE_THEME,
+ SCHEMA_PAGE,
+ SCHEMA_PAGES_METADATA,
+ SCHEMA_REPORT,
+ SCHEMA_VERSION,
+)
+from pbi_cli.core.pbir_path import (
+ get_page_dir,
+ get_pages_dir,
+ validate_report_structure,
+)
+
+# ---------------------------------------------------------------------------
+# JSON helpers
+# ---------------------------------------------------------------------------
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ """Read and parse a JSON file."""
+ return json.loads(path.read_text(encoding="utf-8"))
+
+
+def _write_json(path: Path, data: dict[str, Any]) -> None:
+ """Write JSON with consistent formatting."""
+ path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+def _generate_name() -> str:
+ """Generate a 20-character hex identifier matching PBIR convention."""
+ return secrets.token_hex(10)
+
+
+# ---------------------------------------------------------------------------
+# Report operations
+# ---------------------------------------------------------------------------
+
+
+def report_info(definition_path: Path) -> dict[str, Any]:
+ """Return report metadata summary."""
+ report_data = _read_json(definition_path / "report.json")
+ pages_dir = definition_path / "pages"
+
+ pages: list[dict[str, Any]] = []
+ if pages_dir.is_dir():
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ page_json = page_dir / "page.json"
+ if page_json.exists():
+ page_data = _read_json(page_json)
+ visual_count = 0
+ visuals_dir = page_dir / "visuals"
+ if visuals_dir.is_dir():
+ visual_count = sum(
+ 1
+ for v in visuals_dir.iterdir()
+ if v.is_dir() and (v / "visual.json").exists()
+ )
+ pages.append({
+ "name": page_data.get("name", page_dir.name),
+ "display_name": page_data.get("displayName", ""),
+ "ordinal": page_data.get("ordinal", 0),
+ "visual_count": visual_count,
+ })
+
+ theme = report_data.get("themeCollection", {}).get("baseTheme", {})
+
+ return {
+ "page_count": len(pages),
+ "theme": theme.get("name", "Default"),
+ "pages": pages,
+ "total_visuals": sum(p["visual_count"] for p in pages),
+ "path": str(definition_path),
+ }
+
+
+def report_create(
+ target_path: Path,
+ name: str,
+ dataset_path: str | None = None,
+) -> dict[str, Any]:
+ """Scaffold a new PBIR report project structure.
+
+ Creates:
+ /.Report/definition/report.json
+ /.Report/definition/version.json
+ /.Report/definition/pages/ (empty)
+ /.Report/definition.pbir
+ /.pbip (optional project file)
+ """
+ target_path = target_path.resolve()
+ report_folder = target_path / f"{name}.Report"
+ definition_dir = report_folder / "definition"
+ pages_dir = definition_dir / "pages"
+ pages_dir.mkdir(parents=True, exist_ok=True)
+
+ # version.json
+ _write_json(definition_dir / "version.json", {
+ "$schema": SCHEMA_VERSION,
+ "version": "2.0.0",
+ })
+
+ # report.json (matches Desktop defaults)
+ _write_json(definition_dir / "report.json", {
+ "$schema": SCHEMA_REPORT,
+ "themeCollection": {
+ "baseTheme": dict(DEFAULT_BASE_THEME),
+ },
+ "layoutOptimization": "None",
+ "settings": {
+ "useStylableVisualContainerHeader": True,
+ "defaultDrillFilterOtherVisuals": True,
+ "allowChangeFilterTypes": True,
+ "useEnhancedTooltips": True,
+ "useDefaultAggregateDisplayName": True,
+ },
+ "slowDataSourceSettings": {
+ "isCrossHighlightingDisabled": False,
+ "isSlicerSelectionsButtonEnabled": False,
+ "isFilterSelectionsButtonEnabled": False,
+ "isFieldWellButtonEnabled": False,
+ "isApplyAllButtonEnabled": False,
+ },
+ })
+
+ # pages.json (empty page order)
+ _write_json(definition_dir / "pages" / "pages.json", {
+ "$schema": SCHEMA_PAGES_METADATA,
+ "pageOrder": [],
+ })
+
+ # Scaffold a blank semantic model if no dataset path provided
+ if not dataset_path:
+ dataset_path = f"../{name}.SemanticModel"
+ _scaffold_blank_semantic_model(target_path, name)
+
+ # definition.pbir (datasetReference is REQUIRED by Desktop)
+ _write_json(report_folder / "definition.pbir", {
+ "version": "4.0",
+ "datasetReference": {
+ "byPath": {"path": dataset_path},
+ },
+ })
+
+ # .platform file for the report
+ _write_json(report_folder / ".platform", {
+ "$schema": (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/gitIntegration/platformProperties/2.0.0/schema.json"
+ ),
+ "metadata": {
+ "type": "Report",
+ "displayName": name,
+ },
+ "config": {
+ "version": "2.0",
+ "logicalId": "00000000-0000-0000-0000-000000000000",
+ },
+ })
+
+ # .pbip project file
+ _write_json(target_path / f"{name}.pbip", {
+ "version": "1.0",
+ "artifacts": [
+ {
+ "report": {"path": f"{name}.Report"},
+ }
+ ],
+ })
+
+ return {
+ "status": "created",
+ "name": name,
+ "path": str(report_folder),
+ "definition_path": str(definition_dir),
+ }
+
+
+def report_validate(definition_path: Path) -> dict[str, Any]:
+ """Validate the PBIR report structure and JSON files.
+
+ Returns a dict with ``valid`` bool and ``errors`` list.
+ """
+ errors = validate_report_structure(definition_path)
+
+ # Validate JSON syntax of all files
+ if definition_path.is_dir():
+ for json_file in definition_path.rglob("*.json"):
+ try:
+ _read_json(json_file)
+ except json.JSONDecodeError as e:
+ rel = json_file.relative_to(definition_path)
+ errors.append(f"Invalid JSON in {rel}: {e}")
+
+ # Validate required schema fields
+ report_json = definition_path / "report.json"
+ if report_json.exists():
+ try:
+ data = _read_json(report_json)
+ if "themeCollection" not in data:
+ errors.append("report.json missing required 'themeCollection'")
+ if "layoutOptimization" not in data:
+ errors.append("report.json missing required 'layoutOptimization'")
+ except json.JSONDecodeError:
+ pass # Already caught above
+
+ # Validate pages
+ pages_dir = definition_path / "pages"
+ if pages_dir.is_dir():
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ page_json = page_dir / "page.json"
+ if page_json.exists():
+ try:
+ pdata = _read_json(page_json)
+ for req in ("name", "displayName", "displayOption"):
+ if req not in pdata:
+ errors.append(
+ f"Page '{page_dir.name}' missing required '{req}'"
+ )
+ except json.JSONDecodeError:
+ pass
+
+ return {
+ "valid": len(errors) == 0,
+ "errors": errors,
+ "files_checked": sum(1 for _ in definition_path.rglob("*.json"))
+ if definition_path.is_dir()
+ else 0,
+ }
+
+
+# ---------------------------------------------------------------------------
+# Page operations
+# ---------------------------------------------------------------------------
+
+
+def page_list(definition_path: Path) -> list[dict[str, Any]]:
+ """List all pages in the report."""
+ pages_dir = definition_path / "pages"
+ if not pages_dir.is_dir():
+ return []
+
+ # Read page order if available
+ pages_meta = pages_dir / "pages.json"
+ page_order: list[str] = []
+ if pages_meta.exists():
+ meta = _read_json(pages_meta)
+ page_order = meta.get("pageOrder", [])
+
+ results: list[dict[str, Any]] = []
+ for page_dir in sorted(pages_dir.iterdir()):
+ if not page_dir.is_dir():
+ continue
+ page_json = page_dir / "page.json"
+ if not page_json.exists():
+ continue
+ data = _read_json(page_json)
+ visual_count = 0
+ visuals_dir = page_dir / "visuals"
+ if visuals_dir.is_dir():
+ visual_count = sum(
+ 1
+ for v in visuals_dir.iterdir()
+ if v.is_dir() and (v / "visual.json").exists()
+ )
+ results.append({
+ "name": data.get("name", page_dir.name),
+ "display_name": data.get("displayName", ""),
+ "ordinal": data.get("ordinal", 0),
+ "width": data.get("width", 1280),
+ "height": data.get("height", 720),
+ "display_option": data.get("displayOption", "FitToPage"),
+ "visual_count": visual_count,
+ "is_hidden": data.get("visibility") == "HiddenInViewMode",
+ "page_type": data.get("type", "Default"),
+ })
+
+ # Sort by page order if available, then by ordinal
+ if page_order:
+ order_map = {name: i for i, name in enumerate(page_order)}
+ results.sort(key=lambda p: order_map.get(p["name"], 9999))
+ else:
+ results.sort(key=lambda p: p["ordinal"])
+
+ return results
+
+
+def page_add(
+ definition_path: Path,
+ display_name: str,
+ name: str | None = None,
+ width: int = 1280,
+ height: int = 720,
+ display_option: str = "FitToPage",
+) -> dict[str, Any]:
+ """Add a new page to the report."""
+ page_name = name or _generate_name()
+ pages_dir = get_pages_dir(definition_path)
+ page_dir = pages_dir / page_name
+
+ if page_dir.exists():
+ raise PbiCliError(f"Page '{page_name}' already exists.")
+
+ page_dir.mkdir(parents=True)
+ (page_dir / "visuals").mkdir()
+
+ # Write page.json (no ordinal - Desktop uses pages.json pageOrder instead)
+ _write_json(page_dir / "page.json", {
+ "$schema": SCHEMA_PAGE,
+ "name": page_name,
+ "displayName": display_name,
+ "displayOption": display_option,
+ "height": height,
+ "width": width,
+ })
+
+ # Update pages.json
+ _update_page_order(definition_path, page_name, action="add")
+
+ return {
+ "status": "created",
+ "name": page_name,
+ "display_name": display_name,
+ }
+
+
+def page_delete(definition_path: Path, page_name: str) -> dict[str, Any]:
+ """Delete a page and all its visuals."""
+ page_dir = get_page_dir(definition_path, page_name)
+
+ if not page_dir.exists():
+ raise PbiCliError(f"Page '{page_name}' not found.")
+
+ # Recursively remove
+ _rmtree(page_dir)
+
+ # Update pages.json
+ _update_page_order(definition_path, page_name, action="remove")
+
+ return {"status": "deleted", "name": page_name}
+
+
+def page_get(definition_path: Path, page_name: str) -> dict[str, Any]:
+ """Get details of a specific page."""
+ page_dir = get_page_dir(definition_path, page_name)
+ page_json = page_dir / "page.json"
+
+ if not page_json.exists():
+ raise PbiCliError(f"Page '{page_name}' not found.")
+
+ data = _read_json(page_json)
+ visual_count = 0
+ visuals_dir = page_dir / "visuals"
+ if visuals_dir.is_dir():
+ visual_count = sum(
+ 1
+ for v in visuals_dir.iterdir()
+ if v.is_dir() and (v / "visual.json").exists()
+ )
+
+ return {
+ "name": data.get("name", page_name),
+ "display_name": data.get("displayName", ""),
+ "ordinal": data.get("ordinal", 0),
+ "width": data.get("width", 1280),
+ "height": data.get("height", 720),
+ "display_option": data.get("displayOption", "FitToPage"),
+ "visual_count": visual_count,
+ "is_hidden": data.get("visibility") == "HiddenInViewMode",
+ "page_type": data.get("type", "Default"),
+ "filter_config": data.get("filterConfig"),
+ "visual_interactions": data.get("visualInteractions"),
+ "page_binding": data.get("pageBinding"),
+ }
+
+
+def page_set_background(
+ definition_path: Path,
+ page_name: str,
+ color: str,
+) -> dict[str, Any]:
+ """Set the background color of a page.
+
+ Updates the ``objects.background`` property in ``page.json``.
+ The color must be a hex string, e.g. ``'#F8F9FA'``.
+ """
+ if not re.fullmatch(r"#[0-9A-Fa-f]{3,8}", color):
+ raise PbiCliError(
+ f"Invalid color '{color}' -- expected hex format like '#F8F9FA'."
+ )
+
+ page_dir = get_page_dir(definition_path, page_name)
+ page_json_path = page_dir / "page.json"
+ if not page_json_path.exists():
+ raise PbiCliError(f"Page '{page_name}' not found.")
+
+ page_data = _read_json(page_json_path)
+ background_entry = {
+ "properties": {
+ "color": {
+ "solid": {
+ "color": {
+ "expr": {
+ "Literal": {"Value": f"'{color}'"}
+ }
+ }
+ }
+ }
+ }
+ }
+ objects = {**page_data.get("objects", {}), "background": [background_entry]}
+ _write_json(page_json_path, {**page_data, "objects": objects})
+ return {"status": "updated", "page": page_name, "background_color": color}
+
+
+def page_set_visibility(
+ definition_path: Path,
+ page_name: str,
+ hidden: bool,
+) -> dict[str, Any]:
+ """Show or hide a page in the report navigation.
+
+ Setting ``hidden=True`` writes ``"visibility": "HiddenInViewMode"`` to
+ ``page.json``. Setting ``hidden=False`` removes the key if present.
+ """
+ page_dir = get_page_dir(definition_path, page_name)
+ page_json_path = page_dir / "page.json"
+ if not page_json_path.exists():
+ raise PbiCliError(f"Page '{page_name}' not found.")
+
+ page_data = _read_json(page_json_path)
+ if hidden:
+ updated = {**page_data, "visibility": "HiddenInViewMode"}
+ else:
+ updated = {k: v for k, v in page_data.items() if k != "visibility"}
+ _write_json(page_json_path, updated)
+ return {"status": "updated", "page": page_name, "hidden": hidden}
+
+
+# ---------------------------------------------------------------------------
+# Theme operations
+# ---------------------------------------------------------------------------
+
+
+def theme_set(
+ definition_path: Path, theme_path: Path
+) -> dict[str, Any]:
+ """Apply a custom theme JSON to the report."""
+ if not theme_path.exists():
+ raise PbiCliError(f"Theme file not found: {theme_path}")
+
+ theme_data = _read_json(theme_path)
+ report_json_path = definition_path / "report.json"
+ report_data = _read_json(report_json_path)
+
+ # Set custom theme
+ theme_collection = report_data.get("themeCollection", {})
+ theme_collection["customTheme"] = {
+ "name": theme_data.get("name", theme_path.stem),
+ "reportVersionAtImport": "5.55",
+ "type": "RegisteredResources",
+ }
+ report_data["themeCollection"] = theme_collection
+
+ # Copy theme file to RegisteredResources if needed
+ report_folder = definition_path.parent
+ resources_dir = report_folder / "StaticResources" / "RegisteredResources"
+ resources_dir.mkdir(parents=True, exist_ok=True)
+ theme_dest = resources_dir / theme_path.name
+ theme_dest.write_text(
+ theme_path.read_text(encoding="utf-8"), encoding="utf-8"
+ )
+
+ # Update resource packages in report.json
+ resource_packages = report_data.get("resourcePackages", [])
+ found = False
+ for pkg in resource_packages:
+ if pkg.get("name") == "RegisteredResources":
+ found = True
+ items = pkg.get("items", [])
+ # Add or update theme entry
+ theme_item = {
+ "name": theme_path.name,
+ "type": 202,
+ "path": f"BaseThemes/{theme_path.name}",
+ }
+ existing_names = {i["name"] for i in items}
+ if theme_path.name not in existing_names:
+ items.append(theme_item)
+ pkg["items"] = items
+ break
+
+ if not found:
+ resource_packages.append({
+ "name": "RegisteredResources",
+ "type": "RegisteredResources",
+ "items": [{
+ "name": theme_path.name,
+ "type": 202,
+ "path": f"BaseThemes/{theme_path.name}",
+ }],
+ })
+ report_data["resourcePackages"] = resource_packages
+
+ _write_json(report_json_path, report_data)
+
+ return {
+ "status": "applied",
+ "theme": theme_data.get("name", theme_path.stem),
+ "file": str(theme_dest),
+ }
+
+
+def theme_get(definition_path: Path) -> dict[str, Any]:
+ """Return current theme information for the report.
+
+ Reads ``report.json`` to determine the base and custom theme names.
+ If a custom theme is set and the theme file exists in
+ ``StaticResources/RegisteredResources/``, the full theme JSON is also
+ returned.
+
+ Returns:
+ ``{"base_theme": str, "custom_theme": str | None,
+ "theme_data": dict | None}``
+ """
+ report_json_path = definition_path / "report.json"
+ if not report_json_path.exists():
+ raise PbiCliError("report.json not found -- is this a valid PBIR definition folder?")
+
+ report_data = _read_json(report_json_path)
+ theme_collection = report_data.get("themeCollection", {})
+
+ base_theme = theme_collection.get("baseTheme", {}).get("name", "")
+ custom_theme_info = theme_collection.get("customTheme")
+ custom_theme_name: str | None = None
+ theme_data: dict[str, Any] | None = None
+
+ if custom_theme_info:
+ custom_theme_name = custom_theme_info.get("name")
+ # Try to load from RegisteredResources
+ report_folder = definition_path.parent
+ resources_dir = report_folder / "StaticResources" / "RegisteredResources"
+ if resources_dir.is_dir():
+ for candidate in resources_dir.glob("*.json"):
+ try:
+ parsed = _read_json(candidate)
+ if parsed.get("name") == custom_theme_name:
+ theme_data = parsed
+ break
+ except Exception:
+ continue
+
+ return {
+ "base_theme": base_theme,
+ "custom_theme": custom_theme_name,
+ "theme_data": theme_data,
+ }
+
+
+def theme_diff(definition_path: Path, theme_path: Path) -> dict[str, Any]:
+ """Compare a proposed theme JSON file against the currently applied theme.
+
+ If no custom theme is set, the diff compares against an empty dict
+ (i.e. everything in the proposed file is an addition).
+
+ Returns:
+ ``{"current": str, "proposed": str,
+ "added": list[str], "removed": list[str], "changed": list[str]}``
+ """
+ if not theme_path.exists():
+ raise PbiCliError(f"Proposed theme file not found: {theme_path}")
+
+ current_info = theme_get(definition_path)
+ current_data: dict[str, Any] = current_info.get("theme_data") or {}
+ proposed_data = _read_json(theme_path)
+
+ current_name = current_info.get("custom_theme") or current_info.get("base_theme") or "(none)"
+ proposed_name = proposed_data.get("name", theme_path.stem)
+
+ added, removed, changed = _dict_diff(current_data, proposed_data)
+
+ return {
+ "current": current_name,
+ "proposed": proposed_name,
+ "added": added,
+ "removed": removed,
+ "changed": changed,
+ }
+
+
+def _dict_diff(
+ current: dict[str, Any],
+ proposed: dict[str, Any],
+ prefix: str = "",
+) -> tuple[list[str], list[str], list[str]]:
+ """Recursively diff two dicts and return (added, removed, changed) key paths."""
+ added: list[str] = []
+ removed: list[str] = []
+ changed: list[str] = []
+
+ all_keys = set(current) | set(proposed)
+ for key in sorted(all_keys):
+ path = f"{prefix}{key}" if not prefix else f"{prefix}.{key}"
+ if key not in current:
+ added.append(path)
+ elif key not in proposed:
+ removed.append(path)
+ elif isinstance(current[key], dict) and isinstance(proposed[key], dict):
+ sub_added, sub_removed, sub_changed = _dict_diff(
+ current[key], proposed[key], prefix=path
+ )
+ added.extend(sub_added)
+ removed.extend(sub_removed)
+ changed.extend(sub_changed)
+ elif current[key] != proposed[key]:
+ changed.append(path)
+
+ return added, removed, changed
+
+
+# ---------------------------------------------------------------------------
+# Convert operations
+# ---------------------------------------------------------------------------
+
+
+def report_convert(
+ source_path: Path,
+ output_path: Path | None = None,
+ force: bool = False,
+) -> dict[str, Any]:
+ """Convert a PBIR report project to a distributable .pbip package.
+
+ This scaffolds the proper .pbip project structure from an existing
+ .Report folder. It does NOT convert .pbix to .pbip (that requires
+ Power BI Desktop's "Save as .pbip" feature).
+ """
+ source_path = source_path.resolve()
+
+ # Find the .Report folder
+ report_folder: Path | None = None
+ if source_path.name.endswith(".Report") and source_path.is_dir():
+ report_folder = source_path
+ else:
+ for child in source_path.iterdir():
+ if child.is_dir() and child.name.endswith(".Report"):
+ report_folder = child
+ break
+
+ if report_folder is None:
+ raise PbiCliError(
+ f"No .Report folder found in '{source_path}'. "
+ "Expected a folder ending in .Report."
+ )
+
+ name = report_folder.name.replace(".Report", "")
+ target = output_path.resolve() if output_path else source_path
+
+ # Create .pbip file
+ pbip_path = target / f"{name}.pbip"
+ if pbip_path.exists() and not force:
+ raise PbiCliError(
+ f".pbip file already exists at '{pbip_path}'. Use --force to overwrite."
+ )
+ _write_json(pbip_path, {
+ "version": "1.0",
+ "artifacts": [
+ {"report": {"path": f"{name}.Report"}},
+ ],
+ })
+
+ # Create .gitignore if not present
+ gitignore = target / ".gitignore"
+ gitignore_created = not gitignore.exists()
+ if gitignore_created:
+ gitignore_content = (
+ "# Power BI local settings\n"
+ ".pbi/\n"
+ "*.pbix\n"
+ "*.bak\n"
+ )
+ gitignore.write_text(gitignore_content, encoding="utf-8")
+
+ # Validate the definition.pbir exists
+ defn_pbir = report_folder / "definition.pbir"
+
+ return {
+ "status": "converted",
+ "name": name,
+ "pbip_path": str(pbip_path),
+ "report_folder": str(report_folder),
+ "has_definition_pbir": defn_pbir.exists(),
+ "gitignore_created": gitignore_created,
+ }
+
+
+# ---------------------------------------------------------------------------
+# Internal helpers
+# ---------------------------------------------------------------------------
+
+
+def _scaffold_blank_semantic_model(target_path: Path, name: str) -> None:
+ """Create a minimal TMDL semantic model so Desktop can open the report."""
+ model_dir = target_path / f"{name}.SemanticModel"
+ defn_dir = model_dir / "definition"
+ defn_dir.mkdir(parents=True, exist_ok=True)
+
+ # model.tmdl (minimal valid TMDL)
+ (defn_dir / "model.tmdl").write_text(
+ "model Model\n"
+ " culture: en-US\n"
+ " defaultPowerBIDataSourceVersion: powerBI_V3\n",
+ encoding="utf-8",
+ )
+
+ # .platform file (required by Desktop)
+ _write_json(model_dir / ".platform", {
+ "$schema": (
+ "https://developer.microsoft.com/json-schemas/"
+ "fabric/gitIntegration/platformProperties/2.0.0/schema.json"
+ ),
+ "metadata": {
+ "type": "SemanticModel",
+ "displayName": name,
+ },
+ "config": {
+ "version": "2.0",
+ "logicalId": "00000000-0000-0000-0000-000000000000",
+ },
+ })
+
+ # definition.pbism (matches Desktop format)
+ _write_json(model_dir / "definition.pbism", {
+ "version": "4.1",
+ "settings": {},
+ })
+
+
+def _update_page_order(
+ definition_path: Path, page_name: str, action: str
+) -> None:
+ """Update pages.json with page add/remove."""
+ pages_meta_path = definition_path / "pages" / "pages.json"
+
+ if pages_meta_path.exists():
+ meta = _read_json(pages_meta_path)
+ else:
+ meta = {"$schema": SCHEMA_PAGES_METADATA}
+
+ order = meta.get("pageOrder", [])
+
+ if action == "add" and page_name not in order:
+ order.append(page_name)
+ elif action == "remove" and page_name in order:
+ order = [p for p in order if p != page_name]
+
+ meta["pageOrder"] = order
+
+ # Always set activePageName to the first page (Desktop requires this)
+ if order:
+ meta["activePageName"] = meta.get("activePageName", order[0])
+ # If active page was removed, reset to first
+ if meta["activePageName"] not in order:
+ meta["activePageName"] = order[0]
+
+ _write_json(pages_meta_path, meta)
+
+
+def _rmtree(path: Path) -> None:
+ """Recursively remove a directory tree (stdlib-only)."""
+ if path.is_dir():
+ for child in path.iterdir():
+ _rmtree(child)
+ path.rmdir()
+ else:
+ path.unlink()
diff --git a/src/pbi_cli/core/tmdl_diff.py b/src/pbi_cli/core/tmdl_diff.py
new file mode 100644
index 0000000..814ae8a
--- /dev/null
+++ b/src/pbi_cli/core/tmdl_diff.py
@@ -0,0 +1,329 @@
+"""TMDL folder diff -- pure Python, no .NET required."""
+
+from __future__ import annotations
+
+import re
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError
+
+# Entity keywords inside table files (at 1-tab indent).
+# "variation" is intentionally excluded: it is a sub-property of a column,
+# not a sibling entity, so its content stays inside the parent column block.
+_TABLE_ENTITY_KEYWORDS = frozenset({"measure", "column", "hierarchy", "partition"})
+
+
+def diff_tmdl_folders(base_folder: str, head_folder: str) -> dict[str, Any]:
+ """Compare two TMDL export folders and return a structured diff.
+
+ Works on any two folders produced by ``pbi database export-tmdl`` or
+ exported from Power BI Desktop / Fabric Git. No live connection needed.
+
+ Returns a dict with keys: base, head, changed, summary, tables,
+ relationships, model.
+ """
+ base = Path(base_folder)
+ head = Path(head_folder)
+ if not base.is_dir():
+ raise PbiCliError(f"Base folder not found: {base}")
+ if not head.is_dir():
+ raise PbiCliError(f"Head folder not found: {head}")
+
+ base_def = _find_definition_dir(base)
+ head_def = _find_definition_dir(head)
+
+ tables_diff = _diff_tables(base_def, head_def)
+ rels_diff = _diff_relationships(base_def, head_def)
+ model_diff = _diff_model(base_def, head_def)
+
+ any_changed = bool(
+ tables_diff["added"]
+ or tables_diff["removed"]
+ or tables_diff["changed"]
+ or rels_diff["added"]
+ or rels_diff["removed"]
+ or rels_diff["changed"]
+ or model_diff["changed_properties"]
+ )
+
+ summary: dict[str, Any] = {
+ "tables_added": len(tables_diff["added"]),
+ "tables_removed": len(tables_diff["removed"]),
+ "tables_changed": len(tables_diff["changed"]),
+ "relationships_added": len(rels_diff["added"]),
+ "relationships_removed": len(rels_diff["removed"]),
+ "relationships_changed": len(rels_diff["changed"]),
+ "model_changed": bool(model_diff["changed_properties"]),
+ }
+
+ return {
+ "base": str(base),
+ "head": str(head),
+ "changed": any_changed,
+ "summary": summary,
+ "tables": tables_diff,
+ "relationships": rels_diff,
+ "model": model_diff,
+ }
+
+
+def _find_definition_dir(folder: Path) -> Path:
+ """Return the directory that directly contains model.tmdl / tables/.
+
+ Handles both:
+ - Direct layout: folder/model.tmdl
+ - SemanticModel: folder/definition/model.tmdl
+ """
+ candidate = folder / "definition"
+ if candidate.is_dir():
+ return candidate
+ return folder
+
+
+def _read_tmdl(path: Path) -> str:
+ """Read a TMDL file, returning empty string if absent."""
+ if not path.exists():
+ return ""
+ return path.read_text(encoding="utf-8")
+
+
+def _strip_lineage_tags(text: str) -> str:
+ """Remove lineageTag lines so spurious GUID regeneration is ignored."""
+ return re.sub(r"[ \t]*lineageTag:.*\n?", "", text)
+
+
+# ---------------------------------------------------------------------------
+# Table diffing
+# ---------------------------------------------------------------------------
+
+
+def _diff_tables(base_def: Path, head_def: Path) -> dict[str, Any]:
+ base_tables_dir = base_def / "tables"
+ head_tables_dir = head_def / "tables"
+
+ base_names = _list_tmdl_names(base_tables_dir)
+ head_names = _list_tmdl_names(head_tables_dir)
+
+ added = sorted(head_names - base_names)
+ removed = sorted(base_names - head_names)
+ changed: dict[str, Any] = {}
+
+ for name in sorted(base_names & head_names):
+ base_text = _read_tmdl(base_tables_dir / f"{name}.tmdl")
+ head_text = _read_tmdl(head_tables_dir / f"{name}.tmdl")
+ if _strip_lineage_tags(base_text) == _strip_lineage_tags(head_text):
+ continue
+ table_diff = _diff_table_entities(base_text, head_text)
+ if any(table_diff[k] for k in table_diff):
+ changed[name] = table_diff
+
+ return {"added": added, "removed": removed, "changed": changed}
+
+
+def _list_tmdl_names(tables_dir: Path) -> set[str]:
+ """Return stem names of all .tmdl files in a directory."""
+ if not tables_dir.is_dir():
+ return set()
+ return {p.stem for p in tables_dir.glob("*.tmdl")}
+
+
+def _diff_table_entities(
+ base_text: str, head_text: str
+) -> dict[str, list[str]]:
+ """Compare entity blocks within two table TMDL files."""
+ base_entities = _parse_table_entities(base_text)
+ head_entities = _parse_table_entities(head_text)
+
+ result: dict[str, list[str]] = {
+ "measures_added": [],
+ "measures_removed": [],
+ "measures_changed": [],
+ "columns_added": [],
+ "columns_removed": [],
+ "columns_changed": [],
+ "partitions_added": [],
+ "partitions_removed": [],
+ "partitions_changed": [],
+ "hierarchies_added": [],
+ "hierarchies_removed": [],
+ "hierarchies_changed": [],
+ "other_added": [],
+ "other_removed": [],
+ "other_changed": [],
+ }
+
+ # Map TMDL keywords to their plural result-dict prefix
+ keyword_plurals: dict[str, str] = {
+ "measure": "measures",
+ "column": "columns",
+ "partition": "partitions",
+ "hierarchy": "hierarchies",
+ }
+
+ all_keys = set(base_entities) | set(head_entities)
+ for key in sorted(all_keys):
+ keyword, _, name = key.partition("/")
+ plural = keyword_plurals.get(keyword, "other")
+ added_key = f"{plural}_added"
+ removed_key = f"{plural}_removed"
+ changed_key = f"{plural}_changed"
+
+ if key not in base_entities:
+ result[added_key].append(name)
+ elif key not in head_entities:
+ result[removed_key].append(name)
+ else:
+ b = _strip_lineage_tags(base_entities[key])
+ h = _strip_lineage_tags(head_entities[key])
+ if b != h:
+ result[changed_key].append(name)
+
+ # Remove empty other_* lists to keep output clean
+ for k in ("other_added", "other_removed", "other_changed"):
+ if not result[k]:
+ del result[k]
+
+ return result
+
+
+def _parse_table_entities(text: str) -> dict[str, str]:
+ """Parse a table TMDL file into {keyword/name: text_block} entries.
+
+ Entities (measure, column, hierarchy, partition, variation) start at
+ exactly one tab of indentation inside the table declaration.
+ """
+ entities: dict[str, str] = {}
+ lines = text.splitlines(keepends=True)
+ current_key: str | None = None
+ current_lines: list[str] = []
+
+ for line in lines:
+ # Entity declaration: starts with exactly one tab, not two
+ if line.startswith("\t") and not line.startswith("\t\t"):
+ stripped = line[1:] # remove leading tab
+ keyword = stripped.split()[0] if stripped.split() else ""
+ if keyword in _TABLE_ENTITY_KEYWORDS:
+ # Save previous block
+ if current_key is not None:
+ entities[current_key] = "".join(current_lines)
+ name = _extract_entity_name(keyword, stripped)
+ current_key = f"{keyword}/{name}"
+ current_lines = [line]
+ continue
+
+ if current_key is not None:
+ current_lines.append(line)
+
+ if current_key is not None:
+ entities[current_key] = "".join(current_lines)
+
+ return entities
+
+
+def _extract_entity_name(keyword: str, declaration: str) -> str:
+ """Extract the entity name from a TMDL declaration line (no leading tab)."""
+ # e.g. "measure 'Total Revenue' = ..." -> "Total Revenue"
+ # e.g. "column ProductID" -> "ProductID"
+ # e.g. "partition Sales = m" -> "Sales"
+ rest = declaration[len(keyword):].strip()
+ if rest.startswith("'"):
+ end = rest.find("'", 1)
+ return rest[1:end] if end > 0 else rest[1:]
+ # Take first token, stop at '=' or whitespace
+ token = re.split(r"[\s=]", rest)[0]
+ return token.strip("'\"") if token else rest
+
+
+# ---------------------------------------------------------------------------
+# Relationship diffing
+# ---------------------------------------------------------------------------
+
+
+def _diff_relationships(base_def: Path, head_def: Path) -> dict[str, list[str]]:
+ base_rels = _parse_relationships(_read_tmdl(base_def / "relationships.tmdl"))
+ head_rels = _parse_relationships(_read_tmdl(head_def / "relationships.tmdl"))
+
+ all_keys = set(base_rels) | set(head_rels)
+ added: list[str] = []
+ removed: list[str] = []
+ changed: list[str] = []
+
+ for key in sorted(all_keys):
+ if key not in base_rels:
+ added.append(key)
+ elif key not in head_rels:
+ removed.append(key)
+ elif _strip_lineage_tags(base_rels[key]) != _strip_lineage_tags(head_rels[key]):
+ changed.append(key)
+
+ return {"added": added, "removed": removed, "changed": changed}
+
+
+def _parse_relationships(text: str) -> dict[str, str]:
+ """Parse relationships.tmdl into {from -> to: text_block} entries."""
+ if not text.strip():
+ return {}
+
+ blocks: dict[str, str] = {}
+ current_lines: list[str] = []
+ in_rel = False
+
+ for line in text.splitlines(keepends=True):
+ if line.startswith("relationship "):
+ if in_rel and current_lines:
+ _save_relationship(current_lines, blocks)
+ current_lines = [line]
+ in_rel = True
+ elif in_rel:
+ current_lines.append(line)
+
+ if in_rel and current_lines:
+ _save_relationship(current_lines, blocks)
+
+ return blocks
+
+
+def _save_relationship(lines: list[str], blocks: dict[str, str]) -> None:
+ """Extract semantic key from a relationship block and store it."""
+ from_col = ""
+ to_col = ""
+ for line in lines:
+ stripped = line.strip()
+ if stripped.startswith("fromColumn:"):
+ from_col = stripped.split(":", 1)[1].strip()
+ elif stripped.startswith("toColumn:"):
+ to_col = stripped.split(":", 1)[1].strip()
+ if from_col or to_col:
+ key = f"{from_col} -> {to_col}"
+ blocks[key] = "".join(lines)
+
+
+# ---------------------------------------------------------------------------
+# Model property diffing
+# ---------------------------------------------------------------------------
+
+
+def _diff_model(base_def: Path, head_def: Path) -> dict[str, list[str]]:
+ base_props = _parse_model_props(_read_tmdl(base_def / "model.tmdl"))
+ head_props = _parse_model_props(_read_tmdl(head_def / "model.tmdl"))
+
+ changed: list[str] = []
+ all_keys = set(base_props) | set(head_props)
+ for key in sorted(all_keys):
+ b_val = base_props.get(key)
+ h_val = head_props.get(key)
+ if b_val != h_val:
+ changed.append(f"{key}: {b_val!r} -> {h_val!r}")
+
+ return {"changed_properties": changed}
+
+
+def _parse_model_props(text: str) -> dict[str, str]:
+ """Extract key: value properties at 1-tab indent from model.tmdl."""
+ props: dict[str, str] = {}
+ for line in text.splitlines():
+ if line.startswith("\t") and not line.startswith("\t\t") and ":" in line:
+ key, _, val = line[1:].partition(":")
+ props[key.strip()] = val.strip()
+ return props
diff --git a/src/pbi_cli/core/visual_backend.py b/src/pbi_cli/core/visual_backend.py
new file mode 100644
index 0000000..02bc143
--- /dev/null
+++ b/src/pbi_cli/core/visual_backend.py
@@ -0,0 +1,929 @@
+"""Pure-function backend for PBIR visual operations.
+
+Mirrors ``report_backend.py`` but focuses on individual visual CRUD.
+Every function takes a ``Path`` to the definition folder and returns
+plain Python dicts suitable for ``format_result()``.
+"""
+
+from __future__ import annotations
+
+import json
+import re
+import secrets
+from pathlib import Path
+from typing import Any
+
+from pbi_cli.core.errors import PbiCliError, VisualTypeError
+from pbi_cli.core.pbir_models import (
+ SUPPORTED_VISUAL_TYPES,
+ VISUAL_TYPE_ALIASES,
+)
+from pbi_cli.core.pbir_path import get_visual_dir, get_visuals_dir
+
+# ---------------------------------------------------------------------------
+# JSON helpers (same as report_backend)
+# ---------------------------------------------------------------------------
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ return json.loads(path.read_text(encoding="utf-8"))
+
+
+def _write_json(path: Path, data: dict[str, Any]) -> None:
+ path.write_text(
+ json.dumps(data, indent=2, ensure_ascii=False) + "\n",
+ encoding="utf-8",
+ )
+
+
+def _generate_name() -> str:
+ return secrets.token_hex(10)
+
+
+# ---------------------------------------------------------------------------
+# Template loading
+# ---------------------------------------------------------------------------
+
+# Data role mappings for each visual type
+VISUAL_DATA_ROLES: dict[str, list[str]] = {
+ # Original 9
+ "barChart": ["Category", "Y", "Legend"],
+ "lineChart": ["Category", "Y", "Legend"],
+ "card": ["Values"],
+ "tableEx": ["Values"],
+ "pivotTable": ["Rows", "Values", "Columns"],
+ "slicer": ["Values"],
+ "kpi": ["Indicator", "Goal", "TrendLine"],
+ "gauge": ["Y", "MaxValue"],
+ "donutChart": ["Category", "Y", "Legend"],
+ # v3.1.0 additions
+ "columnChart": ["Category", "Y", "Legend"],
+ "areaChart": ["Category", "Y", "Legend"],
+ "ribbonChart": ["Category", "Y", "Legend"],
+ "waterfallChart": ["Category", "Y", "Breakdown"],
+ "scatterChart": ["Details", "X", "Y", "Size", "Legend"],
+ "funnelChart": ["Category", "Y"],
+ "multiRowCard": ["Values"],
+ "treemap": ["Category", "Values"],
+ "cardNew": ["Fields"],
+ "stackedBarChart": ["Category", "Y", "Legend"],
+ "lineStackedColumnComboChart": ["Category", "ColumnY", "LineY", "Legend"],
+ # v3.4.0 additions
+ "cardVisual": ["Data"],
+ "actionButton": [],
+ # v3.5.0 additions
+ "clusteredColumnChart": ["Category", "Y", "Legend"],
+ "clusteredBarChart": ["Category", "Y", "Legend"],
+ "textSlicer": ["Values"],
+ "listSlicer": ["Values"],
+ # v3.6.0 additions
+ "image": [],
+ "shape": [],
+ "textbox": [],
+ "pageNavigator": [],
+ "advancedSlicerVisual": ["Values"],
+ # v3.8.0 additions
+ "azureMap": ["Category", "Size"],
+}
+
+# Roles that should default to Measure references (not Column)
+MEASURE_ROLES: frozenset[str] = frozenset({
+ "Y", "Values", "Fields", # "Fields" is used by cardNew only
+ "Indicator", "Goal",
+ # v3.1.0 additions
+ "ColumnY", "LineY", "X", "Size",
+ # v3.4.0 additions
+ "Data",
+ # v3.8.0 additions
+ "MaxValue",
+})
+
+# User-friendly role aliases to PBIR role names
+ROLE_ALIASES: dict[str, dict[str, str]] = {
+ # Original 9
+ "barChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "lineChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "card": {"field": "Values", "value": "Values"},
+ "tableEx": {"value": "Values", "column": "Values"},
+ "pivotTable": {"row": "Rows", "value": "Values", "column": "Columns"},
+ "slicer": {"value": "Values", "field": "Values"},
+ "kpi": {
+ "indicator": "Indicator",
+ "value": "Indicator",
+ "goal": "Goal",
+ "trend_line": "TrendLine",
+ "trend": "TrendLine",
+ },
+ "gauge": {
+ "value": "Y",
+ "max": "MaxValue",
+ "max_value": "MaxValue",
+ "target": "MaxValue",
+ },
+ "donutChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ # v3.1.0 additions
+ "columnChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "areaChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "ribbonChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "waterfallChart": {"category": "Category", "value": "Y", "breakdown": "Breakdown"},
+ "scatterChart": {
+ "x": "X", "y": "Y", "detail": "Details", "size": "Size", "legend": "Legend",
+ "value": "Y",
+ },
+ "funnelChart": {"category": "Category", "value": "Y"},
+ "multiRowCard": {"field": "Values", "value": "Values"},
+ "treemap": {"category": "Category", "value": "Values"},
+ "cardNew": {"field": "Fields", "value": "Fields"},
+ "stackedBarChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "lineStackedColumnComboChart": {
+ "category": "Category",
+ "column": "ColumnY",
+ "line": "LineY",
+ "legend": "Legend",
+ "value": "ColumnY",
+ },
+ # v3.4.0 additions
+ "cardVisual": {"field": "Data", "value": "Data"},
+ "actionButton": {},
+ # v3.5.0 additions
+ "clusteredColumnChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "clusteredBarChart": {"category": "Category", "value": "Y", "legend": "Legend"},
+ "textSlicer": {"value": "Values", "field": "Values"},
+ "listSlicer": {"value": "Values", "field": "Values"},
+ # v3.6.0 additions
+ "image": {},
+ "shape": {},
+ "textbox": {},
+ "pageNavigator": {},
+ "advancedSlicerVisual": {"value": "Values", "field": "Values"},
+ # v3.8.0 additions
+ "azureMap": {"category": "Category", "value": "Size", "size": "Size"},
+}
+
+
+def _resolve_visual_type(user_type: str) -> str:
+ """Resolve a user-provided visual type to a PBIR visualType."""
+ if user_type in SUPPORTED_VISUAL_TYPES:
+ return user_type
+ resolved = VISUAL_TYPE_ALIASES.get(user_type)
+ if resolved is not None:
+ return resolved
+ raise VisualTypeError(user_type)
+
+
+def _load_template(visual_type: str) -> str:
+ """Load a visual template as a raw string (contains placeholders)."""
+ import importlib.resources
+
+ templates_pkg = importlib.resources.files("pbi_cli.templates.visuals")
+ template_file = templates_pkg / f"{visual_type}.json"
+ return template_file.read_text(encoding="utf-8")
+
+
+def _build_visual_json(
+ template_str: str,
+ name: str,
+ x: float,
+ y: float,
+ width: float,
+ height: float,
+ z: int = 0,
+ tab_order: int = 0,
+) -> dict[str, Any]:
+ """Fill placeholders in a template string and return parsed JSON."""
+ filled = (
+ template_str
+ .replace("__VISUAL_NAME__", name)
+ .replace("__X__", str(x))
+ .replace("__Y__", str(y))
+ .replace("__WIDTH__", str(width))
+ .replace("__HEIGHT__", str(height))
+ .replace("__Z__", str(z))
+ .replace("__TAB_ORDER__", str(tab_order))
+ )
+ return json.loads(filled)
+
+
+# ---------------------------------------------------------------------------
+# Default positions and sizes per visual type
+# ---------------------------------------------------------------------------
+
+DEFAULT_SIZES: dict[str, tuple[float, float]] = {
+ # Original 9
+ "barChart": (400, 300),
+ "lineChart": (400, 300),
+ "card": (200, 120),
+ "tableEx": (500, 350),
+ "pivotTable": (500, 350),
+ "slicer": (200, 300),
+ "kpi": (250, 150),
+ "gauge": (300, 250),
+ "donutChart": (350, 300),
+ # v3.1.0 additions
+ "columnChart": (400, 300),
+ "areaChart": (400, 300),
+ "ribbonChart": (400, 300),
+ "waterfallChart": (450, 300),
+ "scatterChart": (400, 350),
+ "funnelChart": (350, 300),
+ "multiRowCard": (300, 200),
+ "treemap": (400, 300),
+ "cardNew": (200, 120),
+ "stackedBarChart": (400, 300),
+ "lineStackedColumnComboChart": (500, 300),
+ # v3.4.0 additions -- sizes from real Desktop export
+ "cardVisual": (217, 87),
+ "actionButton": (51, 22),
+ # v3.5.0 additions
+ "clusteredColumnChart": (400, 300),
+ "clusteredBarChart": (400, 300),
+ "textSlicer": (200, 50),
+ "listSlicer": (200, 300),
+ # v3.6.0 additions (from real HR Analysis Desktop export sizing)
+ "image": (200, 150),
+ "shape": (300, 200),
+ "textbox": (300, 100),
+ "pageNavigator": (120, 400),
+ "advancedSlicerVisual": (280, 280),
+ # v3.8.0 additions
+ "azureMap": (500, 400),
+}
+
+
+# ---------------------------------------------------------------------------
+# Visual CRUD operations
+# ---------------------------------------------------------------------------
+
+
+def visual_list(
+ definition_path: Path, page_name: str
+) -> list[dict[str, Any]]:
+ """List all visuals on a page."""
+ visuals_dir = definition_path / "pages" / page_name / "visuals"
+ if not visuals_dir.is_dir():
+ return []
+
+ results: list[dict[str, Any]] = []
+ for vdir in sorted(visuals_dir.iterdir()):
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+ data = _read_json(vfile)
+
+ # Group container: has "visualGroup" key instead of "visual"
+ if "visualGroup" in data and "visual" not in data:
+ results.append({
+ "name": data.get("name", vdir.name),
+ "visual_type": "group",
+ "x": 0,
+ "y": 0,
+ "width": 0,
+ "height": 0,
+ })
+ continue
+
+ pos = data.get("position", {})
+ visual_config = data.get("visual", {})
+ results.append({
+ "name": data.get("name", vdir.name),
+ "visual_type": visual_config.get("visualType", "unknown"),
+ "x": pos.get("x", 0),
+ "y": pos.get("y", 0),
+ "width": pos.get("width", 0),
+ "height": pos.get("height", 0),
+ })
+
+ return results
+
+
+def visual_get(
+ definition_path: Path, page_name: str, visual_name: str
+) -> dict[str, Any]:
+ """Get detailed information about a visual."""
+ visual_dir = get_visual_dir(definition_path, page_name, visual_name)
+ vfile = visual_dir / "visual.json"
+
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ pos = data.get("position", {})
+ visual_config = data.get("visual", {})
+ query_state = visual_config.get("query", {}).get("queryState", {})
+
+ # Extract bindings summary
+ bindings: list[dict[str, Any]] = []
+ for role, state in query_state.items():
+ projections = state.get("projections", [])
+ for proj in projections:
+ field = proj.get("field", {})
+ query_ref = proj.get("queryRef", "")
+ bindings.append({
+ "role": role,
+ "query_ref": query_ref,
+ "field": _summarize_field(field),
+ })
+
+ return {
+ "name": data.get("name", visual_name),
+ "visual_type": visual_config.get("visualType", "unknown"),
+ "x": pos.get("x", 0),
+ "y": pos.get("y", 0),
+ "width": pos.get("width", 0),
+ "height": pos.get("height", 0),
+ "bindings": bindings,
+ "is_hidden": data.get("isHidden", False),
+ }
+
+
+def visual_add(
+ definition_path: Path,
+ page_name: str,
+ visual_type: str,
+ name: str | None = None,
+ x: float | None = None,
+ y: float | None = None,
+ width: float | None = None,
+ height: float | None = None,
+) -> dict[str, Any]:
+ """Add a new visual to a page."""
+ # Validate page exists
+ page_dir = definition_path / "pages" / page_name
+ if not page_dir.is_dir():
+ raise PbiCliError(f"Page '{page_name}' not found.")
+
+ resolved_type = _resolve_visual_type(visual_type)
+ visual_name = name or _generate_name()
+
+ # Defaults
+ default_w, default_h = DEFAULT_SIZES.get(resolved_type, (400, 300))
+ final_x = x if x is not None else 50
+ final_y = y if y is not None else _next_y_position(definition_path, page_name)
+ final_w = width if width is not None else default_w
+ final_h = height if height is not None else default_h
+
+ # Determine z-order
+ z = _next_z_order(definition_path, page_name)
+
+ # Load and fill template
+ template_str = _load_template(resolved_type)
+ visual_data = _build_visual_json(
+ template_str,
+ name=visual_name,
+ x=final_x,
+ y=final_y,
+ width=final_w,
+ height=final_h,
+ z=z,
+ tab_order=z,
+ )
+
+ # Write to disk
+ visual_dir = get_visuals_dir(definition_path, page_name) / visual_name
+ visual_dir.mkdir(parents=True, exist_ok=True)
+ _write_json(visual_dir / "visual.json", visual_data)
+
+ return {
+ "status": "created",
+ "name": visual_name,
+ "visual_type": resolved_type,
+ "page": page_name,
+ "x": final_x,
+ "y": final_y,
+ "width": final_w,
+ "height": final_h,
+ }
+
+
+def visual_update(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ x: float | None = None,
+ y: float | None = None,
+ width: float | None = None,
+ height: float | None = None,
+ hidden: bool | None = None,
+) -> dict[str, Any]:
+ """Update visual position, size, or visibility."""
+ visual_dir = get_visual_dir(definition_path, page_name, visual_name)
+ vfile = visual_dir / "visual.json"
+
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ pos = data.get("position", {})
+
+ if x is not None:
+ pos["x"] = x
+ if y is not None:
+ pos["y"] = y
+ if width is not None:
+ pos["width"] = width
+ if height is not None:
+ pos["height"] = height
+ data["position"] = pos
+
+ if hidden is not None:
+ data["isHidden"] = hidden
+
+ _write_json(vfile, data)
+
+ return {
+ "status": "updated",
+ "name": visual_name,
+ "page": page_name,
+ "position": {
+ "x": pos.get("x", 0),
+ "y": pos.get("y", 0),
+ "width": pos.get("width", 0),
+ "height": pos.get("height", 0),
+ },
+ }
+
+
+def visual_set_container(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ border_show: bool | None = None,
+ background_show: bool | None = None,
+ title: str | None = None,
+) -> dict[str, Any]:
+ """Set container-level properties (border, background, title) on a visual.
+
+ Only the keyword arguments that are provided (not None) are updated.
+ Other ``visualContainerObjects`` keys are preserved unchanged.
+
+ The ``visualContainerObjects`` key is separate from ``visual.objects`` --
+ it controls the container chrome (border, background, header title) rather
+ than the visual's own formatting.
+ """
+ visual_dir = get_visual_dir(definition_path, page_name, visual_name)
+ visual_json_path = visual_dir / "visual.json"
+ if not visual_json_path.exists():
+ raise PbiCliError(
+ f"Visual '{visual_name}' not found on page '{page_name}'."
+ )
+
+ data = _read_json(visual_json_path)
+ visual = data.get("visual")
+ if visual is None:
+ raise PbiCliError(
+ f"Visual '{visual_name}' has invalid JSON -- missing 'visual' key."
+ )
+
+ if border_show is None and background_show is None and title is None:
+ return {
+ "status": "no-op",
+ "visual": visual_name,
+ "page": page_name,
+ "border_show": None,
+ "background_show": None,
+ "title": None,
+ }
+
+ vco: dict[str, Any] = dict(visual.get("visualContainerObjects", {}))
+
+ def _bool_entry(value: bool) -> list[dict[str, Any]]:
+ return [{
+ "properties": {
+ "show": {
+ "expr": {"Literal": {"Value": str(value).lower()}}
+ }
+ }
+ }]
+
+ if border_show is not None:
+ vco = {**vco, "border": _bool_entry(border_show)}
+ if background_show is not None:
+ vco = {**vco, "background": _bool_entry(background_show)}
+ if title is not None:
+ vco = {**vco, "title": [{
+ "properties": {
+ "text": {
+ "expr": {"Literal": {"Value": f"'{title}'"}}
+ }
+ }
+ }]}
+
+ updated_visual = {**visual, "visualContainerObjects": vco}
+ _write_json(visual_json_path, {**data, "visual": updated_visual})
+
+ return {
+ "status": "updated",
+ "visual": visual_name,
+ "page": page_name,
+ "border_show": border_show,
+ "background_show": background_show,
+ "title": title,
+ }
+
+
+def visual_delete(
+ definition_path: Path, page_name: str, visual_name: str
+) -> dict[str, Any]:
+ """Delete a visual from a page."""
+ visual_dir = get_visual_dir(definition_path, page_name, visual_name)
+
+ if not visual_dir.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ _rmtree(visual_dir)
+
+ return {"status": "deleted", "name": visual_name, "page": page_name}
+
+
+def visual_bind(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ bindings: list[dict[str, Any]],
+) -> dict[str, Any]:
+ """Bind semantic model fields to visual data roles.
+
+ Each binding dict should have:
+ - ``role``: Data role (e.g. "category", "value", "row")
+ - ``field``: Field reference in ``Table[Column]`` notation
+ - ``measure``: (optional) bool, force treat as measure
+
+ Roles are resolved through ``ROLE_ALIASES`` to the actual PBIR role name.
+ Measure vs Column is determined by the resolved role: value/field/indicator/goal
+ roles default to Measure; category/row/legend default to Column.
+ """
+ visual_dir = get_visual_dir(definition_path, page_name, visual_name)
+ vfile = visual_dir / "visual.json"
+
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ visual_config = data.get("visual", {})
+ visual_type = visual_config.get("visualType", "")
+ query = visual_config.setdefault("query", {})
+ query_state = query.setdefault("queryState", {})
+
+ # Collect existing Commands From/Select to merge (fix: don't overwrite)
+ from_entities: dict[str, dict[str, Any]] = {}
+ select_items: list[dict[str, Any]] = []
+ _collect_existing_commands(query, from_entities, select_items)
+
+ role_map = ROLE_ALIASES.get(visual_type, {})
+ applied: list[dict[str, str]] = []
+
+ for binding in bindings:
+ user_role = binding["role"].lower()
+ field_ref = binding["field"]
+ force_measure = binding.get("measure", False)
+
+ # Resolve role alias
+ pbir_role = role_map.get(user_role, binding["role"])
+
+ # Parse Table[Column]
+ table, column = _parse_field_ref(field_ref)
+
+ # Determine measure vs column: explicit flag, or role-based heuristic
+ is_measure = force_measure or pbir_role in MEASURE_ROLES
+
+ # Track source alias for Commands block (use full name to avoid collisions)
+ source_alias = table.replace(" ", "_").lower() if table else "t"
+ from_entities[source_alias] = {
+ "Name": source_alias,
+ "Entity": table,
+ "Type": 0,
+ }
+
+ # Build queryState projection (uses Entity directly, matching Desktop)
+ query_ref = f"{table}.{column}"
+ if is_measure:
+ field_expr: dict[str, Any] = {
+ "Measure": {
+ "Expression": {"SourceRef": {"Entity": table}},
+ "Property": column,
+ }
+ }
+ else:
+ field_expr = {
+ "Column": {
+ "Expression": {"SourceRef": {"Entity": table}},
+ "Property": column,
+ }
+ }
+
+ projection = {
+ "field": field_expr,
+ "queryRef": query_ref,
+ "nativeQueryRef": column,
+ }
+
+ # Add to query state
+ role_state = query_state.setdefault(pbir_role, {"projections": []})
+ role_state["projections"].append(projection)
+
+ # Build Commands select item (uses Source alias)
+ if is_measure:
+ cmd_field_expr: dict[str, Any] = {
+ "Measure": {
+ "Expression": {"SourceRef": {"Source": source_alias}},
+ "Property": column,
+ }
+ }
+ else:
+ cmd_field_expr = {
+ "Column": {
+ "Expression": {"SourceRef": {"Source": source_alias}},
+ "Property": column,
+ }
+ }
+ select_items.append({
+ **cmd_field_expr,
+ "Name": query_ref,
+ })
+
+ applied.append({
+ "role": pbir_role,
+ "field": field_ref,
+ "query_ref": query_ref,
+ })
+
+ # Set the semantic query Commands block (merges with existing)
+ if from_entities and select_items:
+ query["Commands"] = [{
+ "SemanticQueryDataShapeCommand": {
+ "Query": {
+ "Version": 2,
+ "From": list(from_entities.values()),
+ "Select": select_items,
+ }
+ }
+ }]
+
+ data["visual"] = visual_config
+ _write_json(vfile, data)
+
+ return {
+ "status": "bound",
+ "name": visual_name,
+ "page": page_name,
+ "bindings": applied,
+ }
+
+
+# ---------------------------------------------------------------------------
+# Internal helpers
+# ---------------------------------------------------------------------------
+
+_FIELD_REF_PATTERN = re.compile(r"^(.+)\[(.+)\]$")
+
+
+def _parse_field_ref(ref: str) -> tuple[str, str]:
+ """Parse ``Table[Column]`` or ``[Measure]`` notation.
+
+ Returns (table, column).
+ """
+ match = _FIELD_REF_PATTERN.match(ref.strip())
+ if match:
+ table = match.group(1).strip()
+ column = match.group(2).strip()
+ return table, column
+
+ raise PbiCliError(
+ f"Invalid field reference '{ref}'. Expected 'Table[Column]' format."
+ )
+
+
+def _summarize_field(field: dict[str, Any]) -> str:
+ """Produce a human-readable summary of a query field expression."""
+ for kind in ("Column", "Measure"):
+ if kind in field:
+ item = field[kind]
+ source_ref = item.get("Expression", {}).get("SourceRef", {})
+ # queryState uses Entity, Commands uses Source
+ source = source_ref.get("Entity", source_ref.get("Source", "?"))
+ prop = item.get("Property", "?")
+ if kind == "Measure":
+ return f"{source}.[{prop}]"
+ return f"{source}.{prop}"
+ return str(field)
+
+
+def _collect_existing_commands(
+ query: dict[str, Any],
+ from_entities: dict[str, dict[str, Any]],
+ select_items: list[dict[str, Any]],
+) -> None:
+ """Extract existing From entities and Select items from Commands block."""
+ for cmd in query.get("Commands", []):
+ sq = cmd.get("SemanticQueryDataShapeCommand", {}).get("Query", {})
+ for entity in sq.get("From", []):
+ name = entity.get("Name", "")
+ if name:
+ from_entities[name] = entity
+ select_items.extend(sq.get("Select", []))
+
+
+def _next_y_position(definition_path: Path, page_name: str) -> float:
+ """Calculate the next y position to avoid overlap with existing visuals."""
+ visuals_dir = definition_path / "pages" / page_name / "visuals"
+ if not visuals_dir.is_dir():
+ return 50
+
+ max_bottom = 50.0
+ for vdir in visuals_dir.iterdir():
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+ try:
+ data = _read_json(vfile)
+ pos = data.get("position", {})
+ bottom = pos.get("y", 0) + pos.get("height", 0)
+ if bottom > max_bottom:
+ max_bottom = bottom
+ except (json.JSONDecodeError, KeyError):
+ continue
+
+ return max_bottom + 20
+
+
+def _next_z_order(definition_path: Path, page_name: str) -> int:
+ """Determine the next z-order value for a new visual."""
+ visuals_dir = definition_path / "pages" / page_name / "visuals"
+ if not visuals_dir.is_dir():
+ return 0
+
+ max_z = -1
+ for vdir in visuals_dir.iterdir():
+ if not vdir.is_dir():
+ continue
+ vfile = vdir / "visual.json"
+ if not vfile.exists():
+ continue
+ try:
+ data = _read_json(vfile)
+ z = data.get("position", {}).get("z", 0)
+ if z > max_z:
+ max_z = z
+ except (json.JSONDecodeError, KeyError):
+ continue
+
+ return max_z + 1
+
+
+def _rmtree(path: Path) -> None:
+ """Recursively remove a directory tree."""
+ if path.is_dir():
+ for child in path.iterdir():
+ _rmtree(child)
+ path.rmdir()
+ else:
+ path.unlink()
+
+
+# ---------------------------------------------------------------------------
+# Visual Calculations (Phase 7)
+# ---------------------------------------------------------------------------
+
+
+def visual_calc_add(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ calc_name: str,
+ expression: str,
+ role: str = "Y",
+) -> dict[str, Any]:
+ """Add a visual calculation to a data role's projections.
+
+ Appends a NativeVisualCalculation projection to queryState[role].projections[].
+ If the role does not exist in queryState, creates it with an empty projections list.
+ If a calc with the same Name already exists in that role, replaces it (idempotent).
+
+ Returns {"status": "added", "visual": visual_name, "name": calc_name,
+ "role": role, "expression": expression}.
+ Raises PbiCliError if visual.json not found.
+ """
+ vfile = get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ visual_config = data.setdefault("visual", {})
+ query = visual_config.setdefault("query", {})
+ query_state = query.setdefault("queryState", {})
+ role_state = query_state.setdefault(role, {"projections": []})
+ projections: list[dict[str, Any]] = role_state.setdefault("projections", [])
+
+ new_proj: dict[str, Any] = {
+ "field": {
+ "NativeVisualCalculation": {
+ "Language": "dax",
+ "Expression": expression,
+ "Name": calc_name,
+ }
+ },
+ "queryRef": "select",
+ "nativeQueryRef": calc_name,
+ }
+
+ # Replace existing calc with same name (idempotent), else append
+ updated = False
+ new_projections: list[dict[str, Any]] = []
+ for proj in projections:
+ nvc = proj.get("field", {}).get("NativeVisualCalculation", {})
+ if nvc.get("Name") == calc_name:
+ new_projections.append(new_proj)
+ updated = True
+ else:
+ new_projections.append(proj)
+
+ if not updated:
+ new_projections.append(new_proj)
+
+ role_state["projections"] = new_projections
+ _write_json(vfile, data)
+
+ return {
+ "status": "added",
+ "visual": visual_name,
+ "name": calc_name,
+ "role": role,
+ "expression": expression,
+ }
+
+
+def visual_calc_list(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+) -> list[dict[str, Any]]:
+ """List all visual calculations across all roles.
+
+ Returns list of {"name": ..., "expression": ..., "role": ..., "query_ref": "select"}.
+ Returns [] if no NativeVisualCalculation projections found.
+ """
+ vfile = get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ query_state = data.get("visual", {}).get("query", {}).get("queryState", {})
+
+ results: list[dict[str, Any]] = []
+ for role, state in query_state.items():
+ for proj in state.get("projections", []):
+ nvc = proj.get("field", {}).get("NativeVisualCalculation")
+ if nvc is not None:
+ results.append({
+ "name": nvc.get("Name", ""),
+ "expression": nvc.get("Expression", ""),
+ "role": role,
+ "query_ref": proj.get("queryRef", "select"),
+ })
+
+ return results
+
+
+def visual_calc_delete(
+ definition_path: Path,
+ page_name: str,
+ visual_name: str,
+ calc_name: str,
+) -> dict[str, Any]:
+ """Delete a visual calculation by name.
+
+ Searches all roles' projections for NativeVisualCalculation with Name==calc_name.
+ Raises PbiCliError if not found.
+ Returns {"status": "deleted", "visual": visual_name, "name": calc_name}.
+ """
+ vfile = get_visual_dir(definition_path, page_name, visual_name) / "visual.json"
+ if not vfile.exists():
+ raise PbiCliError(f"Visual '{visual_name}' not found on page '{page_name}'.")
+
+ data = _read_json(vfile)
+ query_state = (
+ data.get("visual", {}).get("query", {}).get("queryState", {})
+ )
+
+ found = False
+ for role, state in query_state.items():
+ projections: list[dict[str, Any]] = state.get("projections", [])
+ new_projections = [
+ proj for proj in projections
+ if proj.get("field", {}).get("NativeVisualCalculation", {}).get("Name") != calc_name
+ ]
+ if len(new_projections) < len(projections):
+ state["projections"] = new_projections
+ found = True
+
+ if not found:
+ raise PbiCliError(
+ f"Visual calculation '{calc_name}' not found in visual '{visual_name}'."
+ )
+
+ _write_json(vfile, data)
+ return {"status": "deleted", "visual": visual_name, "name": calc_name}
diff --git a/src/pbi_cli/main.py b/src/pbi_cli/main.py
index 43c2a02..a603208 100644
--- a/src/pbi_cli/main.py
+++ b/src/pbi_cli/main.py
@@ -52,6 +52,7 @@ def cli(ctx: click.Context, json_output: bool, connection: str | None) -> None:
def _register_commands() -> None:
"""Lazily import and register all command groups."""
from pbi_cli.commands.advanced import advanced
+ from pbi_cli.commands.bookmarks import bookmarks
from pbi_cli.commands.calc_group import calc_group
from pbi_cli.commands.calendar import calendar
from pbi_cli.commands.column import column
@@ -59,6 +60,8 @@ def _register_commands() -> None:
from pbi_cli.commands.database import database
from pbi_cli.commands.dax import dax
from pbi_cli.commands.expression import expression
+ from pbi_cli.commands.filters import filters
+ from pbi_cli.commands.format_cmd import format_cmd
from pbi_cli.commands.hierarchy import hierarchy
from pbi_cli.commands.measure import measure
from pbi_cli.commands.model import model
@@ -66,12 +69,14 @@ def _register_commands() -> None:
from pbi_cli.commands.perspective import perspective
from pbi_cli.commands.relationship import relationship
from pbi_cli.commands.repl_cmd import repl
+ from pbi_cli.commands.report import report
from pbi_cli.commands.security import security_role
from pbi_cli.commands.setup_cmd import setup
from pbi_cli.commands.skills_cmd import skills
from pbi_cli.commands.table import table
from pbi_cli.commands.trace import trace
from pbi_cli.commands.transaction import transaction
+ from pbi_cli.commands.visual import visual
cli.add_command(setup)
cli.add_command(connect)
@@ -96,6 +101,11 @@ def _register_commands() -> None:
cli.add_command(advanced)
cli.add_command(repl)
cli.add_command(skills)
+ cli.add_command(report)
+ cli.add_command(visual)
+ cli.add_command(filters)
+ cli.add_command(format_cmd)
+ cli.add_command(bookmarks)
_register_commands()
diff --git a/src/pbi_cli/preview/__init__.py b/src/pbi_cli/preview/__init__.py
new file mode 100644
index 0000000..58cb36f
--- /dev/null
+++ b/src/pbi_cli/preview/__init__.py
@@ -0,0 +1 @@
+"""Live preview server for PBIR reports."""
diff --git a/src/pbi_cli/preview/renderer.py b/src/pbi_cli/preview/renderer.py
new file mode 100644
index 0000000..4a94f72
--- /dev/null
+++ b/src/pbi_cli/preview/renderer.py
@@ -0,0 +1,487 @@
+"""PBIR JSON to HTML/SVG renderer.
+
+Renders a simplified structural preview of PBIR report pages.
+Not pixel-perfect Power BI rendering -- shows layout, visual types,
+and field bindings for validation before opening in Desktop.
+"""
+
+from __future__ import annotations
+
+import json
+from html import escape
+from pathlib import Path
+from typing import Any
+
+
+def render_report(definition_path: Path) -> str:
+ """Render a full PBIR report as an HTML page."""
+ report_data = _read_json(definition_path / "report.json")
+ theme = report_data.get("themeCollection", {}).get("baseTheme", {}).get("name", "Default")
+
+ pages_html = []
+ pages_dir = definition_path / "pages"
+ if pages_dir.is_dir():
+ page_order = _get_page_order(definition_path)
+ page_dirs = sorted(
+ [d for d in pages_dir.iterdir() if d.is_dir() and (d / "page.json").exists()],
+ key=lambda d: page_order.index(d.name) if d.name in page_order else 9999,
+ )
+ for page_dir in page_dirs:
+ pages_html.append(_render_page(page_dir))
+
+ pages_content = "\n".join(pages_html) if pages_html else "
No pages in report
"
+
+ return _HTML_TEMPLATE.replace("{{THEME}}", escape(theme)).replace(
+ "{{PAGES}}", pages_content
+ )
+
+
+def render_page(definition_path: Path, page_name: str) -> str:
+ """Render a single page as HTML."""
+ page_dir = definition_path / "pages" / page_name
+ if not page_dir.is_dir():
+ return f"