mirror of
https://github.com/MinaSaad1/pbi-cli
synced 2026-04-21 13:37:19 +00:00
feat: initial pbi-cli project with all 20+ command groups
Complete CLI framework for Power BI semantic models via MCP server: - Core: MCP client (stdio via mcp SDK), binary manager (VSIX download), config, connection store, dual output (JSON + Rich) - Commands: setup, connect, dax, measure, table, column, relationship, model, database, security-role, calc-group, partition, perspective, hierarchy, expression, calendar, trace, transaction, advanced - Binary resolution: env var > managed > VS Code extension fallback - Global --json flag for agent consumption
This commit is contained in:
commit
170413cf22
38 changed files with 2742 additions and 0 deletions
34
.gitignore
vendored
Normal file
34
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
*.egg-info/
|
||||||
|
*.egg
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.eggs/
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
htmlcov/
|
||||||
|
.mypy_cache/
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# pbi-cli specific
|
||||||
|
~/.pbi-cli/
|
||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 pbi-cli contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
73
pyproject.toml
Normal file
73
pyproject.toml
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=68.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "pbi-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "CLI for Power BI semantic models - wraps the Power BI MCP server for token-efficient AI agent usage"
|
||||||
|
readme = "README.md"
|
||||||
|
license = {text = "MIT"}
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
authors = [
|
||||||
|
{name = "pbi-cli contributors"},
|
||||||
|
]
|
||||||
|
keywords = ["power-bi", "cli", "mcp", "semantic-model", "dax", "claude-code"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Topic :: Database",
|
||||||
|
"Topic :: Software Development :: Libraries",
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
"click>=8.0.0",
|
||||||
|
"mcp>=1.20.0",
|
||||||
|
"rich>=13.0.0",
|
||||||
|
"httpx>=0.24.0",
|
||||||
|
"prompt-toolkit>=3.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
pbi = "pbi_cli.main:cli"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://github.com/pbi-cli/pbi-cli"
|
||||||
|
Repository = "https://github.com/pbi-cli/pbi-cli"
|
||||||
|
Issues = "https://github.com/pbi-cli/pbi-cli/issues"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.0",
|
||||||
|
"pytest-cov>=4.0",
|
||||||
|
"pytest-asyncio>=0.21",
|
||||||
|
"ruff>=0.4.0",
|
||||||
|
"mypy>=1.10",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["src"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
target-version = "py310"
|
||||||
|
line-length = 100
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = ["E", "F", "I", "N", "W", "UP"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
markers = [
|
||||||
|
"e2e: end-to-end tests requiring real Power BI binary",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.10"
|
||||||
|
strict = true
|
||||||
3
src/pbi_cli/__init__.py
Normal file
3
src/pbi_cli/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
"""pbi-cli: CLI for Power BI semantic models via MCP server."""
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
6
src/pbi_cli/__main__.py
Normal file
6
src/pbi_cli/__main__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
"""Allow running pbi-cli as: python -m pbi_cli"""
|
||||||
|
|
||||||
|
from pbi_cli.main import cli
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
||||||
1
src/pbi_cli/commands/__init__.py
Normal file
1
src/pbi_cli/commands/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
"""CLI command groups for pbi-cli."""
|
||||||
48
src/pbi_cli/commands/_helpers.py
Normal file
48
src/pbi_cli/commands/_helpers.py
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
"""Shared helpers for CLI commands to reduce boilerplate."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.core.mcp_client import get_client
|
||||||
|
from pbi_cli.core.output import format_mcp_result, print_error
|
||||||
|
from pbi_cli.main import PbiContext
|
||||||
|
|
||||||
|
|
||||||
|
def run_tool(
|
||||||
|
ctx: PbiContext,
|
||||||
|
tool_name: str,
|
||||||
|
request: dict[str, Any],
|
||||||
|
) -> Any:
|
||||||
|
"""Execute an MCP tool call with standard error handling.
|
||||||
|
|
||||||
|
Adds connectionName from context if available. Formats output based
|
||||||
|
on --json flag. Returns the result or exits on error.
|
||||||
|
"""
|
||||||
|
if ctx.connection:
|
||||||
|
request.setdefault("connectionName", ctx.connection)
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool(tool_name, request)
|
||||||
|
format_mcp_result(result, ctx.json_output)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
print_error(str(e))
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def build_definition(
|
||||||
|
required: dict[str, Any],
|
||||||
|
optional: dict[str, Any],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Build a definition dict, including only non-None optional values."""
|
||||||
|
definition = dict(required)
|
||||||
|
for key, value in optional.items():
|
||||||
|
if value is not None:
|
||||||
|
definition[key] = value
|
||||||
|
return definition
|
||||||
135
src/pbi_cli/commands/advanced.py
Normal file
135
src/pbi_cli/commands/advanced.py
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
"""Less common operations: culture, translation, function, query-group."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def advanced() -> None:
|
||||||
|
"""Advanced operations: cultures, translations, functions, query groups."""
|
||||||
|
|
||||||
|
|
||||||
|
# --- Culture ---
|
||||||
|
|
||||||
|
@advanced.group()
|
||||||
|
def culture() -> None:
|
||||||
|
"""Manage model cultures (locales)."""
|
||||||
|
|
||||||
|
|
||||||
|
@culture.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def culture_list(ctx: PbiContext) -> None:
|
||||||
|
"""List cultures."""
|
||||||
|
run_tool(ctx, "culture_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@culture.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def culture_create(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Create a culture."""
|
||||||
|
run_tool(ctx, "culture_operations", {"operation": "Create", "definitions": [{"name": name}]})
|
||||||
|
|
||||||
|
|
||||||
|
@culture.command(name="delete")
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def culture_delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a culture."""
|
||||||
|
run_tool(ctx, "culture_operations", {"operation": "Delete", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
# --- Translation ---
|
||||||
|
|
||||||
|
@advanced.group()
|
||||||
|
def translation() -> None:
|
||||||
|
"""Manage object translations."""
|
||||||
|
|
||||||
|
|
||||||
|
@translation.command(name="list")
|
||||||
|
@click.option("--culture", "-c", required=True, help="Culture name.")
|
||||||
|
@pass_context
|
||||||
|
def translation_list(ctx: PbiContext, culture: str) -> None:
|
||||||
|
"""List translations for a culture."""
|
||||||
|
run_tool(ctx, "object_translation_operations", {"operation": "List", "cultureName": culture})
|
||||||
|
|
||||||
|
|
||||||
|
@translation.command()
|
||||||
|
@click.option("--culture", "-c", required=True, help="Culture name.")
|
||||||
|
@click.option("--object-name", required=True, help="Object to translate.")
|
||||||
|
@click.option("--table", "-t", default=None, help="Table name (if translating table object).")
|
||||||
|
@click.option("--translated-caption", default=None, help="Translated caption.")
|
||||||
|
@click.option("--translated-description", default=None, help="Translated description.")
|
||||||
|
@pass_context
|
||||||
|
def create(
|
||||||
|
ctx: PbiContext,
|
||||||
|
culture: str,
|
||||||
|
object_name: str,
|
||||||
|
table: str | None,
|
||||||
|
translated_caption: str | None,
|
||||||
|
translated_description: str | None,
|
||||||
|
) -> None:
|
||||||
|
"""Create an object translation."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"objectName": object_name, "cultureName": culture},
|
||||||
|
optional={
|
||||||
|
"tableName": table,
|
||||||
|
"translatedCaption": translated_caption,
|
||||||
|
"translatedDescription": translated_description,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "object_translation_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
# --- Function ---
|
||||||
|
|
||||||
|
@advanced.group()
|
||||||
|
def function() -> None:
|
||||||
|
"""Manage model functions."""
|
||||||
|
|
||||||
|
|
||||||
|
@function.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def function_list(ctx: PbiContext) -> None:
|
||||||
|
"""List functions."""
|
||||||
|
run_tool(ctx, "function_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@function.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--expression", "-e", required=True, help="Function expression.")
|
||||||
|
@pass_context
|
||||||
|
def function_create(ctx: PbiContext, name: str, expression: str) -> None:
|
||||||
|
"""Create a function."""
|
||||||
|
run_tool(ctx, "function_operations", {
|
||||||
|
"operation": "Create",
|
||||||
|
"definitions": [{"name": name, "expression": expression}],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# --- Query Group ---
|
||||||
|
|
||||||
|
@advanced.group(name="query-group")
|
||||||
|
def query_group() -> None:
|
||||||
|
"""Manage query groups."""
|
||||||
|
|
||||||
|
|
||||||
|
@query_group.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def qg_list(ctx: PbiContext) -> None:
|
||||||
|
"""List query groups."""
|
||||||
|
run_tool(ctx, "query_group_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@query_group.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--folder", default=None, help="Folder path.")
|
||||||
|
@pass_context
|
||||||
|
def qg_create(ctx: PbiContext, name: str, folder: str | None) -> None:
|
||||||
|
"""Create a query group."""
|
||||||
|
definition = build_definition(required={"name": name}, optional={"folder": folder})
|
||||||
|
run_tool(ctx, "query_group_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
69
src/pbi_cli/commands/calc_group.py
Normal file
69
src/pbi_cli/commands/calc_group.py
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
"""Calculation group commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group(name="calc-group")
|
||||||
|
def calc_group() -> None:
|
||||||
|
"""Manage calculation groups."""
|
||||||
|
|
||||||
|
|
||||||
|
@calc_group.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def cg_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all calculation groups."""
|
||||||
|
run_tool(ctx, "calculation_group_operations", {"operation": "ListGroups"})
|
||||||
|
|
||||||
|
|
||||||
|
@calc_group.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--description", default=None, help="Group description.")
|
||||||
|
@click.option("--precedence", type=int, default=None, help="Calculation precedence.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, description: str | None, precedence: int | None) -> None:
|
||||||
|
"""Create a calculation group."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name},
|
||||||
|
optional={"description": description, "calculationGroupPrecedence": precedence},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "calculation_group_operations", {"operation": "CreateGroup", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@calc_group.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a calculation group."""
|
||||||
|
run_tool(ctx, "calculation_group_operations", {"operation": "DeleteGroup", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@calc_group.command(name="items")
|
||||||
|
@click.argument("group_name")
|
||||||
|
@pass_context
|
||||||
|
def list_items(ctx: PbiContext, group_name: str) -> None:
|
||||||
|
"""List calculation items in a group."""
|
||||||
|
run_tool(ctx, "calculation_group_operations", {"operation": "ListItems", "calculationGroupName": group_name})
|
||||||
|
|
||||||
|
|
||||||
|
@calc_group.command(name="create-item")
|
||||||
|
@click.argument("item_name")
|
||||||
|
@click.option("--group", "-g", required=True, help="Calculation group name.")
|
||||||
|
@click.option("--expression", "-e", required=True, help="DAX expression.")
|
||||||
|
@click.option("--ordinal", type=int, default=None, help="Item ordinal.")
|
||||||
|
@pass_context
|
||||||
|
def create_item(ctx: PbiContext, item_name: str, group: str, expression: str, ordinal: int | None) -> None:
|
||||||
|
"""Create a calculation item in a group."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": item_name, "expression": expression},
|
||||||
|
optional={"ordinal": ordinal},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "calculation_group_operations", {
|
||||||
|
"operation": "CreateItem",
|
||||||
|
"calculationGroupName": group,
|
||||||
|
"definitions": [definition],
|
||||||
|
})
|
||||||
42
src/pbi_cli/commands/calendar.py
Normal file
42
src/pbi_cli/commands/calendar.py
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
"""Calendar table commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def calendar() -> None:
|
||||||
|
"""Manage calendar tables."""
|
||||||
|
|
||||||
|
|
||||||
|
@calendar.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def calendar_list(ctx: PbiContext) -> None:
|
||||||
|
"""List calendar tables."""
|
||||||
|
run_tool(ctx, "calendar_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@calendar.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Target table name.")
|
||||||
|
@click.option("--description", default=None, help="Calendar description.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, table: str, description: str | None) -> None:
|
||||||
|
"""Create a calendar table."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "tableName": table},
|
||||||
|
optional={"description": description},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "calendar_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@calendar.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a calendar."""
|
||||||
|
run_tool(ctx, "calendar_operations", {"operation": "Delete", "name": name})
|
||||||
104
src/pbi_cli/commands/column.py
Normal file
104
src/pbi_cli/commands/column.py
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""Column CRUD commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def column() -> None:
|
||||||
|
"""Manage columns in a semantic model."""
|
||||||
|
|
||||||
|
|
||||||
|
@column.command(name="list")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def column_list(ctx: PbiContext, table: str) -> None:
|
||||||
|
"""List all columns in a table."""
|
||||||
|
run_tool(ctx, "column_operations", {"operation": "List", "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@column.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Get details of a specific column."""
|
||||||
|
run_tool(ctx, "column_operations", {"operation": "Get", "name": name, "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@column.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@click.option("--data-type", required=True, help="Data type (string, int64, double, datetime, etc.).")
|
||||||
|
@click.option("--source-column", default=None, help="Source column name (for Import mode).")
|
||||||
|
@click.option("--expression", default=None, help="DAX expression (for calculated columns).")
|
||||||
|
@click.option("--format-string", default=None, help="Format string.")
|
||||||
|
@click.option("--description", default=None, help="Column description.")
|
||||||
|
@click.option("--folder", default=None, help="Display folder.")
|
||||||
|
@click.option("--hidden", is_flag=True, default=False, help="Hide from client tools.")
|
||||||
|
@click.option("--is-key", is_flag=True, default=False, help="Mark as key column.")
|
||||||
|
@pass_context
|
||||||
|
def create(
|
||||||
|
ctx: PbiContext,
|
||||||
|
name: str,
|
||||||
|
table: str,
|
||||||
|
data_type: str,
|
||||||
|
source_column: str | None,
|
||||||
|
expression: str | None,
|
||||||
|
format_string: str | None,
|
||||||
|
description: str | None,
|
||||||
|
folder: str | None,
|
||||||
|
hidden: bool,
|
||||||
|
is_key: bool,
|
||||||
|
) -> None:
|
||||||
|
"""Create a new column."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "tableName": table, "dataType": data_type},
|
||||||
|
optional={
|
||||||
|
"sourceColumn": source_column,
|
||||||
|
"expression": expression,
|
||||||
|
"formatString": format_string,
|
||||||
|
"description": description,
|
||||||
|
"displayFolder": folder,
|
||||||
|
"isHidden": hidden if hidden else None,
|
||||||
|
"isKey": is_key if is_key else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "column_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@column.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Delete a column."""
|
||||||
|
run_tool(ctx, "column_operations", {"operation": "Delete", "name": name, "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@column.command()
|
||||||
|
@click.argument("old_name")
|
||||||
|
@click.argument("new_name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def rename(ctx: PbiContext, old_name: str, new_name: str, table: str) -> None:
|
||||||
|
"""Rename a column."""
|
||||||
|
run_tool(ctx, "column_operations", {
|
||||||
|
"operation": "Rename",
|
||||||
|
"name": old_name,
|
||||||
|
"newName": new_name,
|
||||||
|
"tableName": table,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@column.command(name="export-tmdl")
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Export a column as TMDL."""
|
||||||
|
run_tool(ctx, "column_operations", {"operation": "ExportTMDL", "name": name, "tableName": table})
|
||||||
206
src/pbi_cli/commands/connection.py
Normal file
206
src/pbi_cli/commands/connection.py
Normal file
|
|
@ -0,0 +1,206 @@
|
||||||
|
"""Connection management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.core.connection_store import (
|
||||||
|
ConnectionInfo,
|
||||||
|
add_connection,
|
||||||
|
get_active_connection,
|
||||||
|
load_connections,
|
||||||
|
remove_connection,
|
||||||
|
save_connections,
|
||||||
|
)
|
||||||
|
from pbi_cli.core.mcp_client import PbiMcpClient, get_client
|
||||||
|
from pbi_cli.core.output import (
|
||||||
|
format_mcp_result,
|
||||||
|
print_error,
|
||||||
|
print_json,
|
||||||
|
print_success,
|
||||||
|
print_table,
|
||||||
|
)
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--data-source", "-d", required=True, help="Data source (e.g., localhost:54321).")
|
||||||
|
@click.option("--catalog", "-C", default="", help="Initial catalog / dataset name.")
|
||||||
|
@click.option("--name", "-n", default=None, help="Name for this connection (auto-generated if omitted).")
|
||||||
|
@click.option("--connection-string", default="", help="Full connection string (overrides data-source).")
|
||||||
|
@pass_context
|
||||||
|
def connect(ctx: PbiContext, data_source: str, catalog: str, name: str | None, connection_string: str) -> None:
|
||||||
|
"""Connect to a Power BI instance via data source."""
|
||||||
|
conn_name = name or _auto_name(data_source)
|
||||||
|
|
||||||
|
request: dict = {
|
||||||
|
"operation": "Connect",
|
||||||
|
"connectionName": conn_name,
|
||||||
|
"dataSource": data_source,
|
||||||
|
}
|
||||||
|
if catalog:
|
||||||
|
request["initialCatalog"] = catalog
|
||||||
|
if connection_string:
|
||||||
|
request["connectionString"] = connection_string
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("connection_operations", request)
|
||||||
|
|
||||||
|
info = ConnectionInfo(
|
||||||
|
name=conn_name,
|
||||||
|
data_source=data_source,
|
||||||
|
initial_catalog=catalog,
|
||||||
|
connection_string=connection_string,
|
||||||
|
)
|
||||||
|
store = load_connections()
|
||||||
|
store = add_connection(store, info)
|
||||||
|
save_connections(store)
|
||||||
|
|
||||||
|
if ctx.json_output:
|
||||||
|
print_json({"connection": conn_name, "status": "connected", "result": result})
|
||||||
|
else:
|
||||||
|
print_success(f"Connected: {conn_name} ({data_source})")
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Connection failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@click.command(name="connect-fabric")
|
||||||
|
@click.option("--workspace", "-w", required=True, help="Fabric workspace name (exact match).")
|
||||||
|
@click.option("--model", "-m", required=True, help="Semantic model name (exact match).")
|
||||||
|
@click.option("--name", "-n", default=None, help="Name for this connection.")
|
||||||
|
@click.option("--tenant", default="myorg", help="Tenant name for B2B scenarios.")
|
||||||
|
@pass_context
|
||||||
|
def connect_fabric(ctx: PbiContext, workspace: str, model: str, name: str | None, tenant: str) -> None:
|
||||||
|
"""Connect to a Fabric workspace semantic model."""
|
||||||
|
conn_name = name or f"{workspace}/{model}"
|
||||||
|
|
||||||
|
request: dict = {
|
||||||
|
"operation": "ConnectFabric",
|
||||||
|
"connectionName": conn_name,
|
||||||
|
"workspaceName": workspace,
|
||||||
|
"semanticModelName": model,
|
||||||
|
"tenantName": tenant,
|
||||||
|
}
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("connection_operations", request)
|
||||||
|
|
||||||
|
info = ConnectionInfo(
|
||||||
|
name=conn_name,
|
||||||
|
data_source=f"powerbi://api.powerbi.com/v1.0/{tenant}/{workspace}",
|
||||||
|
workspace_name=workspace,
|
||||||
|
semantic_model_name=model,
|
||||||
|
tenant_name=tenant,
|
||||||
|
)
|
||||||
|
store = load_connections()
|
||||||
|
store = add_connection(store, info)
|
||||||
|
save_connections(store)
|
||||||
|
|
||||||
|
if ctx.json_output:
|
||||||
|
print_json({"connection": conn_name, "status": "connected", "result": result})
|
||||||
|
else:
|
||||||
|
print_success(f"Connected to Fabric: {workspace}/{model}")
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Fabric connection failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--name", "-n", default=None, help="Connection name to disconnect (defaults to active).")
|
||||||
|
@pass_context
|
||||||
|
def disconnect(ctx: PbiContext, name: str | None) -> None:
|
||||||
|
"""Disconnect from the active or named connection."""
|
||||||
|
store = load_connections()
|
||||||
|
target = name or store.last_used
|
||||||
|
|
||||||
|
if not target:
|
||||||
|
print_error("No active connection to disconnect.")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("connection_operations", {
|
||||||
|
"operation": "Disconnect",
|
||||||
|
"connectionName": target,
|
||||||
|
})
|
||||||
|
|
||||||
|
store = remove_connection(store, target)
|
||||||
|
save_connections(store)
|
||||||
|
|
||||||
|
if ctx.json_output:
|
||||||
|
print_json({"connection": target, "status": "disconnected"})
|
||||||
|
else:
|
||||||
|
print_success(f"Disconnected: {target}")
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Disconnect failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def connections() -> None:
|
||||||
|
"""Manage saved connections."""
|
||||||
|
|
||||||
|
|
||||||
|
@connections.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def connections_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all saved connections."""
|
||||||
|
store = load_connections()
|
||||||
|
|
||||||
|
if ctx.json_output:
|
||||||
|
from dataclasses import asdict
|
||||||
|
data = {
|
||||||
|
"last_used": store.last_used,
|
||||||
|
"connections": [asdict(c) for c in store.connections.values()],
|
||||||
|
}
|
||||||
|
print_json(data)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not store.connections:
|
||||||
|
print_error("No saved connections. Run 'pbi connect' first.")
|
||||||
|
return
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for info in store.connections.values():
|
||||||
|
active = "*" if info.name == store.last_used else ""
|
||||||
|
rows.append([active, info.name, info.data_source, info.initial_catalog])
|
||||||
|
|
||||||
|
print_table("Connections", ["Active", "Name", "Data Source", "Catalog"], rows)
|
||||||
|
|
||||||
|
|
||||||
|
@connections.command(name="last")
|
||||||
|
@pass_context
|
||||||
|
def connections_last(ctx: PbiContext) -> None:
|
||||||
|
"""Show the last-used connection."""
|
||||||
|
store = load_connections()
|
||||||
|
conn = get_active_connection(store)
|
||||||
|
|
||||||
|
if conn is None:
|
||||||
|
print_error("No active connection.")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
if ctx.json_output:
|
||||||
|
from dataclasses import asdict
|
||||||
|
print_json(asdict(conn))
|
||||||
|
else:
|
||||||
|
from pbi_cli.core.output import print_key_value
|
||||||
|
print_key_value("Active Connection", {
|
||||||
|
"Name": conn.name,
|
||||||
|
"Data Source": conn.data_source,
|
||||||
|
"Catalog": conn.initial_catalog,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _auto_name(data_source: str) -> str:
|
||||||
|
"""Generate a connection name from a data source string."""
|
||||||
|
cleaned = data_source.replace("://", "-").replace("/", "-").replace(":", "-")
|
||||||
|
return cleaned[:50]
|
||||||
68
src/pbi_cli/commands/database.py
Normal file
68
src/pbi_cli/commands/database.py
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
"""Database-level operations: list, TMDL import/export, Fabric deploy."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def database() -> None:
|
||||||
|
"""Manage semantic models (databases) at the top level."""
|
||||||
|
|
||||||
|
|
||||||
|
@database.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def database_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all databases on the connected server."""
|
||||||
|
run_tool(ctx, "database_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@database.command(name="import-tmdl")
|
||||||
|
@click.argument("folder_path", type=click.Path(exists=True))
|
||||||
|
@pass_context
|
||||||
|
def import_tmdl(ctx: PbiContext, folder_path: str) -> None:
|
||||||
|
"""Import a model from a TMDL folder."""
|
||||||
|
run_tool(ctx, "database_operations", {
|
||||||
|
"operation": "ImportFromTmdlFolder",
|
||||||
|
"tmdlFolderPath": folder_path,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@database.command(name="export-tmdl")
|
||||||
|
@click.argument("folder_path", type=click.Path())
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, folder_path: str) -> None:
|
||||||
|
"""Export the model to a TMDL folder."""
|
||||||
|
run_tool(ctx, "database_operations", {
|
||||||
|
"operation": "ExportToTmdlFolder",
|
||||||
|
"tmdlFolderPath": folder_path,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@database.command(name="export-tmsl")
|
||||||
|
@pass_context
|
||||||
|
def export_tmsl(ctx: PbiContext) -> None:
|
||||||
|
"""Export the model as TMSL."""
|
||||||
|
run_tool(ctx, "database_operations", {"operation": "ExportTMSL"})
|
||||||
|
|
||||||
|
|
||||||
|
@database.command()
|
||||||
|
@click.option("--workspace", "-w", required=True, help="Target Fabric workspace name.")
|
||||||
|
@click.option("--new-name", default=None, help="New database name in target workspace.")
|
||||||
|
@click.option("--tenant", default=None, help="Tenant name for B2B scenarios.")
|
||||||
|
@pass_context
|
||||||
|
def deploy(ctx: PbiContext, workspace: str, new_name: str | None, tenant: str | None) -> None:
|
||||||
|
"""Deploy the model to a Fabric workspace."""
|
||||||
|
deploy_request: dict = {"targetWorkspaceName": workspace}
|
||||||
|
if new_name:
|
||||||
|
deploy_request["newDatabaseName"] = new_name
|
||||||
|
if tenant:
|
||||||
|
deploy_request["targetTenantName"] = tenant
|
||||||
|
|
||||||
|
run_tool(ctx, "database_operations", {
|
||||||
|
"operation": "DeployToFabric",
|
||||||
|
"deployToFabricRequest": deploy_request,
|
||||||
|
})
|
||||||
131
src/pbi_cli/commands/dax.py
Normal file
131
src/pbi_cli/commands/dax.py
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
"""DAX query commands: execute, validate, clear-cache."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.core.mcp_client import get_client
|
||||||
|
from pbi_cli.core.output import format_mcp_result, print_error, print_json
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def dax() -> None:
|
||||||
|
"""Execute and validate DAX queries."""
|
||||||
|
|
||||||
|
|
||||||
|
@dax.command()
|
||||||
|
@click.argument("query", default="")
|
||||||
|
@click.option("--file", "-f", "query_file", type=click.Path(exists=True), help="Read query from file.")
|
||||||
|
@click.option("--max-rows", type=int, default=None, help="Maximum rows to return.")
|
||||||
|
@click.option("--metrics", is_flag=True, default=False, help="Include execution metrics.")
|
||||||
|
@click.option("--metrics-only", is_flag=True, default=False, help="Return metrics without row data.")
|
||||||
|
@click.option("--timeout", type=int, default=200, help="Query timeout in seconds.")
|
||||||
|
@pass_context
|
||||||
|
def execute(
|
||||||
|
ctx: PbiContext,
|
||||||
|
query: str,
|
||||||
|
query_file: str | None,
|
||||||
|
max_rows: int | None,
|
||||||
|
metrics: bool,
|
||||||
|
metrics_only: bool,
|
||||||
|
timeout: int,
|
||||||
|
) -> None:
|
||||||
|
"""Execute a DAX query.
|
||||||
|
|
||||||
|
Pass the query as an argument, via --file, or pipe it from stdin:
|
||||||
|
|
||||||
|
pbi dax execute "EVALUATE Sales"
|
||||||
|
|
||||||
|
pbi dax execute --file query.dax
|
||||||
|
|
||||||
|
cat query.dax | pbi dax execute -
|
||||||
|
"""
|
||||||
|
resolved_query = _resolve_query(query, query_file)
|
||||||
|
if not resolved_query:
|
||||||
|
print_error("No query provided. Pass as argument, --file, or stdin.")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
request: dict = {
|
||||||
|
"operation": "Execute",
|
||||||
|
"query": resolved_query,
|
||||||
|
"timeoutSeconds": timeout,
|
||||||
|
"getExecutionMetrics": metrics or metrics_only,
|
||||||
|
"executionMetricsOnly": metrics_only,
|
||||||
|
}
|
||||||
|
if ctx.connection:
|
||||||
|
request["connectionName"] = ctx.connection
|
||||||
|
if max_rows is not None:
|
||||||
|
request["maxRows"] = max_rows
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("dax_query_operations", request)
|
||||||
|
format_mcp_result(result, ctx.json_output)
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"DAX execution failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@dax.command()
|
||||||
|
@click.argument("query", default="")
|
||||||
|
@click.option("--file", "-f", "query_file", type=click.Path(exists=True), help="Read query from file.")
|
||||||
|
@click.option("--timeout", type=int, default=10, help="Validation timeout in seconds.")
|
||||||
|
@pass_context
|
||||||
|
def validate(ctx: PbiContext, query: str, query_file: str | None, timeout: int) -> None:
|
||||||
|
"""Validate a DAX query without executing it."""
|
||||||
|
resolved_query = _resolve_query(query, query_file)
|
||||||
|
if not resolved_query:
|
||||||
|
print_error("No query provided.")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
request: dict = {
|
||||||
|
"operation": "Validate",
|
||||||
|
"query": resolved_query,
|
||||||
|
"timeoutSeconds": timeout,
|
||||||
|
}
|
||||||
|
if ctx.connection:
|
||||||
|
request["connectionName"] = ctx.connection
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("dax_query_operations", request)
|
||||||
|
format_mcp_result(result, ctx.json_output)
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"DAX validation failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@dax.command(name="clear-cache")
|
||||||
|
@pass_context
|
||||||
|
def clear_cache(ctx: PbiContext) -> None:
|
||||||
|
"""Clear the DAX query cache."""
|
||||||
|
request: dict = {"operation": "ClearCache"}
|
||||||
|
if ctx.connection:
|
||||||
|
request["connectionName"] = ctx.connection
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
try:
|
||||||
|
result = client.call_tool("dax_query_operations", request)
|
||||||
|
format_mcp_result(result, ctx.json_output)
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Cache clear failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
finally:
|
||||||
|
client.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_query(query: str, query_file: str | None) -> str:
|
||||||
|
"""Resolve the DAX query from argument, file, or stdin."""
|
||||||
|
if query == "-":
|
||||||
|
return sys.stdin.read().strip()
|
||||||
|
if query_file:
|
||||||
|
with open(query_file, encoding="utf-8") as f:
|
||||||
|
return f.read().strip()
|
||||||
|
return query.strip()
|
||||||
64
src/pbi_cli/commands/expression.py
Normal file
64
src/pbi_cli/commands/expression.py
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
"""Named expression and parameter commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def expression() -> None:
|
||||||
|
"""Manage named expressions and parameters."""
|
||||||
|
|
||||||
|
|
||||||
|
@expression.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def expression_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all named expressions."""
|
||||||
|
run_tool(ctx, "named_expression_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@expression.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Get a named expression."""
|
||||||
|
run_tool(ctx, "named_expression_operations", {"operation": "Get", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@expression.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--expression", "-e", required=True, help="M expression.")
|
||||||
|
@click.option("--description", default=None, help="Expression description.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, expression: str, description: str | None) -> None:
|
||||||
|
"""Create a named expression."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "expression": expression},
|
||||||
|
optional={"description": description},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "named_expression_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@expression.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a named expression."""
|
||||||
|
run_tool(ctx, "named_expression_operations", {"operation": "Delete", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@expression.command(name="create-param")
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--expression", "-e", required=True, help="Default value expression.")
|
||||||
|
@click.option("--description", default=None, help="Parameter description.")
|
||||||
|
@pass_context
|
||||||
|
def create_param(ctx: PbiContext, name: str, expression: str, description: str | None) -> None:
|
||||||
|
"""Create a model parameter."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "expression": expression},
|
||||||
|
optional={"description": description},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "named_expression_operations", {"operation": "CreateParameter", "definitions": [definition]})
|
||||||
56
src/pbi_cli/commands/hierarchy.py
Normal file
56
src/pbi_cli/commands/hierarchy.py
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
"""User hierarchy commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def hierarchy() -> None:
|
||||||
|
"""Manage user hierarchies."""
|
||||||
|
|
||||||
|
|
||||||
|
@hierarchy.command(name="list")
|
||||||
|
@click.option("--table", "-t", default=None, help="Filter by table.")
|
||||||
|
@pass_context
|
||||||
|
def hierarchy_list(ctx: PbiContext, table: str | None) -> None:
|
||||||
|
"""List hierarchies."""
|
||||||
|
request: dict = {"operation": "List"}
|
||||||
|
if table:
|
||||||
|
request["tableName"] = table
|
||||||
|
run_tool(ctx, "user_hierarchy_operations", request)
|
||||||
|
|
||||||
|
|
||||||
|
@hierarchy.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Get hierarchy details."""
|
||||||
|
run_tool(ctx, "user_hierarchy_operations", {"operation": "Get", "name": name, "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@hierarchy.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@click.option("--description", default=None, help="Hierarchy description.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, table: str, description: str | None) -> None:
|
||||||
|
"""Create a hierarchy."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "tableName": table},
|
||||||
|
optional={"description": description},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "user_hierarchy_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@hierarchy.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Delete a hierarchy."""
|
||||||
|
run_tool(ctx, "user_hierarchy_operations", {"operation": "Delete", "name": name, "tableName": table})
|
||||||
169
src/pbi_cli/commands/measure.py
Normal file
169
src/pbi_cli/commands/measure.py
Normal file
|
|
@ -0,0 +1,169 @@
|
||||||
|
"""Measure CRUD commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def measure() -> None:
|
||||||
|
"""Manage measures in a semantic model."""
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command(name="list")
|
||||||
|
@click.option("--table", "-t", default=None, help="Filter by table name.")
|
||||||
|
@pass_context
|
||||||
|
def measure_list(ctx: PbiContext, table: str | None) -> None:
|
||||||
|
"""List all measures."""
|
||||||
|
request: dict = {"operation": "List"}
|
||||||
|
if table:
|
||||||
|
request["tableName"] = table
|
||||||
|
run_tool(ctx, "measure_operations", request)
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table containing the measure.")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Get details of a specific measure."""
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Get",
|
||||||
|
"name": name,
|
||||||
|
"tableName": table,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--expression", "-e", required=True, help="DAX expression (use - for stdin).")
|
||||||
|
@click.option("--table", "-t", required=True, help="Target table.")
|
||||||
|
@click.option("--format-string", default=None, help='Format string (e.g., "$#,##0").')
|
||||||
|
@click.option("--description", default=None, help="Measure description.")
|
||||||
|
@click.option("--folder", default=None, help="Display folder path.")
|
||||||
|
@click.option("--hidden", is_flag=True, default=False, help="Hide from client tools.")
|
||||||
|
@pass_context
|
||||||
|
def create(
|
||||||
|
ctx: PbiContext,
|
||||||
|
name: str,
|
||||||
|
expression: str,
|
||||||
|
table: str,
|
||||||
|
format_string: str | None,
|
||||||
|
description: str | None,
|
||||||
|
folder: str | None,
|
||||||
|
hidden: bool,
|
||||||
|
) -> None:
|
||||||
|
"""Create a new measure."""
|
||||||
|
if expression == "-":
|
||||||
|
expression = sys.stdin.read().strip()
|
||||||
|
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "expression": expression, "tableName": table},
|
||||||
|
optional={
|
||||||
|
"formatString": format_string,
|
||||||
|
"description": description,
|
||||||
|
"displayFolder": folder,
|
||||||
|
"isHidden": hidden if hidden else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Create",
|
||||||
|
"definitions": [definition],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table containing the measure.")
|
||||||
|
@click.option("--expression", "-e", default=None, help="New DAX expression.")
|
||||||
|
@click.option("--format-string", default=None, help="New format string.")
|
||||||
|
@click.option("--description", default=None, help="New description.")
|
||||||
|
@click.option("--folder", default=None, help="New display folder.")
|
||||||
|
@pass_context
|
||||||
|
def update(
|
||||||
|
ctx: PbiContext,
|
||||||
|
name: str,
|
||||||
|
table: str,
|
||||||
|
expression: str | None,
|
||||||
|
format_string: str | None,
|
||||||
|
description: str | None,
|
||||||
|
folder: str | None,
|
||||||
|
) -> None:
|
||||||
|
"""Update an existing measure."""
|
||||||
|
if expression == "-":
|
||||||
|
expression = sys.stdin.read().strip()
|
||||||
|
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "tableName": table},
|
||||||
|
optional={
|
||||||
|
"expression": expression,
|
||||||
|
"formatString": format_string,
|
||||||
|
"description": description,
|
||||||
|
"displayFolder": folder,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Update",
|
||||||
|
"definitions": [definition],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table containing the measure.")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Delete a measure."""
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Delete",
|
||||||
|
"name": name,
|
||||||
|
"tableName": table,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("old_name")
|
||||||
|
@click.argument("new_name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table containing the measure.")
|
||||||
|
@pass_context
|
||||||
|
def rename(ctx: PbiContext, old_name: str, new_name: str, table: str) -> None:
|
||||||
|
"""Rename a measure."""
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Rename",
|
||||||
|
"name": old_name,
|
||||||
|
"newName": new_name,
|
||||||
|
"tableName": table,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Source table.")
|
||||||
|
@click.option("--to-table", required=True, help="Destination table.")
|
||||||
|
@pass_context
|
||||||
|
def move(ctx: PbiContext, name: str, table: str, to_table: str) -> None:
|
||||||
|
"""Move a measure to a different table."""
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "Move",
|
||||||
|
"name": name,
|
||||||
|
"tableName": table,
|
||||||
|
"destinationTableName": to_table,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@measure.command(name="export-tmdl")
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table containing the measure.")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Export a measure as TMDL."""
|
||||||
|
run_tool(ctx, "measure_operations", {
|
||||||
|
"operation": "ExportTMDL",
|
||||||
|
"name": name,
|
||||||
|
"tableName": table,
|
||||||
|
})
|
||||||
50
src/pbi_cli/commands/model.py
Normal file
50
src/pbi_cli/commands/model.py
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
"""Model-level operations."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def model() -> None:
|
||||||
|
"""Manage the semantic model."""
|
||||||
|
|
||||||
|
|
||||||
|
@model.command()
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext) -> None:
|
||||||
|
"""Get model metadata."""
|
||||||
|
run_tool(ctx, "model_operations", {"operation": "Get"})
|
||||||
|
|
||||||
|
|
||||||
|
@model.command()
|
||||||
|
@pass_context
|
||||||
|
def stats(ctx: PbiContext) -> None:
|
||||||
|
"""Get model statistics."""
|
||||||
|
run_tool(ctx, "model_operations", {"operation": "GetStats"})
|
||||||
|
|
||||||
|
|
||||||
|
@model.command()
|
||||||
|
@click.option("--type", "refresh_type", type=click.Choice(["Automatic", "Full", "Calculate", "DataOnly", "Defragment"]), default="Automatic", help="Refresh type.")
|
||||||
|
@pass_context
|
||||||
|
def refresh(ctx: PbiContext, refresh_type: str) -> None:
|
||||||
|
"""Refresh the model."""
|
||||||
|
run_tool(ctx, "model_operations", {"operation": "Refresh", "refreshType": refresh_type})
|
||||||
|
|
||||||
|
|
||||||
|
@model.command()
|
||||||
|
@click.argument("new_name")
|
||||||
|
@pass_context
|
||||||
|
def rename(ctx: PbiContext, new_name: str) -> None:
|
||||||
|
"""Rename the model."""
|
||||||
|
run_tool(ctx, "model_operations", {"operation": "Rename", "newName": new_name})
|
||||||
|
|
||||||
|
|
||||||
|
@model.command(name="export-tmdl")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext) -> None:
|
||||||
|
"""Export the model as TMDL."""
|
||||||
|
run_tool(ctx, "model_operations", {"operation": "ExportTMDL"})
|
||||||
54
src/pbi_cli/commands/partition.py
Normal file
54
src/pbi_cli/commands/partition.py
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
"""Partition management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def partition() -> None:
|
||||||
|
"""Manage table partitions."""
|
||||||
|
|
||||||
|
|
||||||
|
@partition.command(name="list")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def partition_list(ctx: PbiContext, table: str) -> None:
|
||||||
|
"""List partitions in a table."""
|
||||||
|
run_tool(ctx, "partition_operations", {"operation": "List", "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@partition.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@click.option("--expression", "-e", default=None, help="M/Power Query expression.")
|
||||||
|
@click.option("--mode", type=click.Choice(["Import", "DirectQuery", "Dual"]), default=None)
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, table: str, expression: str | None, mode: str | None) -> None:
|
||||||
|
"""Create a partition."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name, "tableName": table},
|
||||||
|
optional={"expression": expression, "mode": mode},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "partition_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@partition.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Delete a partition."""
|
||||||
|
run_tool(ctx, "partition_operations", {"operation": "Delete", "name": name, "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@partition.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Table name.")
|
||||||
|
@pass_context
|
||||||
|
def refresh(ctx: PbiContext, name: str, table: str) -> None:
|
||||||
|
"""Refresh a partition."""
|
||||||
|
run_tool(ctx, "partition_operations", {"operation": "Refresh", "name": name, "tableName": table})
|
||||||
38
src/pbi_cli/commands/perspective.py
Normal file
38
src/pbi_cli/commands/perspective.py
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
"""Perspective management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def perspective() -> None:
|
||||||
|
"""Manage model perspectives."""
|
||||||
|
|
||||||
|
|
||||||
|
@perspective.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def perspective_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all perspectives."""
|
||||||
|
run_tool(ctx, "perspective_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@perspective.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--description", default=None, help="Perspective description.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, description: str | None) -> None:
|
||||||
|
"""Create a perspective."""
|
||||||
|
definition = build_definition(required={"name": name}, optional={"description": description})
|
||||||
|
run_tool(ctx, "perspective_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@perspective.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a perspective."""
|
||||||
|
run_tool(ctx, "perspective_operations", {"operation": "Delete", "name": name})
|
||||||
104
src/pbi_cli/commands/relationship.py
Normal file
104
src/pbi_cli/commands/relationship.py
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""Relationship management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def relationship() -> None:
|
||||||
|
"""Manage relationships between tables."""
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def relationship_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all relationships."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Get details of a specific relationship."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Get", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.option("--name", "-n", default=None, help="Relationship name (auto-generated if omitted).")
|
||||||
|
@click.option("--from-table", required=True, help="Source (many-side) table.")
|
||||||
|
@click.option("--from-column", required=True, help="Source column.")
|
||||||
|
@click.option("--to-table", required=True, help="Target (one-side) table.")
|
||||||
|
@click.option("--to-column", required=True, help="Target column.")
|
||||||
|
@click.option("--cross-filter", type=click.Choice(["OneDirection", "BothDirections", "Automatic"]), default="OneDirection", help="Cross-filtering behavior.")
|
||||||
|
@click.option("--active/--inactive", default=True, help="Whether the relationship is active.")
|
||||||
|
@pass_context
|
||||||
|
def create(
|
||||||
|
ctx: PbiContext,
|
||||||
|
name: str | None,
|
||||||
|
from_table: str,
|
||||||
|
from_column: str,
|
||||||
|
to_table: str,
|
||||||
|
to_column: str,
|
||||||
|
cross_filter: str,
|
||||||
|
active: bool,
|
||||||
|
) -> None:
|
||||||
|
"""Create a new relationship."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={
|
||||||
|
"fromTable": from_table,
|
||||||
|
"fromColumn": from_column,
|
||||||
|
"toTable": to_table,
|
||||||
|
"toColumn": to_column,
|
||||||
|
},
|
||||||
|
optional={
|
||||||
|
"name": name,
|
||||||
|
"crossFilteringBehavior": cross_filter,
|
||||||
|
"isActive": active,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a relationship."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Delete", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def activate(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Activate a relationship."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Activate", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def deactivate(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Deactivate a relationship."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Deactivate", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command()
|
||||||
|
@click.option("--table", "-t", required=True, help="Table to search for relationships.")
|
||||||
|
@pass_context
|
||||||
|
def find(ctx: PbiContext, table: str) -> None:
|
||||||
|
"""Find relationships involving a table."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "Find", "tableName": table})
|
||||||
|
|
||||||
|
|
||||||
|
@relationship.command(name="export-tmdl")
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Export a relationship as TMDL."""
|
||||||
|
run_tool(ctx, "relationship_operations", {"operation": "ExportTMDL", "name": name})
|
||||||
57
src/pbi_cli/commands/security.py
Normal file
57
src/pbi_cli/commands/security.py
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
"""Security role management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group(name="security-role")
|
||||||
|
def security_role() -> None:
|
||||||
|
"""Manage security roles (RLS)."""
|
||||||
|
|
||||||
|
|
||||||
|
@security_role.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def role_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all security roles."""
|
||||||
|
run_tool(ctx, "security_role_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@security_role.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Get details of a security role."""
|
||||||
|
run_tool(ctx, "security_role_operations", {"operation": "Get", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@security_role.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--description", default=None, help="Role description.")
|
||||||
|
@pass_context
|
||||||
|
def create(ctx: PbiContext, name: str, description: str | None) -> None:
|
||||||
|
"""Create a new security role."""
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name},
|
||||||
|
optional={"description": description},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "security_role_operations", {"operation": "Create", "definitions": [definition]})
|
||||||
|
|
||||||
|
|
||||||
|
@security_role.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a security role."""
|
||||||
|
run_tool(ctx, "security_role_operations", {"operation": "Delete", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@security_role.command(name="export-tmdl")
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Export a security role as TMDL."""
|
||||||
|
run_tool(ctx, "security_role_operations", {"operation": "ExportTMDL", "name": name})
|
||||||
76
src/pbi_cli/commands/setup_cmd.py
Normal file
76
src/pbi_cli/commands/setup_cmd.py
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
"""pbi setup: download and manage the Power BI MCP binary."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.core.binary_manager import (
|
||||||
|
check_for_updates,
|
||||||
|
download_and_extract,
|
||||||
|
get_binary_info,
|
||||||
|
resolve_binary,
|
||||||
|
)
|
||||||
|
from pbi_cli.core.output import print_error, print_info, print_json, print_key_value, print_success
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--version", "target_version", default=None, help="Specific version to install.")
|
||||||
|
@click.option("--check", is_flag=True, default=False, help="Check for updates without installing.")
|
||||||
|
@click.option("--info", is_flag=True, default=False, help="Show info about the current binary.")
|
||||||
|
@pass_context
|
||||||
|
def setup(ctx: PbiContext, target_version: str | None, check: bool, info: bool) -> None:
|
||||||
|
"""Download and set up the Power BI MCP server binary.
|
||||||
|
|
||||||
|
Run this once after installing pbi-cli to download the binary.
|
||||||
|
"""
|
||||||
|
if info:
|
||||||
|
_show_info(ctx.json_output)
|
||||||
|
return
|
||||||
|
|
||||||
|
if check:
|
||||||
|
_check_updates(ctx.json_output)
|
||||||
|
return
|
||||||
|
|
||||||
|
_install(target_version, ctx.json_output)
|
||||||
|
|
||||||
|
|
||||||
|
def _show_info(json_output: bool) -> None:
|
||||||
|
"""Show binary info."""
|
||||||
|
info = get_binary_info()
|
||||||
|
if json_output:
|
||||||
|
print_json(info)
|
||||||
|
else:
|
||||||
|
print_key_value("Power BI MCP Binary", info)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_updates(json_output: bool) -> None:
|
||||||
|
"""Check for available updates."""
|
||||||
|
try:
|
||||||
|
installed, latest, update_available = check_for_updates()
|
||||||
|
result = {
|
||||||
|
"installed_version": installed,
|
||||||
|
"latest_version": latest,
|
||||||
|
"update_available": update_available,
|
||||||
|
}
|
||||||
|
if json_output:
|
||||||
|
print_json(result)
|
||||||
|
elif update_available:
|
||||||
|
print_info(f"Update available: {installed} -> {latest}")
|
||||||
|
print_info("Run 'pbi setup' to update.")
|
||||||
|
else:
|
||||||
|
print_success(f"Up to date: v{installed}")
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Failed to check for updates: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _install(version: str | None, json_output: bool) -> None:
|
||||||
|
"""Download and install the binary."""
|
||||||
|
try:
|
||||||
|
bin_path = download_and_extract(version)
|
||||||
|
if json_output:
|
||||||
|
print_json({"binary_path": str(bin_path), "status": "installed"})
|
||||||
|
except Exception as e:
|
||||||
|
print_error(f"Setup failed: {e}")
|
||||||
|
raise SystemExit(1)
|
||||||
135
src/pbi_cli/commands/table.py
Normal file
135
src/pbi_cli/commands/table.py
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
"""Table CRUD commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import build_definition, run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def table() -> None:
|
||||||
|
"""Manage tables in a semantic model."""
|
||||||
|
|
||||||
|
|
||||||
|
@table.command(name="list")
|
||||||
|
@pass_context
|
||||||
|
def table_list(ctx: PbiContext) -> None:
|
||||||
|
"""List all tables."""
|
||||||
|
run_tool(ctx, "table_operations", {"operation": "List"})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def get(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Get details of a specific table."""
|
||||||
|
run_tool(ctx, "table_operations", {"operation": "Get", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--mode", type=click.Choice(["Import", "DirectQuery", "Dual"]), default="Import", help="Table mode.")
|
||||||
|
@click.option("--m-expression", default=None, help="M/Power Query expression (use - for stdin).")
|
||||||
|
@click.option("--dax-expression", default=None, help="DAX expression for calculated tables.")
|
||||||
|
@click.option("--sql-query", default=None, help="SQL query for DirectQuery.")
|
||||||
|
@click.option("--description", default=None, help="Table description.")
|
||||||
|
@click.option("--hidden", is_flag=True, default=False, help="Hide from client tools.")
|
||||||
|
@pass_context
|
||||||
|
def create(
|
||||||
|
ctx: PbiContext,
|
||||||
|
name: str,
|
||||||
|
mode: str,
|
||||||
|
m_expression: str | None,
|
||||||
|
dax_expression: str | None,
|
||||||
|
sql_query: str | None,
|
||||||
|
description: str | None,
|
||||||
|
hidden: bool,
|
||||||
|
) -> None:
|
||||||
|
"""Create a new table."""
|
||||||
|
if m_expression == "-":
|
||||||
|
m_expression = sys.stdin.read().strip()
|
||||||
|
if dax_expression == "-":
|
||||||
|
dax_expression = sys.stdin.read().strip()
|
||||||
|
|
||||||
|
definition = build_definition(
|
||||||
|
required={"name": name},
|
||||||
|
optional={
|
||||||
|
"mode": mode,
|
||||||
|
"mExpression": m_expression,
|
||||||
|
"daxExpression": dax_expression,
|
||||||
|
"sqlQuery": sql_query,
|
||||||
|
"description": description,
|
||||||
|
"isHidden": hidden if hidden else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
run_tool(ctx, "table_operations", {
|
||||||
|
"operation": "Create",
|
||||||
|
"definitions": [definition],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def delete(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Delete a table."""
|
||||||
|
run_tool(ctx, "table_operations", {"operation": "Delete", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--type", "refresh_type", type=click.Choice(["Full", "Automatic", "Calculate", "DataOnly"]), default="Automatic", help="Refresh type.")
|
||||||
|
@pass_context
|
||||||
|
def refresh(ctx: PbiContext, name: str, refresh_type: str) -> None:
|
||||||
|
"""Refresh a table."""
|
||||||
|
run_tool(ctx, "table_operations", {
|
||||||
|
"operation": "Refresh",
|
||||||
|
"name": name,
|
||||||
|
"refreshType": refresh_type,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def schema(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Get the schema of a table."""
|
||||||
|
run_tool(ctx, "table_operations", {"operation": "GetSchema", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command(name="export-tmdl")
|
||||||
|
@click.argument("name")
|
||||||
|
@pass_context
|
||||||
|
def export_tmdl(ctx: PbiContext, name: str) -> None:
|
||||||
|
"""Export a table as TMDL."""
|
||||||
|
run_tool(ctx, "table_operations", {"operation": "ExportTMDL", "name": name})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command()
|
||||||
|
@click.argument("old_name")
|
||||||
|
@click.argument("new_name")
|
||||||
|
@pass_context
|
||||||
|
def rename(ctx: PbiContext, old_name: str, new_name: str) -> None:
|
||||||
|
"""Rename a table."""
|
||||||
|
run_tool(ctx, "table_operations", {
|
||||||
|
"operation": "Rename",
|
||||||
|
"name": old_name,
|
||||||
|
"newName": new_name,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@table.command(name="mark-date")
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--date-column", required=True, help="Date column to use.")
|
||||||
|
@pass_context
|
||||||
|
def mark_date_table(ctx: PbiContext, name: str, date_column: str) -> None:
|
||||||
|
"""Mark a table as a date table."""
|
||||||
|
run_tool(ctx, "table_operations", {
|
||||||
|
"operation": "MarkAsDateTable",
|
||||||
|
"name": name,
|
||||||
|
"dateColumn": date_column,
|
||||||
|
})
|
||||||
42
src/pbi_cli/commands/trace.py
Normal file
42
src/pbi_cli/commands/trace.py
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
"""Diagnostic trace commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def trace() -> None:
|
||||||
|
"""Manage diagnostic traces."""
|
||||||
|
|
||||||
|
|
||||||
|
@trace.command()
|
||||||
|
@pass_context
|
||||||
|
def start(ctx: PbiContext) -> None:
|
||||||
|
"""Start a diagnostic trace."""
|
||||||
|
run_tool(ctx, "trace_operations", {"operation": "Start"})
|
||||||
|
|
||||||
|
|
||||||
|
@trace.command()
|
||||||
|
@pass_context
|
||||||
|
def stop(ctx: PbiContext) -> None:
|
||||||
|
"""Stop the active trace."""
|
||||||
|
run_tool(ctx, "trace_operations", {"operation": "Stop"})
|
||||||
|
|
||||||
|
|
||||||
|
@trace.command()
|
||||||
|
@pass_context
|
||||||
|
def fetch(ctx: PbiContext) -> None:
|
||||||
|
"""Fetch trace events."""
|
||||||
|
run_tool(ctx, "trace_operations", {"operation": "Fetch"})
|
||||||
|
|
||||||
|
|
||||||
|
@trace.command()
|
||||||
|
@click.argument("path", type=click.Path())
|
||||||
|
@pass_context
|
||||||
|
def export(ctx: PbiContext, path: str) -> None:
|
||||||
|
"""Export trace events to a file."""
|
||||||
|
run_tool(ctx, "trace_operations", {"operation": "Export", "filePath": path})
|
||||||
42
src/pbi_cli/commands/transaction.py
Normal file
42
src/pbi_cli/commands/transaction.py
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
"""Transaction management commands."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli.commands._helpers import run_tool
|
||||||
|
from pbi_cli.main import PbiContext, pass_context
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def transaction() -> None:
|
||||||
|
"""Manage explicit transactions."""
|
||||||
|
|
||||||
|
|
||||||
|
@transaction.command()
|
||||||
|
@pass_context
|
||||||
|
def begin(ctx: PbiContext) -> None:
|
||||||
|
"""Begin a new transaction."""
|
||||||
|
run_tool(ctx, "transaction_operations", {"operation": "Begin"})
|
||||||
|
|
||||||
|
|
||||||
|
@transaction.command()
|
||||||
|
@click.argument("transaction_id", default="")
|
||||||
|
@pass_context
|
||||||
|
def commit(ctx: PbiContext, transaction_id: str) -> None:
|
||||||
|
"""Commit the active or specified transaction."""
|
||||||
|
request: dict = {"operation": "Commit"}
|
||||||
|
if transaction_id:
|
||||||
|
request["transactionId"] = transaction_id
|
||||||
|
run_tool(ctx, "transaction_operations", request)
|
||||||
|
|
||||||
|
|
||||||
|
@transaction.command()
|
||||||
|
@click.argument("transaction_id", default="")
|
||||||
|
@pass_context
|
||||||
|
def rollback(ctx: PbiContext, transaction_id: str) -> None:
|
||||||
|
"""Rollback the active or specified transaction."""
|
||||||
|
request: dict = {"operation": "Rollback"}
|
||||||
|
if transaction_id:
|
||||||
|
request["transactionId"] = transaction_id
|
||||||
|
run_tool(ctx, "transaction_operations", request)
|
||||||
1
src/pbi_cli/core/__init__.py
Normal file
1
src/pbi_cli/core/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
"""Core modules for pbi-cli."""
|
||||||
249
src/pbi_cli/core/binary_manager.py
Normal file
249
src/pbi_cli/core/binary_manager.py
Normal file
|
|
@ -0,0 +1,249 @@
|
||||||
|
"""Binary manager: download, extract, and resolve the Power BI MCP server binary.
|
||||||
|
|
||||||
|
The binary is a .NET executable distributed as part of a VS Code extension (VSIX).
|
||||||
|
This module handles downloading the VSIX from the VS Marketplace, extracting the
|
||||||
|
server binary, and resolving the binary path for the MCP client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from pbi_cli.core.config import PBI_CLI_HOME, PbiConfig, ensure_home_dir, load_config, save_config
|
||||||
|
from pbi_cli.core.output import print_error, print_info, print_success, print_warning
|
||||||
|
from pbi_cli.utils.platform import (
|
||||||
|
binary_name,
|
||||||
|
detect_platform,
|
||||||
|
ensure_executable,
|
||||||
|
find_vscode_extension_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
EXTENSION_ID = "analysis-services.powerbi-modeling-mcp"
|
||||||
|
PUBLISHER = "analysis-services"
|
||||||
|
EXTENSION_NAME = "powerbi-modeling-mcp"
|
||||||
|
|
||||||
|
MARKETPLACE_API = "https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery"
|
||||||
|
VSIX_URL_TEMPLATE = (
|
||||||
|
"https://marketplace.visualstudio.com/_apis/public/gallery/publishers/"
|
||||||
|
"{publisher}/vsextensions/{extension}/{version}/vspackage"
|
||||||
|
"?targetPlatform={platform}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_binary() -> Path:
|
||||||
|
"""Resolve the MCP server binary path using the priority chain.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. PBI_MCP_BINARY environment variable
|
||||||
|
2. ~/.pbi-cli/bin/{version}/ (managed by pbi setup)
|
||||||
|
3. VS Code extension fallback
|
||||||
|
|
||||||
|
Raises FileNotFoundError if no binary is found.
|
||||||
|
"""
|
||||||
|
env_path = os.environ.get("PBI_MCP_BINARY")
|
||||||
|
if env_path:
|
||||||
|
p = Path(env_path)
|
||||||
|
if p.exists():
|
||||||
|
return p
|
||||||
|
raise FileNotFoundError(f"PBI_MCP_BINARY points to non-existent path: {env_path}")
|
||||||
|
|
||||||
|
config = load_config()
|
||||||
|
if config.binary_path:
|
||||||
|
p = Path(config.binary_path)
|
||||||
|
if p.exists():
|
||||||
|
return p
|
||||||
|
|
||||||
|
managed = _find_managed_binary()
|
||||||
|
if managed:
|
||||||
|
return managed
|
||||||
|
|
||||||
|
vscode_bin = find_vscode_extension_binary()
|
||||||
|
if vscode_bin:
|
||||||
|
print_info(f"Using VS Code extension binary: {vscode_bin}")
|
||||||
|
return vscode_bin
|
||||||
|
|
||||||
|
raise FileNotFoundError(
|
||||||
|
"Power BI MCP binary not found. Run 'pbi setup' to download it, "
|
||||||
|
"or set PBI_MCP_BINARY environment variable."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_managed_binary() -> Path | None:
|
||||||
|
"""Look for a binary in ~/.pbi-cli/bin/."""
|
||||||
|
bin_dir = PBI_CLI_HOME / "bin"
|
||||||
|
if not bin_dir.exists():
|
||||||
|
return None
|
||||||
|
versions = sorted(bin_dir.iterdir(), reverse=True)
|
||||||
|
for version_dir in versions:
|
||||||
|
candidate = version_dir / binary_name()
|
||||||
|
if candidate.exists():
|
||||||
|
return candidate
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def query_latest_version() -> str:
|
||||||
|
"""Query the VS Marketplace for the latest extension version.
|
||||||
|
|
||||||
|
Returns the version string (e.g., '0.4.0').
|
||||||
|
"""
|
||||||
|
payload = {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"criteria": [
|
||||||
|
{"filterType": 7, "value": EXTENSION_ID},
|
||||||
|
],
|
||||||
|
"pageNumber": 1,
|
||||||
|
"pageSize": 1,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"flags": 914,
|
||||||
|
}
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json;api-version=6.1-preview.1",
|
||||||
|
}
|
||||||
|
|
||||||
|
with httpx.Client(timeout=30.0) as client:
|
||||||
|
resp = client.post(MARKETPLACE_API, json=payload, headers=headers)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
results = data.get("results", [])
|
||||||
|
if not results:
|
||||||
|
raise RuntimeError("No results from VS Marketplace query")
|
||||||
|
|
||||||
|
extensions = results[0].get("extensions", [])
|
||||||
|
if not extensions:
|
||||||
|
raise RuntimeError(f"Extension {EXTENSION_ID} not found on VS Marketplace")
|
||||||
|
|
||||||
|
versions = extensions[0].get("versions", [])
|
||||||
|
if not versions:
|
||||||
|
raise RuntimeError(f"No versions found for {EXTENSION_ID}")
|
||||||
|
|
||||||
|
return versions[0]["version"]
|
||||||
|
|
||||||
|
|
||||||
|
def download_and_extract(version: str | None = None) -> Path:
|
||||||
|
"""Download the VSIX and extract the server binary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version: Specific version to download. If None, queries latest.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the extracted binary.
|
||||||
|
"""
|
||||||
|
if version is None:
|
||||||
|
print_info("Querying VS Marketplace for latest version...")
|
||||||
|
version = query_latest_version()
|
||||||
|
|
||||||
|
target_platform = detect_platform()
|
||||||
|
print_info(f"Downloading pbi-mcp v{version} for {target_platform}...")
|
||||||
|
|
||||||
|
url = VSIX_URL_TEMPLATE.format(
|
||||||
|
publisher=PUBLISHER,
|
||||||
|
extension=EXTENSION_NAME,
|
||||||
|
version=version,
|
||||||
|
platform=target_platform,
|
||||||
|
)
|
||||||
|
|
||||||
|
dest_dir = ensure_home_dir() / "bin" / version
|
||||||
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as tmp:
|
||||||
|
vsix_path = Path(tmp) / "extension.vsix"
|
||||||
|
|
||||||
|
with httpx.Client(timeout=120.0, follow_redirects=True) as client:
|
||||||
|
with client.stream("GET", url) as resp:
|
||||||
|
resp.raise_for_status()
|
||||||
|
total = int(resp.headers.get("content-length", 0))
|
||||||
|
downloaded = 0
|
||||||
|
with open(vsix_path, "wb") as f:
|
||||||
|
for chunk in resp.iter_bytes(chunk_size=8192):
|
||||||
|
f.write(chunk)
|
||||||
|
downloaded += len(chunk)
|
||||||
|
if total > 0:
|
||||||
|
pct = downloaded * 100 // total
|
||||||
|
print(f"\r Downloading... {pct}%", end="", flush=True)
|
||||||
|
print()
|
||||||
|
|
||||||
|
print_info("Extracting server binary...")
|
||||||
|
with zipfile.ZipFile(vsix_path, "r") as zf:
|
||||||
|
server_prefix = "extension/server/"
|
||||||
|
server_files = [n for n in zf.namelist() if n.startswith(server_prefix)]
|
||||||
|
if not server_files:
|
||||||
|
raise RuntimeError("No server/ directory found in VSIX package")
|
||||||
|
|
||||||
|
for file_name in server_files:
|
||||||
|
rel_path = file_name[len(server_prefix):]
|
||||||
|
if not rel_path:
|
||||||
|
continue
|
||||||
|
target_path = dest_dir / rel_path
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with zf.open(file_name) as src, open(target_path, "wb") as dst:
|
||||||
|
shutil.copyfileobj(src, dst)
|
||||||
|
|
||||||
|
bin_path = dest_dir / binary_name()
|
||||||
|
if not bin_path.exists():
|
||||||
|
raise RuntimeError(f"Binary not found after extraction: {bin_path}")
|
||||||
|
|
||||||
|
ensure_executable(bin_path)
|
||||||
|
|
||||||
|
config = load_config().with_updates(
|
||||||
|
binary_version=version,
|
||||||
|
binary_path=str(bin_path),
|
||||||
|
)
|
||||||
|
save_config(config)
|
||||||
|
|
||||||
|
print_success(f"Installed pbi-mcp v{version} at {dest_dir}")
|
||||||
|
return bin_path
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_updates() -> tuple[str, str, bool]:
|
||||||
|
"""Compare installed version with latest available.
|
||||||
|
|
||||||
|
Returns (installed_version, latest_version, update_available).
|
||||||
|
"""
|
||||||
|
config = load_config()
|
||||||
|
installed = config.binary_version or "none"
|
||||||
|
latest = query_latest_version()
|
||||||
|
return installed, latest, installed != latest
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_info() -> dict[str, str]:
|
||||||
|
"""Return info about the currently resolved binary."""
|
||||||
|
try:
|
||||||
|
path = resolve_binary()
|
||||||
|
config = load_config()
|
||||||
|
return {
|
||||||
|
"binary_path": str(path),
|
||||||
|
"version": config.binary_version or "unknown",
|
||||||
|
"platform": detect_platform(),
|
||||||
|
"source": _binary_source(path),
|
||||||
|
}
|
||||||
|
except FileNotFoundError:
|
||||||
|
return {
|
||||||
|
"binary_path": "not found",
|
||||||
|
"version": "none",
|
||||||
|
"platform": detect_platform(),
|
||||||
|
"source": "none",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _binary_source(path: Path) -> str:
|
||||||
|
"""Determine the source of a resolved binary path."""
|
||||||
|
path_str = str(path)
|
||||||
|
if "PBI_MCP_BINARY" in os.environ:
|
||||||
|
return "environment variable (PBI_MCP_BINARY)"
|
||||||
|
if ".pbi-cli" in path_str:
|
||||||
|
return "managed (pbi setup)"
|
||||||
|
if ".vscode" in path_str:
|
||||||
|
return "VS Code extension (fallback)"
|
||||||
|
return "unknown"
|
||||||
61
src/pbi_cli/core/config.py
Normal file
61
src/pbi_cli/core/config.py
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
"""Configuration management for pbi-cli.
|
||||||
|
|
||||||
|
Manages ~/.pbi-cli/config.json for binary paths, versions, and preferences.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from dataclasses import asdict, dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
PBI_CLI_HOME = Path.home() / ".pbi-cli"
|
||||||
|
CONFIG_FILE = PBI_CLI_HOME / "config.json"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class PbiConfig:
|
||||||
|
"""Immutable configuration object."""
|
||||||
|
|
||||||
|
binary_version: str = ""
|
||||||
|
binary_path: str = ""
|
||||||
|
default_connection: str = ""
|
||||||
|
binary_args: list[str] = field(default_factory=lambda: ["--start", "--skipconfirmation"])
|
||||||
|
|
||||||
|
def with_updates(self, **kwargs: object) -> PbiConfig:
|
||||||
|
"""Return a new config with the specified fields updated."""
|
||||||
|
current = asdict(self)
|
||||||
|
current.update(kwargs)
|
||||||
|
return PbiConfig(**current)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_home_dir() -> Path:
|
||||||
|
"""Create ~/.pbi-cli/ if it does not exist. Returns the path."""
|
||||||
|
PBI_CLI_HOME.mkdir(parents=True, exist_ok=True)
|
||||||
|
return PBI_CLI_HOME
|
||||||
|
|
||||||
|
|
||||||
|
def load_config() -> PbiConfig:
|
||||||
|
"""Load config from disk. Returns defaults if file does not exist."""
|
||||||
|
if not CONFIG_FILE.exists():
|
||||||
|
return PbiConfig()
|
||||||
|
try:
|
||||||
|
raw = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))
|
||||||
|
return PbiConfig(
|
||||||
|
binary_version=raw.get("binary_version", ""),
|
||||||
|
binary_path=raw.get("binary_path", ""),
|
||||||
|
default_connection=raw.get("default_connection", ""),
|
||||||
|
binary_args=raw.get("binary_args", ["--start", "--skipconfirmation"]),
|
||||||
|
)
|
||||||
|
except (json.JSONDecodeError, KeyError):
|
||||||
|
return PbiConfig()
|
||||||
|
|
||||||
|
|
||||||
|
def save_config(config: PbiConfig) -> None:
|
||||||
|
"""Write config to disk."""
|
||||||
|
ensure_home_dir()
|
||||||
|
CONFIG_FILE.write_text(
|
||||||
|
json.dumps(asdict(config), indent=2, ensure_ascii=False) + "\n",
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
89
src/pbi_cli/core/connection_store.py
Normal file
89
src/pbi_cli/core/connection_store.py
Normal file
|
|
@ -0,0 +1,89 @@
|
||||||
|
"""Persist named connections to ~/.pbi-cli/connections.json."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from dataclasses import asdict, dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pbi_cli.core.config import PBI_CLI_HOME, ensure_home_dir
|
||||||
|
|
||||||
|
|
||||||
|
CONNECTIONS_FILE = PBI_CLI_HOME / "connections.json"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ConnectionInfo:
|
||||||
|
"""A saved connection to a Power BI instance."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
data_source: str
|
||||||
|
initial_catalog: str = ""
|
||||||
|
workspace_name: str = ""
|
||||||
|
semantic_model_name: str = ""
|
||||||
|
tenant_name: str = "myorg"
|
||||||
|
connection_string: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ConnectionStore:
|
||||||
|
"""Immutable store of named connections."""
|
||||||
|
|
||||||
|
last_used: str = ""
|
||||||
|
connections: dict[str, ConnectionInfo] = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
if self.connections is None:
|
||||||
|
object.__setattr__(self, "connections", {})
|
||||||
|
|
||||||
|
|
||||||
|
def load_connections() -> ConnectionStore:
|
||||||
|
"""Load connections from disk."""
|
||||||
|
if not CONNECTIONS_FILE.exists():
|
||||||
|
return ConnectionStore()
|
||||||
|
try:
|
||||||
|
raw = json.loads(CONNECTIONS_FILE.read_text(encoding="utf-8"))
|
||||||
|
conns = {}
|
||||||
|
for name, data in raw.get("connections", {}).items():
|
||||||
|
conns[name] = ConnectionInfo(name=name, **{k: v for k, v in data.items() if k != "name"})
|
||||||
|
return ConnectionStore(
|
||||||
|
last_used=raw.get("last_used", ""),
|
||||||
|
connections=conns,
|
||||||
|
)
|
||||||
|
except (json.JSONDecodeError, KeyError, TypeError):
|
||||||
|
return ConnectionStore()
|
||||||
|
|
||||||
|
|
||||||
|
def save_connections(store: ConnectionStore) -> None:
|
||||||
|
"""Write connections to disk."""
|
||||||
|
ensure_home_dir()
|
||||||
|
data = {
|
||||||
|
"last_used": store.last_used,
|
||||||
|
"connections": {name: asdict(info) for name, info in store.connections.items()},
|
||||||
|
}
|
||||||
|
CONNECTIONS_FILE.write_text(
|
||||||
|
json.dumps(data, indent=2, ensure_ascii=False) + "\n",
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def add_connection(store: ConnectionStore, info: ConnectionInfo) -> ConnectionStore:
|
||||||
|
"""Return a new store with the connection added and set as last-used."""
|
||||||
|
new_conns = dict(store.connections)
|
||||||
|
new_conns[info.name] = info
|
||||||
|
return ConnectionStore(last_used=info.name, connections=new_conns)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_connection(store: ConnectionStore, name: str) -> ConnectionStore:
|
||||||
|
"""Return a new store with the named connection removed."""
|
||||||
|
new_conns = {k: v for k, v in store.connections.items() if k != name}
|
||||||
|
new_last = store.last_used if store.last_used != name else ""
|
||||||
|
return ConnectionStore(last_used=new_last, connections=new_conns)
|
||||||
|
|
||||||
|
|
||||||
|
def get_active_connection(store: ConnectionStore, override: str | None = None) -> ConnectionInfo | None:
|
||||||
|
"""Get the active connection: explicit override, or last-used."""
|
||||||
|
name = override or store.last_used
|
||||||
|
if not name:
|
||||||
|
return None
|
||||||
|
return store.connections.get(name)
|
||||||
254
src/pbi_cli/core/mcp_client.py
Normal file
254
src/pbi_cli/core/mcp_client.py
Normal file
|
|
@ -0,0 +1,254 @@
|
||||||
|
"""MCP client: communicates with the Power BI MCP server binary over stdio.
|
||||||
|
|
||||||
|
Uses the official `mcp` Python SDK to handle JSON-RPC framing and protocol
|
||||||
|
negotiation. Exposes a synchronous API for Click commands while managing
|
||||||
|
an async event loop internally.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import atexit
|
||||||
|
import sys
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from mcp import ClientSession
|
||||||
|
from mcp.client.stdio import StdioServerParameters, stdio_client
|
||||||
|
|
||||||
|
from pbi_cli.core.binary_manager import resolve_binary
|
||||||
|
from pbi_cli.core.config import load_config
|
||||||
|
|
||||||
|
|
||||||
|
class McpClientError(Exception):
|
||||||
|
"""Raised when the MCP server returns an error."""
|
||||||
|
|
||||||
|
|
||||||
|
class PbiMcpClient:
|
||||||
|
"""Synchronous wrapper around the async MCP stdio client.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
client = PbiMcpClient()
|
||||||
|
result = client.call_tool("measure_operations", {
|
||||||
|
"operation": "List",
|
||||||
|
"connectionName": "my-conn",
|
||||||
|
})
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
binary_path: str | Path | None = None,
|
||||||
|
args: list[str] | None = None,
|
||||||
|
) -> None:
|
||||||
|
self._binary_path = str(binary_path) if binary_path else None
|
||||||
|
self._args = args
|
||||||
|
self._loop: asyncio.AbstractEventLoop | None = None
|
||||||
|
self._session: ClientSession | None = None
|
||||||
|
self._cleanup_stack: Any = None
|
||||||
|
self._started = False
|
||||||
|
|
||||||
|
def _resolve_binary(self) -> str:
|
||||||
|
"""Resolve binary path lazily."""
|
||||||
|
if self._binary_path:
|
||||||
|
return self._binary_path
|
||||||
|
return str(resolve_binary())
|
||||||
|
|
||||||
|
def _resolve_args(self) -> list[str]:
|
||||||
|
"""Resolve binary args from config or defaults."""
|
||||||
|
if self._args is not None:
|
||||||
|
return self._args
|
||||||
|
config = load_config()
|
||||||
|
return list(config.binary_args)
|
||||||
|
|
||||||
|
def _ensure_loop(self) -> asyncio.AbstractEventLoop:
|
||||||
|
"""Get or create the event loop."""
|
||||||
|
if self._loop is None or self._loop.is_closed():
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
return self._loop
|
||||||
|
|
||||||
|
def start(self) -> None:
|
||||||
|
"""Start the MCP server process and initialize the session."""
|
||||||
|
if self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
loop = self._ensure_loop()
|
||||||
|
loop.run_until_complete(self._async_start())
|
||||||
|
self._started = True
|
||||||
|
atexit.register(self.stop)
|
||||||
|
|
||||||
|
async def _async_start(self) -> None:
|
||||||
|
"""Async startup: spawn the server and initialize MCP session."""
|
||||||
|
binary = self._resolve_binary()
|
||||||
|
args = self._resolve_args()
|
||||||
|
|
||||||
|
server_params = StdioServerParameters(
|
||||||
|
command=binary,
|
||||||
|
args=args,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the stdio transport
|
||||||
|
self._read_stream, self._write_stream = await self._enter_context(
|
||||||
|
stdio_client(server_params)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create and initialize the MCP session
|
||||||
|
self._session = await self._enter_context(
|
||||||
|
ClientSession(self._read_stream, self._write_stream)
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._session.initialize()
|
||||||
|
|
||||||
|
async def _enter_context(self, cm: Any) -> Any:
|
||||||
|
"""Enter an async context manager and track it for cleanup."""
|
||||||
|
if self._cleanup_stack is None:
|
||||||
|
self._cleanup_stack = []
|
||||||
|
result = await cm.__aenter__()
|
||||||
|
self._cleanup_stack.append(cm)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def call_tool(self, tool_name: str, request: dict[str, Any]) -> Any:
|
||||||
|
"""Call an MCP tool synchronously.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tool_name: The MCP tool name (e.g., "measure_operations").
|
||||||
|
request: The request dict (will be wrapped as {"request": request}).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The parsed result from the MCP server.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
McpClientError: If the server returns an error.
|
||||||
|
"""
|
||||||
|
if not self._started:
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
loop = self._ensure_loop()
|
||||||
|
return loop.run_until_complete(self._async_call_tool(tool_name, request))
|
||||||
|
|
||||||
|
async def _async_call_tool(self, tool_name: str, request: dict[str, Any]) -> Any:
|
||||||
|
"""Execute a tool call via the MCP session."""
|
||||||
|
if self._session is None:
|
||||||
|
raise McpClientError("MCP session not initialized. Call start() first.")
|
||||||
|
|
||||||
|
result = await self._session.call_tool(
|
||||||
|
tool_name,
|
||||||
|
arguments={"request": request},
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.isError:
|
||||||
|
error_text = _extract_text(result.content)
|
||||||
|
raise McpClientError(f"MCP tool error: {error_text}")
|
||||||
|
|
||||||
|
return _parse_content(result.content)
|
||||||
|
|
||||||
|
def list_tools(self) -> list[dict[str, Any]]:
|
||||||
|
"""List all available MCP tools."""
|
||||||
|
if not self._started:
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
loop = self._ensure_loop()
|
||||||
|
return loop.run_until_complete(self._async_list_tools())
|
||||||
|
|
||||||
|
async def _async_list_tools(self) -> list[dict[str, Any]]:
|
||||||
|
"""List tools from the MCP session."""
|
||||||
|
if self._session is None:
|
||||||
|
raise McpClientError("MCP session not initialized.")
|
||||||
|
|
||||||
|
result = await self._session.list_tools()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"name": tool.name,
|
||||||
|
"description": tool.description or "",
|
||||||
|
}
|
||||||
|
for tool in result.tools
|
||||||
|
]
|
||||||
|
|
||||||
|
def stop(self) -> None:
|
||||||
|
"""Shut down the MCP server process."""
|
||||||
|
if not self._started:
|
||||||
|
return
|
||||||
|
|
||||||
|
loop = self._ensure_loop()
|
||||||
|
loop.run_until_complete(self._async_stop())
|
||||||
|
self._started = False
|
||||||
|
self._session = None
|
||||||
|
|
||||||
|
async def _async_stop(self) -> None:
|
||||||
|
"""Clean up all async context managers in reverse order."""
|
||||||
|
if self._cleanup_stack:
|
||||||
|
for cm in reversed(self._cleanup_stack):
|
||||||
|
try:
|
||||||
|
await cm.__aexit__(None, None, None)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._cleanup_stack = []
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
try:
|
||||||
|
self.stop()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_text(content: Any) -> str:
|
||||||
|
"""Extract text from MCP content blocks."""
|
||||||
|
if isinstance(content, list):
|
||||||
|
parts = []
|
||||||
|
for block in content:
|
||||||
|
if hasattr(block, "text"):
|
||||||
|
parts.append(block.text)
|
||||||
|
return "\n".join(parts) if parts else str(content)
|
||||||
|
return str(content)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_content(content: Any) -> Any:
|
||||||
|
"""Parse MCP content blocks into Python data.
|
||||||
|
|
||||||
|
MCP returns content as a list of TextContent blocks. This function
|
||||||
|
tries to parse the text as JSON, falling back to raw text.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
|
||||||
|
if isinstance(content, list):
|
||||||
|
texts = []
|
||||||
|
for block in content:
|
||||||
|
if hasattr(block, "text"):
|
||||||
|
texts.append(block.text)
|
||||||
|
|
||||||
|
if len(texts) == 1:
|
||||||
|
try:
|
||||||
|
return json.loads(texts[0])
|
||||||
|
except (json.JSONDecodeError, ValueError):
|
||||||
|
return texts[0]
|
||||||
|
|
||||||
|
combined = "\n".join(texts)
|
||||||
|
try:
|
||||||
|
return json.loads(combined)
|
||||||
|
except (json.JSONDecodeError, ValueError):
|
||||||
|
return combined
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
# Module-level singleton for REPL mode (keeps server alive across commands).
|
||||||
|
_shared_client: PbiMcpClient | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_shared_client() -> PbiMcpClient:
|
||||||
|
"""Get or create a shared MCP client instance."""
|
||||||
|
global _shared_client
|
||||||
|
if _shared_client is None:
|
||||||
|
_shared_client = PbiMcpClient()
|
||||||
|
return _shared_client
|
||||||
|
|
||||||
|
|
||||||
|
def get_client(repl_mode: bool = False) -> PbiMcpClient:
|
||||||
|
"""Get an MCP client.
|
||||||
|
|
||||||
|
In REPL mode, returns a shared long-lived client.
|
||||||
|
In one-shot mode, returns a fresh client (caller should stop() it).
|
||||||
|
"""
|
||||||
|
if repl_mode:
|
||||||
|
return get_shared_client()
|
||||||
|
return PbiMcpClient()
|
||||||
89
src/pbi_cli/core/output.py
Normal file
89
src/pbi_cli/core/output.py
Normal file
|
|
@ -0,0 +1,89 @@
|
||||||
|
"""Dual-mode output formatter: JSON for agents, Rich tables for humans."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.panel import Panel
|
||||||
|
from rich.table import Table
|
||||||
|
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
error_console = Console(stderr=True)
|
||||||
|
|
||||||
|
|
||||||
|
def print_json(data: Any) -> None:
|
||||||
|
"""Print data as formatted JSON to stdout."""
|
||||||
|
print(json.dumps(data, indent=2, ensure_ascii=False, default=str))
|
||||||
|
|
||||||
|
|
||||||
|
def print_success(message: str) -> None:
|
||||||
|
"""Print a success message to stderr (keeps stdout clean for JSON)."""
|
||||||
|
error_console.print(f"[green]{message}[/green]")
|
||||||
|
|
||||||
|
|
||||||
|
def print_error(message: str) -> None:
|
||||||
|
"""Print an error message to stderr."""
|
||||||
|
error_console.print(f"[red]Error:[/red] {message}")
|
||||||
|
|
||||||
|
|
||||||
|
def print_warning(message: str) -> None:
|
||||||
|
"""Print a warning message to stderr."""
|
||||||
|
error_console.print(f"[yellow]Warning:[/yellow] {message}")
|
||||||
|
|
||||||
|
|
||||||
|
def print_info(message: str) -> None:
|
||||||
|
"""Print an info message to stderr."""
|
||||||
|
error_console.print(f"[blue]{message}[/blue]")
|
||||||
|
|
||||||
|
|
||||||
|
def print_table(
|
||||||
|
title: str,
|
||||||
|
columns: list[str],
|
||||||
|
rows: list[list[str]],
|
||||||
|
) -> None:
|
||||||
|
"""Print a Rich table to stdout."""
|
||||||
|
table = Table(title=title, show_header=True, header_style="bold cyan")
|
||||||
|
for col in columns:
|
||||||
|
table.add_column(col)
|
||||||
|
for row in rows:
|
||||||
|
table.add_row(*row)
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
def print_key_value(title: str, data: dict[str, Any]) -> None:
|
||||||
|
"""Print key-value pairs in a Rich panel."""
|
||||||
|
lines = []
|
||||||
|
for key, value in data.items():
|
||||||
|
lines.append(f"[bold]{key}:[/bold] {value}")
|
||||||
|
console.print(Panel("\n".join(lines), title=title, border_style="cyan"))
|
||||||
|
|
||||||
|
|
||||||
|
def format_mcp_result(result: Any, json_output: bool) -> None:
|
||||||
|
"""Format and print an MCP tool result.
|
||||||
|
|
||||||
|
In JSON mode, prints the raw result. In human mode, attempts to render
|
||||||
|
a table or key-value display based on the shape of the data.
|
||||||
|
"""
|
||||||
|
if json_output:
|
||||||
|
print_json(result)
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(result, list):
|
||||||
|
if not result:
|
||||||
|
print_info("No results.")
|
||||||
|
return
|
||||||
|
if isinstance(result[0], dict):
|
||||||
|
columns = list(result[0].keys())
|
||||||
|
rows = [[str(item.get(c, "")) for c in columns] for item in result]
|
||||||
|
print_table("Results", columns, rows)
|
||||||
|
else:
|
||||||
|
for item in result:
|
||||||
|
console.print(str(item))
|
||||||
|
elif isinstance(result, dict):
|
||||||
|
print_key_value("Result", result)
|
||||||
|
else:
|
||||||
|
console.print(str(result))
|
||||||
87
src/pbi_cli/main.py
Normal file
87
src/pbi_cli/main.py
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
"""Main CLI entry point for pbi-cli."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from pbi_cli import __version__
|
||||||
|
|
||||||
|
|
||||||
|
class PbiContext:
|
||||||
|
"""Shared context passed to all CLI commands."""
|
||||||
|
|
||||||
|
def __init__(self, json_output: bool = False, connection: str | None = None) -> None:
|
||||||
|
self.json_output = json_output
|
||||||
|
self.connection = connection
|
||||||
|
|
||||||
|
|
||||||
|
pass_context = click.make_pass_decorator(PbiContext, ensure=True)
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.option("--json", "json_output", is_flag=True, default=False, help="Output raw JSON for agent consumption.")
|
||||||
|
@click.option("--connection", "-c", default=None, help="Named connection to use (defaults to last-used).")
|
||||||
|
@click.version_option(version=__version__, prog_name="pbi-cli")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, json_output: bool, connection: str | None) -> None:
|
||||||
|
"""pbi-cli: Power BI semantic model CLI.
|
||||||
|
|
||||||
|
Wraps the Power BI MCP server for token-efficient usage with
|
||||||
|
Claude Code and other AI agents.
|
||||||
|
|
||||||
|
Run 'pbi setup' first to download the Power BI MCP binary.
|
||||||
|
"""
|
||||||
|
ctx.ensure_object(PbiContext)
|
||||||
|
ctx.obj = PbiContext(json_output=json_output, connection=connection)
|
||||||
|
|
||||||
|
|
||||||
|
def _register_commands() -> None:
|
||||||
|
"""Lazily import and register all command groups."""
|
||||||
|
from pbi_cli.commands.setup_cmd import setup
|
||||||
|
from pbi_cli.commands.connection import connect, connect_fabric, disconnect, connections
|
||||||
|
from pbi_cli.commands.dax import dax
|
||||||
|
from pbi_cli.commands.measure import measure
|
||||||
|
from pbi_cli.commands.table import table
|
||||||
|
from pbi_cli.commands.column import column
|
||||||
|
from pbi_cli.commands.relationship import relationship
|
||||||
|
from pbi_cli.commands.model import model
|
||||||
|
from pbi_cli.commands.database import database
|
||||||
|
from pbi_cli.commands.security import security_role
|
||||||
|
from pbi_cli.commands.calc_group import calc_group
|
||||||
|
from pbi_cli.commands.partition import partition
|
||||||
|
from pbi_cli.commands.perspective import perspective
|
||||||
|
from pbi_cli.commands.hierarchy import hierarchy
|
||||||
|
from pbi_cli.commands.expression import expression
|
||||||
|
from pbi_cli.commands.calendar import calendar
|
||||||
|
from pbi_cli.commands.trace import trace
|
||||||
|
from pbi_cli.commands.transaction import transaction
|
||||||
|
from pbi_cli.commands.advanced import advanced
|
||||||
|
|
||||||
|
cli.add_command(setup)
|
||||||
|
cli.add_command(connect)
|
||||||
|
cli.add_command(connect_fabric)
|
||||||
|
cli.add_command(disconnect)
|
||||||
|
cli.add_command(connections)
|
||||||
|
cli.add_command(dax)
|
||||||
|
cli.add_command(measure)
|
||||||
|
cli.add_command(table)
|
||||||
|
cli.add_command(column)
|
||||||
|
cli.add_command(relationship)
|
||||||
|
cli.add_command(model)
|
||||||
|
cli.add_command(database)
|
||||||
|
cli.add_command(security_role)
|
||||||
|
cli.add_command(calc_group)
|
||||||
|
cli.add_command(partition)
|
||||||
|
cli.add_command(perspective)
|
||||||
|
cli.add_command(hierarchy)
|
||||||
|
cli.add_command(expression)
|
||||||
|
cli.add_command(calendar)
|
||||||
|
cli.add_command(trace)
|
||||||
|
cli.add_command(transaction)
|
||||||
|
cli.add_command(advanced)
|
||||||
|
|
||||||
|
|
||||||
|
_register_commands()
|
||||||
1
src/pbi_cli/utils/__init__.py
Normal file
1
src/pbi_cli/utils/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
"""Utility modules for pbi-cli."""
|
||||||
83
src/pbi_cli/utils/platform.py
Normal file
83
src/pbi_cli/utils/platform.py
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
"""Platform and architecture detection for binary resolution."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import stat
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
# Maps (system, machine) to VS Marketplace target platform identifier.
|
||||||
|
PLATFORM_MAP: dict[tuple[str, str], str] = {
|
||||||
|
("Windows", "AMD64"): "win32-x64",
|
||||||
|
("Windows", "x86_64"): "win32-x64",
|
||||||
|
("Windows", "ARM64"): "win32-arm64",
|
||||||
|
("Darwin", "arm64"): "darwin-arm64",
|
||||||
|
("Linux", "x86_64"): "linux-x64",
|
||||||
|
("Linux", "aarch64"): "linux-arm64",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Binary name per OS.
|
||||||
|
BINARY_NAMES: dict[str, str] = {
|
||||||
|
"Windows": "powerbi-modeling-mcp.exe",
|
||||||
|
"Darwin": "powerbi-modeling-mcp",
|
||||||
|
"Linux": "powerbi-modeling-mcp",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def detect_platform() -> str:
|
||||||
|
"""Return the VS Marketplace target platform string for this machine.
|
||||||
|
|
||||||
|
Raises ValueError if the platform is unsupported.
|
||||||
|
"""
|
||||||
|
system = platform.system()
|
||||||
|
machine = platform.machine()
|
||||||
|
key = (system, machine)
|
||||||
|
target = PLATFORM_MAP.get(key)
|
||||||
|
if target is None:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported platform: {system}/{machine}. "
|
||||||
|
f"Supported: {', '.join(f'{s}/{m}' for s, m in PLATFORM_MAP)}"
|
||||||
|
)
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def binary_name() -> str:
|
||||||
|
"""Return the expected binary filename for this OS."""
|
||||||
|
system = platform.system()
|
||||||
|
name = BINARY_NAMES.get(system)
|
||||||
|
if name is None:
|
||||||
|
raise ValueError(f"Unsupported OS: {system}")
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_executable(path: Path) -> None:
|
||||||
|
"""Set executable permission on non-Windows systems."""
|
||||||
|
if platform.system() != "Windows":
|
||||||
|
current = path.stat().st_mode
|
||||||
|
path.chmod(current | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||||
|
|
||||||
|
|
||||||
|
def find_vscode_extension_binary() -> Path | None:
|
||||||
|
"""Look for the binary in the VS Code extension install directory.
|
||||||
|
|
||||||
|
This is the fallback resolution path when the user has the VS Code
|
||||||
|
extension installed but hasn't run 'pbi setup'.
|
||||||
|
"""
|
||||||
|
vscode_ext_dir = Path.home() / ".vscode" / "extensions"
|
||||||
|
if not vscode_ext_dir.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
matches = sorted(
|
||||||
|
vscode_ext_dir.glob("analysis-services.powerbi-modeling-mcp-*/server"),
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
if not matches:
|
||||||
|
return None
|
||||||
|
|
||||||
|
server_dir = matches[0]
|
||||||
|
bin_path = server_dir / binary_name()
|
||||||
|
if bin_path.exists():
|
||||||
|
return bin_path
|
||||||
|
return None
|
||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
0
tests/mocks/__init__.py
Normal file
0
tests/mocks/__init__.py
Normal file
0
tests/test_commands/__init__.py
Normal file
0
tests/test_commands/__init__.py
Normal file
Loading…
Reference in a new issue