Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changes/unreleased/added-20251209-141353.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
kind: added
body: Include API response data in the output.
time: 2025-12-09T14:13:53.030608891Z
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def exec(capacity: VirtualWorkspaceItem, args: Namespace) -> None:

response = capacity_api.create_capacity(args, payload=json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{capacity.name}' created")
utils_ui.print_output_format(args, message=f"'{capacity.name}' created", data=json.loads(response.text), show_headers=True)

# In here we use a different approach since the id responded by the API is not the same as the id we use in the code
# The id in the response is the fully qualified azure resource ID for the resource
Expand Down
4 changes: 2 additions & 2 deletions src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,9 @@ def exec(connection: VirtualWorkspaceItem, args: Namespace) -> None:

response = connection_api.create_connection(args, payload=json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{connection.name}' created")

data = json.loads(response.text)
utils_ui.print_output_format(args, message=f"'{connection.name}' created", data=data, show_headers=True)

connection._id = data["id"]

# Add to mem_store
Expand Down
3 changes: 1 addition & 2 deletions src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_domain.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,8 @@ def exec(domain: VirtualWorkspaceItem, args: Namespace) -> None:

response = domain_api.create_domain(args, payload=json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{domain.name}' created")

data = json.loads(response.text)
utils_ui.print_output_format(args, message=f"'{domain.name}' created", data=data, show_headers=True)

domain._id = data["id"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,7 @@ def exec(external_data_share: VirtualItem, args: Namespace) -> None:
item.name, external_data_share.id
)

utils_ui.print_output_format(
args, message=f"'{external_data_share.name}' created"
)
utils_ui.print_output_format(args, message=f"'{external_data_share.name}' created", data=data, show_headers=True)

# Add to mem_store
utils_mem_store.upsert_external_data_share_to_cache(external_data_share, item)
2 changes: 1 addition & 1 deletion src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def exec(folder: Folder, args: Namespace) -> str | None:

response = folder_api.create_folder(args, json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{folder.name}' created")
data = json.loads(response.text)
utils_ui.print_output_format(args, message=f"'{folder.name}' created", data=data, show_headers=True)
if data is not None and data.get("id"):
_folder_id = data["id"]
folder._id = _folder_id
Expand Down
3 changes: 1 addition & 2 deletions src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,8 @@ def exec(gateway: VirtualWorkspaceItem, args: Namespace) -> None:

response = gateway_api.create_gateway(args, payload=json.dumps(payload))
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{gateway.name}' created")

data = json.loads(response.text)
utils_ui.print_output_format(args, message=f"'{gateway.name}' created", data=data, show_headers=True)
gateway._id = data["id"]

# Add to mem_store
Expand Down
55 changes: 54 additions & 1 deletion src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,37 @@


def exec(item: Item, args: Namespace) -> str | None:
"""
Execute the creation of a Microsoft Fabric item.

This method supports items that may require creating additional dependent
artifacts (e.g., a Report that implicitly creates a SemanticModel).
Output behavior differs depending on whether the current call represents the
user‑requested creation or an internally triggered dependency.

Two execution modes:
- **Root operation** (`is_root_operation=True`):
Represents the item explicitly requested by the user.
Handles creation of the item and any required dependencies.
Collects and returns a consolidated output for all created artifacts.

- **Dependency operation** (`is_root_operation=False`):
Represents an item created implicitly as part of another item's dependency chain.
Runs silently and contributes its result to the root operation’s batch output,
without producing standalone output.

Args:
item (Item): The Fabric item to be created.
args (Namespace): Command arguments. May include `output_batch` used to
accumulate results during dependency operations.

Returns:
str | None: The created item ID for root operations, or None for dependency
operations or failed creations.
"""
# Determine if this is part of a batch operation
is_root_operation = not hasattr(args, 'output_batch')

# Params
params = args.params
required_params, optional_params = mkdir_utils.get_params_per_item_type(item)
Expand Down Expand Up @@ -58,8 +89,30 @@ def exec(item: Item, args: Namespace) -> str | None:

response = item_api.create_item(args, json_payload, item_uri=True)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{item.name}' created")
data = json.loads(response.text)

if hasattr(args, 'output_batch'):
# Collect operation data for batch output
args.output_batch['items'].append(data)
args.output_batch['names'].append(item.name)

# Only print consolidated output at the end of root operation
if is_root_operation:
names = args.output_batch['names']
names_list = f"'{names[0]}' and '{names[1]}'" if len(names) == 2 else "'" + "', '".join(names[:-1]) + f"' and '{names[-1]}'"

utils_ui.print_output_format(
args,
message=f"{names_list} created",
data=args.output_batch['items'],
show_headers=True
)
# Clean up
delattr(args, 'output_batch')
else:
# Standard single item output for non-batched scenarios
utils_ui.print_output_format(args, message=f"'{item.name}' created", data=data, show_headers=True)

if data is not None and data.get("id"):
_item_id = data["id"]
item._id = _item_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def exec(managed_identity: VirtualItem, args: Namespace) -> None:
utils_ui.print_grey(f"Creating a new Managed Identity...")
response = managed_identity_api.provision_managed_identity(args)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{managed_identity.name}' created")

data = json.loads(response.text)
managed_identity._id = data["servicePrincipalId"]

# Add to mem_store
utils_mem_store.upsert_managed_identity_to_cache(managed_identity)
utils_ui.print_output_format(args, message=f"'{managed_identity.name}' created", data=data, show_headers=True)
Original file line number Diff line number Diff line change
Expand Up @@ -128,4 +128,4 @@ def exec(managed_private_endpoint: VirtualItem, args: Namespace) -> None:
fab_constant.ERROR_OPERATION_FAILED,
)

utils_ui.print_output_format(args, message=result_message)
utils_ui.print_output_format(args, message=result_message, data=data, show_headers=True)
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,9 @@ def exec(spark_pool: VirtualItem, args: Namespace) -> None:
utils_ui.print_grey(f"Creating a new Spark Pool...")
response = sparkpool_api.create_spark_pool(args, payload=json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{spark_pool.name}' created")

data = json.loads(response.text)
spark_pool._id = data["id"]

# Add to mem_store
utils_mem_store.upsert_spark_pool_to_cache(spark_pool)
utils_ui.print_output_format(args, message=f"'{spark_pool.name}' created", data=data, show_headers=True)
5 changes: 3 additions & 2 deletions src/fabric_cli/commands/fs/mkdir/fab_fs_mkdir_workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,10 @@ def exec(workspace: Workspace, args: Namespace) -> None:

response = workspace_api.create_workspace(args, json_payload)
if response.status_code in (200, 201):
utils_ui.print_output_format(args, message=f"'{workspace.name}' created")
data = json.loads(response.text)
workspace._id = data["id"]

# Add to mem_store
utils_mem_store.upsert_workspace_to_cache(workspace)

utils_ui.print_output_format(args, message=f"'{workspace.name}' created", data=data, show_headers=True)

29 changes: 29 additions & 0 deletions src/fabric_cli/utils/fab_cmd_mkdir_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,8 @@ def add_type_specific_payload(item: Item, args, payload):
"EventHouse not provided in params. Creating one first"
)

_initialize_batch_collection_for_dependency_creation(args)

# Create a new Event House first
_eventhouse = Item(
f"{item.short_name}_auto",
Expand Down Expand Up @@ -128,6 +130,8 @@ def add_type_specific_payload(item: Item, args, payload):
"Semantic Model not provided in params. Creating one first"
)

_initialize_batch_collection_for_dependency_creation(args)

# Create a new Semantic Model first
_semantic_model = Item(
f"{item.short_name}_auto",
Expand Down Expand Up @@ -754,3 +758,28 @@ def find_mpe_connection(managed_private_endpoint, targetprivatelinkresourceid):
return conn

return None

def _initialize_batch_collection_for_dependency_creation(args):
"""Initialize batch collection for scenarios where dependent items need to be created automatically.

This method is used when creating items that have dependencies that don't exist yet, such as:
- Creating a KQL Database without an EventHouse (auto-creates EventHouse first)
- Creating a Report without a Semantic Model (auto-creates Semantic Model first)

The batch collection allows multiple related items to be created in sequence and then
display a consolidated output message showing all items that were created together.

Args:
args (Namespace): The command arguments namespace that will be augmented with
'output_batch' attribute containing 'items' and 'names' lists
to collect creation results.

Note:
This method only initializes the batch collection if it doesn't already exist,
ensuring it's safe to call multiple times during a dependency creation chain.
"""
if not hasattr(args, 'output_batch'):
args.output_batch = {
'items': [],
'names': []
}
6 changes: 3 additions & 3 deletions src/fabric_cli/utils/fab_ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def print_done(text: str, to_stderr: bool = False) -> None:
# Escape the text to avoid HTML injection and parsing issues
escaped_text = html.escape(text)
_safe_print_formatted_text(
f"<ansigreen>*</ansigreen> {escaped_text}", escaped_text, to_stderr
f"\n<ansigreen>*</ansigreen> {escaped_text}", escaped_text, to_stderr
)


Expand Down Expand Up @@ -368,9 +368,9 @@ def _print_output_format_result_text(output: FabricCLIOutput) -> None:
print_grey("------------------------------")
_print_raw_data(output_result.hidden_data)


if output_result.message:
print_done(output_result.message)

print_done(f"{output_result.message}\n")

def _print_raw_data(data: list[Any], to_stderr: bool = False) -> None:
"""
Expand Down
Loading