From f648dfa01841097730d75e613c32f30c594bf137 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:16:34 +0100 Subject: [PATCH 01/15] added --- .github/workflows/copilot-analysis.yml | 510 +++++++++++++++++++++++++ 1 file changed, 510 insertions(+) create mode 100644 .github/workflows/copilot-analysis.yml diff --git a/.github/workflows/copilot-analysis.yml b/.github/workflows/copilot-analysis.yml new file mode 100644 index 00000000..88547175 --- /dev/null +++ b/.github/workflows/copilot-analysis.yml @@ -0,0 +1,510 @@ +name: "GitHub Copilot Code Analysis" + +on: + pull_request: + types: [opened, edited, synchronize] + branches: + - main + workflow_dispatch: + inputs: + pr_number: + description: 'PR number to analyze' + required: true + type: number + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + copilot-analysis: + name: Copilot Code Analysis + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '18' + + - name: Install GitHub CLI and Copilot CLI + run: | + # Install GitHub CLI if not present + if ! command -v gh &> /dev/null; then + curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null + sudo apt update + sudo apt install gh + fi + + # Install Copilot CLI extension + gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}" + gh extension install github/gh-copilot || true + + - name: Get PR details + id: pr-details + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + PR_NUMBER=${{ github.event.inputs.pr_number }} + else + PR_NUMBER=${{ github.event.number }} + fi + + echo "pr_number=${PR_NUMBER}" >> $GITHUB_OUTPUT + + # Get PR info + PR_INFO=$(gh pr view $PR_NUMBER --json title,author,headRefName,baseRefName,body) + echo "pr_title=$(echo "$PR_INFO" | jq -r '.title')" >> $GITHUB_OUTPUT + echo "pr_author=$(echo "$PR_INFO" | jq -r '.author.login')" >> $GITHUB_OUTPUT + echo "head_branch=$(echo "$PR_INFO" | jq -r '.headRefName')" >> $GITHUB_OUTPUT + echo "base_branch=$(echo "$PR_INFO" | jq -r '.baseRefName')" >> $GITHUB_OUTPUT + echo "pr_body=$(echo "$PR_INFO" | jq -r '.body')" >> $GITHUB_OUTPUT + + # Detect if this is an API Sync PR + IS_API_SYNC=false + if echo "${{ steps.pr-details.outputs.pr_title }}" | grep -i "api.*sync\|sync.*api\|spec.*update\|swagger.*update"; then + IS_API_SYNC=true + elif echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "swaggerhub\|api.*spec\|spec.*version"; then + IS_API_SYNC=true + elif echo "${{ steps.pr-details.outputs.head_branch }}" | grep -i "sync\|api\|spec"; then + IS_API_SYNC=true + fi + + echo "is_api_sync=${IS_API_SYNC}" >> $GITHUB_OUTPUT + echo "API Sync PR detected: ${IS_API_SYNC}" + + - name: Get changed files for analysis + id: changed-files + run: | + PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} + + # Get list of changed files + gh pr diff $PR_NUMBER --name-only > changed_files.txt + + # Filter for code files only (Java, Python, etc.) + grep -E '\.(java|py|js|ts|jsx|tsx|go|rs|cpp|c|h|hpp|cs|php|rb|scala|kt)$' changed_files.txt > code_files.txt || true + + CHANGED_FILES_COUNT=$(wc -l < changed_files.txt | tr -d ' ') + CODE_FILES_COUNT=$(wc -l < code_files.txt | tr -d ' ') + + echo "changed_files_count=${CHANGED_FILES_COUNT}" >> $GITHUB_OUTPUT + echo "code_files_count=${CODE_FILES_COUNT}" >> $GITHUB_OUTPUT + + echo "Changed files: $CHANGED_FILES_COUNT" + echo "Code files: $CODE_FILES_COUNT" + + - name: Analyze API Sync Changes + id: api-sync-analysis + if: steps.pr-details.outputs.is_api_sync == 'true' + run: | + echo "🔄 Analyzing API Sync PR changes..." + + # Create API sync analysis + cat > api_sync_summary.md << 'EOF' + ## 🔄 API Sync PR Summary + + This API Sync PR was triggered by @${{ steps.pr-details.outputs.pr_author }} through GitHub Actions workflow_dispatch on $(date '+%Y-%m-%d'). + + EOF + + # Try to extract version information from PR title, body, or files + VERSION_FROM="" + VERSION_TO="" + + # Check for version in PR title + if echo "${{ steps.pr-details.outputs.pr_title }}" | grep -o "v\?[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -2; then + VERSIONS=$(echo "${{ steps.pr-details.outputs.pr_title }}" | grep -o "v\?[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -2) + VERSION_FROM=$(echo "$VERSIONS" | head -1 | sed 's/^v//') + VERSION_TO=$(echo "$VERSIONS" | tail -1 | sed 's/^v//') + fi + + # Check for version in PR body + if [ -z "$VERSION_FROM" ] && echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*from\|from.*version"; then + VERSION_FROM=$(echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*from\|from.*version" | grep -o "[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -1) + VERSION_TO=$(echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*to\|to.*version" | grep -o "[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -1) + fi + + # Check version files + if [ -z "$VERSION_FROM" ]; then + if [ -f "version" ]; then + VERSION_TO=$(cat version | tr -d '\n') + # Try to get previous version from git + VERSION_FROM=$(git show HEAD~1:version 2>/dev/null || echo "unknown") + elif [ -f "pom.xml" ]; then + VERSION_TO=$(grep -o "[^<]*" pom.xml | head -1 | sed 's/<[^>]*>//g' | tr -d '\n') + VERSION_FROM=$(git show HEAD~1:pom.xml 2>/dev/null | grep -o "[^<]*" | head -1 | sed 's/<[^>]*>//g' | tr -d '\n' || echo "unknown") + fi + fi + + # Add version info to summary + if [ -n "$VERSION_FROM" ] && [ -n "$VERSION_TO" ] && [ "$VERSION_FROM" != "$VERSION_TO" ]; then + echo "This PR updates the SDK API Spec Version: from $VERSION_FROM to $VERSION_TO" >> api_sync_summary.md + echo "" >> api_sync_summary.md + echo "Latest Swaggerhub API Spec is fetched - version $VERSION_TO" >> api_sync_summary.md + else + echo "This PR updates the SDK API Spec to the latest version" >> api_sync_summary.md + echo "" >> api_sync_summary.md + echo "Latest Swaggerhub API Spec has been fetched" >> api_sync_summary.md + fi + + echo "Patches have been applied" >> api_sync_summary.md + echo "Generated client has been updated" >> api_sync_summary.md + echo "" >> api_sync_summary.md + + # Analyze changes in SDK + echo "## Changes in SDK" >> api_sync_summary.md + echo "" >> api_sync_summary.md + + # Initialize arrays for tracking changes + declare -A added_models + declare -A modified_models + declare -A removed_models + declare -A added_apis + declare -A modified_apis + declare -A removed_apis + declare -A renamed_classes + + # Analyze changed files + while IFS= read -r file; do + if [[ "$file" =~ \.java$ ]]; then + FILE_PATH="$file" + FILE_NAME=$(basename "$file" .java) + + # Check if file was added, modified, or deleted + FILE_STATUS=$(git diff --name-status origin/${{ steps.pr-details.outputs.base_branch }}..HEAD -- "$file" 2>/dev/null | cut -f1 || echo "M") + + # Categorize based on file path and name + if [[ "$file" =~ /model/ ]]; then + case "$FILE_STATUS" in + "A") added_models["$FILE_NAME"]=1 ;; + "M") modified_models["$FILE_NAME"]=1 ;; + "D") removed_models["$FILE_NAME"]=1 ;; + esac + elif [[ "$file" =~ /api/ ]] && [[ "$FILE_NAME" =~ Api$ ]]; then + case "$FILE_STATUS" in + "A") added_apis["$FILE_NAME"]=1 ;; + "M") modified_apis["$FILE_NAME"]=1 ;; + "D") removed_apis["$FILE_NAME"]=1 ;; + esac + fi + fi + done < changed_files.txt + + # Generate Added models section + if [ ${#added_models[@]} -gt 0 ]; then + echo "### Added models" >> api_sync_summary.md + for model in "${!added_models[@]}"; do + echo "- $model" >> api_sync_summary.md + done + echo "" >> api_sync_summary.md + fi + + # Generate Modified models section + if [ ${#modified_models[@]} -gt 0 ]; then + echo "### Modified models" >> api_sync_summary.md + for model in "${!modified_models[@]}"; do + echo "- $model" >> api_sync_summary.md + done + echo "" >> api_sync_summary.md + fi + + # Generate Breaking Changes section if there are removed items + if [ ${#removed_models[@]} -gt 0 ] || [ ${#removed_apis[@]} -gt 0 ]; then + echo "## Breaking Changes" >> api_sync_summary.md + echo "" >> api_sync_summary.md + fi + + # Modified API classes + if [ ${#modified_apis[@]} -gt 0 ]; then + echo "### Modified api classes" >> api_sync_summary.md + for api in "${!modified_apis[@]}"; do + echo "- $api" >> api_sync_summary.md + done + echo "" >> api_sync_summary.md + fi + + # Removed API classes + if [ ${#removed_apis[@]} -gt 0 ]; then + echo "### Removed api classes" >> api_sync_summary.md + for api in "${!removed_apis[@]}"; do + echo "- $api" >> api_sync_summary.md + done + echo "" >> api_sync_summary.md + fi + + # Removed models + if [ ${#removed_models[@]} -gt 0 ]; then + echo "### Removed models" >> api_sync_summary.md + for model in "${!removed_models[@]}"; do + echo "- $model" >> api_sync_summary.md + done + echo "" >> api_sync_summary.md + fi + + # Try to detect renamed classes by analyzing git renames + echo "### Renamed classes" >> api_sync_summary.md + git diff --find-renames=50 --name-status origin/${{ steps.pr-details.outputs.base_branch }}..HEAD | grep "^R" | while read status old_file new_file; do + if [[ "$old_file" =~ \.java$ ]] && [[ "$new_file" =~ \.java$ ]]; then + OLD_NAME=$(basename "$old_file" .java) + NEW_NAME=$(basename "$new_file" .java) + echo "- $OLD_NAME -> $NEW_NAME" >> api_sync_summary.md + fi + done + + # Add footer + echo "" >> api_sync_summary.md + echo "---" >> api_sync_summary.md + echo "*Auto-generated API Sync Summary - $(date '+%Y-%m-%d %H:%M:%S')*" >> api_sync_summary.md + + echo "api_sync_completed=true" >> $GITHUB_OUTPUT + + - name: Analyze code with Copilot + id: copilot-analysis + if: steps.changed-files.outputs.code_files_count > 0 + run: | + PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} + + echo "🤖 Starting Copilot analysis..." + + # Create analysis report + if [ "${{ steps.pr-details.outputs.is_api_sync }}" == "true" ]; then + # For API Sync PRs, use the structured summary + cp api_sync_summary.md analysis_report.md + + # Add additional Copilot analysis for API sync + cat >> analysis_report.md << 'EOF' + + ## 🤖 GitHub Copilot Analysis + + ### API Changes Impact Assessment + + EOF + + # Analyze the scale of changes + TOTAL_JAVA_FILES=$(find . -name "*.java" -path "*/src/*" | wc -l) + CHANGED_JAVA_FILES=${{ steps.changed-files.outputs.code_files_count }} + + if [ "$CHANGED_JAVA_FILES" -gt 50 ]; then + echo "âš ī¸ **Large Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md + echo "- Extensive API updates detected" >> analysis_report.md + echo "- Recommend thorough testing of all affected endpoints" >> analysis_report.md + echo "- Consider staged rollout for production deployment" >> analysis_report.md + elif [ "$CHANGED_JAVA_FILES" -gt 20 ]; then + echo "📊 **Medium Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md + echo "- Moderate API updates" >> analysis_report.md + echo "- Verify backward compatibility" >> analysis_report.md + else + echo "✅ **Small Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md + echo "- Limited scope API updates" >> analysis_report.md + fi + + echo "" >> analysis_report.md + + else + # Regular PR analysis + cat > analysis_report.md << 'EOF' + ## 🤖 GitHub Copilot Code Analysis + + **PR:** #${{ steps.pr-details.outputs.pr_number }} + **Title:** ${{ steps.pr-details.outputs.pr_title }} + **Author:** ${{ steps.pr-details.outputs.pr_author }} + **Branch:** `${{ steps.pr-details.outputs.head_branch }}` → `${{ steps.pr-details.outputs.base_branch }}` + + EOF + + # Analyze each code file for regular PRs + while IFS= read -r file; do + if [ -f "$file" ]; then + echo "### 📄 Analysis of \`$file\`" >> analysis_report.md + echo "" >> analysis_report.md + + # Use Copilot to explain the code changes + echo "Analyzing $file with Copilot..." + + # Use gh copilot explain command + COPILOT_EXPLANATION="" + if command -v gh copilot &> /dev/null; then + # Try to get explanation from Copilot + COPILOT_EXPLANATION=$(gh copilot explain "$file" 2>/dev/null || echo "") + fi + + if [ -n "$COPILOT_EXPLANATION" ]; then + echo "$COPILOT_EXPLANATION" >> analysis_report.md + else + # Fallback analysis using file inspection + echo "**File Type:** $(file "$file" | cut -d: -f2-)" >> analysis_report.md + echo "**Lines:** $(wc -l < "$file" 2>/dev/null || echo "N/A")" >> analysis_report.md + + # Basic code pattern detection + if grep -q "class\|interface\|enum" "$file" 2>/dev/null; then + echo "**Contains:** Classes/Interfaces" >> analysis_report.md + fi + if grep -q "test\|Test\|@Test" "$file" 2>/dev/null; then + echo "**Type:** Test file" >> analysis_report.md + fi + if grep -q "public static void main" "$file" 2>/dev/null; then + echo "**Type:** Main class" >> analysis_report.md + fi + fi + + echo "" >> analysis_report.md + fi + done < code_files.txt + fi + + # Add summary section + cat >> analysis_report.md << EOF + + ### 📊 Summary + + - **Total changed files:** ${{ steps.changed-files.outputs.changed_files_count }} + - **Code files analyzed:** ${{ steps.changed-files.outputs.code_files_count }} + - **Analysis timestamp:** $(date -u '+%Y-%m-%d %H:%M:%S UTC') + + ### 🔍 Copilot Recommendations + + EOF + + # Get general recommendations from Copilot if available + echo "Generating Copilot recommendations..." + + # Create a prompt for general code review + REVIEW_PROMPT="Review this pull request and provide recommendations for code quality, security, and best practices. Focus on Java code if present." + + # Use Copilot suggest if available + SUGGESTIONS="" + if command -v gh copilot suggest &> /dev/null; then + SUGGESTIONS=$(echo "$REVIEW_PROMPT" | gh copilot suggest 2>/dev/null || echo "") + fi + + if [ -n "$SUGGESTIONS" ]; then + echo "$SUGGESTIONS" >> analysis_report.md + else + # Fallback recommendations based on file patterns + echo "**Automated Analysis Recommendations:**" >> analysis_report.md + echo "" >> analysis_report.md + + # Check for common patterns + if grep -r "System.out.println\|console.log\|print(" . --include="*.java" --include="*.js" --include="*.py" 2>/dev/null | head -5; then + echo "- âš ī¸ Consider removing debug print statements before merging" >> analysis_report.md + fi + + if grep -r "TODO\|FIXME\|XXX" . --include="*.java" --include="*.js" --include="*.py" 2>/dev/null | head -3; then + echo "- 📝 Address TODO/FIXME comments if possible" >> analysis_report.md + fi + + if find . -name "*.java" -exec grep -l "catch.*Exception.*{[^}]*}" {} \; 2>/dev/null | head -1; then + echo "- 🔍 Review exception handling in catch blocks" >> analysis_report.md + fi + + echo "- ✅ Consider adding unit tests for new functionality" >> analysis_report.md + echo "- 📚 Ensure proper documentation for public APIs" >> analysis_report.md + fi + + cat >> analysis_report.md << EOF + + --- + *Analysis generated by GitHub Copilot integration - $(date '+%Y-%m-%d %H:%M:%S')* + EOF + + echo "analysis_completed=true" >> $GITHUB_OUTPUT + + - name: Create detailed diff analysis + if: steps.copilot-analysis.outputs.analysis_completed == 'true' + run: | + echo "" >> analysis_report.md + echo "### 📋 Detailed Changes" >> analysis_report.md + echo "" >> analysis_report.md + echo "
" >> analysis_report.md + echo "Click to view detailed diff analysis" >> analysis_report.md + echo "" >> analysis_report.md + echo "\`\`\`diff" >> analysis_report.md + + # Get diff with context + gh pr diff ${{ steps.pr-details.outputs.pr_number }} | head -200 >> analysis_report.md + + echo "\`\`\`" >> analysis_report.md + echo "" >> analysis_report.md + echo "
" >> analysis_report.md + + - name: Post Copilot analysis as PR comment + if: steps.copilot-analysis.outputs.analysis_completed == 'true' + run: | + # Post analysis as comment + gh pr comment ${{ steps.pr-details.outputs.pr_number }} --body-file analysis_report.md + + - name: Update PR description with Copilot summary + if: steps.copilot-analysis.outputs.analysis_completed == 'true' + run: | + # Get current PR description + CURRENT_DESC=$(gh pr view ${{ steps.pr-details.outputs.pr_number }} --json body --jq '.body') + + # Create short summary for PR description + SUMMARY="## 🤖 Copilot Analysis Summary + + ✅ **Analysis completed** - ${{ steps.changed-files.outputs.code_files_count }} code files analyzed + + 📊 **Files:** ${{ steps.changed-files.outputs.changed_files_count }} changed, ${{ steps.changed-files.outputs.code_files_count }} code files + + 🔍 **Status:** Automated code review completed by GitHub Copilot + + > 💡 See detailed analysis in PR comments below + + ---" + + # Check if summary already exists and update/add accordingly + if echo "$CURRENT_DESC" | grep -q "## 🤖 Copilot Analysis Summary"; then + # Remove old summary + NEW_DESC=$(echo "$CURRENT_DESC" | sed '/## 🤖 Copilot Analysis Summary/,/^---$/d') + NEW_DESC="${NEW_DESC} + + ${SUMMARY}" + else + # Add new summary + if [ -n "$CURRENT_DESC" ] && [ "$CURRENT_DESC" != "null" ]; then + NEW_DESC="${CURRENT_DESC} + + ${SUMMARY}" + else + NEW_DESC="$SUMMARY" + fi + fi + + # Update PR description + echo "$NEW_DESC" > pr_description.md + gh pr edit ${{ steps.pr-details.outputs.pr_number }} --body-file pr_description.md + + - name: Create analysis artifact + if: steps.copilot-analysis.outputs.analysis_completed == 'true' + uses: actions/upload-artifact@v4 + with: + name: copilot-analysis-${{ steps.pr-details.outputs.pr_number }} + path: | + analysis_report.md + changed_files.txt + code_files.txt + retention-days: 30 + + - name: Analysis summary + if: always() + run: | + echo "🎉 Copilot Analysis Workflow Completed!" + echo "==================================" + echo "PR Number: ${{ steps.pr-details.outputs.pr_number }}" + echo "Changed Files: ${{ steps.changed-files.outputs.changed_files_count }}" + echo "Code Files: ${{ steps.changed-files.outputs.code_files_count }}" + + if [ "${{ steps.copilot-analysis.outputs.analysis_completed }}" == "true" ]; then + echo "✅ Analysis completed successfully" + echo "📝 Check PR comments for detailed analysis" + echo "📋 PR description updated with summary" + else + echo "âš ī¸ Analysis skipped (no code files found or other issue)" + fi From 4455ba4a4a77218175359dbad0c964cff7845d18 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:21:08 +0100 Subject: [PATCH 02/15] updated --- .../com/equinix/sdk/fabricv4/model/AccessPoint.java | 13 ------------- .../sdk/fabricv4/model/AllPortsResponse.java | 2 +- .../com/equinix/sdk/fabricv4/model/AuthContext.java | 2 +- 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java index 2b128ed5..fc7a9809 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java @@ -64,20 +64,7 @@ */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.16.0") public class AccessPoint { - public static final String SERIALIZED_NAME_TYPE = "type"; - @SerializedName(SERIALIZED_NAME_TYPE) - @javax.annotation.Nullable - private AccessPointType type; - - public static final String SERIALIZED_NAME_ACCOUNT = "account"; - @SerializedName(SERIALIZED_NAME_ACCOUNT) - @javax.annotation.Nullable - private SimplifiedAccount account; - public static final String SERIALIZED_NAME_LOCATION = "location"; - @SerializedName(SERIALIZED_NAME_LOCATION) - @javax.annotation.Nullable - private SimplifiedLocation location; public static final String SERIALIZED_NAME_PORT = "port"; @SerializedName(SERIALIZED_NAME_PORT) diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java index 6ae73bb8..f5c3eda6 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java @@ -62,7 +62,7 @@ public class AllPortsResponse { public static final String SERIALIZED_NAME_DATA = "data"; @SerializedName(SERIALIZED_NAME_DATA) @javax.annotation.Nullable - private List data = new ArrayList<>(); + private List data; public AllPortsResponse() { } diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java index 032b9c9f..7090b6f5 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java @@ -56,7 +56,7 @@ public class AuthContext { @JsonAdapter(AuthtypeEnum.Adapter.class) public enum AuthtypeEnum { SYSTEM("system"), - + SUPER("super"), USER("user"); private String value; From 91c1eef89ab4c880a9f1ae95d0744947075f2684 Mon Sep 17 00:00:00 2001 From: "equinix-labs@auto-commit-workflow" Date: Thu, 12 Feb 2026 18:21:41 +0000 Subject: [PATCH 03/15] Auto commit generated client changes - Thu Feb 12 18:21:41 UTC 2026 --- .../com/equinix/sdk/fabricv4/model/AccessPoint.java | 13 +++++++++++++ .../sdk/fabricv4/model/AllPortsResponse.java | 2 +- .../com/equinix/sdk/fabricv4/model/AuthContext.java | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java index fc7a9809..2b128ed5 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java @@ -64,7 +64,20 @@ */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.16.0") public class AccessPoint { + public static final String SERIALIZED_NAME_TYPE = "type"; + @SerializedName(SERIALIZED_NAME_TYPE) + @javax.annotation.Nullable + private AccessPointType type; + + public static final String SERIALIZED_NAME_ACCOUNT = "account"; + @SerializedName(SERIALIZED_NAME_ACCOUNT) + @javax.annotation.Nullable + private SimplifiedAccount account; + public static final String SERIALIZED_NAME_LOCATION = "location"; + @SerializedName(SERIALIZED_NAME_LOCATION) + @javax.annotation.Nullable + private SimplifiedLocation location; public static final String SERIALIZED_NAME_PORT = "port"; @SerializedName(SERIALIZED_NAME_PORT) diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java index f5c3eda6..6ae73bb8 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AllPortsResponse.java @@ -62,7 +62,7 @@ public class AllPortsResponse { public static final String SERIALIZED_NAME_DATA = "data"; @SerializedName(SERIALIZED_NAME_DATA) @javax.annotation.Nullable - private List data; + private List data = new ArrayList<>(); public AllPortsResponse() { } diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java index 7090b6f5..032b9c9f 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AuthContext.java @@ -56,7 +56,7 @@ public class AuthContext { @JsonAdapter(AuthtypeEnum.Adapter.class) public enum AuthtypeEnum { SYSTEM("system"), - SUPER("super"), + USER("user"); private String value; From a27368a560dde8e4075b74b54484789adf9c350d Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:25:48 +0100 Subject: [PATCH 04/15] updated --- .github/workflows/copilot-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/copilot-analysis.yml b/.github/workflows/copilot-analysis.yml index 88547175..ef338800 100644 --- a/.github/workflows/copilot-analysis.yml +++ b/.github/workflows/copilot-analysis.yml @@ -376,7 +376,7 @@ jobs: echo "Generating Copilot recommendations..." # Create a prompt for general code review - REVIEW_PROMPT="Review this pull request and provide recommendations for code quality, security, and best practices. Focus on Java code if present." + REVIEW_PROMPT="Review this pull request of changed files. Please present section 'Changes in SDK' with added, modified, removed models and APIs. They should be listed and categorized by added models, added apis, renamed from one to antoher class name, modified. Please evaluate braking changes and put in different section. I just need only listed summaraize" # Use Copilot suggest if available SUGGESTIONS="" From b72b0f6bc02bd440fed30cd2ffdbcb008913e0ad Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:30:43 +0100 Subject: [PATCH 05/15] updated --- .github/workflows/copilot-analysis.yml | 90 ++++++++++++------- .../openapi/fabric/tests/MetrosApiTest.java | 3 +- 2 files changed, 61 insertions(+), 32 deletions(-) diff --git a/.github/workflows/copilot-analysis.yml b/.github/workflows/copilot-analysis.yml index ef338800..4502a25e 100644 --- a/.github/workflows/copilot-analysis.yml +++ b/.github/workflows/copilot-analysis.yml @@ -61,20 +61,51 @@ jobs: # Get PR info PR_INFO=$(gh pr view $PR_NUMBER --json title,author,headRefName,baseRefName,body) - echo "pr_title=$(echo "$PR_INFO" | jq -r '.title')" >> $GITHUB_OUTPUT - echo "pr_author=$(echo "$PR_INFO" | jq -r '.author.login')" >> $GITHUB_OUTPUT - echo "head_branch=$(echo "$PR_INFO" | jq -r '.headRefName')" >> $GITHUB_OUTPUT - echo "base_branch=$(echo "$PR_INFO" | jq -r '.baseRefName')" >> $GITHUB_OUTPUT - echo "pr_body=$(echo "$PR_INFO" | jq -r '.body')" >> $GITHUB_OUTPUT + + # Extract values safely + PR_TITLE=$(echo "$PR_INFO" | jq -r '.title // ""') + PR_AUTHOR=$(echo "$PR_INFO" | jq -r '.author.login // ""') + HEAD_BRANCH=$(echo "$PR_INFO" | jq -r '.headRefName // ""') + BASE_BRANCH=$(echo "$PR_INFO" | jq -r '.baseRefName // ""') + PR_BODY=$(echo "$PR_INFO" | jq -r '.body // ""') + + # Set outputs with proper escaping + { + echo "pr_title<> $GITHUB_OUTPUT # Detect if this is an API Sync PR IS_API_SYNC=false - if echo "${{ steps.pr-details.outputs.pr_title }}" | grep -i "api.*sync\|sync.*api\|spec.*update\|swagger.*update"; then + PR_TITLE_VAL=$(echo "$PR_INFO" | jq -r '.title // ""') + PR_BODY_VAL=$(echo "$PR_INFO" | jq -r '.body // ""') + HEAD_BRANCH_VAL=$(echo "$PR_INFO" | jq -r '.headRefName // ""') + + echo "Checking PR title: $PR_TITLE_VAL" + echo "Checking head branch: $HEAD_BRANCH_VAL" + + if [ -n "$PR_TITLE_VAL" ] && echo "$PR_TITLE_VAL" | grep -i "api.*sync\|sync.*api\|spec.*update\|swagger.*update"; then IS_API_SYNC=true - elif echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "swaggerhub\|api.*spec\|spec.*version"; then + echo "API Sync detected from title" + elif [ -n "$PR_BODY_VAL" ] && echo "$PR_BODY_VAL" | grep -i "swaggerhub\|api.*spec\|spec.*version"; then IS_API_SYNC=true - elif echo "${{ steps.pr-details.outputs.head_branch }}" | grep -i "sync\|api\|spec"; then + echo "API Sync detected from body" + elif [ -n "$HEAD_BRANCH_VAL" ] && echo "$HEAD_BRANCH_VAL" | grep -i "sync\|api\|spec"; then IS_API_SYNC=true + echo "API Sync detected from branch name" fi echo "is_api_sync=${IS_API_SYNC}" >> $GITHUB_OUTPUT @@ -447,39 +478,38 @@ jobs: CURRENT_DESC=$(gh pr view ${{ steps.pr-details.outputs.pr_number }} --json body --jq '.body') # Create short summary for PR description - SUMMARY="## 🤖 Copilot Analysis Summary - - ✅ **Analysis completed** - ${{ steps.changed-files.outputs.code_files_count }} code files analyzed - - 📊 **Files:** ${{ steps.changed-files.outputs.changed_files_count }} changed, ${{ steps.changed-files.outputs.code_files_count }} code files - - 🔍 **Status:** Automated code review completed by GitHub Copilot - - > 💡 See detailed analysis in PR comments below - - ---" + echo "## 🤖 Copilot Analysis Summary" > pr_summary.md + echo "" >> pr_summary.md + echo "✅ **Analysis completed** - ${{ steps.changed-files.outputs.code_files_count }} code files analyzed" >> pr_summary.md + echo "" >> pr_summary.md + echo "📊 **Files:** ${{ steps.changed-files.outputs.changed_files_count }} changed, ${{ steps.changed-files.outputs.code_files_count }} code files" >> pr_summary.md + echo "" >> pr_summary.md + echo "🔍 **Status:** Automated code review completed by GitHub Copilot" >> pr_summary.md + echo "" >> pr_summary.md + echo "> 💡 See detailed analysis in PR comments below" >> pr_summary.md + echo "" >> pr_summary.md + echo "---" >> pr_summary.md # Check if summary already exists and update/add accordingly - if echo "$CURRENT_DESC" | grep -q "## 🤖 Copilot Analysis Summary"; then + if echo "$CURRENT_DESC" | grep -q "🤖 Copilot Analysis Summary"; then # Remove old summary - NEW_DESC=$(echo "$CURRENT_DESC" | sed '/## 🤖 Copilot Analysis Summary/,/^---$/d') - NEW_DESC="${NEW_DESC} - - ${SUMMARY}" + NEW_DESC=$(echo "$CURRENT_DESC" | sed '/🤖 Copilot Analysis Summary/,/^---$/d') + echo "${NEW_DESC}" > temp_desc.md + echo "" >> temp_desc.md + cat pr_summary.md >> temp_desc.md else # Add new summary if [ -n "$CURRENT_DESC" ] && [ "$CURRENT_DESC" != "null" ]; then - NEW_DESC="${CURRENT_DESC} - - ${SUMMARY}" + echo "$CURRENT_DESC" > temp_desc.md + echo "" >> temp_desc.md + cat pr_summary.md >> temp_desc.md else - NEW_DESC="$SUMMARY" + cp pr_summary.md temp_desc.md fi fi # Update PR description - echo "$NEW_DESC" > pr_description.md - gh pr edit ${{ steps.pr-details.outputs.pr_number }} --body-file pr_description.md + gh pr edit ${{ steps.pr-details.outputs.pr_number }} --body-file temp_desc.md - name: Create analysis artifact if: steps.copilot-analysis.outputs.analysis_completed == 'true' diff --git a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/MetrosApiTest.java b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/MetrosApiTest.java index ac315bf5..1b02bdf3 100644 --- a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/MetrosApiTest.java +++ b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/MetrosApiTest.java @@ -37,7 +37,6 @@ public void getMetroByCode() throws ApiException { public void getMetros() throws ApiException { MetroResponse metroResponse = metrosApi.getMetros(null,1, 10); assertEquals(200, metrosApi.getApiClient().getStatusCode()); - boolean metroFound = metroResponse.getData().stream().anyMatch(metro -> metro.getCode().equals(metroCode)); - assertTrue(metroFound); + assertTrue(!metroResponse.getData().isEmpty()); } } From cc99be707fc62b744731f351a850ab3529e56855 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:52:50 +0100 Subject: [PATCH 06/15] updated --- .../{AccessPoint.java => AccessPointS.java} | 130 +++++++----------- 1 file changed, 53 insertions(+), 77 deletions(-) rename services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/{AccessPoint.java => AccessPointS.java} (83%) diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java similarity index 83% rename from services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java rename to services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java index 2b128ed5..f4ddaaf3 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java @@ -13,20 +13,7 @@ import java.util.Objects; import java.util.Locale; -import com.equinix.sdk.fabricv4.model.AccessPointType; -import com.equinix.sdk.fabricv4.model.CloudRouter; -import com.equinix.sdk.fabricv4.model.MetalInterconnection; -import com.equinix.sdk.fabricv4.model.ModelInterface; -import com.equinix.sdk.fabricv4.model.PeeringType; -import com.equinix.sdk.fabricv4.model.SimplifiedAccount; -import com.equinix.sdk.fabricv4.model.SimplifiedLinkProtocol; -import com.equinix.sdk.fabricv4.model.SimplifiedLocation; -import com.equinix.sdk.fabricv4.model.SimplifiedNetwork; -import com.equinix.sdk.fabricv4.model.SimplifiedPort; -import com.equinix.sdk.fabricv4.model.SimplifiedServiceProfile; -import com.equinix.sdk.fabricv4.model.VirtualDevice; -import com.equinix.sdk.fabricv4.model.VirtualNetwork; -import com.equinix.sdk.fabricv4.model.VpicInterface; + import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; @@ -36,26 +23,15 @@ import java.util.Arrays; import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonArray; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; -import com.google.gson.TypeAdapter; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -63,7 +39,7 @@ * Access point object */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.16.0") -public class AccessPoint { +public class AccessPointS { public static final String SERIALIZED_NAME_TYPE = "type"; @SerializedName(SERIALIZED_NAME_TYPE) @javax.annotation.Nullable @@ -206,10 +182,10 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti @javax.annotation.Nullable private RoleEnum role; - public AccessPoint() { + public AccessPointS() { } - public AccessPoint type(@javax.annotation.Nullable AccessPointType type) { + public AccessPointS type(@javax.annotation.Nullable AccessPointType type) { this.type = type; return this; } @@ -228,7 +204,7 @@ public void setType(@javax.annotation.Nullable AccessPointType type) { } - public AccessPoint account(@javax.annotation.Nullable SimplifiedAccount account) { + public AccessPointS account(@javax.annotation.Nullable SimplifiedAccount account) { this.account = account; return this; } @@ -247,7 +223,7 @@ public void setAccount(@javax.annotation.Nullable SimplifiedAccount account) { } - public AccessPoint location(@javax.annotation.Nullable SimplifiedLocation location) { + public AccessPointS location(@javax.annotation.Nullable SimplifiedLocation location) { this.location = location; return this; } @@ -266,7 +242,7 @@ public void setLocation(@javax.annotation.Nullable SimplifiedLocation location) } - public AccessPoint port(@javax.annotation.Nullable SimplifiedPort port) { + public AccessPointS port(@javax.annotation.Nullable SimplifiedPort port) { this.port = port; return this; } @@ -285,7 +261,7 @@ public void setPort(@javax.annotation.Nullable SimplifiedPort port) { } - public AccessPoint profile(@javax.annotation.Nullable SimplifiedServiceProfile profile) { + public AccessPointS profile(@javax.annotation.Nullable SimplifiedServiceProfile profile) { this.profile = profile; return this; } @@ -304,7 +280,7 @@ public void setProfile(@javax.annotation.Nullable SimplifiedServiceProfile profi } - public AccessPoint router(@javax.annotation.Nullable CloudRouter router) { + public AccessPointS router(@javax.annotation.Nullable CloudRouter router) { this.router = router; return this; } @@ -323,7 +299,7 @@ public void setRouter(@javax.annotation.Nullable CloudRouter router) { } - public AccessPoint linkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol linkProtocol) { + public AccessPointS linkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol linkProtocol) { this.linkProtocol = linkProtocol; return this; } @@ -342,7 +318,7 @@ public void setLinkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol li } - public AccessPoint virtualDevice(@javax.annotation.Nullable VirtualDevice virtualDevice) { + public AccessPointS virtualDevice(@javax.annotation.Nullable VirtualDevice virtualDevice) { this.virtualDevice = virtualDevice; return this; } @@ -361,7 +337,7 @@ public void setVirtualDevice(@javax.annotation.Nullable VirtualDevice virtualDev } - public AccessPoint _interface(@javax.annotation.Nullable ModelInterface _interface) { + public AccessPointS _interface(@javax.annotation.Nullable ModelInterface _interface) { this._interface = _interface; return this; } @@ -380,7 +356,7 @@ public void setInterface(@javax.annotation.Nullable ModelInterface _interface) { } - public AccessPoint network(@javax.annotation.Nullable SimplifiedNetwork network) { + public AccessPointS network(@javax.annotation.Nullable SimplifiedNetwork network) { this.network = network; return this; } @@ -399,7 +375,7 @@ public void setNetwork(@javax.annotation.Nullable SimplifiedNetwork network) { } - public AccessPoint sellerRegion(@javax.annotation.Nullable String sellerRegion) { + public AccessPointS sellerRegion(@javax.annotation.Nullable String sellerRegion) { this.sellerRegion = sellerRegion; return this; } @@ -418,7 +394,7 @@ public void setSellerRegion(@javax.annotation.Nullable String sellerRegion) { } - public AccessPoint peeringType(@javax.annotation.Nullable PeeringType peeringType) { + public AccessPointS peeringType(@javax.annotation.Nullable PeeringType peeringType) { this.peeringType = peeringType; return this; } @@ -437,7 +413,7 @@ public void setPeeringType(@javax.annotation.Nullable PeeringType peeringType) { } - public AccessPoint authenticationKey(@javax.annotation.Nullable String authenticationKey) { + public AccessPointS authenticationKey(@javax.annotation.Nullable String authenticationKey) { this.authenticationKey = authenticationKey; return this; } @@ -456,7 +432,7 @@ public void setAuthenticationKey(@javax.annotation.Nullable String authenticatio } - public AccessPoint providerConnectionId(@javax.annotation.Nullable String providerConnectionId) { + public AccessPointS providerConnectionId(@javax.annotation.Nullable String providerConnectionId) { this.providerConnectionId = providerConnectionId; return this; } @@ -475,7 +451,7 @@ public void setProviderConnectionId(@javax.annotation.Nullable String providerCo } - public AccessPoint virtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualNetwork) { + public AccessPointS virtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualNetwork) { this.virtualNetwork = virtualNetwork; return this; } @@ -494,7 +470,7 @@ public void setVirtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualN } - public AccessPoint interconnection(@javax.annotation.Nullable MetalInterconnection interconnection) { + public AccessPointS interconnection(@javax.annotation.Nullable MetalInterconnection interconnection) { this.interconnection = interconnection; return this; } @@ -513,7 +489,7 @@ public void setInterconnection(@javax.annotation.Nullable MetalInterconnection i } - public AccessPoint vpicInterface(@javax.annotation.Nullable VpicInterface vpicInterface) { + public AccessPointS vpicInterface(@javax.annotation.Nullable VpicInterface vpicInterface) { this.vpicInterface = vpicInterface; return this; } @@ -532,7 +508,7 @@ public void setVpicInterface(@javax.annotation.Nullable VpicInterface vpicInterf } - public AccessPoint role(@javax.annotation.Nullable RoleEnum role) { + public AccessPointS role(@javax.annotation.Nullable RoleEnum role) { this.role = role; return this; } @@ -565,7 +541,7 @@ public void setRole(@javax.annotation.Nullable RoleEnum role) { * @param value value of the property * @return the AccessPoint instance itself */ - public AccessPoint putAdditionalProperty(String key, Object value) { + public AccessPointS putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -604,26 +580,26 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - AccessPoint accessPoint = (AccessPoint) o; - return Objects.equals(this.type, accessPoint.type) && - Objects.equals(this.account, accessPoint.account) && - Objects.equals(this.location, accessPoint.location) && - Objects.equals(this.port, accessPoint.port) && - Objects.equals(this.profile, accessPoint.profile) && - Objects.equals(this.router, accessPoint.router) && - Objects.equals(this.linkProtocol, accessPoint.linkProtocol) && - Objects.equals(this.virtualDevice, accessPoint.virtualDevice) && - Objects.equals(this._interface, accessPoint._interface) && - Objects.equals(this.network, accessPoint.network) && - Objects.equals(this.sellerRegion, accessPoint.sellerRegion) && - Objects.equals(this.peeringType, accessPoint.peeringType) && - Objects.equals(this.authenticationKey, accessPoint.authenticationKey) && - Objects.equals(this.providerConnectionId, accessPoint.providerConnectionId) && - Objects.equals(this.virtualNetwork, accessPoint.virtualNetwork) && - Objects.equals(this.interconnection, accessPoint.interconnection) && - Objects.equals(this.vpicInterface, accessPoint.vpicInterface) && - Objects.equals(this.role, accessPoint.role)&& - Objects.equals(this.additionalProperties, accessPoint.additionalProperties); + AccessPointS accessPointS = (AccessPointS) o; + return Objects.equals(this.type, accessPointS.type) && + Objects.equals(this.account, accessPointS.account) && + Objects.equals(this.location, accessPointS.location) && + Objects.equals(this.port, accessPointS.port) && + Objects.equals(this.profile, accessPointS.profile) && + Objects.equals(this.router, accessPointS.router) && + Objects.equals(this.linkProtocol, accessPointS.linkProtocol) && + Objects.equals(this.virtualDevice, accessPointS.virtualDevice) && + Objects.equals(this._interface, accessPointS._interface) && + Objects.equals(this.network, accessPointS.network) && + Objects.equals(this.sellerRegion, accessPointS.sellerRegion) && + Objects.equals(this.peeringType, accessPointS.peeringType) && + Objects.equals(this.authenticationKey, accessPointS.authenticationKey) && + Objects.equals(this.providerConnectionId, accessPointS.providerConnectionId) && + Objects.equals(this.virtualNetwork, accessPointS.virtualNetwork) && + Objects.equals(this.interconnection, accessPointS.interconnection) && + Objects.equals(this.vpicInterface, accessPointS.vpicInterface) && + Objects.equals(this.role, accessPointS.role)&& + Objects.equals(this.additionalProperties, accessPointS.additionalProperties); } @Override @@ -689,8 +665,8 @@ private String toIndentedString(Object o) { */ public static void validateJsonElement(JsonElement jsonElement) throws IOException { if (jsonElement == null) { - if (!AccessPoint.openapiRequiredFields.isEmpty()) { // has required fields but JSON element is null - throw new IllegalArgumentException(String.format(Locale.ROOT, "The required field(s) %s in AccessPoint is not found in the empty JSON string", AccessPoint.openapiRequiredFields.toString())); + if (!AccessPointS.openapiRequiredFields.isEmpty()) { // has required fields but JSON element is null + throw new IllegalArgumentException(String.format(Locale.ROOT, "The required field(s) %s in AccessPoint is not found in the empty JSON string", AccessPointS.openapiRequiredFields.toString())); } } JsonObject jsonObj = jsonElement.getAsJsonObject(); @@ -772,16 +748,16 @@ public static class CustomTypeAdapterFactory implements TypeAdapterFactory { @SuppressWarnings("unchecked") @Override public TypeAdapter create(Gson gson, TypeToken type) { - if (!AccessPoint.class.isAssignableFrom(type.getRawType())) { + if (!AccessPointS.class.isAssignableFrom(type.getRawType())) { return null; // this class only serializes 'AccessPoint' and its subtypes } final TypeAdapter elementAdapter = gson.getAdapter(JsonElement.class); - final TypeAdapter thisAdapter - = gson.getDelegateAdapter(this, TypeToken.get(AccessPoint.class)); + final TypeAdapter thisAdapter + = gson.getDelegateAdapter(this, TypeToken.get(AccessPointS.class)); - return (TypeAdapter) new TypeAdapter() { + return (TypeAdapter) new TypeAdapter() { @Override - public void write(JsonWriter out, AccessPoint value) throws IOException { + public void write(JsonWriter out, AccessPointS value) throws IOException { JsonObject obj = thisAdapter.toJsonTree(value).getAsJsonObject(); obj.remove("additionalProperties"); // serialize additional properties @@ -809,12 +785,12 @@ else if (entry.getValue() instanceof Character) } @Override - public AccessPoint read(JsonReader in) throws IOException { + public AccessPointS read(JsonReader in) throws IOException { JsonElement jsonElement = elementAdapter.read(in); validateJsonElement(jsonElement); JsonObject jsonObj = jsonElement.getAsJsonObject(); // store additional fields in the deserialized instance - AccessPoint instance = thisAdapter.fromJsonTree(jsonObj); + AccessPointS instance = thisAdapter.fromJsonTree(jsonObj); for (Map.Entry entry : jsonObj.entrySet()) { if (!openapiFields.contains(entry.getKey())) { if (entry.getValue().isJsonPrimitive()) { // primitive type @@ -847,8 +823,8 @@ else if (entry.getValue().getAsJsonPrimitive().isBoolean()) * @return An instance of AccessPoint * @throws IOException if the JSON string is invalid with respect to AccessPoint */ - public static AccessPoint fromJson(String jsonString) throws IOException { - return JSON.getGson().fromJson(jsonString, AccessPoint.class); + public static AccessPointS fromJson(String jsonString) throws IOException { + return JSON.getGson().fromJson(jsonString, AccessPointS.class); } /** From 8548df601ea428c4387c9cad68210e52e87e8a53 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:52:57 +0100 Subject: [PATCH 07/15] updated --- .../fabric/tests/ConnectionsApiTest.java | 16 ++++---- services/fabricv4/docs/ConnectionSide.md | 2 +- .../sdk/fabricv4/model/ConnectionSide.java | 41 ++++++------------- 3 files changed, 21 insertions(+), 38 deletions(-) diff --git a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java index dcbb6386..450c583e 100644 --- a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java +++ b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java @@ -73,7 +73,7 @@ public void createConnectionVdColo() throws ApiException { .bandwidth(1000) .type(ConnectionType.EVPL_VC) .order(new Order().purchaseOrderNumber("pol123")) - .aSide(new ConnectionSide().accessPoint(new AccessPoint() + .aSide(new ConnectionSide().accessPoint(new AccessPointS() .type(AccessPointType.VD) .virtualDevice(new VirtualDevice() .type(VirtualDevice.TypeEnum.EDGE) @@ -84,7 +84,7 @@ public void createConnectionVdColo() throws ApiException { for (int i = 0; i < 3; i++) { int vlanTag = getRandomVlanNumber(); connectionPostRequest.zSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(portUuid)) @@ -181,7 +181,7 @@ public static Connection createConnectionFCR2ToPort() throws ApiException { .bandwidth(1000) .project(new Project().projectId(userDto.getProjectId())) .aSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.CLOUD_ROUTER) .router(new CloudRouter().uuid(cloudRouter.getUuid())))); @@ -189,7 +189,7 @@ public static Connection createConnectionFCR2ToPort() throws ApiException { for (int i = 0; i < 3; i++) { int tag = getRandomVlanNumber(); connectionPostRequest.zSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.COLO) .port(new SimplifiedPort().uuid(port.getUuid())) .linkProtocol(new SimplifiedLinkProtocol() @@ -234,7 +234,7 @@ public static Connection createPort2SpConnection() throws ApiException { .redundancy(new ConnectionRedundancy().priority(ConnectionPriority.PRIMARY)) .order(new Order().purchaseOrderNumber("pol123")) .zSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.SP) .profile(new SimplifiedServiceProfile() .type(ServiceProfileTypeEnum.L2_PROFILE) @@ -248,7 +248,7 @@ public static Connection createPort2SpConnection() throws ApiException { int sTag = getRandomVlanNumber(); int cTag = getRandomVlanNumber(); connectionPostRequest.aSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.COLO) .port(new SimplifiedPort().uuid(UUID.fromString(portDto.getUuid()))) .linkProtocol(new SimplifiedLinkProtocol() @@ -317,14 +317,14 @@ public static Connection createPort2Port() throws ApiException { .type(ConnectionType.EVPL_VC) .redundancy(new ConnectionRedundancy().priority(ConnectionPriority.PRIMARY)) .aSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(port.get(0).getUuid())) .linkProtocol(new SimplifiedLinkProtocol() .type(LinkProtocolType.DOT1Q).vlanTag(tagAside)))) .zSide(new ConnectionSide().accessPoint( - new AccessPoint() + new AccessPointS() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(port.get(1).getUuid())) diff --git a/services/fabricv4/docs/ConnectionSide.md b/services/fabricv4/docs/ConnectionSide.md index 155fdeb5..c62b2727 100644 --- a/services/fabricv4/docs/ConnectionSide.md +++ b/services/fabricv4/docs/ConnectionSide.md @@ -9,7 +9,7 @@ Connection configuration object for each side of multi-segment connection | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| |**serviceToken** | [**ServiceToken**](ServiceToken.md) | | [optional] | -|**accessPoint** | [**AccessPoint**](AccessPoint.md) | | [optional] | +|**accessPointS** | [**AccessPoint**](AccessPoint.md) | | [optional] | |**internetAccess** | [**InternetAccess**](InternetAccess.md) | | [optional] | |**companyProfile** | [**ConnectionCompanyProfile**](ConnectionCompanyProfile.md) | | [optional] | |**invitation** | [**ConnectionInvitation**](ConnectionInvitation.md) | | [optional] | diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java index 4cf0cc83..97794347 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java @@ -13,14 +13,8 @@ import java.util.Objects; import java.util.Locale; -import com.equinix.sdk.fabricv4.model.AccessPoint; -import com.equinix.sdk.fabricv4.model.ConnectionCompanyProfile; -import com.equinix.sdk.fabricv4.model.ConnectionInvitation; -import com.equinix.sdk.fabricv4.model.ConnectionSideAdditionalInfo; -import com.equinix.sdk.fabricv4.model.InternetAccess; -import com.equinix.sdk.fabricv4.model.ServiceToken; + import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; @@ -30,26 +24,15 @@ import java.util.List; import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; -import com.google.gson.TypeAdapter; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import java.io.IOException; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -66,7 +49,7 @@ public class ConnectionSide { public static final String SERIALIZED_NAME_ACCESS_POINT = "accessPoint"; @SerializedName(SERIALIZED_NAME_ACCESS_POINT) @javax.annotation.Nullable - private AccessPoint accessPoint; + private AccessPointS accessPointS; public static final String SERIALIZED_NAME_INTERNET_ACCESS = "internetAccess"; @SerializedName(SERIALIZED_NAME_INTERNET_ACCESS) @@ -110,8 +93,8 @@ public void setServiceToken(@javax.annotation.Nullable ServiceToken serviceToken } - public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { - this.accessPoint = accessPoint; + public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { + this.accessPointS = accessPointS; return this; } @@ -120,12 +103,12 @@ public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint accessP * @return accessPoint */ @javax.annotation.Nullable - public AccessPoint getAccessPoint() { - return accessPoint; + public AccessPointS getAccessPoint() { + return accessPointS; } - public void setAccessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { - this.accessPoint = accessPoint; + public void setAccessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { + this.accessPointS = accessPointS; } @@ -268,7 +251,7 @@ public boolean equals(Object o) { } ConnectionSide connectionSide = (ConnectionSide) o; return Objects.equals(this.serviceToken, connectionSide.serviceToken) && - Objects.equals(this.accessPoint, connectionSide.accessPoint) && + Objects.equals(this.accessPointS, connectionSide.accessPointS) && Objects.equals(this.internetAccess, connectionSide.internetAccess) && Objects.equals(this.companyProfile, connectionSide.companyProfile) && Objects.equals(this.invitation, connectionSide.invitation) && @@ -278,7 +261,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(serviceToken, accessPoint, internetAccess, companyProfile, invitation, additionalInfo, additionalProperties); + return Objects.hash(serviceToken, accessPointS, internetAccess, companyProfile, invitation, additionalInfo, additionalProperties); } @Override @@ -286,7 +269,7 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConnectionSide {\n"); sb.append(" serviceToken: ").append(toIndentedString(serviceToken)).append("\n"); - sb.append(" accessPoint: ").append(toIndentedString(accessPoint)).append("\n"); + sb.append(" accessPoint: ").append(toIndentedString(accessPointS)).append("\n"); sb.append(" internetAccess: ").append(toIndentedString(internetAccess)).append("\n"); sb.append(" companyProfile: ").append(toIndentedString(companyProfile)).append("\n"); sb.append(" invitation: ").append(toIndentedString(invitation)).append("\n"); @@ -338,7 +321,7 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti } // validate the optional field `accessPoint` if (jsonObj.get("accessPoint") != null && !jsonObj.get("accessPoint").isJsonNull()) { - AccessPoint.validateJsonElement(jsonObj.get("accessPoint")); + AccessPointS.validateJsonElement(jsonObj.get("accessPoint")); } // validate the optional field `internetAccess` if (jsonObj.get("internetAccess") != null && !jsonObj.get("internetAccess").isJsonNull()) { From f9507e8f71e7886db94ff1c2d46fd1bb5b0179d9 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Thu, 12 Feb 2026 19:53:06 +0100 Subject: [PATCH 08/15] updated --- services/fabricv4/docs/SearchFieldName.md | 64 +++++++++---------- services/fabricv4/docs/SortBy.md | 32 +++++----- .../docs/TimeServicePriceConnectionASide.md | 2 +- .../docs/VirtualConnectionPriceASide.md | 2 +- .../docs/VirtualConnectionPriceZSide.md | 2 +- .../fabricv4/docs/VirtualConnectionSide.md | 2 +- .../java/com/equinix/sdk/fabricv4/JSON.java | 4 +- .../fabricv4/model/VirtualConnectionSide.java | 36 ++++------- 8 files changed, 66 insertions(+), 78 deletions(-) diff --git a/services/fabricv4/docs/SearchFieldName.md b/services/fabricv4/docs/SearchFieldName.md index 6c435cd7..7d09660f 100644 --- a/services/fabricv4/docs/SearchFieldName.md +++ b/services/fabricv4/docs/SearchFieldName.md @@ -17,35 +17,35 @@ * `ACCOUNT_ORGID` (value: `"/account/orgId"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPoint/account/accountName"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPointS/account/accountName"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNUMBER` (value: `"/aSide/accessPoint/account/accountNumber"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNUMBER` (value: `"/aSide/accessPointS/account/accountNumber"`) -* `ASIDE_ACCESSPOINT_ROUTER_UUID` (value: `"/aSide/accessPoint/router/uuid"`) +* `ASIDE_ACCESSPOINT_ROUTER_UUID` (value: `"/aSide/accessPointS/router/uuid"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanCTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanCTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanSTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanSTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/aSide/accessPoint/linkProtocol/vlanTagMin"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/aSide/accessPointS/linkProtocol/vlanTagMin"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/aSide/accessPoint/linkProtocol/vlanTagMax"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/aSide/accessPointS/linkProtocol/vlanTagMax"`) -* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPoint/location/metroCode"`) +* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPointS/location/metroCode"`) -* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPoint/location/metroName"`) +* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPointS/location/metroName"`) -* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPoint/name"`) +* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPointS/name"`) -* `ASIDE_ACCESSPOINT_PORT_UUID` (value: `"/aSide/accessPoint/port/uuid"`) +* `ASIDE_ACCESSPOINT_PORT_UUID` (value: `"/aSide/accessPointS/port/uuid"`) -* `ASIDE_ACCESSPOINT_PORT_NAME` (value: `"/aSide/accessPoint/port/name"`) +* `ASIDE_ACCESSPOINT_PORT_NAME` (value: `"/aSide/accessPointS/port/name"`) -* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPoint/type"`) +* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPointS/type"`) -* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/aSide/accessPoint/virtualDevice/name"`) +* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/aSide/accessPointS/virtualDevice/name"`) -* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/aSide/accessPoint/virtualDevice/uuid"`) +* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/aSide/accessPointS/virtualDevice/uuid"`) * `ASIDE_SERVICETOKEN_UUID` (value: `"/aSide/serviceToken/uuid"`) @@ -61,39 +61,39 @@ * `REDUNDANCY_PRIORITY` (value: `"/redundancy/priority"`) -* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPoint/account/accountName"`) +* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPointS/account/accountName"`) -* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPoint/authenticationKey"`) +* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPointS/authenticationKey"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanCTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanCTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanSTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/zSide/accessPoint/linkProtocol/vlanTagMin"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/zSide/accessPointS/linkProtocol/vlanTagMin"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/zSide/accessPoint/linkProtocol/vlanTagMax"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/zSide/accessPointS/linkProtocol/vlanTagMax"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPoint/location/metroCode"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPointS/location/metroCode"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPoint/location/metroName"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPointS/location/metroName"`) -* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPoint/name"`) +* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPointS/name"`) -* `ZSIDE_ACCESSPOINT_PORT_UUID` (value: `"/zSide/accessPoint/port/uuid"`) +* `ZSIDE_ACCESSPOINT_PORT_UUID` (value: `"/zSide/accessPointS/port/uuid"`) -* `ZSIDE_ACCESSPOINT_NETWORK_UUID` (value: `"/zSide/accessPoint/network/uuid"`) +* `ZSIDE_ACCESSPOINT_NETWORK_UUID` (value: `"/zSide/accessPointS/network/uuid"`) -* `ZSIDE_ACCESSPOINT_PORT_NAME` (value: `"/zSide/accessPoint/port/name"`) +* `ZSIDE_ACCESSPOINT_PORT_NAME` (value: `"/zSide/accessPointS/port/name"`) -* `ZSIDE_ACCESSPOINT_PROFILE_UUID` (value: `"/zSide/accessPoint/profile/uuid"`) +* `ZSIDE_ACCESSPOINT_PROFILE_UUID` (value: `"/zSide/accessPointS/profile/uuid"`) -* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPoint/type"`) +* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPointS/type"`) -* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPoint/role"`) +* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPointS/role"`) -* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/zSide/accessPoint/virtualDevice/name"`) +* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/zSide/accessPointS/virtualDevice/name"`) -* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/zSide/accessPoint/virtualDevice/uuid"`) +* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/zSide/accessPointS/virtualDevice/uuid"`) * `ZSIDE_SERVICETOKEN_UUID` (value: `"/zSide/serviceToken/uuid"`) diff --git a/services/fabricv4/docs/SortBy.md b/services/fabricv4/docs/SortBy.md index 39eab163..a257e116 100644 --- a/services/fabricv4/docs/SortBy.md +++ b/services/fabricv4/docs/SortBy.md @@ -9,37 +9,37 @@ * `DIRECTION` (value: `"/direction"`) -* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPoint/name"`) +* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPointS/name"`) -* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPoint/type"`) +* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPointS/type"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPoint/account/accountName"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPointS/account/accountName"`) -* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPoint/location/metroName"`) +* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPointS/location/metroName"`) -* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPoint/location/metroCode"`) +* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPointS/location/metroCode"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanCTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanCTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanSTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPoint/name"`) +* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPointS/name"`) -* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPoint/type"`) +* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPointS/type"`) -* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPoint/role"`) +* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPointS/role"`) -* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPoint/account/accountName"`) +* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPointS/account/accountName"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPoint/location/metroName"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPointS/location/metroName"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPoint/location/metroCode"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPointS/location/metroCode"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanCTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanCTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanSTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPoint/authenticationKey"`) +* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPointS/authenticationKey"`) * `BANDWIDTH` (value: `"/bandwidth"`) diff --git a/services/fabricv4/docs/TimeServicePriceConnectionASide.md b/services/fabricv4/docs/TimeServicePriceConnectionASide.md index 4f15771b..4a03add5 100644 --- a/services/fabricv4/docs/TimeServicePriceConnectionASide.md +++ b/services/fabricv4/docs/TimeServicePriceConnectionASide.md @@ -8,7 +8,7 @@ Time Service Price Connection ASide configuration | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPoint** | [**TimeServicePriceConnectionAccessPoint**](TimeServicePriceConnectionAccessPoint.md) | | [optional] | +|**accessPointS** | [**TimeServicePriceConnectionAccessPoint**](TimeServicePriceConnectionAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionPriceASide.md b/services/fabricv4/docs/VirtualConnectionPriceASide.md index 79384eff..4365e569 100644 --- a/services/fabricv4/docs/VirtualConnectionPriceASide.md +++ b/services/fabricv4/docs/VirtualConnectionPriceASide.md @@ -7,7 +7,7 @@ | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPoint** | [**VirtualConnectionPriceASideAccessPoint**](VirtualConnectionPriceASideAccessPoint.md) | | [optional] | +|**accessPointS** | [**VirtualConnectionPriceASideAccessPoint**](VirtualConnectionPriceASideAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionPriceZSide.md b/services/fabricv4/docs/VirtualConnectionPriceZSide.md index 51c66b5e..1143970a 100644 --- a/services/fabricv4/docs/VirtualConnectionPriceZSide.md +++ b/services/fabricv4/docs/VirtualConnectionPriceZSide.md @@ -7,7 +7,7 @@ | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPoint** | [**VirtualConnectionPriceZSideAccessPoint**](VirtualConnectionPriceZSideAccessPoint.md) | | [optional] | +|**accessPointS** | [**VirtualConnectionPriceZSideAccessPoint**](VirtualConnectionPriceZSideAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionSide.md b/services/fabricv4/docs/VirtualConnectionSide.md index ee1e951e..e75a02bb 100644 --- a/services/fabricv4/docs/VirtualConnectionSide.md +++ b/services/fabricv4/docs/VirtualConnectionSide.md @@ -8,7 +8,7 @@ Fabric Connection access point object. | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPoint** | [**AccessPoint**](AccessPoint.md) | | [optional] | +|**accessPointS** | [**AccessPoint**](AccessPoint.md) | | [optional] | diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java index 587dcef9..559746b9 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java @@ -11,6 +11,7 @@ package com.equinix.sdk.fabricv4; +import com.equinix.sdk.fabricv4.model.AccessPointS; import com.equinix.sdk.fabricv4.model.PortDeviceRedundancy; import com.equinix.sdk.fabricv4.model.PortEncapsulation; import com.equinix.sdk.fabricv4.model.PortPriority; @@ -44,7 +45,6 @@ import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.Date; -import java.util.Locale; import java.util.Map; import java.util.HashMap; import java.util.TimeZone; @@ -141,7 +141,7 @@ public Enum deserialize(JsonElement json, java.lang.reflect.Type type, JsonDeser gsonBuilder.registerTypeAdapter(OffsetDateTime.class, offsetDateTimeTypeAdapter); gsonBuilder.registerTypeAdapter(LocalDate.class, localDateTypeAdapter); gsonBuilder.registerTypeAdapter(byte[].class, byteArrayAdapter); - gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AccessPoint.CustomTypeAdapterFactory()); + gsonBuilder.registerTypeAdapterFactory(new AccessPointS.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AccessPointSelector.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AddOperation.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AlertRulePostRequest.CustomTypeAdapterFactory()); diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java index 2638992d..41d10fd9 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java @@ -13,9 +13,8 @@ import java.util.Objects; import java.util.Locale; -import com.equinix.sdk.fabricv4.model.AccessPoint; + import com.google.gson.TypeAdapter; -import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; @@ -23,26 +22,15 @@ import java.util.Arrays; import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonArray; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; -import com.google.gson.TypeAdapter; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -54,13 +42,13 @@ public class VirtualConnectionSide { public static final String SERIALIZED_NAME_ACCESS_POINT = "accessPoint"; @SerializedName(SERIALIZED_NAME_ACCESS_POINT) @javax.annotation.Nullable - private AccessPoint accessPoint; + private AccessPointS accessPointS; public VirtualConnectionSide() { } - public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { - this.accessPoint = accessPoint; + public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { + this.accessPointS = accessPointS; return this; } @@ -69,12 +57,12 @@ public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint * @return accessPoint */ @javax.annotation.Nullable - public AccessPoint getAccessPoint() { - return accessPoint; + public AccessPointS getAccessPoint() { + return accessPointS; } - public void setAccessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { - this.accessPoint = accessPoint; + public void setAccessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { + this.accessPointS = accessPointS; } /** @@ -132,20 +120,20 @@ public boolean equals(Object o) { return false; } VirtualConnectionSide virtualConnectionSide = (VirtualConnectionSide) o; - return Objects.equals(this.accessPoint, virtualConnectionSide.accessPoint)&& + return Objects.equals(this.accessPointS, virtualConnectionSide.accessPointS)&& Objects.equals(this.additionalProperties, virtualConnectionSide.additionalProperties); } @Override public int hashCode() { - return Objects.hash(accessPoint, additionalProperties); + return Objects.hash(accessPointS, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class VirtualConnectionSide {\n"); - sb.append(" accessPoint: ").append(toIndentedString(accessPoint)).append("\n"); + sb.append(" accessPoint: ").append(toIndentedString(accessPointS)).append("\n"); sb.append(" additionalProperties: ").append(toIndentedString(additionalProperties)).append("\n"); sb.append("}"); return sb.toString(); @@ -189,7 +177,7 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti JsonObject jsonObj = jsonElement.getAsJsonObject(); // validate the optional field `accessPoint` if (jsonObj.get("accessPoint") != null && !jsonObj.get("accessPoint").isJsonNull()) { - AccessPoint.validateJsonElement(jsonObj.get("accessPoint")); + AccessPointS.validateJsonElement(jsonObj.get("accessPoint")); } } From 24383aa5d46a10a2d512dc45af8b543389b17906 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 17:22:18 +0100 Subject: [PATCH 09/15] updated --- .github/workflows/branch-file-analysis.yml | 225 +++++++++ .github/workflows/copilot-analysis.yml | 540 --------------------- script/branch_file_analyzer.py | 297 ++++++++++++ 3 files changed, 522 insertions(+), 540 deletions(-) create mode 100644 .github/workflows/branch-file-analysis.yml delete mode 100644 .github/workflows/copilot-analysis.yml create mode 100755 script/branch_file_analyzer.py diff --git a/.github/workflows/branch-file-analysis.yml b/.github/workflows/branch-file-analysis.yml new file mode 100644 index 00000000..04965d75 --- /dev/null +++ b/.github/workflows/branch-file-analysis.yml @@ -0,0 +1,225 @@ +name: "Branch File Analysis" + +on: + pull_request: + types: [opened, synchronize, edited] + branches: + - main + - develop + workflow_dispatch: + inputs: + pr_number: + description: 'PR number to analyze' + required: true + type: number + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + branch-file-analysis: + name: Branch File Analysis + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + + - name: Setup GitHub CLI + run: | + # Install GitHub CLI if not present + if ! command -v gh &> /dev/null; then + curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null + sudo apt update + sudo apt install gh + fi + + # Authenticate GitHub CLI + gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}" + + - name: Get PR details + id: pr-details + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + PR_NUMBER=${{ github.event.inputs.pr_number }} + else + PR_NUMBER=${{ github.event.number }} + fi + + echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT + + # Get PR info + PR_INFO=$(gh pr view $PR_NUMBER --json title,author,headRefName,baseRefName,body) + + # Extract values safely with multiline support + PR_TITLE=$(echo "$PR_INFO" | jq -r '.title // ""') + PR_AUTHOR=$(echo "$PR_INFO" | jq -r '.author.login // ""') + HEAD_BRANCH=$(echo "$PR_INFO" | jq -r '.headRefName // ""') + BASE_BRANCH=$(echo "$PR_INFO" | jq -r '.baseRefName // ""') + PR_BODY=$(echo "$PR_INFO" | jq -r '.body // ""') + + # Set outputs with proper escaping for multiline content + { + echo "pr_title<> $GITHUB_OUTPUT + + - name: Run branch file analysis + id: file-analysis + run: | + echo "🔍 Running branch file analysis..." + + # Check if analyzer script exists + if [ ! -f "script/branch_file_analyzer.py" ]; then + echo "❌ Branch file analyzer script not found at script/branch_file_analyzer.py" + echo "analysis_completed=false" >> $GITHUB_OUTPUT + echo "analysis_result=Branch file analyzer script not found" >> $GITHUB_OUTPUT + exit 0 + fi + + # Make script executable + chmod +x script/branch_file_analyzer.py + + # Run the analysis and capture output + echo "Running: python3 script/branch_file_analyzer.py" + + if python3 script/branch_file_analyzer.py > analysis_output.txt 2>&1; then + # Check if analysis produced results + if [ -s analysis_output.txt ]; then + echo "✅ Branch file analysis completed successfully" + echo "analysis_completed=true" >> $GITHUB_OUTPUT + + # Save the output with proper multiline handling + { + echo "analysis_result<> $GITHUB_OUTPUT + + echo "Analysis output:" + cat analysis_output.txt + else + echo "â„šī¸ No file changes detected" + echo "analysis_completed=true" >> $GITHUB_OUTPUT + echo "analysis_result=No file changes detected in this branch" >> $GITHUB_OUTPUT + fi + else + echo "❌ Branch file analysis failed" + echo "Analysis error output:" + cat analysis_output.txt + echo "analysis_completed=false" >> $GITHUB_OUTPUT + { + echo "analysis_result<> $GITHUB_OUTPUT + fi + + - name: Update PR description + if: steps.file-analysis.outputs.analysis_completed == 'true' + run: | + echo "📝 Updating PR description..." + + PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} + + # Get current PR description + CURRENT_DESC=$(gh pr view $PR_NUMBER --json body --jq '.body // ""') + + # Create file analysis section + cat > file_analysis_section.md << 'EOF' +## 📋 Branch File Analysis + +${{ steps.file-analysis.outputs.analysis_result }} + +--- +*Auto-generated by Branch File Analysis workflow* +EOF + + # Check if file analysis section already exists and update/add accordingly + if echo "$CURRENT_DESC" | grep -q "## 📋 Branch File Analysis"; then + echo "🔄 Updating existing file analysis section..." + # Remove old file analysis section (from header to next ## or end) + NEW_DESC=$(echo "$CURRENT_DESC" | sed '/## 📋 Branch File Analysis/,/^---$/d' | sed '/^---$/d') + + # Add updated section + { + echo "$NEW_DESC" + echo "" + cat file_analysis_section.md + } > updated_desc.md + else + echo "➕ Adding new file analysis section..." + # Add new file analysis section + if [ -n "$CURRENT_DESC" ] && [ "$CURRENT_DESC" != "null" ]; then + { + echo "$CURRENT_DESC" + echo "" + cat file_analysis_section.md + } > updated_desc.md + else + cp file_analysis_section.md updated_desc.md + fi + fi + + # Update PR description + gh pr edit $PR_NUMBER --body-file updated_desc.md + + echo "✅ PR description updated successfully" + + - name: Create analysis artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: branch-file-analysis-${{ steps.pr-details.outputs.pr_number }} + path: | + analysis_output.txt + file_analysis_section.md + updated_desc.md + retention-days: 7 + + - name: Summary + if: always() + run: | + echo "## 📋 Branch File Analysis Summary" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "**PR Number:** #${{ steps.pr-details.outputs.pr_number }}" >> $GITHUB_STEP_SUMMARY + echo "**Branch:** \`${{ steps.pr-details.outputs.head_branch }}\` → \`${{ steps.pr-details.outputs.base_branch }}\`" >> $GITHUB_STEP_SUMMARY + echo "**Analysis Status:** ${{ steps.file-analysis.outputs.analysis_completed == 'true' && '✅ Completed' || '❌ Failed' }}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + if [ "${{ steps.file-analysis.outputs.analysis_completed }}" == "true" ]; then + echo "### Analysis Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "${{ steps.file-analysis.outputs.analysis_result }}" >> $GITHUB_STEP_SUMMARY + echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "✅ PR description has been updated with file analysis results" >> $GITHUB_STEP_SUMMARY + else + echo "❌ Analysis failed or no changes detected" >> $GITHUB_STEP_SUMMARY + fi diff --git a/.github/workflows/copilot-analysis.yml b/.github/workflows/copilot-analysis.yml deleted file mode 100644 index 4502a25e..00000000 --- a/.github/workflows/copilot-analysis.yml +++ /dev/null @@ -1,540 +0,0 @@ -name: "GitHub Copilot Code Analysis" - -on: - pull_request: - types: [opened, edited, synchronize] - branches: - - main - workflow_dispatch: - inputs: - pr_number: - description: 'PR number to analyze' - required: true - type: number - -permissions: - contents: read - pull-requests: write - actions: read - -jobs: - copilot-analysis: - name: Copilot Code Analysis - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: '18' - - - name: Install GitHub CLI and Copilot CLI - run: | - # Install GitHub CLI if not present - if ! command -v gh &> /dev/null; then - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null - sudo apt update - sudo apt install gh - fi - - # Install Copilot CLI extension - gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}" - gh extension install github/gh-copilot || true - - - name: Get PR details - id: pr-details - run: | - if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then - PR_NUMBER=${{ github.event.inputs.pr_number }} - else - PR_NUMBER=${{ github.event.number }} - fi - - echo "pr_number=${PR_NUMBER}" >> $GITHUB_OUTPUT - - # Get PR info - PR_INFO=$(gh pr view $PR_NUMBER --json title,author,headRefName,baseRefName,body) - - # Extract values safely - PR_TITLE=$(echo "$PR_INFO" | jq -r '.title // ""') - PR_AUTHOR=$(echo "$PR_INFO" | jq -r '.author.login // ""') - HEAD_BRANCH=$(echo "$PR_INFO" | jq -r '.headRefName // ""') - BASE_BRANCH=$(echo "$PR_INFO" | jq -r '.baseRefName // ""') - PR_BODY=$(echo "$PR_INFO" | jq -r '.body // ""') - - # Set outputs with proper escaping - { - echo "pr_title<> $GITHUB_OUTPUT - - # Detect if this is an API Sync PR - IS_API_SYNC=false - PR_TITLE_VAL=$(echo "$PR_INFO" | jq -r '.title // ""') - PR_BODY_VAL=$(echo "$PR_INFO" | jq -r '.body // ""') - HEAD_BRANCH_VAL=$(echo "$PR_INFO" | jq -r '.headRefName // ""') - - echo "Checking PR title: $PR_TITLE_VAL" - echo "Checking head branch: $HEAD_BRANCH_VAL" - - if [ -n "$PR_TITLE_VAL" ] && echo "$PR_TITLE_VAL" | grep -i "api.*sync\|sync.*api\|spec.*update\|swagger.*update"; then - IS_API_SYNC=true - echo "API Sync detected from title" - elif [ -n "$PR_BODY_VAL" ] && echo "$PR_BODY_VAL" | grep -i "swaggerhub\|api.*spec\|spec.*version"; then - IS_API_SYNC=true - echo "API Sync detected from body" - elif [ -n "$HEAD_BRANCH_VAL" ] && echo "$HEAD_BRANCH_VAL" | grep -i "sync\|api\|spec"; then - IS_API_SYNC=true - echo "API Sync detected from branch name" - fi - - echo "is_api_sync=${IS_API_SYNC}" >> $GITHUB_OUTPUT - echo "API Sync PR detected: ${IS_API_SYNC}" - - - name: Get changed files for analysis - id: changed-files - run: | - PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} - - # Get list of changed files - gh pr diff $PR_NUMBER --name-only > changed_files.txt - - # Filter for code files only (Java, Python, etc.) - grep -E '\.(java|py|js|ts|jsx|tsx|go|rs|cpp|c|h|hpp|cs|php|rb|scala|kt)$' changed_files.txt > code_files.txt || true - - CHANGED_FILES_COUNT=$(wc -l < changed_files.txt | tr -d ' ') - CODE_FILES_COUNT=$(wc -l < code_files.txt | tr -d ' ') - - echo "changed_files_count=${CHANGED_FILES_COUNT}" >> $GITHUB_OUTPUT - echo "code_files_count=${CODE_FILES_COUNT}" >> $GITHUB_OUTPUT - - echo "Changed files: $CHANGED_FILES_COUNT" - echo "Code files: $CODE_FILES_COUNT" - - - name: Analyze API Sync Changes - id: api-sync-analysis - if: steps.pr-details.outputs.is_api_sync == 'true' - run: | - echo "🔄 Analyzing API Sync PR changes..." - - # Create API sync analysis - cat > api_sync_summary.md << 'EOF' - ## 🔄 API Sync PR Summary - - This API Sync PR was triggered by @${{ steps.pr-details.outputs.pr_author }} through GitHub Actions workflow_dispatch on $(date '+%Y-%m-%d'). - - EOF - - # Try to extract version information from PR title, body, or files - VERSION_FROM="" - VERSION_TO="" - - # Check for version in PR title - if echo "${{ steps.pr-details.outputs.pr_title }}" | grep -o "v\?[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -2; then - VERSIONS=$(echo "${{ steps.pr-details.outputs.pr_title }}" | grep -o "v\?[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -2) - VERSION_FROM=$(echo "$VERSIONS" | head -1 | sed 's/^v//') - VERSION_TO=$(echo "$VERSIONS" | tail -1 | sed 's/^v//') - fi - - # Check for version in PR body - if [ -z "$VERSION_FROM" ] && echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*from\|from.*version"; then - VERSION_FROM=$(echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*from\|from.*version" | grep -o "[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -1) - VERSION_TO=$(echo "${{ steps.pr-details.outputs.pr_body }}" | grep -i "version.*to\|to.*version" | grep -o "[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?" | head -1) - fi - - # Check version files - if [ -z "$VERSION_FROM" ]; then - if [ -f "version" ]; then - VERSION_TO=$(cat version | tr -d '\n') - # Try to get previous version from git - VERSION_FROM=$(git show HEAD~1:version 2>/dev/null || echo "unknown") - elif [ -f "pom.xml" ]; then - VERSION_TO=$(grep -o "[^<]*" pom.xml | head -1 | sed 's/<[^>]*>//g' | tr -d '\n') - VERSION_FROM=$(git show HEAD~1:pom.xml 2>/dev/null | grep -o "[^<]*" | head -1 | sed 's/<[^>]*>//g' | tr -d '\n' || echo "unknown") - fi - fi - - # Add version info to summary - if [ -n "$VERSION_FROM" ] && [ -n "$VERSION_TO" ] && [ "$VERSION_FROM" != "$VERSION_TO" ]; then - echo "This PR updates the SDK API Spec Version: from $VERSION_FROM to $VERSION_TO" >> api_sync_summary.md - echo "" >> api_sync_summary.md - echo "Latest Swaggerhub API Spec is fetched - version $VERSION_TO" >> api_sync_summary.md - else - echo "This PR updates the SDK API Spec to the latest version" >> api_sync_summary.md - echo "" >> api_sync_summary.md - echo "Latest Swaggerhub API Spec has been fetched" >> api_sync_summary.md - fi - - echo "Patches have been applied" >> api_sync_summary.md - echo "Generated client has been updated" >> api_sync_summary.md - echo "" >> api_sync_summary.md - - # Analyze changes in SDK - echo "## Changes in SDK" >> api_sync_summary.md - echo "" >> api_sync_summary.md - - # Initialize arrays for tracking changes - declare -A added_models - declare -A modified_models - declare -A removed_models - declare -A added_apis - declare -A modified_apis - declare -A removed_apis - declare -A renamed_classes - - # Analyze changed files - while IFS= read -r file; do - if [[ "$file" =~ \.java$ ]]; then - FILE_PATH="$file" - FILE_NAME=$(basename "$file" .java) - - # Check if file was added, modified, or deleted - FILE_STATUS=$(git diff --name-status origin/${{ steps.pr-details.outputs.base_branch }}..HEAD -- "$file" 2>/dev/null | cut -f1 || echo "M") - - # Categorize based on file path and name - if [[ "$file" =~ /model/ ]]; then - case "$FILE_STATUS" in - "A") added_models["$FILE_NAME"]=1 ;; - "M") modified_models["$FILE_NAME"]=1 ;; - "D") removed_models["$FILE_NAME"]=1 ;; - esac - elif [[ "$file" =~ /api/ ]] && [[ "$FILE_NAME" =~ Api$ ]]; then - case "$FILE_STATUS" in - "A") added_apis["$FILE_NAME"]=1 ;; - "M") modified_apis["$FILE_NAME"]=1 ;; - "D") removed_apis["$FILE_NAME"]=1 ;; - esac - fi - fi - done < changed_files.txt - - # Generate Added models section - if [ ${#added_models[@]} -gt 0 ]; then - echo "### Added models" >> api_sync_summary.md - for model in "${!added_models[@]}"; do - echo "- $model" >> api_sync_summary.md - done - echo "" >> api_sync_summary.md - fi - - # Generate Modified models section - if [ ${#modified_models[@]} -gt 0 ]; then - echo "### Modified models" >> api_sync_summary.md - for model in "${!modified_models[@]}"; do - echo "- $model" >> api_sync_summary.md - done - echo "" >> api_sync_summary.md - fi - - # Generate Breaking Changes section if there are removed items - if [ ${#removed_models[@]} -gt 0 ] || [ ${#removed_apis[@]} -gt 0 ]; then - echo "## Breaking Changes" >> api_sync_summary.md - echo "" >> api_sync_summary.md - fi - - # Modified API classes - if [ ${#modified_apis[@]} -gt 0 ]; then - echo "### Modified api classes" >> api_sync_summary.md - for api in "${!modified_apis[@]}"; do - echo "- $api" >> api_sync_summary.md - done - echo "" >> api_sync_summary.md - fi - - # Removed API classes - if [ ${#removed_apis[@]} -gt 0 ]; then - echo "### Removed api classes" >> api_sync_summary.md - for api in "${!removed_apis[@]}"; do - echo "- $api" >> api_sync_summary.md - done - echo "" >> api_sync_summary.md - fi - - # Removed models - if [ ${#removed_models[@]} -gt 0 ]; then - echo "### Removed models" >> api_sync_summary.md - for model in "${!removed_models[@]}"; do - echo "- $model" >> api_sync_summary.md - done - echo "" >> api_sync_summary.md - fi - - # Try to detect renamed classes by analyzing git renames - echo "### Renamed classes" >> api_sync_summary.md - git diff --find-renames=50 --name-status origin/${{ steps.pr-details.outputs.base_branch }}..HEAD | grep "^R" | while read status old_file new_file; do - if [[ "$old_file" =~ \.java$ ]] && [[ "$new_file" =~ \.java$ ]]; then - OLD_NAME=$(basename "$old_file" .java) - NEW_NAME=$(basename "$new_file" .java) - echo "- $OLD_NAME -> $NEW_NAME" >> api_sync_summary.md - fi - done - - # Add footer - echo "" >> api_sync_summary.md - echo "---" >> api_sync_summary.md - echo "*Auto-generated API Sync Summary - $(date '+%Y-%m-%d %H:%M:%S')*" >> api_sync_summary.md - - echo "api_sync_completed=true" >> $GITHUB_OUTPUT - - - name: Analyze code with Copilot - id: copilot-analysis - if: steps.changed-files.outputs.code_files_count > 0 - run: | - PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} - - echo "🤖 Starting Copilot analysis..." - - # Create analysis report - if [ "${{ steps.pr-details.outputs.is_api_sync }}" == "true" ]; then - # For API Sync PRs, use the structured summary - cp api_sync_summary.md analysis_report.md - - # Add additional Copilot analysis for API sync - cat >> analysis_report.md << 'EOF' - - ## 🤖 GitHub Copilot Analysis - - ### API Changes Impact Assessment - - EOF - - # Analyze the scale of changes - TOTAL_JAVA_FILES=$(find . -name "*.java" -path "*/src/*" | wc -l) - CHANGED_JAVA_FILES=${{ steps.changed-files.outputs.code_files_count }} - - if [ "$CHANGED_JAVA_FILES" -gt 50 ]; then - echo "âš ī¸ **Large Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md - echo "- Extensive API updates detected" >> analysis_report.md - echo "- Recommend thorough testing of all affected endpoints" >> analysis_report.md - echo "- Consider staged rollout for production deployment" >> analysis_report.md - elif [ "$CHANGED_JAVA_FILES" -gt 20 ]; then - echo "📊 **Medium Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md - echo "- Moderate API updates" >> analysis_report.md - echo "- Verify backward compatibility" >> analysis_report.md - else - echo "✅ **Small Scale Changes**: $CHANGED_JAVA_FILES Java files modified" >> analysis_report.md - echo "- Limited scope API updates" >> analysis_report.md - fi - - echo "" >> analysis_report.md - - else - # Regular PR analysis - cat > analysis_report.md << 'EOF' - ## 🤖 GitHub Copilot Code Analysis - - **PR:** #${{ steps.pr-details.outputs.pr_number }} - **Title:** ${{ steps.pr-details.outputs.pr_title }} - **Author:** ${{ steps.pr-details.outputs.pr_author }} - **Branch:** `${{ steps.pr-details.outputs.head_branch }}` → `${{ steps.pr-details.outputs.base_branch }}` - - EOF - - # Analyze each code file for regular PRs - while IFS= read -r file; do - if [ -f "$file" ]; then - echo "### 📄 Analysis of \`$file\`" >> analysis_report.md - echo "" >> analysis_report.md - - # Use Copilot to explain the code changes - echo "Analyzing $file with Copilot..." - - # Use gh copilot explain command - COPILOT_EXPLANATION="" - if command -v gh copilot &> /dev/null; then - # Try to get explanation from Copilot - COPILOT_EXPLANATION=$(gh copilot explain "$file" 2>/dev/null || echo "") - fi - - if [ -n "$COPILOT_EXPLANATION" ]; then - echo "$COPILOT_EXPLANATION" >> analysis_report.md - else - # Fallback analysis using file inspection - echo "**File Type:** $(file "$file" | cut -d: -f2-)" >> analysis_report.md - echo "**Lines:** $(wc -l < "$file" 2>/dev/null || echo "N/A")" >> analysis_report.md - - # Basic code pattern detection - if grep -q "class\|interface\|enum" "$file" 2>/dev/null; then - echo "**Contains:** Classes/Interfaces" >> analysis_report.md - fi - if grep -q "test\|Test\|@Test" "$file" 2>/dev/null; then - echo "**Type:** Test file" >> analysis_report.md - fi - if grep -q "public static void main" "$file" 2>/dev/null; then - echo "**Type:** Main class" >> analysis_report.md - fi - fi - - echo "" >> analysis_report.md - fi - done < code_files.txt - fi - - # Add summary section - cat >> analysis_report.md << EOF - - ### 📊 Summary - - - **Total changed files:** ${{ steps.changed-files.outputs.changed_files_count }} - - **Code files analyzed:** ${{ steps.changed-files.outputs.code_files_count }} - - **Analysis timestamp:** $(date -u '+%Y-%m-%d %H:%M:%S UTC') - - ### 🔍 Copilot Recommendations - - EOF - - # Get general recommendations from Copilot if available - echo "Generating Copilot recommendations..." - - # Create a prompt for general code review - REVIEW_PROMPT="Review this pull request of changed files. Please present section 'Changes in SDK' with added, modified, removed models and APIs. They should be listed and categorized by added models, added apis, renamed from one to antoher class name, modified. Please evaluate braking changes and put in different section. I just need only listed summaraize" - - # Use Copilot suggest if available - SUGGESTIONS="" - if command -v gh copilot suggest &> /dev/null; then - SUGGESTIONS=$(echo "$REVIEW_PROMPT" | gh copilot suggest 2>/dev/null || echo "") - fi - - if [ -n "$SUGGESTIONS" ]; then - echo "$SUGGESTIONS" >> analysis_report.md - else - # Fallback recommendations based on file patterns - echo "**Automated Analysis Recommendations:**" >> analysis_report.md - echo "" >> analysis_report.md - - # Check for common patterns - if grep -r "System.out.println\|console.log\|print(" . --include="*.java" --include="*.js" --include="*.py" 2>/dev/null | head -5; then - echo "- âš ī¸ Consider removing debug print statements before merging" >> analysis_report.md - fi - - if grep -r "TODO\|FIXME\|XXX" . --include="*.java" --include="*.js" --include="*.py" 2>/dev/null | head -3; then - echo "- 📝 Address TODO/FIXME comments if possible" >> analysis_report.md - fi - - if find . -name "*.java" -exec grep -l "catch.*Exception.*{[^}]*}" {} \; 2>/dev/null | head -1; then - echo "- 🔍 Review exception handling in catch blocks" >> analysis_report.md - fi - - echo "- ✅ Consider adding unit tests for new functionality" >> analysis_report.md - echo "- 📚 Ensure proper documentation for public APIs" >> analysis_report.md - fi - - cat >> analysis_report.md << EOF - - --- - *Analysis generated by GitHub Copilot integration - $(date '+%Y-%m-%d %H:%M:%S')* - EOF - - echo "analysis_completed=true" >> $GITHUB_OUTPUT - - - name: Create detailed diff analysis - if: steps.copilot-analysis.outputs.analysis_completed == 'true' - run: | - echo "" >> analysis_report.md - echo "### 📋 Detailed Changes" >> analysis_report.md - echo "" >> analysis_report.md - echo "
" >> analysis_report.md - echo "Click to view detailed diff analysis" >> analysis_report.md - echo "" >> analysis_report.md - echo "\`\`\`diff" >> analysis_report.md - - # Get diff with context - gh pr diff ${{ steps.pr-details.outputs.pr_number }} | head -200 >> analysis_report.md - - echo "\`\`\`" >> analysis_report.md - echo "" >> analysis_report.md - echo "
" >> analysis_report.md - - - name: Post Copilot analysis as PR comment - if: steps.copilot-analysis.outputs.analysis_completed == 'true' - run: | - # Post analysis as comment - gh pr comment ${{ steps.pr-details.outputs.pr_number }} --body-file analysis_report.md - - - name: Update PR description with Copilot summary - if: steps.copilot-analysis.outputs.analysis_completed == 'true' - run: | - # Get current PR description - CURRENT_DESC=$(gh pr view ${{ steps.pr-details.outputs.pr_number }} --json body --jq '.body') - - # Create short summary for PR description - echo "## 🤖 Copilot Analysis Summary" > pr_summary.md - echo "" >> pr_summary.md - echo "✅ **Analysis completed** - ${{ steps.changed-files.outputs.code_files_count }} code files analyzed" >> pr_summary.md - echo "" >> pr_summary.md - echo "📊 **Files:** ${{ steps.changed-files.outputs.changed_files_count }} changed, ${{ steps.changed-files.outputs.code_files_count }} code files" >> pr_summary.md - echo "" >> pr_summary.md - echo "🔍 **Status:** Automated code review completed by GitHub Copilot" >> pr_summary.md - echo "" >> pr_summary.md - echo "> 💡 See detailed analysis in PR comments below" >> pr_summary.md - echo "" >> pr_summary.md - echo "---" >> pr_summary.md - - # Check if summary already exists and update/add accordingly - if echo "$CURRENT_DESC" | grep -q "🤖 Copilot Analysis Summary"; then - # Remove old summary - NEW_DESC=$(echo "$CURRENT_DESC" | sed '/🤖 Copilot Analysis Summary/,/^---$/d') - echo "${NEW_DESC}" > temp_desc.md - echo "" >> temp_desc.md - cat pr_summary.md >> temp_desc.md - else - # Add new summary - if [ -n "$CURRENT_DESC" ] && [ "$CURRENT_DESC" != "null" ]; then - echo "$CURRENT_DESC" > temp_desc.md - echo "" >> temp_desc.md - cat pr_summary.md >> temp_desc.md - else - cp pr_summary.md temp_desc.md - fi - fi - - # Update PR description - gh pr edit ${{ steps.pr-details.outputs.pr_number }} --body-file temp_desc.md - - - name: Create analysis artifact - if: steps.copilot-analysis.outputs.analysis_completed == 'true' - uses: actions/upload-artifact@v4 - with: - name: copilot-analysis-${{ steps.pr-details.outputs.pr_number }} - path: | - analysis_report.md - changed_files.txt - code_files.txt - retention-days: 30 - - - name: Analysis summary - if: always() - run: | - echo "🎉 Copilot Analysis Workflow Completed!" - echo "==================================" - echo "PR Number: ${{ steps.pr-details.outputs.pr_number }}" - echo "Changed Files: ${{ steps.changed-files.outputs.changed_files_count }}" - echo "Code Files: ${{ steps.changed-files.outputs.code_files_count }}" - - if [ "${{ steps.copilot-analysis.outputs.analysis_completed }}" == "true" ]; then - echo "✅ Analysis completed successfully" - echo "📝 Check PR comments for detailed analysis" - echo "📋 PR description updated with summary" - else - echo "âš ī¸ Analysis skipped (no code files found or other issue)" - fi diff --git a/script/branch_file_analyzer.py b/script/branch_file_analyzer.py new file mode 100755 index 00000000..e119b2b2 --- /dev/null +++ b/script/branch_file_analyzer.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python3 + +import subprocess +import sys +import os +from pathlib import Path +from collections import defaultdict + + +class BranchFileAnalyzer: + def __init__(self, base_branch='main'): + self.base_branch = base_branch + self.current_branch = self._get_current_branch() + self.base_ref = self._get_base_ref() + + # File categorization + self.added_files = [] + self.modified_files = [] + self.deleted_files = [] + self.renamed_files = {} + + def _run_git_command(self, cmd): + """Execute git command and return output.""" + try: + result = subprocess.run(cmd, capture_output=True, text=True, check=True, cwd='.') + return result.stdout.strip() + except subprocess.CalledProcessError: + return "" + + def _get_current_branch(self): + """Get current branch name.""" + branch = self._run_git_command(['git', 'branch', '--show-current']) + if not branch: + # Handle detached HEAD + head_commit = self._run_git_command(['git', 'rev-parse', '--short', 'HEAD']) + return f"HEAD-{head_commit}" if head_commit else "unknown" + return branch + + def _get_base_ref(self): + """Get base reference for comparison.""" + # Try different base references + for ref in [f'origin/{self.base_branch}', self.base_branch, 'HEAD~1']: + if self._run_git_command(['git', 'rev-parse', '--verify', ref]): + return ref + return 'HEAD~1' # fallback + + def analyze_branch_changes(self): + """Analyze all file changes in the current branch.""" + # Get all changed files with their status + diff_output = self._run_git_command([ + 'git', 'diff', '--name-status', f'{self.base_ref}..HEAD' + ]) + + if not diff_output: + # If no diff between base and HEAD, check uncommitted changes + print("No committed changes found. Checking uncommitted changes...", file=sys.stderr) + self._analyze_uncommitted_changes() + return + + # Process each line of diff output + for line in diff_output.split('\n'): + if not line.strip(): + continue + + self._process_file_change(line.strip()) + + def _analyze_uncommitted_changes(self): + """Analyze uncommitted changes if no committed changes found.""" + # Check staged changes + staged_output = self._run_git_command(['git', 'diff', '--cached', '--name-status']) + # Check unstaged changes + unstaged_output = self._run_git_command(['git', 'diff', '--name-status']) + # Check untracked files + untracked_output = self._run_git_command(['git', 'ls-files', '--others', '--exclude-standard']) + + # Process staged changes + for line in staged_output.split('\n'): + if line.strip(): + self._process_file_change(line.strip()) + + # Process unstaged changes + for line in unstaged_output.split('\n'): + if line.strip(): + self._process_file_change(line.strip()) + + # Process untracked files (treat as added) + for filepath in untracked_output.split('\n'): + if filepath.strip(): + self.added_files.append(filepath.strip()) + + def _process_file_change(self, line): + """Process a single file change line.""" + parts = line.split('\t') + if len(parts) < 2: + return + + status = parts[0] + filepath = parts[1] + + # Handle rename operations (R100 old_file new_file) + if status.startswith('R'): + if len(parts) >= 3: + old_file = filepath + new_file = parts[2] + self.renamed_files[old_file] = new_file + return + + # Handle copy operations (C100 old_file new_file) + if status.startswith('C'): + if len(parts) >= 3: + # Treat copies as new files + new_file = parts[2] + self.added_files.append(new_file) + return + + # Handle other status codes + if status == 'A': + self.added_files.append(filepath) + elif status == 'M': + self.modified_files.append(filepath) + elif status == 'D': + self.deleted_files.append(filepath) + elif status == 'T': + # Type change (e.g., file to symlink) + self.modified_files.append(filepath) + + def categorize_files_by_type(self, file_list): + """Categorize files by their type/extension.""" + categories = defaultdict(list) + + for filepath in file_list: + file_path = Path(filepath) + filename = file_path.name + + # Categorize by file type + if filepath.endswith('.java'): + if '/model/' in filepath or '/dto/' in filepath: + categories['Java Models'].append(filename) + elif '/api/' in filepath and filename.endswith('Api.java'): + categories['API Classes'].append(filename) + elif '/test/' in filepath or 'test' in filename.lower(): + categories['Test Files'].append(filename) + # Removed Java Files category - these files will be ignored + elif filepath.endswith(('.py', '.sh', '.js', '.ts')): + categories['Scripts'].append(filename) + elif filepath.endswith(('.json', '.xml')): + categories['Data Files'].append(filename) + else: + categories['Other Files'].append(filename) + + return categories + + def generate_report(self): + """Generate the analysis report.""" + lines = [] + + + # Added Files by category + if self.added_files: + # Separate by specific categories + added_apis = [f for f in self.added_files if f.endswith('.java') and ('/api/' in f and f.endswith('Api.java'))] + added_models = [f for f in self.added_files if f.endswith('.java') and ('/model/' in f or '/dto/' in f)] + + # Only show header if there are files in any category + if added_apis or added_models: + lines.append("ADDED FILES:") + + if added_apis: + lines.append(" Added API ->") + for filepath in sorted(added_apis): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" + {filename}") + + if added_models: + lines.append(" Added Models ->") + for filepath in sorted(added_models): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" + {filename}") + + lines.append("") + + # Modified Files by category + if self.modified_files: + # Separate by specific categories + modified_apis = [f for f in self.modified_files if f.endswith('.java') and ('/api/' in f and f.endswith('Api.java'))] + modified_models = [f for f in self.modified_files if f.endswith('.java') and ('/model/' in f or '/dto/' in f)] + + # Only show header if there are files in any category + if modified_apis or modified_models: + lines.append("MODIFIED FILES:") + + if modified_apis: + lines.append(" Modified API ->") + for filepath in sorted(modified_apis): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" * {filename}") + + if modified_models: + lines.append(" Modified Models ->") + for filepath in sorted(modified_models): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" * {filename}") + + lines.append("") + + # Deleted Files by category + if self.deleted_files: + # Separate by specific categories + deleted_apis = [f for f in self.deleted_files if f.endswith('.java') and ('/api/' in f and f.endswith('Api.java'))] + deleted_models = [f for f in self.deleted_files if f.endswith('.java') and ('/model/' in f or '/dto/' in f)] + + # Only show header if there are files in any category + if deleted_apis or deleted_models: + lines.append("DELETED FILES:") + + if deleted_apis: + lines.append(" Deleted API ->") + for filepath in sorted(deleted_apis): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" - {filename}") + + if deleted_models: + lines.append(" Deleted Models ->") + for filepath in sorted(deleted_models): + filename = Path(filepath).name.replace('.java', '') + lines.append(f" - {filename}") + + lines.append("") + + # Renamed Files with detailed categorization + if self.renamed_files: + # Separate renamed files by category + renamed_apis = {} + renamed_models = {} + + + for old_file, new_file in self.renamed_files.items(): + old_is_api = old_file.endswith('.java') and ('/api/' in old_file and old_file.endswith('Api.java')) + new_is_api = new_file.endswith('.java') and ('/api/' in new_file and new_file.endswith('Api.java')) + old_is_model = old_file.endswith('.java') and ('/model/' in old_file or '/dto/' in old_file) + new_is_model = new_file.endswith('.java') and ('/model/' in new_file or '/dto/' in new_file) + + if old_is_api or new_is_api: + renamed_apis[old_file] = new_file + elif old_is_model or new_is_model: + renamed_models[old_file] = new_file + + + # Only show header if there are files in any category + if renamed_apis or renamed_models: + lines.append("RENAMED FILES:") + + if renamed_apis: + lines.append(" Renamed API ->") + for old_file, new_file in sorted(renamed_apis.items()): + old_name = Path(old_file).name.replace('.java', '') + new_name = Path(new_file).name.replace('.java', '') + lines.append(f" {old_name} -> {new_name}") + + if renamed_models: + lines.append(" Renamed Models ->") + for old_file, new_file in sorted(renamed_models.items()): + old_name = Path(old_file).name.replace('.java', '') + new_name = Path(new_file).name.replace('.java', '') + lines.append(f" {old_name} -> {new_name}") + + lines.append("") + + + return "\n".join(lines) + + +def main(): + """Main function.""" + # Check if we're in a git repository + if not os.path.exists('.git'): + print("Error: Not in a Git repository!", file=sys.stderr) + sys.exit(1) + + try: + # Create analyzer + analyzer = BranchFileAnalyzer() + + # Analyze changes + analyzer.analyze_branch_changes() + + # Generate and print report + report = analyzer.generate_report() + print(report) + + except Exception as e: + print(f"Error during analysis: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main() From 78dcfe6e343cedb4d5afa970ea2767a38e5a5943 Mon Sep 17 00:00:00 2001 From: "equinix-labs@auto-commit-workflow" Date: Fri, 13 Feb 2026 16:22:48 +0000 Subject: [PATCH 10/15] Auto commit generated client changes - Fri Feb 13 16:22:48 UTC 2026 --- services/fabricv4/docs/ConnectionSide.md | 2 +- services/fabricv4/docs/SearchFieldName.md | 64 ++++----- services/fabricv4/docs/SortBy.md | 32 ++--- .../docs/TimeServicePriceConnectionASide.md | 2 +- .../docs/VirtualConnectionPriceASide.md | 2 +- .../docs/VirtualConnectionPriceZSide.md | 2 +- .../fabricv4/docs/VirtualConnectionSide.md | 2 +- .../java/com/equinix/sdk/fabricv4/JSON.java | 4 +- .../{AccessPointS.java => AccessPoint.java} | 130 +++++++++++------- .../sdk/fabricv4/model/ConnectionSide.java | 41 ++++-- .../fabricv4/model/VirtualConnectionSide.java | 36 +++-- 11 files changed, 185 insertions(+), 132 deletions(-) rename services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/{AccessPointS.java => AccessPoint.java} (83%) diff --git a/services/fabricv4/docs/ConnectionSide.md b/services/fabricv4/docs/ConnectionSide.md index c62b2727..155fdeb5 100644 --- a/services/fabricv4/docs/ConnectionSide.md +++ b/services/fabricv4/docs/ConnectionSide.md @@ -9,7 +9,7 @@ Connection configuration object for each side of multi-segment connection | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| |**serviceToken** | [**ServiceToken**](ServiceToken.md) | | [optional] | -|**accessPointS** | [**AccessPoint**](AccessPoint.md) | | [optional] | +|**accessPoint** | [**AccessPoint**](AccessPoint.md) | | [optional] | |**internetAccess** | [**InternetAccess**](InternetAccess.md) | | [optional] | |**companyProfile** | [**ConnectionCompanyProfile**](ConnectionCompanyProfile.md) | | [optional] | |**invitation** | [**ConnectionInvitation**](ConnectionInvitation.md) | | [optional] | diff --git a/services/fabricv4/docs/SearchFieldName.md b/services/fabricv4/docs/SearchFieldName.md index 7d09660f..6c435cd7 100644 --- a/services/fabricv4/docs/SearchFieldName.md +++ b/services/fabricv4/docs/SearchFieldName.md @@ -17,35 +17,35 @@ * `ACCOUNT_ORGID` (value: `"/account/orgId"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPointS/account/accountName"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPoint/account/accountName"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNUMBER` (value: `"/aSide/accessPointS/account/accountNumber"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNUMBER` (value: `"/aSide/accessPoint/account/accountNumber"`) -* `ASIDE_ACCESSPOINT_ROUTER_UUID` (value: `"/aSide/accessPointS/router/uuid"`) +* `ASIDE_ACCESSPOINT_ROUTER_UUID` (value: `"/aSide/accessPoint/router/uuid"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanCTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanCTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanSTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanSTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/aSide/accessPointS/linkProtocol/vlanTagMin"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/aSide/accessPoint/linkProtocol/vlanTagMin"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/aSide/accessPointS/linkProtocol/vlanTagMax"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/aSide/accessPoint/linkProtocol/vlanTagMax"`) -* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPointS/location/metroCode"`) +* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPoint/location/metroCode"`) -* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPointS/location/metroName"`) +* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPoint/location/metroName"`) -* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPointS/name"`) +* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPoint/name"`) -* `ASIDE_ACCESSPOINT_PORT_UUID` (value: `"/aSide/accessPointS/port/uuid"`) +* `ASIDE_ACCESSPOINT_PORT_UUID` (value: `"/aSide/accessPoint/port/uuid"`) -* `ASIDE_ACCESSPOINT_PORT_NAME` (value: `"/aSide/accessPointS/port/name"`) +* `ASIDE_ACCESSPOINT_PORT_NAME` (value: `"/aSide/accessPoint/port/name"`) -* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPointS/type"`) +* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPoint/type"`) -* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/aSide/accessPointS/virtualDevice/name"`) +* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/aSide/accessPoint/virtualDevice/name"`) -* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/aSide/accessPointS/virtualDevice/uuid"`) +* `ASIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/aSide/accessPoint/virtualDevice/uuid"`) * `ASIDE_SERVICETOKEN_UUID` (value: `"/aSide/serviceToken/uuid"`) @@ -61,39 +61,39 @@ * `REDUNDANCY_PRIORITY` (value: `"/redundancy/priority"`) -* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPointS/account/accountName"`) +* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPoint/account/accountName"`) -* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPointS/authenticationKey"`) +* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPoint/authenticationKey"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanCTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanCTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanSTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/zSide/accessPointS/linkProtocol/vlanTagMin"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMIN` (value: `"/zSide/accessPoint/linkProtocol/vlanTagMin"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/zSide/accessPointS/linkProtocol/vlanTagMax"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANTAGMAX` (value: `"/zSide/accessPoint/linkProtocol/vlanTagMax"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPointS/location/metroCode"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPoint/location/metroCode"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPointS/location/metroName"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPoint/location/metroName"`) -* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPointS/name"`) +* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPoint/name"`) -* `ZSIDE_ACCESSPOINT_PORT_UUID` (value: `"/zSide/accessPointS/port/uuid"`) +* `ZSIDE_ACCESSPOINT_PORT_UUID` (value: `"/zSide/accessPoint/port/uuid"`) -* `ZSIDE_ACCESSPOINT_NETWORK_UUID` (value: `"/zSide/accessPointS/network/uuid"`) +* `ZSIDE_ACCESSPOINT_NETWORK_UUID` (value: `"/zSide/accessPoint/network/uuid"`) -* `ZSIDE_ACCESSPOINT_PORT_NAME` (value: `"/zSide/accessPointS/port/name"`) +* `ZSIDE_ACCESSPOINT_PORT_NAME` (value: `"/zSide/accessPoint/port/name"`) -* `ZSIDE_ACCESSPOINT_PROFILE_UUID` (value: `"/zSide/accessPointS/profile/uuid"`) +* `ZSIDE_ACCESSPOINT_PROFILE_UUID` (value: `"/zSide/accessPoint/profile/uuid"`) -* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPointS/type"`) +* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPoint/type"`) -* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPointS/role"`) +* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPoint/role"`) -* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/zSide/accessPointS/virtualDevice/name"`) +* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_NAME` (value: `"/zSide/accessPoint/virtualDevice/name"`) -* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/zSide/accessPointS/virtualDevice/uuid"`) +* `ZSIDE_ACCESSPOINT_VIRTUALDEVICE_UUID` (value: `"/zSide/accessPoint/virtualDevice/uuid"`) * `ZSIDE_SERVICETOKEN_UUID` (value: `"/zSide/serviceToken/uuid"`) diff --git a/services/fabricv4/docs/SortBy.md b/services/fabricv4/docs/SortBy.md index a257e116..39eab163 100644 --- a/services/fabricv4/docs/SortBy.md +++ b/services/fabricv4/docs/SortBy.md @@ -9,37 +9,37 @@ * `DIRECTION` (value: `"/direction"`) -* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPointS/name"`) +* `ASIDE_ACCESSPOINT_NAME` (value: `"/aSide/accessPoint/name"`) -* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPointS/type"`) +* `ASIDE_ACCESSPOINT_TYPE` (value: `"/aSide/accessPoint/type"`) -* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPointS/account/accountName"`) +* `ASIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/aSide/accessPoint/account/accountName"`) -* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPointS/location/metroName"`) +* `ASIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/aSide/accessPoint/location/metroName"`) -* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPointS/location/metroCode"`) +* `ASIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/aSide/accessPoint/location/metroCode"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanCTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanCTag"`) -* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPointS/linkProtocol/vlanSTag"`) +* `ASIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/aSide/accessPoint/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPointS/name"`) +* `ZSIDE_ACCESSPOINT_NAME` (value: `"/zSide/accessPoint/name"`) -* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPointS/type"`) +* `ZSIDE_ACCESSPOINT_TYPE` (value: `"/zSide/accessPoint/type"`) -* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPointS/role"`) +* `ZSIDE_ACCESSPOINT_ROLE` (value: `"/zSide/accessPoint/role"`) -* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPointS/account/accountName"`) +* `ZSIDE_ACCESSPOINT_ACCOUNT_ACCOUNTNAME` (value: `"/zSide/accessPoint/account/accountName"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPointS/location/metroName"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METRONAME` (value: `"/zSide/accessPoint/location/metroName"`) -* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPointS/location/metroCode"`) +* `ZSIDE_ACCESSPOINT_LOCATION_METROCODE` (value: `"/zSide/accessPoint/location/metroCode"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanCTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANCTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanCTag"`) -* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPointS/linkProtocol/vlanSTag"`) +* `ZSIDE_ACCESSPOINT_LINKPROTOCOL_VLANSTAG` (value: `"/zSide/accessPoint/linkProtocol/vlanSTag"`) -* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPointS/authenticationKey"`) +* `ZSIDE_ACCESSPOINT_AUTHENTICATIONKEY` (value: `"/zSide/accessPoint/authenticationKey"`) * `BANDWIDTH` (value: `"/bandwidth"`) diff --git a/services/fabricv4/docs/TimeServicePriceConnectionASide.md b/services/fabricv4/docs/TimeServicePriceConnectionASide.md index 4a03add5..4f15771b 100644 --- a/services/fabricv4/docs/TimeServicePriceConnectionASide.md +++ b/services/fabricv4/docs/TimeServicePriceConnectionASide.md @@ -8,7 +8,7 @@ Time Service Price Connection ASide configuration | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPointS** | [**TimeServicePriceConnectionAccessPoint**](TimeServicePriceConnectionAccessPoint.md) | | [optional] | +|**accessPoint** | [**TimeServicePriceConnectionAccessPoint**](TimeServicePriceConnectionAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionPriceASide.md b/services/fabricv4/docs/VirtualConnectionPriceASide.md index 4365e569..79384eff 100644 --- a/services/fabricv4/docs/VirtualConnectionPriceASide.md +++ b/services/fabricv4/docs/VirtualConnectionPriceASide.md @@ -7,7 +7,7 @@ | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPointS** | [**VirtualConnectionPriceASideAccessPoint**](VirtualConnectionPriceASideAccessPoint.md) | | [optional] | +|**accessPoint** | [**VirtualConnectionPriceASideAccessPoint**](VirtualConnectionPriceASideAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionPriceZSide.md b/services/fabricv4/docs/VirtualConnectionPriceZSide.md index 1143970a..51c66b5e 100644 --- a/services/fabricv4/docs/VirtualConnectionPriceZSide.md +++ b/services/fabricv4/docs/VirtualConnectionPriceZSide.md @@ -7,7 +7,7 @@ | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPointS** | [**VirtualConnectionPriceZSideAccessPoint**](VirtualConnectionPriceZSideAccessPoint.md) | | [optional] | +|**accessPoint** | [**VirtualConnectionPriceZSideAccessPoint**](VirtualConnectionPriceZSideAccessPoint.md) | | [optional] | diff --git a/services/fabricv4/docs/VirtualConnectionSide.md b/services/fabricv4/docs/VirtualConnectionSide.md index e75a02bb..ee1e951e 100644 --- a/services/fabricv4/docs/VirtualConnectionSide.md +++ b/services/fabricv4/docs/VirtualConnectionSide.md @@ -8,7 +8,7 @@ Fabric Connection access point object. | Name | Type | Description | Notes | |------------ | ------------- | ------------- | -------------| -|**accessPointS** | [**AccessPoint**](AccessPoint.md) | | [optional] | +|**accessPoint** | [**AccessPoint**](AccessPoint.md) | | [optional] | diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java index 559746b9..587dcef9 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/JSON.java @@ -11,7 +11,6 @@ package com.equinix.sdk.fabricv4; -import com.equinix.sdk.fabricv4.model.AccessPointS; import com.equinix.sdk.fabricv4.model.PortDeviceRedundancy; import com.equinix.sdk.fabricv4.model.PortEncapsulation; import com.equinix.sdk.fabricv4.model.PortPriority; @@ -45,6 +44,7 @@ import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.Date; +import java.util.Locale; import java.util.Map; import java.util.HashMap; import java.util.TimeZone; @@ -141,7 +141,7 @@ public Enum deserialize(JsonElement json, java.lang.reflect.Type type, JsonDeser gsonBuilder.registerTypeAdapter(OffsetDateTime.class, offsetDateTimeTypeAdapter); gsonBuilder.registerTypeAdapter(LocalDate.class, localDateTypeAdapter); gsonBuilder.registerTypeAdapter(byte[].class, byteArrayAdapter); - gsonBuilder.registerTypeAdapterFactory(new AccessPointS.CustomTypeAdapterFactory()); + gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AccessPoint.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AccessPointSelector.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AddOperation.CustomTypeAdapterFactory()); gsonBuilder.registerTypeAdapterFactory(new com.equinix.sdk.fabricv4.model.AlertRulePostRequest.CustomTypeAdapterFactory()); diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java similarity index 83% rename from services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java rename to services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java index f4ddaaf3..2b128ed5 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPointS.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/AccessPoint.java @@ -13,7 +13,20 @@ import java.util.Objects; import java.util.Locale; - +import com.equinix.sdk.fabricv4.model.AccessPointType; +import com.equinix.sdk.fabricv4.model.CloudRouter; +import com.equinix.sdk.fabricv4.model.MetalInterconnection; +import com.equinix.sdk.fabricv4.model.ModelInterface; +import com.equinix.sdk.fabricv4.model.PeeringType; +import com.equinix.sdk.fabricv4.model.SimplifiedAccount; +import com.equinix.sdk.fabricv4.model.SimplifiedLinkProtocol; +import com.equinix.sdk.fabricv4.model.SimplifiedLocation; +import com.equinix.sdk.fabricv4.model.SimplifiedNetwork; +import com.equinix.sdk.fabricv4.model.SimplifiedPort; +import com.equinix.sdk.fabricv4.model.SimplifiedServiceProfile; +import com.equinix.sdk.fabricv4.model.VirtualDevice; +import com.equinix.sdk.fabricv4.model.VirtualNetwork; +import com.equinix.sdk.fabricv4.model.VpicInterface; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; @@ -23,15 +36,26 @@ import java.util.Arrays; import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -39,7 +63,7 @@ * Access point object */ @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.16.0") -public class AccessPointS { +public class AccessPoint { public static final String SERIALIZED_NAME_TYPE = "type"; @SerializedName(SERIALIZED_NAME_TYPE) @javax.annotation.Nullable @@ -182,10 +206,10 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti @javax.annotation.Nullable private RoleEnum role; - public AccessPointS() { + public AccessPoint() { } - public AccessPointS type(@javax.annotation.Nullable AccessPointType type) { + public AccessPoint type(@javax.annotation.Nullable AccessPointType type) { this.type = type; return this; } @@ -204,7 +228,7 @@ public void setType(@javax.annotation.Nullable AccessPointType type) { } - public AccessPointS account(@javax.annotation.Nullable SimplifiedAccount account) { + public AccessPoint account(@javax.annotation.Nullable SimplifiedAccount account) { this.account = account; return this; } @@ -223,7 +247,7 @@ public void setAccount(@javax.annotation.Nullable SimplifiedAccount account) { } - public AccessPointS location(@javax.annotation.Nullable SimplifiedLocation location) { + public AccessPoint location(@javax.annotation.Nullable SimplifiedLocation location) { this.location = location; return this; } @@ -242,7 +266,7 @@ public void setLocation(@javax.annotation.Nullable SimplifiedLocation location) } - public AccessPointS port(@javax.annotation.Nullable SimplifiedPort port) { + public AccessPoint port(@javax.annotation.Nullable SimplifiedPort port) { this.port = port; return this; } @@ -261,7 +285,7 @@ public void setPort(@javax.annotation.Nullable SimplifiedPort port) { } - public AccessPointS profile(@javax.annotation.Nullable SimplifiedServiceProfile profile) { + public AccessPoint profile(@javax.annotation.Nullable SimplifiedServiceProfile profile) { this.profile = profile; return this; } @@ -280,7 +304,7 @@ public void setProfile(@javax.annotation.Nullable SimplifiedServiceProfile profi } - public AccessPointS router(@javax.annotation.Nullable CloudRouter router) { + public AccessPoint router(@javax.annotation.Nullable CloudRouter router) { this.router = router; return this; } @@ -299,7 +323,7 @@ public void setRouter(@javax.annotation.Nullable CloudRouter router) { } - public AccessPointS linkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol linkProtocol) { + public AccessPoint linkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol linkProtocol) { this.linkProtocol = linkProtocol; return this; } @@ -318,7 +342,7 @@ public void setLinkProtocol(@javax.annotation.Nullable SimplifiedLinkProtocol li } - public AccessPointS virtualDevice(@javax.annotation.Nullable VirtualDevice virtualDevice) { + public AccessPoint virtualDevice(@javax.annotation.Nullable VirtualDevice virtualDevice) { this.virtualDevice = virtualDevice; return this; } @@ -337,7 +361,7 @@ public void setVirtualDevice(@javax.annotation.Nullable VirtualDevice virtualDev } - public AccessPointS _interface(@javax.annotation.Nullable ModelInterface _interface) { + public AccessPoint _interface(@javax.annotation.Nullable ModelInterface _interface) { this._interface = _interface; return this; } @@ -356,7 +380,7 @@ public void setInterface(@javax.annotation.Nullable ModelInterface _interface) { } - public AccessPointS network(@javax.annotation.Nullable SimplifiedNetwork network) { + public AccessPoint network(@javax.annotation.Nullable SimplifiedNetwork network) { this.network = network; return this; } @@ -375,7 +399,7 @@ public void setNetwork(@javax.annotation.Nullable SimplifiedNetwork network) { } - public AccessPointS sellerRegion(@javax.annotation.Nullable String sellerRegion) { + public AccessPoint sellerRegion(@javax.annotation.Nullable String sellerRegion) { this.sellerRegion = sellerRegion; return this; } @@ -394,7 +418,7 @@ public void setSellerRegion(@javax.annotation.Nullable String sellerRegion) { } - public AccessPointS peeringType(@javax.annotation.Nullable PeeringType peeringType) { + public AccessPoint peeringType(@javax.annotation.Nullable PeeringType peeringType) { this.peeringType = peeringType; return this; } @@ -413,7 +437,7 @@ public void setPeeringType(@javax.annotation.Nullable PeeringType peeringType) { } - public AccessPointS authenticationKey(@javax.annotation.Nullable String authenticationKey) { + public AccessPoint authenticationKey(@javax.annotation.Nullable String authenticationKey) { this.authenticationKey = authenticationKey; return this; } @@ -432,7 +456,7 @@ public void setAuthenticationKey(@javax.annotation.Nullable String authenticatio } - public AccessPointS providerConnectionId(@javax.annotation.Nullable String providerConnectionId) { + public AccessPoint providerConnectionId(@javax.annotation.Nullable String providerConnectionId) { this.providerConnectionId = providerConnectionId; return this; } @@ -451,7 +475,7 @@ public void setProviderConnectionId(@javax.annotation.Nullable String providerCo } - public AccessPointS virtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualNetwork) { + public AccessPoint virtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualNetwork) { this.virtualNetwork = virtualNetwork; return this; } @@ -470,7 +494,7 @@ public void setVirtualNetwork(@javax.annotation.Nullable VirtualNetwork virtualN } - public AccessPointS interconnection(@javax.annotation.Nullable MetalInterconnection interconnection) { + public AccessPoint interconnection(@javax.annotation.Nullable MetalInterconnection interconnection) { this.interconnection = interconnection; return this; } @@ -489,7 +513,7 @@ public void setInterconnection(@javax.annotation.Nullable MetalInterconnection i } - public AccessPointS vpicInterface(@javax.annotation.Nullable VpicInterface vpicInterface) { + public AccessPoint vpicInterface(@javax.annotation.Nullable VpicInterface vpicInterface) { this.vpicInterface = vpicInterface; return this; } @@ -508,7 +532,7 @@ public void setVpicInterface(@javax.annotation.Nullable VpicInterface vpicInterf } - public AccessPointS role(@javax.annotation.Nullable RoleEnum role) { + public AccessPoint role(@javax.annotation.Nullable RoleEnum role) { this.role = role; return this; } @@ -541,7 +565,7 @@ public void setRole(@javax.annotation.Nullable RoleEnum role) { * @param value value of the property * @return the AccessPoint instance itself */ - public AccessPointS putAdditionalProperty(String key, Object value) { + public AccessPoint putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -580,26 +604,26 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - AccessPointS accessPointS = (AccessPointS) o; - return Objects.equals(this.type, accessPointS.type) && - Objects.equals(this.account, accessPointS.account) && - Objects.equals(this.location, accessPointS.location) && - Objects.equals(this.port, accessPointS.port) && - Objects.equals(this.profile, accessPointS.profile) && - Objects.equals(this.router, accessPointS.router) && - Objects.equals(this.linkProtocol, accessPointS.linkProtocol) && - Objects.equals(this.virtualDevice, accessPointS.virtualDevice) && - Objects.equals(this._interface, accessPointS._interface) && - Objects.equals(this.network, accessPointS.network) && - Objects.equals(this.sellerRegion, accessPointS.sellerRegion) && - Objects.equals(this.peeringType, accessPointS.peeringType) && - Objects.equals(this.authenticationKey, accessPointS.authenticationKey) && - Objects.equals(this.providerConnectionId, accessPointS.providerConnectionId) && - Objects.equals(this.virtualNetwork, accessPointS.virtualNetwork) && - Objects.equals(this.interconnection, accessPointS.interconnection) && - Objects.equals(this.vpicInterface, accessPointS.vpicInterface) && - Objects.equals(this.role, accessPointS.role)&& - Objects.equals(this.additionalProperties, accessPointS.additionalProperties); + AccessPoint accessPoint = (AccessPoint) o; + return Objects.equals(this.type, accessPoint.type) && + Objects.equals(this.account, accessPoint.account) && + Objects.equals(this.location, accessPoint.location) && + Objects.equals(this.port, accessPoint.port) && + Objects.equals(this.profile, accessPoint.profile) && + Objects.equals(this.router, accessPoint.router) && + Objects.equals(this.linkProtocol, accessPoint.linkProtocol) && + Objects.equals(this.virtualDevice, accessPoint.virtualDevice) && + Objects.equals(this._interface, accessPoint._interface) && + Objects.equals(this.network, accessPoint.network) && + Objects.equals(this.sellerRegion, accessPoint.sellerRegion) && + Objects.equals(this.peeringType, accessPoint.peeringType) && + Objects.equals(this.authenticationKey, accessPoint.authenticationKey) && + Objects.equals(this.providerConnectionId, accessPoint.providerConnectionId) && + Objects.equals(this.virtualNetwork, accessPoint.virtualNetwork) && + Objects.equals(this.interconnection, accessPoint.interconnection) && + Objects.equals(this.vpicInterface, accessPoint.vpicInterface) && + Objects.equals(this.role, accessPoint.role)&& + Objects.equals(this.additionalProperties, accessPoint.additionalProperties); } @Override @@ -665,8 +689,8 @@ private String toIndentedString(Object o) { */ public static void validateJsonElement(JsonElement jsonElement) throws IOException { if (jsonElement == null) { - if (!AccessPointS.openapiRequiredFields.isEmpty()) { // has required fields but JSON element is null - throw new IllegalArgumentException(String.format(Locale.ROOT, "The required field(s) %s in AccessPoint is not found in the empty JSON string", AccessPointS.openapiRequiredFields.toString())); + if (!AccessPoint.openapiRequiredFields.isEmpty()) { // has required fields but JSON element is null + throw new IllegalArgumentException(String.format(Locale.ROOT, "The required field(s) %s in AccessPoint is not found in the empty JSON string", AccessPoint.openapiRequiredFields.toString())); } } JsonObject jsonObj = jsonElement.getAsJsonObject(); @@ -748,16 +772,16 @@ public static class CustomTypeAdapterFactory implements TypeAdapterFactory { @SuppressWarnings("unchecked") @Override public TypeAdapter create(Gson gson, TypeToken type) { - if (!AccessPointS.class.isAssignableFrom(type.getRawType())) { + if (!AccessPoint.class.isAssignableFrom(type.getRawType())) { return null; // this class only serializes 'AccessPoint' and its subtypes } final TypeAdapter elementAdapter = gson.getAdapter(JsonElement.class); - final TypeAdapter thisAdapter - = gson.getDelegateAdapter(this, TypeToken.get(AccessPointS.class)); + final TypeAdapter thisAdapter + = gson.getDelegateAdapter(this, TypeToken.get(AccessPoint.class)); - return (TypeAdapter) new TypeAdapter() { + return (TypeAdapter) new TypeAdapter() { @Override - public void write(JsonWriter out, AccessPointS value) throws IOException { + public void write(JsonWriter out, AccessPoint value) throws IOException { JsonObject obj = thisAdapter.toJsonTree(value).getAsJsonObject(); obj.remove("additionalProperties"); // serialize additional properties @@ -785,12 +809,12 @@ else if (entry.getValue() instanceof Character) } @Override - public AccessPointS read(JsonReader in) throws IOException { + public AccessPoint read(JsonReader in) throws IOException { JsonElement jsonElement = elementAdapter.read(in); validateJsonElement(jsonElement); JsonObject jsonObj = jsonElement.getAsJsonObject(); // store additional fields in the deserialized instance - AccessPointS instance = thisAdapter.fromJsonTree(jsonObj); + AccessPoint instance = thisAdapter.fromJsonTree(jsonObj); for (Map.Entry entry : jsonObj.entrySet()) { if (!openapiFields.contains(entry.getKey())) { if (entry.getValue().isJsonPrimitive()) { // primitive type @@ -823,8 +847,8 @@ else if (entry.getValue().getAsJsonPrimitive().isBoolean()) * @return An instance of AccessPoint * @throws IOException if the JSON string is invalid with respect to AccessPoint */ - public static AccessPointS fromJson(String jsonString) throws IOException { - return JSON.getGson().fromJson(jsonString, AccessPointS.class); + public static AccessPoint fromJson(String jsonString) throws IOException { + return JSON.getGson().fromJson(jsonString, AccessPoint.class); } /** diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java index 97794347..4cf0cc83 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/ConnectionSide.java @@ -13,8 +13,14 @@ import java.util.Objects; import java.util.Locale; - +import com.equinix.sdk.fabricv4.model.AccessPoint; +import com.equinix.sdk.fabricv4.model.ConnectionCompanyProfile; +import com.equinix.sdk.fabricv4.model.ConnectionInvitation; +import com.equinix.sdk.fabricv4.model.ConnectionSideAdditionalInfo; +import com.equinix.sdk.fabricv4.model.InternetAccess; +import com.equinix.sdk.fabricv4.model.ServiceToken; import com.google.gson.TypeAdapter; +import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; @@ -24,15 +30,26 @@ import java.util.List; import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import java.io.IOException; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -49,7 +66,7 @@ public class ConnectionSide { public static final String SERIALIZED_NAME_ACCESS_POINT = "accessPoint"; @SerializedName(SERIALIZED_NAME_ACCESS_POINT) @javax.annotation.Nullable - private AccessPointS accessPointS; + private AccessPoint accessPoint; public static final String SERIALIZED_NAME_INTERNET_ACCESS = "internetAccess"; @SerializedName(SERIALIZED_NAME_INTERNET_ACCESS) @@ -93,8 +110,8 @@ public void setServiceToken(@javax.annotation.Nullable ServiceToken serviceToken } - public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { - this.accessPointS = accessPointS; + public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { + this.accessPoint = accessPoint; return this; } @@ -103,12 +120,12 @@ public ConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS access * @return accessPoint */ @javax.annotation.Nullable - public AccessPointS getAccessPoint() { - return accessPointS; + public AccessPoint getAccessPoint() { + return accessPoint; } - public void setAccessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { - this.accessPointS = accessPointS; + public void setAccessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { + this.accessPoint = accessPoint; } @@ -251,7 +268,7 @@ public boolean equals(Object o) { } ConnectionSide connectionSide = (ConnectionSide) o; return Objects.equals(this.serviceToken, connectionSide.serviceToken) && - Objects.equals(this.accessPointS, connectionSide.accessPointS) && + Objects.equals(this.accessPoint, connectionSide.accessPoint) && Objects.equals(this.internetAccess, connectionSide.internetAccess) && Objects.equals(this.companyProfile, connectionSide.companyProfile) && Objects.equals(this.invitation, connectionSide.invitation) && @@ -261,7 +278,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(serviceToken, accessPointS, internetAccess, companyProfile, invitation, additionalInfo, additionalProperties); + return Objects.hash(serviceToken, accessPoint, internetAccess, companyProfile, invitation, additionalInfo, additionalProperties); } @Override @@ -269,7 +286,7 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConnectionSide {\n"); sb.append(" serviceToken: ").append(toIndentedString(serviceToken)).append("\n"); - sb.append(" accessPoint: ").append(toIndentedString(accessPointS)).append("\n"); + sb.append(" accessPoint: ").append(toIndentedString(accessPoint)).append("\n"); sb.append(" internetAccess: ").append(toIndentedString(internetAccess)).append("\n"); sb.append(" companyProfile: ").append(toIndentedString(companyProfile)).append("\n"); sb.append(" invitation: ").append(toIndentedString(invitation)).append("\n"); @@ -321,7 +338,7 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti } // validate the optional field `accessPoint` if (jsonObj.get("accessPoint") != null && !jsonObj.get("accessPoint").isJsonNull()) { - AccessPointS.validateJsonElement(jsonObj.get("accessPoint")); + AccessPoint.validateJsonElement(jsonObj.get("accessPoint")); } // validate the optional field `internetAccess` if (jsonObj.get("internetAccess") != null && !jsonObj.get("internetAccess").isJsonNull()) { diff --git a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java index 41d10fd9..2638992d 100644 --- a/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java +++ b/services/fabricv4/src/main/java/com/equinix/sdk/fabricv4/model/VirtualConnectionSide.java @@ -13,8 +13,9 @@ import java.util.Objects; import java.util.Locale; - +import com.equinix.sdk.fabricv4.model.AccessPoint; import com.google.gson.TypeAdapter; +import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; @@ -22,15 +23,26 @@ import java.util.Arrays; import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; import com.google.gson.TypeAdapterFactory; import com.google.gson.reflect.TypeToken; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.Locale; import com.equinix.sdk.fabricv4.JSON; @@ -42,13 +54,13 @@ public class VirtualConnectionSide { public static final String SERIALIZED_NAME_ACCESS_POINT = "accessPoint"; @SerializedName(SERIALIZED_NAME_ACCESS_POINT) @javax.annotation.Nullable - private AccessPointS accessPointS; + private AccessPoint accessPoint; public VirtualConnectionSide() { } - public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { - this.accessPointS = accessPointS; + public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { + this.accessPoint = accessPoint; return this; } @@ -57,12 +69,12 @@ public VirtualConnectionSide accessPoint(@javax.annotation.Nullable AccessPointS * @return accessPoint */ @javax.annotation.Nullable - public AccessPointS getAccessPoint() { - return accessPointS; + public AccessPoint getAccessPoint() { + return accessPoint; } - public void setAccessPoint(@javax.annotation.Nullable AccessPointS accessPointS) { - this.accessPointS = accessPointS; + public void setAccessPoint(@javax.annotation.Nullable AccessPoint accessPoint) { + this.accessPoint = accessPoint; } /** @@ -120,20 +132,20 @@ public boolean equals(Object o) { return false; } VirtualConnectionSide virtualConnectionSide = (VirtualConnectionSide) o; - return Objects.equals(this.accessPointS, virtualConnectionSide.accessPointS)&& + return Objects.equals(this.accessPoint, virtualConnectionSide.accessPoint)&& Objects.equals(this.additionalProperties, virtualConnectionSide.additionalProperties); } @Override public int hashCode() { - return Objects.hash(accessPointS, additionalProperties); + return Objects.hash(accessPoint, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class VirtualConnectionSide {\n"); - sb.append(" accessPoint: ").append(toIndentedString(accessPointS)).append("\n"); + sb.append(" accessPoint: ").append(toIndentedString(accessPoint)).append("\n"); sb.append(" additionalProperties: ").append(toIndentedString(additionalProperties)).append("\n"); sb.append("}"); return sb.toString(); @@ -177,7 +189,7 @@ public static void validateJsonElement(JsonElement jsonElement) throws IOExcepti JsonObject jsonObj = jsonElement.getAsJsonObject(); // validate the optional field `accessPoint` if (jsonObj.get("accessPoint") != null && !jsonObj.get("accessPoint").isJsonNull()) { - AccessPointS.validateJsonElement(jsonObj.get("accessPoint")); + AccessPoint.validateJsonElement(jsonObj.get("accessPoint")); } } From d89305797b1fcb85dfb1938a1dac730366692433 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 17:30:22 +0100 Subject: [PATCH 11/15] updated --- .github/workflows/branch-file-analysis.yml | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/.github/workflows/branch-file-analysis.yml b/.github/workflows/branch-file-analysis.yml index 04965d75..bfbafa9c 100644 --- a/.github/workflows/branch-file-analysis.yml +++ b/.github/workflows/branch-file-analysis.yml @@ -5,7 +5,6 @@ on: types: [opened, synchronize, edited] branches: - main - - develop workflow_dispatch: inputs: pr_number: @@ -150,15 +149,13 @@ jobs: # Get current PR description CURRENT_DESC=$(gh pr view $PR_NUMBER --json body --jq '.body // ""') - # Create file analysis section - cat > file_analysis_section.md << 'EOF' -## 📋 Branch File Analysis - -${{ steps.file-analysis.outputs.analysis_result }} - ---- -*Auto-generated by Branch File Analysis workflow* -EOF + # Create file analysis section using echo to avoid YAML parsing issues + echo "## 📋 Branch File Analysis" > file_analysis_section.md + echo "" >> file_analysis_section.md + echo "${{ steps.file-analysis.outputs.analysis_result }}" >> file_analysis_section.md + echo "" >> file_analysis_section.md + echo "---" >> file_analysis_section.md + echo "*Auto-generated by Branch File Analysis workflow*" >> file_analysis_section.md # Check if file analysis section already exists and update/add accordingly if echo "$CURRENT_DESC" | grep -q "## 📋 Branch File Analysis"; then From 9052c3b1520d430358392de9f8b911bdba3b72ec Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 17:51:28 +0100 Subject: [PATCH 12/15] test flow --- .github/workflows/branch-file-analysis.yml | 210 ++---------------- .../fabric/tests/ConnectionsApiTest.java | 16 +- 2 files changed, 28 insertions(+), 198 deletions(-) diff --git a/.github/workflows/branch-file-analysis.yml b/.github/workflows/branch-file-analysis.yml index bfbafa9c..4bc33c52 100644 --- a/.github/workflows/branch-file-analysis.yml +++ b/.github/workflows/branch-file-analysis.yml @@ -1,21 +1,13 @@ name: "Branch File Analysis" on: - pull_request: - types: [opened, synchronize, edited] - branches: - - main - workflow_dispatch: - inputs: - pr_number: - description: 'PR number to analyze' - required: true - type: number + workflow_run: + workflows: ["SAuto commit client updates"] + types: [completed] permissions: contents: read pull-requests: write - actions: read jobs: branch-file-analysis: @@ -25,198 +17,36 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - with: - fetch-depth: 0 - token: ${{ secrets.GITHUB_TOKEN }} - name: Setup Python uses: actions/setup-python@v4 with: python-version: '3.9' - - name: Setup GitHub CLI + - name: Run analysis and update PR run: | - # Install GitHub CLI if not present - if ! command -v gh &> /dev/null; then - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null - sudo apt update - sudo apt install gh - fi - - # Authenticate GitHub CLI + # Install GitHub CLI + curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null + sudo apt update + sudo apt install gh gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}" - - - name: Get PR details - id: pr-details - run: | - if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - PR_NUMBER=${{ github.event.inputs.pr_number }} - else - PR_NUMBER=${{ github.event.number }} - fi - - echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT - - # Get PR info - PR_INFO=$(gh pr view $PR_NUMBER --json title,author,headRefName,baseRefName,body) - - # Extract values safely with multiline support - PR_TITLE=$(echo "$PR_INFO" | jq -r '.title // ""') - PR_AUTHOR=$(echo "$PR_INFO" | jq -r '.author.login // ""') - HEAD_BRANCH=$(echo "$PR_INFO" | jq -r '.headRefName // ""') - BASE_BRANCH=$(echo "$PR_INFO" | jq -r '.baseRefName // ""') - PR_BODY=$(echo "$PR_INFO" | jq -r '.body // ""') - - # Set outputs with proper escaping for multiline content - { - echo "pr_title<> $GITHUB_OUTPUT - - - name: Run branch file analysis - id: file-analysis - run: | - echo "🔍 Running branch file analysis..." - - # Check if analyzer script exists - if [ ! -f "script/branch_file_analyzer.py" ]; then - echo "❌ Branch file analyzer script not found at script/branch_file_analyzer.py" - echo "analysis_completed=false" >> $GITHUB_OUTPUT - echo "analysis_result=Branch file analyzer script not found" >> $GITHUB_OUTPUT - exit 0 - fi - # Make script executable - chmod +x script/branch_file_analyzer.py + # Get PR for branch + BRANCH="${{ github.event.workflow_run.head_branch }}" + PR_NUMBER=$(gh pr list --head "$BRANCH" --state open --json number --jq '.[0].number // empty') - # Run the analysis and capture output - echo "Running: python3 script/branch_file_analyzer.py" - - if python3 script/branch_file_analyzer.py > analysis_output.txt 2>&1; then - # Check if analysis produced results - if [ -s analysis_output.txt ]; then - echo "✅ Branch file analysis completed successfully" - echo "analysis_completed=true" >> $GITHUB_OUTPUT - - # Save the output with proper multiline handling - { - echo "analysis_result<> $GITHUB_OUTPUT - - echo "Analysis output:" - cat analysis_output.txt - else - echo "â„šī¸ No file changes detected" - echo "analysis_completed=true" >> $GITHUB_OUTPUT - echo "analysis_result=No file changes detected in this branch" >> $GITHUB_OUTPUT - fi - else - echo "❌ Branch file analysis failed" - echo "Analysis error output:" - cat analysis_output.txt - echo "analysis_completed=false" >> $GITHUB_OUTPUT - { - echo "analysis_result<> $GITHUB_OUTPUT - fi - - - name: Update PR description - if: steps.file-analysis.outputs.analysis_completed == 'true' - run: | - echo "📝 Updating PR description..." - - PR_NUMBER=${{ steps.pr-details.outputs.pr_number }} + # Run Python script and get result + ANALYSIS_RESULT=$(python3 script/branch_file_analyzer.py) # Get current PR description CURRENT_DESC=$(gh pr view $PR_NUMBER --json body --jq '.body // ""') - # Create file analysis section using echo to avoid YAML parsing issues - echo "## 📋 Branch File Analysis" > file_analysis_section.md - echo "" >> file_analysis_section.md - echo "${{ steps.file-analysis.outputs.analysis_result }}" >> file_analysis_section.md - echo "" >> file_analysis_section.md - echo "---" >> file_analysis_section.md - echo "*Auto-generated by Branch File Analysis workflow*" >> file_analysis_section.md - - # Check if file analysis section already exists and update/add accordingly - if echo "$CURRENT_DESC" | grep -q "## 📋 Branch File Analysis"; then - echo "🔄 Updating existing file analysis section..." - # Remove old file analysis section (from header to next ## or end) - NEW_DESC=$(echo "$CURRENT_DESC" | sed '/## 📋 Branch File Analysis/,/^---$/d' | sed '/^---$/d') - - # Add updated section - { - echo "$NEW_DESC" - echo "" - cat file_analysis_section.md - } > updated_desc.md - else - echo "➕ Adding new file analysis section..." - # Add new file analysis section - if [ -n "$CURRENT_DESC" ] && [ "$CURRENT_DESC" != "null" ]; then - { - echo "$CURRENT_DESC" - echo "" - cat file_analysis_section.md - } > updated_desc.md - else - cp file_analysis_section.md updated_desc.md - fi - fi - - # Update PR description - gh pr edit $PR_NUMBER --body-file updated_desc.md + # Remove old analysis section if exists + NEW_DESC=$(echo "$CURRENT_DESC" | sed '/## 📋 Branch File Analysis/,/^\*Auto-generated by Branch File Analysis workflow\*$/d') - echo "✅ PR description updated successfully" - - - name: Create analysis artifact - if: always() - uses: actions/upload-artifact@v4 - with: - name: branch-file-analysis-${{ steps.pr-details.outputs.pr_number }} - path: | - analysis_output.txt - file_analysis_section.md - updated_desc.md - retention-days: 7 - - - name: Summary - if: always() - run: | - echo "## 📋 Branch File Analysis Summary" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**PR Number:** #${{ steps.pr-details.outputs.pr_number }}" >> $GITHUB_STEP_SUMMARY - echo "**Branch:** \`${{ steps.pr-details.outputs.head_branch }}\` → \`${{ steps.pr-details.outputs.base_branch }}\`" >> $GITHUB_STEP_SUMMARY - echo "**Analysis Status:** ${{ steps.file-analysis.outputs.analysis_completed == 'true' && '✅ Completed' || '❌ Failed' }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY + # Add new analysis section + UPDATED_DESC=$(printf "%s\n\n## 📋 Branch File Analysis\n\n%s\n\n---\n*Auto-generated by Branch File Analysis workflow*" "$NEW_DESC" "$ANALYSIS_RESULT") - if [ "${{ steps.file-analysis.outputs.analysis_completed }}" == "true" ]; then - echo "### Analysis Results" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - echo "${{ steps.file-analysis.outputs.analysis_result }}" >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "✅ PR description has been updated with file analysis results" >> $GITHUB_STEP_SUMMARY - else - echo "❌ Analysis failed or no changes detected" >> $GITHUB_STEP_SUMMARY - fi + # Update PR + gh pr edit $PR_NUMBER --body "$UPDATED_DESC" diff --git a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java index 450c583e..dcbb6386 100644 --- a/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java +++ b/equinix-openapi-fabric-tests/src/test/java/com/equinix/openapi/fabric/tests/ConnectionsApiTest.java @@ -73,7 +73,7 @@ public void createConnectionVdColo() throws ApiException { .bandwidth(1000) .type(ConnectionType.EVPL_VC) .order(new Order().purchaseOrderNumber("pol123")) - .aSide(new ConnectionSide().accessPoint(new AccessPointS() + .aSide(new ConnectionSide().accessPoint(new AccessPoint() .type(AccessPointType.VD) .virtualDevice(new VirtualDevice() .type(VirtualDevice.TypeEnum.EDGE) @@ -84,7 +84,7 @@ public void createConnectionVdColo() throws ApiException { for (int i = 0; i < 3; i++) { int vlanTag = getRandomVlanNumber(); connectionPostRequest.zSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(portUuid)) @@ -181,7 +181,7 @@ public static Connection createConnectionFCR2ToPort() throws ApiException { .bandwidth(1000) .project(new Project().projectId(userDto.getProjectId())) .aSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.CLOUD_ROUTER) .router(new CloudRouter().uuid(cloudRouter.getUuid())))); @@ -189,7 +189,7 @@ public static Connection createConnectionFCR2ToPort() throws ApiException { for (int i = 0; i < 3; i++) { int tag = getRandomVlanNumber(); connectionPostRequest.zSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.COLO) .port(new SimplifiedPort().uuid(port.getUuid())) .linkProtocol(new SimplifiedLinkProtocol() @@ -234,7 +234,7 @@ public static Connection createPort2SpConnection() throws ApiException { .redundancy(new ConnectionRedundancy().priority(ConnectionPriority.PRIMARY)) .order(new Order().purchaseOrderNumber("pol123")) .zSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.SP) .profile(new SimplifiedServiceProfile() .type(ServiceProfileTypeEnum.L2_PROFILE) @@ -248,7 +248,7 @@ public static Connection createPort2SpConnection() throws ApiException { int sTag = getRandomVlanNumber(); int cTag = getRandomVlanNumber(); connectionPostRequest.aSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.COLO) .port(new SimplifiedPort().uuid(UUID.fromString(portDto.getUuid()))) .linkProtocol(new SimplifiedLinkProtocol() @@ -317,14 +317,14 @@ public static Connection createPort2Port() throws ApiException { .type(ConnectionType.EVPL_VC) .redundancy(new ConnectionRedundancy().priority(ConnectionPriority.PRIMARY)) .aSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(port.get(0).getUuid())) .linkProtocol(new SimplifiedLinkProtocol() .type(LinkProtocolType.DOT1Q).vlanTag(tagAside)))) .zSide(new ConnectionSide().accessPoint( - new AccessPointS() + new AccessPoint() .type(AccessPointType.COLO) .port(new SimplifiedPort() .uuid(port.get(1).getUuid())) From 7421bf496b4c491c93e8efcf8a1e9669bfa7b77c Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 17:53:14 +0100 Subject: [PATCH 13/15] test flow --- .../workflows/{branch-file-analysis.yml => pr-analysis.yml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{branch-file-analysis.yml => pr-analysis.yml} (94%) diff --git a/.github/workflows/branch-file-analysis.yml b/.github/workflows/pr-analysis.yml similarity index 94% rename from .github/workflows/branch-file-analysis.yml rename to .github/workflows/pr-analysis.yml index 4bc33c52..1b80b68f 100644 --- a/.github/workflows/branch-file-analysis.yml +++ b/.github/workflows/pr-analysis.yml @@ -1,8 +1,8 @@ -name: "Branch File Analysis" +name: "PR Analysis" on: workflow_run: - workflows: ["SAuto commit client updates"] + workflows: ["Auto commit client updates"] types: [completed] permissions: @@ -10,7 +10,7 @@ permissions: pull-requests: write jobs: - branch-file-analysis: + pr-analysis: name: Branch File Analysis runs-on: ubuntu-latest From 26e1d5438b59782c9ce3c9d1f276f1a23b3b1a3e Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 18:01:42 +0100 Subject: [PATCH 14/15] test flow --- .github/workflows/pr-analysis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pr-analysis.yml b/.github/workflows/pr-analysis.yml index 1b80b68f..80534a62 100644 --- a/.github/workflows/pr-analysis.yml +++ b/.github/workflows/pr-analysis.yml @@ -8,11 +8,13 @@ on: permissions: contents: read pull-requests: write + actions: read jobs: pr-analysis: name: Branch File Analysis runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion == 'success' steps: - name: Checkout code From 56040864eb487c19a4e5dfdfe30341c6797acfe6 Mon Sep 17 00:00:00 2001 From: "tomasz.tutka" Date: Fri, 13 Feb 2026 18:05:11 +0100 Subject: [PATCH 15/15] test flow --- .github/workflows/pr-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-analysis.yml b/.github/workflows/pr-analysis.yml index 80534a62..96fecb59 100644 --- a/.github/workflows/pr-analysis.yml +++ b/.github/workflows/pr-analysis.yml @@ -2,7 +2,7 @@ name: "PR Analysis" on: workflow_run: - workflows: ["Auto commit client updates"] + workflows: ["Sync fabricv4 API spec"] types: [completed] permissions: