Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 22 additions & 25 deletions .github/workflows/nextjs.yml → .github/workflows/astro.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# Sample workflow for building and deploying a Next.js site to GitHub Pages
# Sample workflow for building and deploying an Astro site to GitHub Pages
#
# To get started with Next.js see: https://nextjs.org/docs/getting-started
# To get started with Astro see: https://docs.astro.build/en/getting-started/
#
name: Deploy Next.js site to Pages
name: Deploy Astro site to Pages

on:
# Runs on pushes targeting the default branch
Expand All @@ -24,9 +24,13 @@ concurrency:
group: "pages"
cancel-in-progress: false

env:
BUILD_PATH: "." # default value when not using subfolders
# BUILD_PATH: subfolder

jobs:
# Build job
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout
Expand All @@ -38,11 +42,13 @@ jobs:
echo "manager=yarn" >> $GITHUB_OUTPUT
echo "command=install" >> $GITHUB_OUTPUT
echo "runner=yarn" >> $GITHUB_OUTPUT
echo "lockfile=yarn.lock" >> $GITHUB_OUTPUT
exit 0
elif [ -f "${{ github.workspace }}/package.json" ]; then
echo "manager=npm" >> $GITHUB_OUTPUT
echo "command=ci" >> $GITHUB_OUTPUT
echo "runner=npx --no-install" >> $GITHUB_OUTPUT
echo "lockfile=package-lock.json" >> $GITHUB_OUTPUT
exit 0
else
echo "Unable to determine package manager"
Expand All @@ -53,40 +59,31 @@ jobs:
with:
node-version: "20"
cache: ${{ steps.detect-package-manager.outputs.manager }}
cache-dependency-path: ${{ env.BUILD_PATH }}/${{ steps.detect-package-manager.outputs.lockfile }}
- name: Setup Pages
id: pages
uses: actions/configure-pages@v5
with:
# Automatically inject basePath in your Next.js configuration file and disable
# server side image optimization (https://nextjs.org/docs/api-reference/next/image#unoptimized).
#
# You may remove this line if you want to manage the configuration yourself.
static_site_generator: next
- name: Restore cache
uses: actions/cache@v4
with:
path: |
.next/cache
# Generate a new cache whenever packages or source files change.
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json', '**/yarn.lock') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
# If source files changed but packages didn't, rebuild from a prior cache.
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json', '**/yarn.lock') }}-
- name: Install dependencies
run: ${{ steps.detect-package-manager.outputs.manager }} ${{ steps.detect-package-manager.outputs.command }}
- name: Build with Next.js
run: ${{ steps.detect-package-manager.outputs.runner }} next build
working-directory: ${{ env.BUILD_PATH }}
- name: Build with Astro
run: |
${{ steps.detect-package-manager.outputs.runner }} astro build \
--site "${{ steps.pages.outputs.origin }}" \
--base "${{ steps.pages.outputs.base_path }}"
working-directory: ${{ env.BUILD_PATH }}
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: ./out
path: ${{ env.BUILD_PATH }}/dist

# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
runs-on: ubuntu-latest
name: Deploy
steps:
- name: Deploy to GitHub Pages
id: deployment
Expand Down
File renamed without changes.
1 change: 1 addition & 0 deletions .github/workflows/gen-man.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

File renamed without changes.
1 change: 1 addition & 0 deletions api/submit
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

12 changes: 12 additions & 0 deletions app/ai_review.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import random

def analyze_pr(repo_url: str, pr_number: int):
# Real logic would clone repo, checkout PR diff, run LLM, etc.
mock_comments = [
f"✅ Efficient use of data structures in PR #{pr_number}.",
"🧹 Consider removing unused imports.",
"🔒 Security tip: mask sensitive keys in logs.",
"📦 Use semantic versioning in your package updates.",
"🧠 Consider adding docstrings to helper functions."
]
return random.sample(mock_comments, k=3)
36 changes: 18 additions & 18 deletions app/main.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
from fastapi import FastAPI, HTTPException
from fastapi import FastAPI, Request
from fastapi.staticfiles import StaticFiles
from fastapi.responses import JSONResponse, FileResponse
from pydantic import BaseModel
from celery.result import AsyncResult
from app.tasks import analyze_pull_request
import os
from app.ai_review import analyze_pr

app = FastAPI()

app.mount("/static", StaticFiles(directory="static"), name="static")

@app.get("/")
async def serve_index():
return FileResponse("static/index.html")

class PRRequest(BaseModel):
repo_url: str
pr_number: int

@app.post("/api/submit")
async def submit_pr(pr: PRRequest):
task = analyze_pull_request.delay(pr.repo_url, pr.pr_number)
return {"job_id": task.id}

@app.get("/api/status/{job_id}")
async def get_status(job_id: str):
task_result = AsyncResult(job_id)
if task_result.state == 'PENDING':
return {"status": "pending"}
elif task_result.state == 'SUCCESS':
return {"status": "completed", "result": task_result.result}
elif task_result.state == 'FAILURE':
return {"status": "failed", "error": str(task_result.result)}
else:
return {"status": task_result.state}
async def submit_review(pr: PRRequest):
try:
result = analyze_pr(pr.repo_url, pr.pr_number)
return JSONResponse(content={"success": True, "comments": result})
except Exception as e:
import logging
logging.error("An error occurred while processing the request", exc_info=True)
return JSONResponse(content={"success": False, "error": "An internal error has occurred."})
3 changes: 3 additions & 0 deletions app/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash
uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
chmod +x run.sh
30 changes: 29 additions & 1 deletion app/tasks.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,36 @@
from celery import Celery
from typing import List
from celery import shared_task
from .utils.github_client import fetch_pull_request_diff
from .utils.llm_client import analyze_code_with_llm
from .models import save_review_comments
import httpx
import os
import difflib
from typing import List

@shared_task
def analyze_pull_request(repo_url: str, pr_number: int):
try:
# 1. Fetch PR diff
diff_text = fetch_pull_request_diff(repo_url, pr_number)
if not diff_text:
return {"success": False, "error": "Unable to fetch diff"}

# 2. Use LLM to analyze PR diff
review_comments = analyze_code_with_llm(diff_text)

# 3. Persist and/or post comments
save_review_comments(repo_url, pr_number, review_comments)

return {
"success": True,
"comments": review_comments,
}
except Exception as e:
return {
"success": False,
"error": str(e),
}

celery_app = Celery(
'tasks',
Expand Down
9 changes: 9 additions & 0 deletions backend/app.py → backend/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,15 @@ async def predict(request: PredictRequest):
@app.get("/health")
async def health_check():
return {"status": "healthy"}
# in FastAPI backend
@app.post("/api/submit")
async def submit_pr(payload: dict):
repo_url = payload.get("repo_url")
pr_number = payload.get("pr_number")
# enqueue task to Celery
task = review_pr.delay(repo_url, pr_number)
return {"success": True, "task_id": task.id}


if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)
6 changes: 6 additions & 0 deletions database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# models.py or database.py

def save_review_comments(repo_url: str, pr_number: int, comments: list):
print(f"Saving review for {repo_url} PR #{pr_number}")
for comment in comments:
print(f"- {comment}")
Loading