diff --git a/.gitignore b/.gitignore index 2eea525..7feb4eb 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,68 @@ -.env \ No newline at end of file +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# environment variables +.env diff --git a/db/__init__.py b/db/__init__.py index af7482f..edd3366 100644 --- a/db/__init__.py +++ b/db/__init__.py @@ -2,29 +2,30 @@ from dotenv import load_dotenv from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import sessionmaker from sqlalchemy.pool import NullPool load_dotenv(".env") def get_postgres_uri(): - DB_HOST = os.getenv('POSTGRES_DB_HOST') - DB_NAME = os.getenv('POSTGRES_DB_NAME') - DB_USER = os.getenv('POSTGRES_DB_USER') - DB_PASS = os.getenv('POSTGRES_DB_PASS') + DB_HOST = os.getenv("POSTGRES_DB_HOST") + DB_NAME = os.getenv("POSTGRES_DB_NAME") + DB_USER = os.getenv("POSTGRES_DB_USER") + DB_PASS = os.getenv("POSTGRES_DB_PASS") + + return f"postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}" - return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - class PostgresORM: - def __init__(self): - DATABASE_URL = get_postgres_uri() + DATABASE_URL = get_postgres_uri() # Initialize Async SQLAlchemy - engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) - async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + engine = create_async_engine(DATABASE_URL, echo=False, poolclass=NullPool) + async_session = sessionmaker( + autocommit=False, autoflush=False, bind=engine, class_=AsyncSession + ) self.session = async_session - + def get_instance(): - return PostgresORM() \ No newline at end of file + return PostgresORM() diff --git a/db/__pycache__/__init__.cpython-310.pyc b/db/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index d65e3e5..0000000 Binary files a/db/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/db/__pycache__/models.cpython-310.pyc b/db/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 860a0bb..0000000 Binary files a/db/__pycache__/models.cpython-310.pyc and /dev/null differ diff --git a/db/discord_bot.py b/db/discord_bot.py index 16185b8..2ca3fbd 100644 --- a/db/discord_bot.py +++ b/db/discord_bot.py @@ -1,12 +1,19 @@ import os from dotenv import load_dotenv -from sqlalchemy import create_engine, select, desc, update, delete +from sqlalchemy import select, desc, update, delete from sqlalchemy.orm import sessionmaker -from .models import * from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.pool import NullPool +from db.models import ( + Base, + Chapters, + ContributorsRegistration, + Leaderboard, + VcLogs, + ContributorsDiscord, +) # load_dotenv() @@ -14,24 +21,27 @@ def get_postgres_uri(): - DB_HOST = os.getenv('POSTGRES_DB_HOST') - DB_NAME = os.getenv('POSTGRES_DB_NAME') - DB_USER = os.getenv('POSTGRES_DB_USER') - DB_PASS = os.getenv('POSTGRES_DB_PASS') + DB_HOST = os.getenv("POSTGRES_DB_HOST") + DB_NAME = os.getenv("POSTGRES_DB_NAME") + DB_USER = os.getenv("POSTGRES_DB_USER") + DB_PASS = os.getenv("POSTGRES_DB_PASS") + + return f"postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}" - return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' class DiscordBotQueries: def __init__(self): - DATABASE_URL = get_postgres_uri() + DATABASE_URL = get_postgres_uri() # Initialize Async SQLAlchemy - engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) - async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + engine = create_async_engine(DATABASE_URL, echo=False, poolclass=NullPool) + async_session = sessionmaker( + autocommit=False, autoflush=False, bind=engine, class_=AsyncSession + ) self.session = async_session def convert_dict(self, data): try: - if type(data) == list: + if isinstance(data, list): data = [val.to_dict() for val in data] else: return [data.to_dict()] @@ -65,7 +75,9 @@ def read(self, table_class, query_key, query_value, columns=None): stmt = stmt.where(getattr(table_class, query_key) == query_value) if columns: - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + stmt = stmt.with_only_columns( + *(getattr(table_class, col) for col in columns) + ) result = self.session.execute(stmt) rows = result.fetchall() column_names = [col.name for col in stmt.columns] @@ -83,15 +95,23 @@ def get_class_by_tablename(self, tablename): try: for cls in Base.registry._class_registry.values(): if isinstance(cls, DeclarativeMeta): - if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: + if hasattr(cls, "__tablename__") and cls.__tablename__ == tablename: return cls return None except Exception as e: print(f"ERROR get_class_by_tablename - {e}") return None - def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, - columns="*"): + def read_by_order_limit( + self, + table_class, + query_key, + query_value, + order_column, + order_by=False, + limit=1, + columns="*", + ): try: stmt = select(table_class) stmt = stmt.where(getattr(table_class, query_key) == query_value) @@ -102,13 +122,15 @@ def read_by_order_limit(self, table_class, query_key, query_value, order_column, stmt = stmt.limit(limit) if columns != "*": - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + stmt = stmt.with_only_columns( + *(getattr(table_class, col) for col in columns) + ) result = self.session.execute(stmt) results = result.fetchall() # Convert results to list of dictionaries - column_names = [col['name'] for col in result.keys()] + column_names = [col["name"] for col in result.keys()] data = [dict(zip(column_names, row)) for row in results] return data @@ -136,9 +158,11 @@ def update(self, table_class, update_data, query_key, query_value): try: stmt = ( update(table_class) - .where(getattr(table_class, query_key) == query_value) - .values(update_data) - .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns + .where(getattr(table_class, query_key) == query_value) + .values(update_data) + .returning( + *[getattr(table_class, col) for col in update_data.keys()] + ) # Return updated columns ) result = self.session.execute(stmt) @@ -151,7 +175,8 @@ def update(self, table_class, update_data, query_key, query_value): else: return None except Exception as e: - import pdb; + import pdb + pdb.set_trace() print("Error updating record:", e) return None @@ -168,8 +193,11 @@ def insert(self, table, data): return None def memberIsAuthenticated(self, member): - data = self.session.query(ContributorsRegistration).where( - ContributorsRegistration.discord_id == member.id).all() + data = ( + self.session.query(ContributorsRegistration) + .where(ContributorsRegistration.discord_id == member.id) + .all() + ) if data: return True else: @@ -177,17 +205,23 @@ def memberIsAuthenticated(self, member): def addChapter(self, roleId: int, orgName: str, type: str): try: - existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first() + existing_record = ( + self.session.query(Chapters).filter_by(discord_role_id=roleId).first() + ) if existing_record: existing_record.type = type existing_record.org_name = orgName else: - new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName) + new_record = Chapters( + discord_role_id=roleId, type=type, org_name=orgName + ) self.session.add(new_record) self.session.commit() - return existing_record.to_dict() if existing_record else new_record.to_dict() + return ( + existing_record.to_dict() if existing_record else new_record.to_dict() + ) except Exception as e: print("Error adding or updating chapter:", e) return None @@ -205,9 +239,24 @@ def deleteChapter(self, roleId: int): def _lookForRoles(self, roles): predefined_roles = { - "country": ["India", "Asia (Outside India)", "Europe", "Africa", "North America", "South America", - "Australia"], - "city": ["Delhi", "Bangalore", "Mumbai", "Pune", "Hyderabad", "Chennai", "Kochi"], + "country": [ + "India", + "Asia (Outside India)", + "Europe", + "Africa", + "North America", + "South America", + "Australia", + ], + "city": [ + "Delhi", + "Bangalore", + "Mumbai", + "Pune", + "Hyderabad", + "Chennai", + "Kochi", + ], "experience": [ "Tech Freshman", "Tech Sophomore", @@ -216,9 +265,9 @@ def _lookForRoles(self, roles): "Junior Developer", "Senior Developer", "Super Senior Developer", - "Champion Developer" + "Champion Developer", ], - "gender": ["M", "F", "NB"] + "gender": ["M", "F", "NB"], } chapter_roles = [] gender = None @@ -227,9 +276,9 @@ def _lookForRoles(self, roles): experience = None for role in roles: if role.name.startswith("College:"): - chapter_roles.append(role.name[len("College: "):]) + chapter_roles.append(role.name[len("College: ") :]) elif role.name.startswith("Corporate:"): - chapter_roles.append(role.name[len("Corporate: "):]) + chapter_roles.append(role.name[len("Corporate: ") :]) # gender for role in roles: @@ -260,14 +309,14 @@ def _lookForRoles(self, roles): "gender": gender, "country": country, "city": city, - "experience": experience + "experience": experience, } return user_roles async def updateContributor(self, contributor, table_class=None): try: async with self.session() as session: - if table_class == None: + if table_class is None: table_class = ContributorsDiscord chapters = self._lookForRoles(contributor["roles"])["chapter_roles"] gender = self._lookForRoles(contributor["roles"])["gender"] @@ -280,15 +329,19 @@ async def updateContributor(self, contributor, table_class=None): "gender": gender, "email": contributor["email"] if contributor["email"] else "", "is_active": contributor["is_active"], - "joined_at": contributor["joined_at"].replace(tzinfo=None), # Ensure naive datetime + "joined_at": contributor["joined_at"].replace( + tzinfo=None + ), # Ensure naive datetime } # Check if the record exists - stmt = select(table_class).where(table_class.discord_id == contributor["discord_id"]) + stmt = select(table_class).where( + table_class.discord_id == contributor["discord_id"] + ) result = await session.execute(stmt) existing_record = result.scalars().first() - print('existing record ', existing_record) + print("existing record ", existing_record) if existing_record: # Update existing record @@ -321,13 +374,17 @@ def updateContributors(self, contributors, table_class): "gender": gender, "joined_at": contributor.joined_at, } - existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() + existing_record = ( + self.session.query(table_class) + .filter_by(discord_id=contributor.id) + .first() + ) if existing_record: stmt = ( update(table_class) - .where(table_class.discord_id == contributor.id) - .values(update_data) + .where(table_class.discord_id == contributor.id) + .values(update_data) ) self.session.execute(stmt) else: @@ -342,9 +399,11 @@ def updateContributors(self, contributors, table_class): def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): try: - if table_class == None: + if table_class is None: table_class = ContributorsDiscord - stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds)) + stmt = delete(table_class).where( + table_class.discord_id.in_(contributorDiscordIds) + ) self.session.execute(stmt) self.session.commit() @@ -357,10 +416,12 @@ def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): def read_all_active(self, table): if table == "contributors_discord": table = ContributorsDiscord - data = self.session.query(table).where(table.is_active == True).all() + data = self.session.query(table).where(table.is_active).all() return self.convert_dict(data) def invalidateContributorDiscord(self, contributorDiscordIds): table = "contributors_discord" for id in contributorDiscordIds: - self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute() + self.client.table(table).update({"is_active": "false"}).eq( + "discord_id", id + ).execute() diff --git a/db/dmp_api.py b/db/dmp_api.py index 6b27709..2802c1a 100644 --- a/db/dmp_api.py +++ b/db/dmp_api.py @@ -1,76 +1,65 @@ -from .models import * +from db.models import DmpOrgs, DmpIssues, DmpIssueUpdates, DmpPrUpdates from sqlalchemy import func -import os -from dotenv import load_dotenv -import sqlalchemy from sqlalchemy.future import select class DmpAPIQueries: - async def get_issue_query(async_session, year: int = None): try: query = select( - DmpOrgs.id.label('org_id'), - DmpOrgs.name.label('org_name'), + DmpOrgs.id.label("org_id"), + DmpOrgs.name.label("org_name"), func.json_agg( - func.json_build_object( - 'id', DmpIssues.id, - 'name', DmpIssues.title - ) - ).label('issues') + func.json_build_object("id", DmpIssues.id, "name", DmpIssues.title) + ).label("issues"), ) - + # Add join query = query.outerjoin(DmpIssues, DmpOrgs.id == DmpIssues.org_id) - + # Add year filter if provided if year is not None: query = query.where(DmpIssues.year == year) - + # Add group by and order by query = query.group_by(DmpOrgs.id).order_by(DmpOrgs.id) - + async with async_session() as session: results = await session.execute(query) # Extract results as a list of dictionaries if needed data = results.all() - + return data except Exception as e: print(f"An error occurred: get_column_value {e}") return None - - - async def get_issue_owner(async_session, name): - try: + + async def get_issue_owner(async_session, name): + try: async with async_session() as session: - response = await session.execute( - select(DmpOrgs).filter_by(name=name) - ) + response = await session.execute(select(DmpOrgs).filter_by(name=name)) results = response.scalars().all() return results except Exception as e: print(f"An error occurred: get_column_value {e}") return None - + async def get_actual_owner_query(async_session, owner): try: async with async_session() as session: response = await session.execute( - select(DmpIssues).filter(DmpIssues.repo_owner.like(f'%{owner}%')) + select(DmpIssues).filter(DmpIssues.repo_owner.like(f"%{owner}%")) ) results = response.scalars().all() # Fetch all matching rows as objects results = [val.to_dict() for val in results] # Convert objects to dicts return results except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - + print(f"An error occurred: get_column_value {e}") + return None + async def get_dmp_issues(async_session, issue_id): - try: + try: async with async_session() as session: response = await session.execute( select(DmpIssues).filter_by(id=int(issue_id)) @@ -81,33 +70,31 @@ async def get_dmp_issues(async_session, issue_id): except Exception as e: print(f"An error occurred: get_column_value {e}") return None - - + async def get_dmp_issue_updates(async_session, dmp_issue_id): try: async with async_session() as session: response = await session.execute( select(DmpIssueUpdates).filter_by(dmp_id=int(dmp_issue_id)) - ) + ) results = response.scalars().all() # Fetch all matching rows as objects results = [val.to_dict() for val in results] # Convert objects to dicts return results except Exception as e: print(f"An error occurred: get_column_value {e}") return None - - + async def get_pr_data(async_session, dmp_issue_id): - try: + try: async with async_session() as session: response = await session.execute( select(DmpPrUpdates).filter_by(dmp_id=int(dmp_issue_id)) ) pr_updates = response.scalars().all() # Fetch all matching rows as objects - pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] # Convert objects to dicts + pr_updates_dict = [ + pr_update.to_dict() for pr_update in pr_updates + ] # Convert objects to dicts return pr_updates_dict except Exception as e: print(f"An error occurred: get_column_value {e}") return None - - \ No newline at end of file diff --git a/db/dmp_cron.py b/db/dmp_cron.py index aa08857..1fd9d58 100644 --- a/db/dmp_cron.py +++ b/db/dmp_cron.py @@ -1,32 +1,31 @@ -from sqlalchemy.future import select -from .models import * -from sqlalchemy import update +from db.models import DmpOrgs, DmpIssues, DmpIssueUpdates, DmpPrUpdates, DmpWeekUpdates +from sqlalchemy import update, func from sqlalchemy.dialects.postgresql import insert -from datetime import datetime -from sqlalchemy.orm import aliased -import os -from sqlalchemy.orm import Session from sqlalchemy.exc import NoResultFound +from sqlalchemy.future import select +from sqlalchemy.orm import aliased +from datetime import datetime class DmpCronQueries: - async def get_timestamp(async_session, model, col_name: str, col: str, value): try: # Construct the ORM query - query = select(getattr(model, col_name)).filter(getattr(model, col) == value) - + query = select(getattr(model, col_name)).filter( + getattr(model, col) == value + ) + # Execute the query and fetch the result async with async_session() as session: result = await session.execute(query) return result.scalar() - + except NoResultFound: return None except Exception as e: print(f"An error occurred: get_column_value {e}") return None - + async def get_all_dmp_issues(async_session): try: async with async_session() as session: @@ -38,41 +37,49 @@ async def get_all_dmp_issues(async_session): select( DmpIssues, func.json_build_object( - 'created_at', dmp_org_alias.created_at, - 'description', dmp_org_alias.description, - 'id', dmp_org_alias.id, - 'link', dmp_org_alias.link, - 'name', dmp_org_alias.name, - 'repo_owner', dmp_org_alias.repo_owner - ).label('dmp_orgs') + "created_at", + dmp_org_alias.created_at, + "description", + dmp_org_alias.description, + "id", + dmp_org_alias.id, + "link", + dmp_org_alias.link, + "name", + dmp_org_alias.name, + "repo_owner", + dmp_org_alias.repo_owner, + ).label("dmp_orgs"), ) .outerjoin(dmp_org_alias, DmpIssues.org_id == dmp_org_alias.id) .filter(DmpIssues.org_id.isnot(None)) .order_by(DmpIssues.id) ) - + # Execute the query and fetch results result = await session.execute(query) rows = result.fetchall() - + # Convert results to dictionaries data = [] for row in rows: issue_dict = row._asdict() # Convert row to dict - dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row - issue_dict['dmp_orgs'] = dmp_orgs - issue_dict.update(issue_dict['DmpIssues'].to_dict()) + dmp_orgs = issue_dict.pop( + "dmp_orgs" + ) # Extract JSON object from row + issue_dict["dmp_orgs"] = dmp_orgs + issue_dict.update(issue_dict["DmpIssues"].to_dict()) # Add JSON object back to dict - del issue_dict['DmpIssues'] + del issue_dict["DmpIssues"] data.append(issue_dict) - + return data - + except Exception as e: print(e) raise Exception - - async def update_dmp_issue(async_session,issue_id: int, update_data: dict): + + async def update_dmp_issue(async_session, issue_id: int, update_data: dict): try: async with async_session() as session: async with session.begin(): @@ -82,99 +89,107 @@ async def update_dmp_issue(async_session,issue_id: int, update_data: dict): .where(DmpIssues.id == issue_id) .values(**update_data) ) - + # Execute the query await session.execute(query) await session.commit() return True - - except Exception as e: + + except Exception: return False - - - async def upsert_data_orm(async_session, update_data): - try: + async def upsert_data_orm(async_session, update_data): + try: async with async_session() as session: async with session.begin(): - # Define the insert statement stmt = insert(DmpIssueUpdates).values(**update_data) # Define the update statement in case of conflict stmt = stmt.on_conflict_do_update( - index_elements=['comment_id'], + index_elements=["comment_id"], set_={ - 'body_text': stmt.excluded.body_text, - 'comment_link': stmt.excluded.comment_link, - 'comment_api': stmt.excluded.comment_api, - 'comment_updated_at': stmt.excluded.comment_updated_at, - 'dmp_id': stmt.excluded.dmp_id, - 'created_by': stmt.excluded.created_by, - 'created_at': stmt.excluded.created_at - } + "body_text": stmt.excluded.body_text, + "comment_link": stmt.excluded.comment_link, + "comment_api": stmt.excluded.comment_api, + "comment_updated_at": stmt.excluded.comment_updated_at, + "dmp_id": stmt.excluded.dmp_id, + "created_by": stmt.excluded.created_by, + "created_at": stmt.excluded.created_at, + }, ) # Execute the statement await session.execute(stmt) await session.commit() - + return True - - except Exception as e: + + except Exception as e: print(e) - return False - + return False - async def upsert_pr_update(async_session, pr_update_data): try: async with async_session() as session: async with session.begin(): - pr_update_data['pr_updated_at'] = datetime.fromisoformat(pr_update_data['pr_updated_at']).replace(tzinfo=None) if pr_update_data['pr_updated_at'] else None - pr_update_data['merged_at'] = datetime.fromisoformat(pr_update_data['merged_at']).replace(tzinfo=None) if pr_update_data['merged_at'] else None - pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None + pr_update_data["pr_updated_at"] = ( + datetime.fromisoformat(pr_update_data["pr_updated_at"]).replace( + tzinfo=None + ) + if pr_update_data["pr_updated_at"] + else None + ) + pr_update_data["merged_at"] = ( + datetime.fromisoformat(pr_update_data["merged_at"]).replace( + tzinfo=None + ) + if pr_update_data["merged_at"] + else None + ) + pr_update_data["closed_at"] = ( + datetime.fromisoformat(pr_update_data["closed_at"]).replace( + tzinfo=None + ) + if pr_update_data["closed_at"] + else None + ) # Prepare the insert statement stmt = insert(DmpPrUpdates).values(**pr_update_data) # Prepare the conflict resolution strategy stmt = stmt.on_conflict_do_update( - index_elements=['pr_id'], # Assuming `pr_id` is the unique key + index_elements=["pr_id"], # Assuming `pr_id` is the unique key set_={ - 'status': stmt.excluded.status, - 'merged_at': stmt.excluded.merged_at, - 'closed_at': stmt.excluded.closed_at, - 'pr_updated_at': stmt.excluded.pr_updated_at, - 'dmp_id': stmt.excluded.dmp_id, - 'created_at': stmt.excluded.created_at, - 'title': stmt.excluded.title, - 'link': stmt.excluded.link - } + "status": stmt.excluded.status, + "merged_at": stmt.excluded.merged_at, + "closed_at": stmt.excluded.closed_at, + "pr_updated_at": stmt.excluded.pr_updated_at, + "dmp_id": stmt.excluded.dmp_id, + "created_at": stmt.excluded.created_at, + "title": stmt.excluded.title, + "link": stmt.excluded.link, + }, ) # Execute and commit the transaction await session.execute(stmt) await session.commit() - + return True - + except Exception as e: print(e) return False - - - + async def update_dmp_week_update(async_session, update_data): - try: + try: async with async_session() as session: async with session.begin(): # Define the filter conditions - stmt = ( - select(DmpWeekUpdates) - .where( - DmpWeekUpdates.week == update_data['week'], - DmpWeekUpdates.dmp_id == update_data['dmp_id'] - ) + stmt = select(DmpWeekUpdates).where( + DmpWeekUpdates.week == update_data["week"], + DmpWeekUpdates.dmp_id == update_data["dmp_id"], ) # Fetch the row that needs to be updated @@ -192,31 +207,25 @@ async def update_dmp_week_update(async_session, update_data): except Exception as e: print(e) return False - - - + async def get_week_updates(async_session, dmp_id, week): try: async with async_session() as session: # Build the ORM query stmt = select(DmpWeekUpdates).where( - DmpWeekUpdates.dmp_id == dmp_id, - DmpWeekUpdates.week == week + DmpWeekUpdates.dmp_id == dmp_id, DmpWeekUpdates.week == week ) # Execute the query result = await session.execute(stmt) - + # Fetch all matching rows week_updates = result.scalars().all() - - return True if len(week_updates)>0 else False - - except Exception as e: - return False - - - + return True if len(week_updates) > 0 else False + + except Exception: + return False + async def insert_dmp_week_update(async_session, update_data): try: async with async_session() as session: @@ -233,5 +242,3 @@ async def insert_dmp_week_update(async_session, update_data): except Exception as e: print(e) return False - - diff --git a/db/models.py b/db/models.py index 491d1d3..84843e3 100644 --- a/db/models.py +++ b/db/models.py @@ -1,13 +1,29 @@ from datetime import datetime from sqlalchemy.orm import relationship -from sqlalchemy import UUID, Boolean, Float, MetaData, Column, Integer, SmallInteger, String, Text, DateTime, ForeignKey, BigInteger, TypeDecorator, UniqueConstraint, func +from sqlalchemy import ( + UUID, + Boolean, + Float, + MetaData, + Column, + Integer, + SmallInteger, + String, + Text, + DateTime as SA_DateTime, + ForeignKey, + BigInteger, + TypeDecorator, + UniqueConstraint, + func, +) from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.types import TypeDecorator, DateTime as SA_DateTime Base = declarative_base() # Shared metadata object shared_metadata = MetaData() + class DateTime(TypeDecorator): impl = SA_DateTime @@ -26,8 +42,8 @@ def process_result_value(self, value, dialect): class AppComments(Base): - __tablename__ = 'app_comments' - + __tablename__ = "app_comments" + id = Column(BigInteger, primary_key=True, autoincrement=True) updated_at = Column(DateTime, nullable=True) api_url = Column(Text, nullable=True) @@ -39,40 +55,43 @@ def __repr__(self): def to_dict(self): return { - 'id': str(self.id), - 'updated_at': self.updated_at, - 'api_url': self.api_url, - 'comment_id': self.comment_id, - 'issue_id': self.issue_id + "id": str(self.id), + "updated_at": self.updated_at, + "api_url": self.api_url, + "comment_id": self.comment_id, + "issue_id": self.issue_id, } + class Badges(Base): - __tablename__ = 'badges' + __tablename__ = "badges" id = Column(UUID(as_uuid=True), primary_key=True) image = Column(Text, nullable=True) text = Column(Text, nullable=True) description = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) - - user_badges = relationship('UserBadges', back_populates='badge') + user_badges = relationship("UserBadges", back_populates="badge") def __repr__(self): return f"" def to_dict(self): return { - 'image': self.image, - 'text': self.text, - 'description': self.description, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "image": self.image, + "text": self.text, + "description": self.description, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class CcbpTickets(Base): - __tablename__ = 'ccbp_tickets' - __table_args__ = {'comment': 'A table to store details of CCBP Tickets from various projects'} + __tablename__ = "ccbp_tickets" + __table_args__ = { + "comment": "A table to store details of CCBP Tickets from various projects" + } created_at = Column(DateTime, nullable=True) name = Column(Text, nullable=True) @@ -84,14 +103,18 @@ class CcbpTickets(Base): issue_id = Column(BigInteger, unique=True) api_endpoint_url = Column(Text, unique=True, nullable=True) url = Column(Text, unique=True, nullable=True) - ticket_points = Column(SmallInteger, nullable=True, comment='How many points the ticket is worth') + ticket_points = Column( + SmallInteger, nullable=True, comment="How many points the ticket is worth" + ) index = Column(SmallInteger, unique=True, autoincrement=True) mentors = Column(Text, nullable=True) uuid = Column(UUID(as_uuid=True), primary_key=True) status = Column(Text, nullable=True) - community_label = Column(Boolean, nullable=True, comment='has community label') + community_label = Column(Boolean, nullable=True, comment="has community label") organization = Column(Text, nullable=True) - closed_at = Column(DateTime, nullable=True, comment='date-time at which issue was closed') + closed_at = Column( + DateTime, nullable=True, comment="date-time at which issue was closed" + ) assignees = Column(Text, nullable=True) issue_author = Column(Text, nullable=True) is_assigned = Column(Boolean, nullable=False) @@ -101,38 +124,45 @@ def __repr__(self): def to_dict(self): return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': str(self.uuid), - 'status': self.status, - 'community_label': self.community_label, - 'organization': self.organization, - 'closed_at': self.closed_at, - 'assignees': self.assignees, - 'issue_author': self.issue_author, - 'is_assigned': self.is_assigned + "created_at": self.created_at, + "name": self.name, + "product": self.product, + "complexity": self.complexity, + "project_category": self.project_category, + "project_sub_category": self.project_sub_category, + "reqd_skills": self.reqd_skills, + "issue_id": self.issue_id, + "api_endpoint_url": self.api_endpoint_url, + "url": self.url, + "ticket_points": self.ticket_points, + "index": self.index, + "mentors": self.mentors, + "uuid": str(self.uuid), + "status": self.status, + "community_label": self.community_label, + "organization": self.organization, + "closed_at": self.closed_at, + "assignees": self.assignees, + "issue_author": self.issue_author, + "is_assigned": self.is_assigned, } + class Chapters(Base): - __tablename__ = 'chapters' - + __tablename__ = "chapters" + id = Column(UUID(as_uuid=True), primary_key=True) type = Column(Text, nullable=True) org_name = Column(Text, unique=True) - primary_organisation = Column(Text, nullable=True, comment='the organisation that the chapter is mapped to') + primary_organisation = Column( + Text, nullable=True, comment="the organisation that the chapter is mapped to" + ) sessions = Column(Integer, nullable=True) - discord_role_id = Column(BigInteger, unique=True, comment='db id of the corresponding member role in discord server') + discord_role_id = Column( + BigInteger, + unique=True, + comment="db id of the corresponding member role in discord server", + ) created_at = Column(DateTime, nullable=True) def __repr__(self): @@ -140,20 +170,21 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'type': self.type, - 'org_name': self.org_name, - 'primary_organisation': self.primary_organisation, - 'sessions': self.sessions, - 'discord_role_id': self.discord_role_id, - 'created_at': self.created_at + "id": self.id, + "type": self.type, + "org_name": self.org_name, + "primary_organisation": self.primary_organisation, + "sessions": self.sessions, + "discord_role_id": self.discord_role_id, + "created_at": self.created_at, } ## + class ConnectedPrs(Base): - __tablename__ = 'connected_prs' + __tablename__ = "connected_prs" id = Column(UUID(as_uuid=True), primary_key=True) created_at = Column(DateTime, nullable=True) @@ -167,7 +198,7 @@ class ConnectedPrs(Base): merged_by = Column(BigInteger, nullable=True) merged_at = Column(Text, nullable=True) merged_by_username = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=False, comment='github id of the pr') + pr_id = Column(BigInteger, nullable=False, comment="github id of the pr") points = Column(SmallInteger, nullable=False) ticket_url = Column(Text, nullable=False) ticket_complexity = Column(Text, nullable=True) @@ -177,26 +208,27 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'points': self.points, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity + "id": self.id, + "created_at": self.created_at, + "api_url": self.api_url, + "html_url": self.html_url, + "raised_by": self.raised_by, + "raised_at": self.raised_at, + "raised_by_username": self.raised_by_username, + "status": self.status, + "is_merged": self.is_merged, + "merged_by": self.merged_by, + "merged_at": self.merged_at, + "merged_by_username": self.merged_by_username, + "pr_id": self.pr_id, + "points": self.points, + "ticket_url": self.ticket_url, + "ticket_complexity": self.ticket_complexity, } + class ContributorNames(Base): - __tablename__ = 'contributor_names' + __tablename__ = "contributor_names" id = Column(BigInteger, primary_key=True, autoincrement=True) discord_id = Column(BigInteger, nullable=False) @@ -208,14 +240,15 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'discord_id': self.discord_id, - 'name': self.name, - 'country': self.country + "id": self.id, + "discord_id": self.discord_id, + "name": self.name, + "country": self.country, } + class ContributorsDiscord(Base): - __tablename__ = 'contributors_discord' + __tablename__ = "contributors_discord" id = Column(BigInteger, primary_key=True, autoincrement=True) discord_id = Column(BigInteger, unique=True, nullable=False) @@ -224,7 +257,7 @@ class ContributorsDiscord(Base): discord_username = Column(String, nullable=True) joined_at = Column(DateTime, nullable=False) email = Column(Text, nullable=True) - field_name = Column(Text, nullable=True, name='name') # Adjusted field name + field_name = Column(Text, nullable=True, name="name") # Adjusted field name chapter = Column(Text, nullable=True, comment="the chapter they're associated with") gender = Column(Text, nullable=True) country = Column(Text, nullable=True) @@ -237,24 +270,25 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'discord_username': self.discord_username, - 'joined_at': self.joined_at, - 'email': self.email, - 'name': self.field_name, - 'chapter': self.chapter, - 'gender': self.gender, - 'country': self.country, - 'city': self.city, - 'experience': self.experience, - 'is_active': self.is_active + "id": self.id, + "discord_id": self.discord_id, + "github_id": self.github_id, + "github_url": self.github_url, + "discord_username": self.discord_username, + "joined_at": self.joined_at, + "email": self.email, + "name": self.field_name, + "chapter": self.chapter, + "gender": self.gender, + "country": self.country, + "city": self.city, + "experience": self.experience, + "is_active": self.is_active, } - + + class ContributorsRegistration(Base): - __tablename__ = 'contributors_registration' + __tablename__ = "contributors_registration" id = Column(BigInteger, primary_key=True, autoincrement=True) discord_id = Column(BigInteger, unique=True, nullable=False) @@ -264,54 +298,55 @@ class ContributorsRegistration(Base): joined_at = Column(DateTime, nullable=False) email = Column(Text, nullable=True) name = Column(Text, nullable=True) - - point_transactions = relationship('PointTransactions', back_populates='contributor') - - user_activities = relationship('UserActivity', back_populates='contributor') - user_points_mappings = relationship('UserPointsMapping', back_populates='contributors') + point_transactions = relationship("PointTransactions", back_populates="contributor") + + user_activities = relationship("UserActivity", back_populates="contributor") + user_points_mappings = relationship( + "UserPointsMapping", back_populates="contributors" + ) def __repr__(self): return f"" - def to_dict(self): return { - 'id': self.id, - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'discord_username': self.discord_username, - 'joined_at': self.joined_at, - 'email': self.email, - 'name': self.name + "id": self.id, + "discord_id": self.discord_id, + "github_id": self.github_id, + "github_url": self.github_url, + "discord_username": self.discord_username, + "joined_at": self.joined_at, + "email": self.email, + "name": self.name, } + class DiscordChannels(Base): - __tablename__ = 'discord_channels' + __tablename__ = "discord_channels" channel_id = Column(BigInteger, primary_key=True) channel_name = Column(Text, nullable=True) webhook = Column(Text, nullable=True) should_notify = Column(Boolean, nullable=False) - - products = relationship('Product', back_populates='channel') + products = relationship("Product", back_populates="channel") def __repr__(self): return f"" def to_dict(self): return { - 'channel_id': self.channel_id, - 'channel_name': self.channel_name, - 'webhook': self.webhook, - 'should_notify': self.should_notify + "channel_id": self.channel_id, + "channel_name": self.channel_name, + "webhook": self.webhook, + "should_notify": self.should_notify, } + class DiscordEngagement(Base): - __tablename__ = 'discord_engagement' - __table_args__ = {'comment': 'engagement metrics for contributors'} + __tablename__ = "discord_engagement" + __table_args__ = {"comment": "engagement metrics for contributors"} id = Column(BigInteger, primary_key=True, autoincrement=True) created_at = Column(DateTime, nullable=True) @@ -330,22 +365,23 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'contributor': self.contributor, - 'has_introduced': self.has_introduced, - 'total_message_count': self.total_message_count, - 'total_reaction_count': self.total_reaction_count, - 'converserbadge': self.converserbadge, - 'apprenticebadge': self.apprenticebadge, - 'rockstarbadge': self.rockstarbadge, - 'enthusiastbadge': self.enthusiastbadge, - 'risingstarbadge': self.risingstarbadge + "id": self.id, + "created_at": self.created_at, + "contributor": self.contributor, + "has_introduced": self.has_introduced, + "total_message_count": self.total_message_count, + "total_reaction_count": self.total_reaction_count, + "converserbadge": self.converserbadge, + "apprenticebadge": self.apprenticebadge, + "rockstarbadge": self.rockstarbadge, + "enthusiastbadge": self.enthusiastbadge, + "risingstarbadge": self.risingstarbadge, } + class DmpIssueUpdates(Base): - __tablename__ = 'dmp_issue_updates' - __table_args__ = {'comment': 'Having records of dmp with issue details'} + __tablename__ = "dmp_issue_updates" + __table_args__ = {"comment": "Having records of dmp with issue details"} created_at = Column(DateTime, nullable=False) body_text = Column(Text, nullable=True) @@ -353,7 +389,7 @@ class DmpIssueUpdates(Base): comment_id = Column(BigInteger, primary_key=True) comment_api = Column(String, nullable=True) comment_updated_at = Column(DateTime, nullable=True) - dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) + dmp_id = Column(BigInteger, ForeignKey("dmp_issues.id"), nullable=False) created_by = Column(Text, nullable=False) def __repr__(self): @@ -361,19 +397,19 @@ def __repr__(self): def to_dict(self): return { - 'created_at': self.created_at, - 'body_text': self.body_text, - 'comment_link': self.comment_link, - 'comment_id': self.comment_id, - 'comment_api': self.comment_api, - 'comment_updated_at': self.comment_updated_at, - 'dmp_id': self.dmp_id, - 'created_by': self.created_by + "created_at": self.created_at, + "body_text": self.body_text, + "comment_link": self.comment_link, + "comment_id": self.comment_id, + "comment_api": self.comment_api, + "comment_updated_at": self.comment_updated_at, + "dmp_id": self.dmp_id, + "created_by": self.created_by, } class DmpIssues(Base): - __tablename__ = 'dmp_issues' + __tablename__ = "dmp_issues" id = Column(BigInteger, primary_key=True, autoincrement=True) issue_url = Column(String, nullable=False) @@ -381,32 +417,33 @@ class DmpIssues(Base): mentor_username = Column(Text, nullable=True) contributor_username = Column(Text, nullable=True) title = Column(Text, nullable=False) - org_id = Column(BigInteger, ForeignKey('dmp_orgs.id'), nullable=False) + org_id = Column(BigInteger, ForeignKey("dmp_orgs.id"), nullable=False) description = Column(Text, nullable=False) repo = Column(Text, nullable=False) repo_owner = Column(Text, nullable=False) - year = Column(Integer, nullable=True, comment='The year the issue was created') + year = Column(Integer, nullable=True, comment="The year the issue was created") def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'issue_url': self.issue_url, - 'issue_number': self.issue_number, - 'mentor_username': self.mentor_username, - 'contributor_username': self.contributor_username, - 'title': self.title, - 'org_id': self.org_id, - 'description': self.description, - 'repo': self.repo, - 'repo_owner': self.repo_owner, - 'year': self.year + "id": self.id, + "issue_url": self.issue_url, + "issue_number": self.issue_number, + "mentor_username": self.mentor_username, + "contributor_username": self.contributor_username, + "title": self.title, + "org_id": self.org_id, + "description": self.description, + "repo": self.repo, + "repo_owner": self.repo_owner, + "year": self.year, } + class DmpOrgs(Base): - __tablename__ = 'dmp_orgs' + __tablename__ = "dmp_orgs" id = Column(BigInteger, primary_key=True, autoincrement=True) created_at = Column(DateTime, nullable=False) @@ -414,26 +451,26 @@ class DmpOrgs(Base): description = Column(Text, nullable=False) link = Column(Text, nullable=False) repo_owner = Column(Text, nullable=False) - - # issues = relationship('Issues', backref='organization', lazy='joined') + # issues = relationship('Issues', backref='organization', lazy='joined') def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'name': self.name, - 'description': self.description, - 'link': self.link, - 'repo_owner': self.repo_owner + "id": self.id, + "created_at": self.created_at, + "name": self.name, + "description": self.description, + "link": self.link, + "repo_owner": self.repo_owner, } + class DmpPrUpdates(Base): - __tablename__ = 'dmp_pr_updates' - __table_args__ = {'comment': 'Having PR related records'} + __tablename__ = "dmp_pr_updates" + __table_args__ = {"comment": "Having PR related records"} created_at = Column(DateTime, nullable=False) pr_id = Column(BigInteger, primary_key=True) @@ -442,7 +479,7 @@ class DmpPrUpdates(Base): pr_updated_at = Column(DateTime, nullable=True) merged_at = Column(DateTime, nullable=True) closed_at = Column(DateTime, nullable=True) - dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) + dmp_id = Column(BigInteger, ForeignKey("dmp_issues.id"), nullable=False) link = Column(Text, nullable=False) def __repr__(self): @@ -450,19 +487,20 @@ def __repr__(self): def to_dict(self): return { - 'created_at': self.created_at, - 'pr_id': self.pr_id, - 'status': self.status, - 'title': self.title, - 'pr_updated_at': self.pr_updated_at, - 'merged_at': self.merged_at, - 'closed_at': self.closed_at, - 'dmp_id': self.dmp_id, - 'link': self.link + "created_at": self.created_at, + "pr_id": self.pr_id, + "status": self.status, + "title": self.title, + "pr_updated_at": self.pr_updated_at, + "merged_at": self.merged_at, + "closed_at": self.closed_at, + "dmp_id": self.dmp_id, + "link": self.link, } + class DmpTickets(Base): - __tablename__ = 'dmp_tickets' + __tablename__ = "dmp_tickets" created_at = Column(DateTime, nullable=True) name = Column(Text, nullable=True) @@ -474,12 +512,14 @@ class DmpTickets(Base): issue_id = Column(BigInteger, unique=True, nullable=False) api_endpoint_url = Column(Text, unique=True, nullable=True) url = Column(Text, unique=True, nullable=True) - ticket_points = Column(Integer, nullable=True, comment='How many points the ticket is worth') + ticket_points = Column( + Integer, nullable=True, comment="How many points the ticket is worth" + ) index = Column(Integer, unique=True, autoincrement=True) mentors = Column(Text, nullable=True) uuid = Column(UUID(as_uuid=True), primary_key=True) status = Column(Text, nullable=True) - community_label = Column(Boolean, nullable=True, comment='has community label') + community_label = Column(Boolean, nullable=True, comment="has community label") organization = Column(Text, nullable=True) def __repr__(self): @@ -487,27 +527,28 @@ def __repr__(self): def to_dict(self): return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': self.uuid, - 'status': self.status, - 'community_label': self.community_label, - 'organization': self.organization + "created_at": self.created_at, + "name": self.name, + "product": self.product, + "complexity": self.complexity, + "project_category": self.project_category, + "project_sub_category": self.project_sub_category, + "reqd_skills": self.reqd_skills, + "issue_id": self.issue_id, + "api_endpoint_url": self.api_endpoint_url, + "url": self.url, + "ticket_points": self.ticket_points, + "index": self.index, + "mentors": self.mentors, + "uuid": self.uuid, + "status": self.status, + "community_label": self.community_label, + "organization": self.organization, } + class DmpWeekUpdates(Base): - __tablename__ = 'dmp_week_updates' + __tablename__ = "dmp_week_updates" id = Column(BigInteger, primary_key=True, autoincrement=True) issue_url = Column(Text, nullable=False) @@ -523,19 +564,22 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'issue_url': self.issue_url, - 'week': self.week, - 'total_task': self.total_task, - 'completed_task': self.completed_task, - 'progress': self.progress, - 'task_data': self.task_data, - 'dmp_id': self.dmp_id + "id": self.id, + "issue_url": self.issue_url, + "week": self.week, + "total_task": self.total_task, + "completed_task": self.completed_task, + "progress": self.progress, + "task_data": self.task_data, + "dmp_id": self.dmp_id, } + class GithubClassroomData(Base): - __tablename__ = 'github_classroom_data' - __table_args__ = {'comment': 'Table for saving the details about github classroom assignment data'} + __tablename__ = "github_classroom_data" + __table_args__ = { + "comment": "Table for saving the details about github classroom assignment data" + } id = Column(BigInteger, primary_key=True, autoincrement=True) created_at = Column(DateTime, nullable=False) @@ -559,78 +603,93 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'assignment_name': self.assignment_name, - 'assignment_url': self.assignment_url, - 'assignment_id': self.assignment_id, - 'starter_code_url': self.starter_code_url, - 'github_username': self.github_username, - 'roster_identifier': self.roster_identifier, - 'student_repository_name': self.student_repository_name, - 'student_repository_url': self.student_repository_url, - 'submission_timestamp': self.submission_timestamp, - 'points_awarded': self.points_awarded, - 'points_available': self.points_available, - 'c4gt_points': self.c4gt_points, - 'discord_id': self.discord_id, - 'updated_at': self.updated_at + "id": self.id, + "created_at": self.created_at, + "assignment_name": self.assignment_name, + "assignment_url": self.assignment_url, + "assignment_id": self.assignment_id, + "starter_code_url": self.starter_code_url, + "github_username": self.github_username, + "roster_identifier": self.roster_identifier, + "student_repository_name": self.student_repository_name, + "student_repository_url": self.student_repository_url, + "submission_timestamp": self.submission_timestamp, + "points_awarded": self.points_awarded, + "points_available": self.points_available, + "c4gt_points": self.c4gt_points, + "discord_id": self.discord_id, + "updated_at": self.updated_at, } + class GithubInstallations(Base): - __tablename__ = 'github_installations' + __tablename__ = "github_installations" id = Column(BigInteger, primary_key=True, autoincrement=True) github_organisation = Column(Text, unique=True, nullable=False) installation_id = Column(BigInteger, unique=True, nullable=False) - target_type = Column(Text, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"') - github_ids = Column(Text, nullable=True, comment="Identifiers on the github database, prolly won't be used") + target_type = Column( + Text, + nullable=True, + comment='Type of github entity that installed the app, usually "Organisation"', + ) + github_ids = Column( + Text, + nullable=True, + comment="Identifiers on the github database, prolly won't be used", + ) permissions_and_events = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) - organisation = Column(Text, ForeignKey('community_orgs.name'), nullable=True) + organisation = Column(Text, ForeignKey("community_orgs.name"), nullable=True) def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'github_organisation': self.github_organisation, - 'installation_id': self.installation_id, - 'target_type': self.target_type, - 'github_ids': self.github_ids, - 'permissions_and_events': self.permissions_and_events, - 'created_at': self.created_at, - 'organisation': self.organisation + "id": self.id, + "github_organisation": self.github_organisation, + "installation_id": self.installation_id, + "target_type": self.target_type, + "github_ids": self.github_ids, + "permissions_and_events": self.permissions_and_events, + "created_at": self.created_at, + "organisation": self.organisation, } + + ## + class GithubOrganisationsToOrganisations(Base): - __tablename__ = 'github_organisations_to_organisations' + __tablename__ = "github_organisations_to_organisations" id = Column(BigInteger, primary_key=True, autoincrement=True) github_organisation = Column(Text, nullable=False) organisation = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True, comment='Creation date of organization ticket') + created_at = Column( + DateTime, nullable=True, comment="Creation date of organization ticket" + ) def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'github_organisation': self.github_organisation, - 'organisation': self.organisation, - 'created_at': self.created_at + "id": self.id, + "github_organisation": self.github_organisation, + "organisation": self.organisation, + "created_at": self.created_at, } + class IssueContributors(Base): - __tablename__ = 'issue_contributors' + __tablename__ = "issue_contributors" id = Column(BigInteger, primary_key=True, autoincrement=True) - contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) - issue_id = Column(BigInteger, ForeignKey('issues.id'), primary_key=True) - role = Column(BigInteger, ForeignKey('role_master.id'), nullable=True) + contributor_id = Column(BigInteger, ForeignKey("contributors_registration.id")) + issue_id = Column(BigInteger, ForeignKey("issues.id"), primary_key=True) + role = Column(BigInteger, ForeignKey("role_master.id"), nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) @@ -639,20 +698,21 @@ def __repr__(self): def to_dict(self): return { - 'contributor_id': self.contributor_id, - 'issue_id': self.issue_id, - 'role_id': self.role, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "contributor_id": self.contributor_id, + "issue_id": self.issue_id, + "role_id": self.role, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class IssueMentors(Base): - __tablename__ = 'issue_mentors' + __tablename__ = "issue_mentors" id = Column(BigInteger, primary_key=True, autoincrement=True) - issue_id = Column(BigInteger, ForeignKey('issues.id'), primary_key=True) + issue_id = Column(BigInteger, ForeignKey("issues.id"), primary_key=True) org_mentor_id = Column(Text, nullable=True) - angel_mentor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) + angel_mentor_id = Column(BigInteger, ForeignKey("contributors_registration.id")) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) @@ -661,15 +721,16 @@ def __repr__(self): def to_dict(self): return { - 'issue_id': self.issue_id, - 'org_mentor_id': self.org_mentor_id, - 'angel_mentor_id': self.angel_mentor_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "issue_id": self.issue_id, + "org_mentor_id": self.org_mentor_id, + "angel_mentor_id": self.angel_mentor_id, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class Issues(Base): - __tablename__ = 'issues' + __tablename__ = "issues" id = Column(BigInteger, primary_key=True) link = Column(Text, nullable=False) @@ -684,42 +745,37 @@ class Issues(Base): title = Column(Text, nullable=True) domain = Column(Text, nullable=True) description = Column(Text, nullable=True) - org_id = Column(BigInteger, ForeignKey('community_orgs.id'), nullable=True) + org_id = Column(BigInteger, ForeignKey("community_orgs.id"), nullable=True) issue_id = Column(BigInteger, unique=True) - - point_transactions = relationship('PointTransactions', back_populates='issue') - user_activities = relationship('UserActivity', back_populates='issue') - - - def __repr__(self): - return f"" - + point_transactions = relationship("PointTransactions", back_populates="issue") + user_activities = relationship("UserActivity", back_populates="issue") def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'link': self.link, - 'labels': self.labels, - 'complexity': self.complexity, - 'skills': self.skills, - 'technology': self.technology, - 'status': self.status, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'title': self.title, - 'description': self.description, - 'org_id': self.org_id, - 'issue_id': self.issue_id, - 'project_type':self.project_type, - 'domain': self.domain + "id": self.id, + "link": self.link, + "labels": self.labels, + "complexity": self.complexity, + "skills": self.skills, + "technology": self.technology, + "status": self.status, + "created_at": self.created_at, + "updated_at": self.updated_at, + "title": self.title, + "description": self.description, + "org_id": self.org_id, + "issue_id": self.issue_id, + "project_type": self.project_type, + "domain": self.domain, } + class MentorDetails(Base): - __tablename__ = 'mentor_details' + __tablename__ = "mentor_details" id = Column(BigInteger, primary_key=True) name = Column(String(255), nullable=True) @@ -729,32 +785,31 @@ class MentorDetails(Base): github_id = Column(String(255), nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) - - point_transactions = relationship('PointTransactions', back_populates='mentor') - user_points_mappings = relationship('UserPointsMapping', back_populates='mentor') - + point_transactions = relationship("PointTransactions", back_populates="mentor") + user_points_mappings = relationship("UserPointsMapping", back_populates="mentor") def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'name': self.name, - 'email': self.email, - 'discord_id': self.discord_id, - 'discord_username': self.discord_username, - 'github_id': self.github_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "id": self.id, + "name": self.name, + "email": self.email, + "discord_id": self.discord_id, + "discord_username": self.discord_username, + "github_id": self.github_id, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class MentorshipProgramSiteStructure(Base): - __tablename__ = 'mentorship_program_site_structure' + __tablename__ = "mentorship_program_site_structure" id = Column(BigInteger, primary_key=True) - product_id = Column(BigInteger, ForeignKey('product.id'), nullable=True) + product_id = Column(BigInteger, ForeignKey("product.id"), nullable=True) project_id = Column(BigInteger, nullable=True) contributor_id = Column(BigInteger, nullable=True) website_directory_label = Column(Text, nullable=True) @@ -768,16 +823,17 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'product_id': self.product_id, - 'project_id': self.project_id, - 'contributor_id': self.contributor_id, - 'website_directory_label': self.website_directory_label, - 'directory_url': self.directory_url + "id": self.id, + "product_id": self.product_id, + "project_id": self.project_id, + "contributor_id": self.contributor_id, + "website_directory_label": self.website_directory_label, + "directory_url": self.directory_url, } + class MentorshipProgramWebsiteComments(Base): - __tablename__ = 'mentorship_program_website_comments' + __tablename__ = "mentorship_program_website_comments" comment_id = Column(BigInteger, primary_key=True) url = Column(Text, nullable=True) @@ -794,19 +850,20 @@ def __repr__(self): def to_dict(self): return { - 'comment_id': self.comment_id, - 'url': self.url, - 'html_url': self.html_url, - 'commented_by_username': self.commented_by_username, - 'commented_by_id': self.commented_by_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'body': self.body, - 'pr_id': self.pr_id + "comment_id": self.comment_id, + "url": self.url, + "html_url": self.html_url, + "commented_by_username": self.commented_by_username, + "commented_by_id": self.commented_by_id, + "created_at": self.created_at, + "updated_at": self.updated_at, + "body": self.body, + "pr_id": self.pr_id, } + class MentorshipProgramWebsiteCommits(Base): - __tablename__ = 'mentorship_program_website_commits' + __tablename__ = "mentorship_program_website_commits" node_id = Column(Text, primary_key=True) url = Column(Text, nullable=True) @@ -830,26 +887,27 @@ def __repr__(self): def to_dict(self): return { - 'node_id': self.node_id, - 'url': self.url, - 'html_url': self.html_url, - 'comment_count': self.comment_count, - 'date': self.date, - 'author_id': self.author_id, - 'author_username': self.author_username, - 'author_email': self.author_email, - 'committer_id': self.committer_id, - 'committer_username': self.committer_username, - 'committer_email': self.committer_email, - 'additions': self.additions, - 'deletions': self.deletions, - 'files': self.files, - 'project_folder_name': self.project_folder_name, - 'pr_id': self.pr_id + "node_id": self.node_id, + "url": self.url, + "html_url": self.html_url, + "comment_count": self.comment_count, + "date": self.date, + "author_id": self.author_id, + "author_username": self.author_username, + "author_email": self.author_email, + "committer_id": self.committer_id, + "committer_username": self.committer_username, + "committer_email": self.committer_email, + "additions": self.additions, + "deletions": self.deletions, + "files": self.files, + "project_folder_name": self.project_folder_name, + "pr_id": self.pr_id, } + class MentorshipProgramWebsiteHasUpdated(Base): - __tablename__ = 'mentorship_program_website_has_updated' + __tablename__ = "mentorship_program_website_has_updated" id = Column(BigInteger, primary_key=True) project_id = Column(BigInteger, nullable=True) @@ -880,37 +938,37 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'project_id': self.project_id, - 'week1_update_date': self.week1_update_date, - 'week2_update_date': self.week2_update_date, - 'week3_update_date': self.week3_update_date, - 'week4_update_date': self.week4_update_date, - 'week5_update_date': self.week5_update_date, - 'week6_update_date': self.week6_update_date, - 'week7_update_date': self.week7_update_date, - 'week8_update_date': self.week8_update_date, - 'week9_update_date': self.week9_update_date, - 'week1_is_default_text': self.week1_is_default_text, - 'week2_is_default_text': self.week2_is_default_text, - 'week3_is_default_text': self.week3_is_default_text, - 'week4_is_default_text': self.week4_is_default_text, - 'week5_is_default_text': self.week5_is_default_text, - 'week6_is_default_text': self.week6_is_default_text, - 'week7_is_default_text': self.week7_is_default_text, - 'week8_is_default_text': self.week8_is_default_text, - 'week9_is_default_text': self.week9_is_default_text, - 'product': self.product, - 'project_folder': self.project_folder, - 'all_links': self.all_links + "id": self.id, + "project_id": self.project_id, + "week1_update_date": self.week1_update_date, + "week2_update_date": self.week2_update_date, + "week3_update_date": self.week3_update_date, + "week4_update_date": self.week4_update_date, + "week5_update_date": self.week5_update_date, + "week6_update_date": self.week6_update_date, + "week7_update_date": self.week7_update_date, + "week8_update_date": self.week8_update_date, + "week9_update_date": self.week9_update_date, + "week1_is_default_text": self.week1_is_default_text, + "week2_is_default_text": self.week2_is_default_text, + "week3_is_default_text": self.week3_is_default_text, + "week4_is_default_text": self.week4_is_default_text, + "week5_is_default_text": self.week5_is_default_text, + "week6_is_default_text": self.week6_is_default_text, + "week7_is_default_text": self.week7_is_default_text, + "week8_is_default_text": self.week8_is_default_text, + "week9_is_default_text": self.week9_is_default_text, + "product": self.product, + "project_folder": self.project_folder, + "all_links": self.all_links, } - ## + class MentorshipProgramWebsitePullRequest(Base): - __tablename__ = 'mentorship_program_website_pull_request' + __tablename__ = "mentorship_program_website_pull_request" pr_url = Column(Text, nullable=True) pr_id = Column(BigInteger, primary_key=True) @@ -952,44 +1010,45 @@ def __repr__(self): def to_dict(self): return { - 'pr_url': self.pr_url, - 'pr_id': self.pr_id, - 'pr_node_id': self.pr_node_id, - 'html_url': self.html_url, - 'status': self.status, - 'title': self.title, - 'raised_by_username': self.raised_by_username, - 'raised_by_id': self.raised_by_id, - 'body': self.body, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'closed_at': self.closed_at, - 'merged_at': self.merged_at, - 'assignees': self.assignees, - 'requested_reviewers': self.requested_reviewers, - 'labels': self.labels, - 'review_comments_url': self.review_comments_url, - 'comments_url': self.comments_url, - 'repository_id': self.repository_id, - 'repository_owner_name': self.repository_owner_name, - 'repository_owner_id': self.repository_owner_id, - 'repository_url': self.repository_url, - 'merged': self.merged, - 'number_of_commits': self.number_of_commits, - 'number_of_comments': self.number_of_comments, - 'lines_of_code_added': self.lines_of_code_added, - 'lines_of_code_removed': self.lines_of_code_removed, - 'number_of_files_changed': self.number_of_files_changed, - 'merged_by_id': self.merged_by_id, - 'merged_by_username': self.merged_by_username, - 'linked_ticket': self.linked_ticket, - 'project_name': self.project_name, - 'project_folder_label': self.project_folder_label, - 'week_number': self.week_number + "pr_url": self.pr_url, + "pr_id": self.pr_id, + "pr_node_id": self.pr_node_id, + "html_url": self.html_url, + "status": self.status, + "title": self.title, + "raised_by_username": self.raised_by_username, + "raised_by_id": self.raised_by_id, + "body": self.body, + "created_at": self.created_at, + "updated_at": self.updated_at, + "closed_at": self.closed_at, + "merged_at": self.merged_at, + "assignees": self.assignees, + "requested_reviewers": self.requested_reviewers, + "labels": self.labels, + "review_comments_url": self.review_comments_url, + "comments_url": self.comments_url, + "repository_id": self.repository_id, + "repository_owner_name": self.repository_owner_name, + "repository_owner_id": self.repository_owner_id, + "repository_url": self.repository_url, + "merged": self.merged, + "number_of_commits": self.number_of_commits, + "number_of_comments": self.number_of_comments, + "lines_of_code_added": self.lines_of_code_added, + "lines_of_code_removed": self.lines_of_code_removed, + "number_of_files_changed": self.number_of_files_changed, + "merged_by_id": self.merged_by_id, + "merged_by_username": self.merged_by_username, + "linked_ticket": self.linked_ticket, + "project_name": self.project_name, + "project_folder_label": self.project_folder_label, + "week_number": self.week_number, } + class MentorshipWebsiteContributorProject(Base): - __tablename__ = 'mentorship_website_contributor_project' + __tablename__ = "mentorship_website_contributor_project" project_folder = Column(Text, primary_key=True) contributor = Column(Text, nullable=True) @@ -998,13 +1057,11 @@ def __repr__(self): return f"" def to_dict(self): - return { - 'project_folder': self.project_folder, - 'contributor': self.contributor - } + return {"project_folder": self.project_folder, "contributor": self.contributor} + class PointSystem(Base): - __tablename__ = 'point_system' + __tablename__ = "point_system" id = Column(BigInteger, primary_key=True) complexity = Column(Text, nullable=False) @@ -1014,47 +1071,51 @@ def __repr__(self): return f"" def to_dict(self): - return { - 'id': self.id, - 'complexity': self.complexity, - 'points': self.points - } - + return {"id": self.id, "complexity": self.complexity, "points": self.points} + + class PointTransactions(Base): - __tablename__ = 'point_transactions' + __tablename__ = "point_transactions" id = Column(BigInteger, primary_key=True) - user_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) - issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) + user_id = Column( + BigInteger, ForeignKey("contributors_registration.id"), nullable=True + ) + issue_id = Column(BigInteger, ForeignKey("issues.id"), nullable=False) point = Column(Integer, nullable=True) type = Column(Text, nullable=True) - created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created - updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) # Updated to current time when record is modified - angel_mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) - - - contributor = relationship('ContributorsRegistration', back_populates='point_transactions') - issue = relationship('Issues', back_populates='point_transactions') - mentor = relationship('MentorDetails', back_populates='point_transactions') + created_at = Column( + DateTime, default=func.now(), nullable=False + ) # Set to current time when created + updated_at = Column( + DateTime, default=func.now(), onupdate=func.now(), nullable=False + ) # Updated to current time when record is modified + angel_mentor_id = Column(BigInteger, ForeignKey("mentor_details.id"), nullable=True) + + contributor = relationship( + "ContributorsRegistration", back_populates="point_transactions" + ) + issue = relationship("Issues", back_populates="point_transactions") + mentor = relationship("MentorDetails", back_populates="point_transactions") def __repr__(self): return f"" - def to_dict(self): return { - 'id': self.id, - 'user_id': self.user_id, - 'issue_id': self.issue_id, - 'point': self.point, - 'type': self.type, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'angel_mentor_id': self.angel_mentor_id + "id": self.id, + "user_id": self.user_id, + "issue_id": self.issue_id, + "point": self.point, + "type": self.type, + "created_at": self.created_at, + "updated_at": self.updated_at, + "angel_mentor_id": self.angel_mentor_id, } + class PointsMapping(Base): - __tablename__ = 'points_mapping' + __tablename__ = "points_mapping" id = Column(BigInteger, primary_key=True) role = Column(String(50), nullable=False) @@ -1068,20 +1129,20 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'role': self.role, - 'complexity': self.complexity, - 'points': self.points, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "id": self.id, + "role": self.role, + "complexity": self.complexity, + "points": self.points, + "created_at": self.created_at, + "updated_at": self.updated_at, } - ### + class PrHistory(Base): - __tablename__ = 'pr_history' + __tablename__ = "pr_history" id = Column(BigInteger, primary_key=True, autoincrement=True) created_at = Column(DateTime, nullable=True) @@ -1106,27 +1167,28 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity, - 'title': self.title, - 'issue_id': self.issue_id + "id": self.id, + "created_at": self.created_at, + "api_url": self.api_url, + "html_url": self.html_url, + "raised_by": self.raised_by, + "raised_at": self.raised_at, + "raised_by_username": self.raised_by_username, + "status": self.status, + "is_merged": self.is_merged, + "merged_by": self.merged_by, + "merged_at": self.merged_at, + "merged_by_username": self.merged_by_username, + "pr_id": self.pr_id, + "ticket_url": self.ticket_url, + "ticket_complexity": self.ticket_complexity, + "title": self.title, + "issue_id": self.issue_id, } + class PrStaging(Base): - __tablename__ = 'pr_staging' + __tablename__ = "pr_staging" id = Column(String(36), primary_key=True) # UUID field created_at = Column(DateTime, nullable=True) @@ -1150,50 +1212,53 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'points': self.points, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity + "id": self.id, + "created_at": self.created_at, + "api_url": self.api_url, + "html_url": self.html_url, + "raised_by": self.raised_by, + "raised_at": self.raised_at, + "raised_by_username": self.raised_by_username, + "status": self.status, + "is_merged": self.is_merged, + "merged_by": self.merged_by, + "merged_at": self.merged_at, + "merged_by_username": self.merged_by_username, + "pr_id": self.pr_id, + "points": self.points, + "ticket_url": self.ticket_url, + "ticket_complexity": self.ticket_complexity, } + class Product(Base): - __tablename__ = 'product' + __tablename__ = "product" id = Column(BigInteger, primary_key=True) # Auto field name = Column(Text, unique=True, nullable=False) description = Column(Text, nullable=True) wiki_url = Column(Text, nullable=True) - channel_id = Column(BigInteger, ForeignKey('discord_channels.channel_id'), nullable=True) # Assumes 'DiscordChannels' model - - channel = relationship('DiscordChannels', back_populates='products') + channel_id = Column( + BigInteger, ForeignKey("discord_channels.channel_id"), nullable=True + ) # Assumes 'DiscordChannels' model + channel = relationship("DiscordChannels", back_populates="products") def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'name': self.name, - 'description': self.description, - 'wiki_url': self.wiki_url, - 'channel_id': self.channel_id + "id": self.id, + "name": self.name, + "description": self.description, + "wiki_url": self.wiki_url, + "channel_id": self.channel_id, } + class RoleMaster(Base): - __tablename__ = 'role_master' + __tablename__ = "role_master" id = Column(BigInteger, primary_key=True) # Auto field created_at = Column(DateTime, nullable=False) @@ -1205,14 +1270,15 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'role': self.role + "id": self.id, + "created_at": self.created_at, + "updated_at": self.updated_at, + "role": self.role, } + class TicketComments(Base): - __tablename__ = 'ticket_comments' + __tablename__ = "ticket_comments" id = Column(BigInteger, primary_key=True) url = Column(Text, nullable=True) @@ -1234,22 +1300,23 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'url': self.url, - 'html_url': self.html_url, - 'issue_url': self.issue_url, - 'node_id': self.node_id, - 'commented_by': self.commented_by, - 'commented_by_id': self.commented_by_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'content': self.content, - 'reactions_url': self.reactions_url, - 'ticket_url': self.ticket_url + "id": self.id, + "url": self.url, + "html_url": self.html_url, + "issue_url": self.issue_url, + "node_id": self.node_id, + "commented_by": self.commented_by, + "commented_by_id": self.commented_by_id, + "created_at": self.created_at, + "updated_at": self.updated_at, + "content": self.content, + "reactions_url": self.reactions_url, + "ticket_url": self.ticket_url, } + class UnlistedTickets(Base): - __tablename__ = 'unlisted_tickets' + __tablename__ = "unlisted_tickets" created_at = Column(DateTime, nullable=True) name = Column(Text, nullable=True) @@ -1268,33 +1335,34 @@ class UnlistedTickets(Base): status = Column(Text, nullable=True) organization = Column(Text, nullable=True) - __table_args__ = (UniqueConstraint('uuid', 'issue_id'),) + __table_args__ = (UniqueConstraint("uuid", "issue_id"),) def __repr__(self): return f"" def to_dict(self): return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': self.uuid, - 'status': self.status, - 'organization': self.organization + "created_at": self.created_at, + "name": self.name, + "product": self.product, + "complexity": self.complexity, + "project_category": self.project_category, + "project_sub_category": self.project_sub_category, + "reqd_skills": self.reqd_skills, + "issue_id": self.issue_id, + "api_endpoint_url": self.api_endpoint_url, + "url": self.url, + "ticket_points": self.ticket_points, + "index": self.index, + "mentors": self.mentors, + "uuid": self.uuid, + "status": self.status, + "organization": self.organization, } + class UnstructuredDiscordData(Base): - __tablename__ = 'unstructured_discord_data' + __tablename__ = "unstructured_discord_data" text = Column(Text, nullable=True) author = Column(BigInteger, nullable=True) @@ -1310,118 +1378,144 @@ def __repr__(self): def to_dict(self): return { - 'text': self.text, - 'author': self.author, - 'channel': self.channel, - 'channel_name': self.channel_name, - 'uuid': self.uuid, - 'author_name': self.author_name, - 'author_roles': self.author_roles, - 'sent_at': self.sent_at + "text": self.text, + "author": self.author, + "channel": self.channel, + "channel_name": self.channel_name, + "uuid": self.uuid, + "author_name": self.author_name, + "author_roles": self.author_roles, + "sent_at": self.sent_at, } + class UserActivity(Base): - __tablename__ = 'user_activity' + __tablename__ = "user_activity" id = Column(BigInteger, primary_key=True, autoincrement=True) - contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=False) # Assumes 'ContributorsRegistration' model - issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) # Assumes 'Issues' model + contributor_id = Column( + BigInteger, ForeignKey("contributors_registration.id"), nullable=False + ) # Assumes 'ContributorsRegistration' model + issue_id = Column( + BigInteger, ForeignKey("issues.id"), nullable=False + ) # Assumes 'Issues' model activity = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) mentor_id = Column(BigInteger, nullable=True) # Assumes 'MentorDetails' model - contributor = relationship('ContributorsRegistration', back_populates='user_activities') - issue = relationship('Issues', back_populates='user_activities') + contributor = relationship( + "ContributorsRegistration", back_populates="user_activities" + ) + issue = relationship("Issues", back_populates="user_activities") def __repr__(self): return f"" def to_dict(self): return { - 'contributor_id': self.contributor_id, - 'issue_id': self.issue_id, - 'activity': self.activity, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'mentor_id': self.mentor_id + "contributor_id": self.contributor_id, + "issue_id": self.issue_id, + "activity": self.activity, + "created_at": self.created_at, + "updated_at": self.updated_at, + "mentor_id": self.mentor_id, } + class UserBadges(Base): - __tablename__ = 'user_badges' + __tablename__ = "user_badges" id = Column(UUID(as_uuid=True), primary_key=True) - user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model - badge_id = Column(BigInteger, ForeignKey('badges.id'), nullable=False) # Assumes 'Badges' model + user_id = Column( + BigInteger, ForeignKey("users.id"), nullable=False + ) # Assumes 'Users' model + badge_id = Column( + BigInteger, ForeignKey("badges.id"), nullable=False + ) # Assumes 'Badges' model created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) - user = relationship('Users', back_populates='user_badges') - badge = relationship('Badges', back_populates='user_badges') + user = relationship("Users", back_populates="user_badges") + badge = relationship("Badges", back_populates="user_badges") def __repr__(self): return f"" def to_dict(self): return { - 'user_id': self.user_id, - 'badge_id': self.badge_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "user_id": self.user_id, + "badge_id": self.badge_id, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class UserCertificates(Base): - __tablename__ = 'user_certificates' + __tablename__ = "user_certificates" id = Column(UUID(as_uuid=True), primary_key=True) - user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model + user_id = Column( + BigInteger, ForeignKey("users.id"), nullable=False + ) # Assumes 'Users' model certificate_link = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) - user = relationship('Users', back_populates='user_certificates') + user = relationship("Users", back_populates="user_certificates") def __repr__(self): return f"" def to_dict(self): return { - 'user_id': self.user_id, - 'certificate_link': self.certificate_link, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "user_id": self.user_id, + "certificate_link": self.certificate_link, + "created_at": self.created_at, + "updated_at": self.updated_at, } - ### + class UserPointsMapping(Base): - __tablename__ = 'user_points_mapping' + __tablename__ = "user_points_mapping" id = Column(UUID(as_uuid=True), primary_key=True) - contributor = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) # Assumes 'ContributorsRegistration' model + contributor = Column( + BigInteger, ForeignKey("contributors_registration.id"), nullable=True + ) # Assumes 'ContributorsRegistration' model points = Column(Integer, nullable=False) level = Column(String(50), nullable=True) - created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created - updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) - mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model - - contributors = relationship('ContributorsRegistration', back_populates='user_points_mappings') - mentor = relationship('MentorDetails', back_populates='user_points_mappings') + created_at = Column( + DateTime, default=func.now(), nullable=False + ) # Set to current time when created + updated_at = Column( + DateTime, default=func.now(), onupdate=func.now(), nullable=False + ) + mentor_id = Column( + BigInteger, ForeignKey("mentor_details.id"), nullable=True + ) # Assumes 'MentorDetails' model + + contributors = relationship( + "ContributorsRegistration", back_populates="user_points_mappings" + ) + mentor = relationship("MentorDetails", back_populates="user_points_mappings") def __repr__(self): return f"" def to_dict(self): return { - 'contributor_id': self.contributor, - 'points': self.points, - 'level': self.level, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'mentor_id': self.mentor_id + "contributor_id": self.contributor, + "points": self.points, + "level": self.level, + "created_at": self.created_at, + "updated_at": self.updated_at, + "mentor_id": self.mentor_id, } + class Users(Base): - __tablename__ = 'users' + __tablename__ = "users" id = Column(BigInteger, primary_key=True) # Assumes id is the primary key name = Column(Text, nullable=True) @@ -1431,28 +1525,28 @@ class Users(Base): level = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) updated_at = Column(DateTime, nullable=True) - - user_badges = relationship('UserBadges', back_populates='user') - user_certificates = relationship('UserCertificates', back_populates='user') + user_badges = relationship("UserBadges", back_populates="user") + user_certificates = relationship("UserCertificates", back_populates="user") def __repr__(self): return f"" def to_dict(self): return { - 'id': self.id, - 'name': self.name, - 'discord': self.discord, - 'github': self.github, - 'points': self.points, - 'level': self.level, - 'created_at': self.created_at, - 'updated_at': self.updated_at + "id": self.id, + "name": self.name, + "discord": self.discord, + "github": self.github, + "points": self.points, + "level": self.level, + "created_at": self.created_at, + "updated_at": self.updated_at, } + class VcLogs(Base): - __tablename__ = 'vc_logs' + __tablename__ = "vc_logs" id = Column(BigInteger, primary_key=True) # Auto field created_at = Column(DateTime, nullable=False) @@ -1465,16 +1559,17 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'created_at': self.created_at, - 'discord_id': self.discord_id, - 'discord_name': self.discord_name, - 'option': self.option + "id": self.id, + "created_at": self.created_at, + "discord_id": self.discord_id, + "discord_name": self.discord_name, + "option": self.option, } + class GitHubProfileData(Base): - __tablename__ = 'github_profile_data' - + __tablename__ = "github_profile_data" + github_username = Column(String, primary_key=True) discord_id = Column(BigInteger, nullable=False) classroom_points = Column(Integer, nullable=False, default=0) @@ -1489,18 +1584,19 @@ def __repr__(self): def to_dict(self): return { - 'github_username': self.github_username, - 'discord_id': self.discord_id, - 'classroom_points': self.classroom_points, - 'prs_raised': self.prs_raised, - 'prs_reviewed': self.prs_reviewed, - 'prs_merged': self.prs_merged, - 'dpg_points': self.dpg_points, - 'milestone': self.milestone, + "github_username": self.github_username, + "discord_id": self.discord_id, + "classroom_points": self.classroom_points, + "prs_raised": self.prs_raised, + "prs_reviewed": self.prs_reviewed, + "prs_merged": self.prs_merged, + "dpg_points": self.dpg_points, + "milestone": self.milestone, } + class CommunityOrgs(Base): - __tablename__ = 'community_orgs' + __tablename__ = "community_orgs" id = Column(BigInteger, primary_key=True, autoincrement=True) name = Column(Text, nullable=True) @@ -1509,18 +1605,16 @@ def __repr__(self): return f"" def to_dict(self): - return { - 'id': self.id, - 'name': self.name - } - + return {"id": self.id, "name": self.name} class ContributorPoints(Base): - __tablename__ = 'contributor_points' + __tablename__ = "contributor_points" id = Column(BigInteger, primary_key=True, autoincrement=True) - contributors_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) + contributors_id = Column( + BigInteger, ForeignKey("contributors_registration.id"), nullable=True + ) total_points = Column(Integer, nullable=False, default=0) def __repr__(self): @@ -1528,16 +1622,17 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'contributors_id': self.contributors_id, - 'total_points': self.total_points + "id": self.id, + "contributors_id": self.contributors_id, + "total_points": self.total_points, } - + + class MentorNotAdded(Base): - __tablename__ = 'mentor_not_added' + __tablename__ = "mentor_not_added" id = Column(BigInteger, primary_key=True, autoincrement=True) - mentor_github_id = Column(BigInteger, nullable=True) + mentor_github_id = Column(BigInteger, nullable=True) issue_id = Column(BigInteger, nullable=True) def __repr__(self): @@ -1545,16 +1640,15 @@ def __repr__(self): def to_dict(self): return { - 'id': self.id, - 'mentor_github_id': self.mentor_github_id, - 'issue_id': self.issue_id + "id": self.id, + "mentor_github_id": self.mentor_github_id, + "issue_id": self.issue_id, } - class Leaderboard(Base): - __tablename__ = 'leaderboard' - + __tablename__ = "leaderboard" + discord_id = Column(BigInteger, primary_key=True, autoincrement=False) github_id = Column(BigInteger, nullable=False) github_url = Column(Text, nullable=False) @@ -1573,24 +1667,26 @@ class Leaderboard(Base): certificate_link = Column(Text, nullable=True) def __repr__(self): - return f"" + return ( + f"" + ) def to_dict(self): return { - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'apprentice_badge': self.apprentice_badge, - 'converser_badge': self.converser_badge, - 'rockstar_badge': self.rockstar_badge, - 'enthusiast_badge': self.enthusiast_badge, - 'rising_star_badge': self.rising_star_badge, - 'github_x_discord_badge': self.github_x_discord_badge, - 'points': self.points, - 'bronze_badge': self.bronze_badge, - 'silver_badge': self.silver_badge, - 'gold_badge': self.gold_badge, - 'ruby_badge': self.ruby_badge, - 'diamond_badge': self.diamond_badge, - 'certificate_link': self.certificate_link - } \ No newline at end of file + "discord_id": self.discord_id, + "github_id": self.github_id, + "github_url": self.github_url, + "apprentice_badge": self.apprentice_badge, + "converser_badge": self.converser_badge, + "rockstar_badge": self.rockstar_badge, + "enthusiast_badge": self.enthusiast_badge, + "rising_star_badge": self.rising_star_badge, + "github_x_discord_badge": self.github_x_discord_badge, + "points": self.points, + "bronze_badge": self.bronze_badge, + "silver_badge": self.silver_badge, + "gold_badge": self.gold_badge, + "ruby_badge": self.ruby_badge, + "diamond_badge": self.diamond_badge, + "certificate_link": self.certificate_link, + } diff --git a/db/server.py b/db/server.py index 11d0ecc..b810fba 100644 --- a/db/server.py +++ b/db/server.py @@ -1,20 +1,21 @@ - import os -## from sqlalchemy.future import select -from sqlalchemy.orm import sessionmaker, aliased +from sqlalchemy.orm import sessionmaker, aliased from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.pool import NullPool from sqlalchemy.ext.declarative import DeclarativeMeta - -from ..db import PostgresORM -from .models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors +from .models import ( + Base, + ContributorsRegistration, + GithubClassroomData, + IssueContributors, +) from sqlalchemy import delete, insert -from sqlalchemy import select, asc, desc,update, join +from sqlalchemy import asc, desc, update from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import exists from datetime import datetime -from sqlalchemy import cast, String ,and_ +from sqlalchemy import cast, String, and_ from sqlalchemy.dialects.postgresql import ARRAY from .models import Issues, CommunityOrgs, PointSystem, PrHistory from dotenv import load_dotenv @@ -24,53 +25,55 @@ def get_postgres_uri(): - DB_HOST = os.getenv('POSTGRES_DB_HOST') - DB_NAME = os.getenv('POSTGRES_DB_NAME') - DB_USER = os.getenv('POSTGRES_DB_USER') - DB_PASS = os.getenv('POSTGRES_DB_PASS') + DB_HOST = os.getenv("POSTGRES_DB_HOST") + DB_NAME = os.getenv("POSTGRES_DB_NAME") + DB_USER = os.getenv("POSTGRES_DB_USER") + DB_PASS = os.getenv("POSTGRES_DB_PASS") + + return f"postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}" + - return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - class ServerQueries: def __init__(self): - DATABASE_URL = get_postgres_uri() + DATABASE_URL = get_postgres_uri() # Initialize Async SQLAlchemy - engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) - async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + engine = create_async_engine(DATABASE_URL, echo=False, poolclass=NullPool) + async_session = sessionmaker( + autocommit=False, autoflush=False, bind=engine, class_=AsyncSession + ) self.session = async_session - - def convert_dict(self,data): + + def convert_dict(self, data): try: - if type(data) == list: + if isinstance(data, list): data = [val.to_dict() for val in data] else: return [data.to_dict()] - + return data except Exception as e: print(e) raise Exception - - - def get_class_by_tablename(self,tablename): + + def get_class_by_tablename(self, tablename): try: for cls in Base.registry._class_registry.values(): if isinstance(cls, DeclarativeMeta): - if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: + if hasattr(cls, "__tablename__") and cls.__tablename__ == tablename: return cls return None except Exception as e: print(f"ERROR get_class_by_tablename - {e}") - return None + return None - async def readAll(self,table_class): + async def readAll(self, table_class): try: table = self.get_class_by_tablename(table_class) - # Query all records from the specified table class + # Query all records from the specified table class async with self.session() as session: stmt = select(table) - result = await session.execute(stmt) - + result = await session.execute(stmt) + data = result.scalars().all() result = self.convert_dict(data) return result @@ -78,47 +81,54 @@ async def readAll(self,table_class): print(f"An error occurred -read_all_from_table : {e}") return None - - async def deleteComment(self,issue_id,table_name): + async def deleteComment(self, issue_id, table_name): try: table = self.get_class_by_tablename(table_name) async with self.session() as session: stmt = delete(table).where(table.issue_id == issue_id) await session.execute(stmt) await session.commit() - + return True - + except Exception as e: print(f"An error occurred - deleteComment: {e}") return False - async def read(self, table, filters=None, select_columns=None, order=None, limit=None, offset=None): + async def read( + self, + table, + filters=None, + select_columns=None, + order=None, + limit=None, + offset=None, + ): """ Reads data from a table in the database using SQLAlchemy ORM. """ try: table_class = self.get_class_by_tablename(table) - + # Select specific columns or all columns if None if select_columns: stmt = select([getattr(table_class, col) for col in select_columns]) else: stmt = select(table_class) - + # Apply filters if filters: for column, condition in filters.items(): if isinstance(condition, tuple) and len(condition) == 2: operation, value = condition col_attr = getattr(table_class, column) - if operation == 'gt': + if operation == "gt": stmt = stmt.where(col_attr > value) - elif operation == 'lt': + elif operation == "lt": stmt = stmt.where(col_attr < value) - elif operation == 'gte': + elif operation == "gte": stmt = stmt.where(col_attr >= value) - elif operation == 'lte': + elif operation == "lte": stmt = stmt.where(col_attr <= value) else: stmt = stmt.where(getattr(table_class, column) == condition) @@ -126,15 +136,15 @@ async def read(self, table, filters=None, select_columns=None, order=None, limit # Apply ordering if order: for column, direction in order.items(): - if direction == 'asc': + if direction == "asc": stmt = stmt.order_by(asc(getattr(table_class, column))) - elif direction == 'desc': + elif direction == "desc": stmt = stmt.order_by(desc(getattr(table_class, column))) # Apply limit if limit: stmt = stmt.limit(limit) - + # Apply offset if offset: stmt = stmt.offset(offset) @@ -142,32 +152,35 @@ async def read(self, table, filters=None, select_columns=None, order=None, limit async with self.session() as session: result = await session.execute(stmt) data = result.scalars().all() - + # Convert result to dictionary return [row.to_dict() for row in data] - + except Exception as e: print(f"An error occurred - read: {e}") return None - - + async def add_discord_metrics(self, discord_metrics): try: async with self.session() as session: DiscordMetrics = self.get_class_by_tablename("discord_metrics") for metric in discord_metrics: - stmt = select(DiscordMetrics).where(DiscordMetrics.product_name == metric["product_name"]) + stmt = select(DiscordMetrics).where( + DiscordMetrics.product_name == metric["product_name"] + ) result = await session.execute(stmt) existing_record = result.scalars().first() if existing_record: update_stmt = ( update(DiscordMetrics) - .where(DiscordMetrics.product_name == metric["product_name"]) + .where( + DiscordMetrics.product_name == metric["product_name"] + ) .values( mentor_messages=metric["mentor_messages"], - contributor_messages=metric["contributor_messages"] + contributor_messages=metric["contributor_messages"], ) .returning(DiscordMetrics) ) @@ -176,7 +189,7 @@ async def add_discord_metrics(self, discord_metrics): else: new_record = DiscordMetrics(**metric) session.add(new_record) - await session.commit() + await session.commit() await session.refresh(new_record) data = new_record @@ -187,7 +200,7 @@ async def add_discord_metrics(self, discord_metrics): print(f"An error occurred: {e}") await session.rollback() return None - + async def add_github_metrics(self, github_metrics): try: async with self.session() as session: @@ -195,13 +208,19 @@ async def add_github_metrics(self, github_metrics): GithubMetrics = self.get_class_by_tablename("github_metrics") # Check if the metric already exists in the database - stmt = select(GithubMetrics).where(GithubMetrics.product_name == metric["product_name"]) + stmt = select(GithubMetrics).where( + GithubMetrics.product_name == metric["product_name"] + ) result = await session.execute(stmt) existing_record = result.scalars().first() if existing_record: - update_data = {key: value for key, value in metric.items() if key != "product_name"} - + update_data = { + key: value + for key, value in metric.items() + if key != "product_name" + } + update_stmt = ( update(GithubMetrics) .where(GithubMetrics.product_name == metric["product_name"]) @@ -214,7 +233,7 @@ async def add_github_metrics(self, github_metrics): # Insert the new metric if it doesn't exist new_record = GithubMetrics(**metric) session.add(new_record) - await session.commit() + await session.commit() await session.refresh(new_record) data = new_record @@ -225,16 +244,18 @@ async def add_github_metrics(self, github_metrics): print(f"An error occurred: {e}") await session.rollback() return None - - async def check_exists(self,discord_id, assignment_id): + + async def check_exists(self, discord_id, assignment_id): try: # Construct the query for check exists async with self.session() as session: - stmt = ( - select(exists() - .where((GithubClassroomData.discord_id.is_(None)) | (GithubClassroomData.discord_id == discord_id)) - .where(GithubClassroomData.assignment_id == assignment_id) + stmt = select( + exists() + .where( + (GithubClassroomData.discord_id.is_(None)) + | (GithubClassroomData.discord_id == discord_id) ) + .where(GithubClassroomData.assignment_id == assignment_id) ) result = await session.execute(stmt) exists_result = result.scalar() @@ -244,53 +265,67 @@ async def check_exists(self,discord_id, assignment_id): except Exception as e: print(f"An error occurred: {e}") return None - + async def save_classroom_records(self, data): try: async with self.session() as session: for record in data: try: - new_record = GithubClassroomData( - **record) + new_record = GithubClassroomData(**record) session.add(new_record) - + await session.commit() print("Record inserting successfully!") except Exception as e: await session.rollback() print("Error updating record:", e) - + return True except Exception as e: print(f"An error occurred save_classroom_records: {e}") return False - + async def update_classroom_records(self, data): async with self.session() as session: for record in data: try: stmt = ( - update(GithubClassroomData). - where( - GithubClassroomData.assignment_id == record.get('assignment_id'), - GithubClassroomData.discord_id == cast(str(record.get('discord_id')),String) - ). - values( - assignment_name=record.get('assignment', {}).get('title'), - assignment_url=record.get('assignment', {}).get('classroom', {}).get('url'), - c4gt_points=record.get('c4gt_points'), - github_username=record.get('students', [{}])[0].get('login'), - points_available=record.get('points_available'), - points_awarded=record.get('points_awarded',0), - roster_identifier=record.get('roster_identifier',""), - starter_code_url=record.get('starter_code_url', record.get('repository', {}).get('html_url')), - student_repository_name=record.get('repository', {}).get('full_name'), - student_repository_url=record.get('repository', {}).get('html_url'), - submission_timestamp=record.get('submission_timestamsp', datetime.now()), - updated_at=record.get('updated_at') + update(GithubClassroomData) + .where( + GithubClassroomData.assignment_id + == record.get("assignment_id"), + GithubClassroomData.discord_id + == cast(str(record.get("discord_id")), String), + ) + .values( + assignment_name=record.get("assignment", {}).get("title"), + assignment_url=record.get("assignment", {}) + .get("classroom", {}) + .get("url"), + c4gt_points=record.get("c4gt_points"), + github_username=record.get("students", [{}])[0].get( + "login" + ), + points_available=record.get("points_available"), + points_awarded=record.get("points_awarded", 0), + roster_identifier=record.get("roster_identifier", ""), + starter_code_url=record.get( + "starter_code_url", + record.get("repository", {}).get("html_url"), + ), + student_repository_name=record.get("repository", {}).get( + "full_name" + ), + student_repository_url=record.get("repository", {}).get( + "html_url" + ), + submission_timestamp=record.get( + "submission_timestamsp", datetime.now() + ), + updated_at=record.get("updated_at"), ) ) - result = await session.execute(stmt) + await session.execute(stmt) await session.commit() print("Record updated successfully!") return True @@ -298,12 +333,12 @@ async def update_classroom_records(self, data): await session.rollback() print("Error updating record:", e) return False - - async def getdiscord_from_cr(self,github_url): + + async def getdiscord_from_cr(self, github_url): try: Table = self.get_class_by_tablename("contributors_registration") async with self.session() as session: - stmt = (select(Table.discord_id).where(Table.github_url == github_url)) + stmt = select(Table.discord_id).where(Table.github_url == github_url) result = await session.execute(stmt) exists_result = result.scalar() @@ -311,49 +346,47 @@ async def getdiscord_from_cr(self,github_url): except Exception as e: print("Error - getdiscord_from_cr:", e) return None - - + async def add_data(self, data: dict, table_name: str): try: - table_class = self.get_class_by_tablename(table_name) + table_class = self.get_class_by_tablename(table_name) if not table_class: raise ValueError(f"Table class for {table_name} not found") - + async with self.session() as session: new_record = table_class(**data) session.add(new_record) - await session.commit() + await session.commit() await session.refresh(new_record) - + return new_record except Exception as e: print("Error - add_data:", e) return None - + async def insert_org(self, name): try: async with self.session() as session: table = self.get_class_by_tablename("community_orgs") if not table: - raise ValueError(f"No ORM class found for table community_orgs") - - stmt = insert(table).values( - name=name - ).returning(table) + raise ValueError("No ORM class found for table community_orgs") + + stmt = insert(table).values(name=name).returning(table) result = await session.execute(stmt) - - await session.commit() - inserted_record = result.fetchone() - print("inserted_record ", {"id": inserted_record[0], "name": inserted_record[1]}) + + await session.commit() + inserted_record = result.fetchone() + print( + "inserted_record ", + {"id": inserted_record[0], "name": inserted_record[1]}, + ) return {"id": inserted_record[0], "name": inserted_record[1]} - + except Exception as e: print(f"Error in record_created_ticket method: {e}") return None - - async def check_record_exists(self, table_name, filter_column, filter_value): try: table_class = self.get_class_by_tablename(table_name) @@ -361,9 +394,8 @@ async def check_record_exists(self, table_name, filter_column, filter_value): raise ValueError(f"No ORM class found for table '{table_name}'") async with self.session() as session: - stmt = ( - select(table_class) - .where(getattr(table_class, filter_column) == filter_value) + stmt = select(table_class).where( + getattr(table_class, filter_column) == filter_value ) result = await session.execute(stmt) exists = result.scalars().first() is not None @@ -371,27 +403,27 @@ async def check_record_exists(self, table_name, filter_column, filter_value): except Exception as e: print(f"An error occurred - check_record_exists: {e}") return False - - - async def delete(self,table_name, filter_column, filter_value): + + async def delete(self, table_name, filter_column, filter_value): try: table = self.get_class_by_tablename(table_name) async with self.session() as session: - stmt = delete(table).where(getattr(table, filter_column) == filter_value) + stmt = delete(table).where( + getattr(table, filter_column) == filter_value + ) await session.execute(stmt) - await session.commit() + await session.commit() return True - + except Exception as e: print(f"An error occurred - delete: {e}") return False - - - async def get_data(self,col_name,table_name,value,condition=None): + + async def get_data(self, col_name, table_name, value, condition=None): try: Table = self.get_class_by_tablename(table_name) async with self.session() as session: - stmt = (select(Table).where(getattr(Table, col_name) == value)) + stmt = select(Table).where(getattr(Table, col_name) == value) # Execute the query result = await session.execute(stmt) exists_result = result.scalar() @@ -399,25 +431,27 @@ async def get_data(self,col_name,table_name,value,condition=None): return self.convert_dict(exists_result) else: return None - + except Exception as e: print(f"An error occurred - get_data: {e}") return None - + async def checkIsTicket(self, issue_id): try: - tables_to_check = ['issues'] + tables_to_check = ["issues"] async with self.session() as session: data = [] for table_name in tables_to_check: table_class = self.get_class_by_tablename(table_name) if not table_class: - continue - stmt = select(table_class).where(getattr(table_class, 'issue_id') == issue_id) + continue + stmt = select(table_class).where( + getattr(table_class, "issue_id") == issue_id + ) result = await session.execute(stmt) records = result.scalars().all() - + if records: data.extend(records) # Check if data was found in any of the tables @@ -428,44 +462,48 @@ async def checkIsTicket(self, issue_id): except Exception as e: print(f"An error occurred - check_is_ticket: {e}") return False - - - async def record_created_ticket(self, data,table_name): + + async def record_created_ticket(self, data, table_name): try: async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename(table_name) - + # Build and execute the query to check if the issue_id already exists # stmt = select(table).where(table.issue_id == data['issue_id']) - stmt = insert(table).values( - link=data['link'], - labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type - complexity=data['complexity'], - technology=data['technology'], - status=data['status'], - created_at=data['created_at'], - updated_at=data['updated_at'], - title=data['title'], - domain=data['domain'], - description=f"{data['description']}", - org_id=data['org_id'], - issue_id=data['issue_id'], - project_type=data['project_type'] - ).returning(table) + stmt = ( + insert(table) + .values( + link=data["link"], + labels=cast( + data["labels"], ARRAY(String) + ), # Cast to ARRAY type + complexity=data["complexity"], + technology=data["technology"], + status=data["status"], + created_at=data["created_at"], + updated_at=data["updated_at"], + title=data["title"], + domain=data["domain"], + description=f"{data['description']}", + org_id=data["org_id"], + issue_id=data["issue_id"], + project_type=data["project_type"], + ) + .returning(table) + ) result = await session.execute(stmt) - + await session.commit() return result - + except Exception as e: print(f"Error in record_created_ticket method: {e}") return None - async def record_updated_ticket(self, data, table_name): try: async with self.session() as session: @@ -475,18 +513,22 @@ async def record_updated_ticket(self, data, table_name): # Build the update query stmt = ( update(table) - .where(table.issue_id == data['issue_id']) # Match the existing issue by issue_id + .where( + table.issue_id == data["issue_id"] + ) # Match the existing issue by issue_id .values( - link=data['link'], - labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type - complexity=data['complexity'], - technology=data['technology'], - status=data['status'], - created_at=data['created_at'], - updated_at=data['updated_at'], - title=data['title'], + link=data["link"], + labels=cast( + data["labels"], ARRAY(String) + ), # Cast to ARRAY type + complexity=data["complexity"], + technology=data["technology"], + status=data["status"], + created_at=data["created_at"], + updated_at=data["updated_at"], + title=data["title"], description=f"{data['description']}", - org_id=data['org_id'] + org_id=data["org_id"], ) .returning(table) # Return the updated row(s) ) @@ -496,62 +538,58 @@ async def record_updated_ticket(self, data, table_name): # Commit the transaction await session.commit() - + return result except Exception as e: print(f"Error in record_updated_ticket method: {e}") return None - async def update_data(self, data, col_name, table_name): try: table_class = self.get_class_by_tablename(table_name) - + async with self.session() as session: stmt = ( update(table_class) .where(getattr(table_class, col_name) == data[col_name]) .values(**data) - .returning(table_class) + .returning(table_class) ) - + result = await session.execute(stmt) await session.commit() - updated_record = result.scalars().first() + # updated_record = result.scalars().first() # Convert the updated record to a dictionary before returning return result - + except Exception as e: print(f"Error in update_data: {e}") return None - async def update_pr_data(self, data, table_name): try: table_class = self.get_class_by_tablename(table_name) - + async with self.session() as session: new_pr_history = PrHistory( - created_at= data['created_at'], - api_url=data['api_url'], - html_url= data['html_url'], - raised_by= data['raised_by'], - raised_at= data['raised_at'], - raised_by_username= data['raised_by_username'], - status= data['status'], - is_merged= data['is_merged'], - merged_by= data['merged_by'], - merged_at= data['merged_at'], - merged_by_username= data['merged_by_username'], - pr_id= data['pr_id'] + created_at=data["created_at"], + api_url=data["api_url"], + html_url=data["html_url"], + raised_by=data["raised_by"], + raised_at=data["raised_at"], + raised_by_username=data["raised_by_username"], + status=data["status"], + is_merged=data["is_merged"], + merged_by=data["merged_by"], + merged_at=data["merged_at"], + merged_by_username=data["merged_by_username"], + pr_id=data["pr_id"], ) stmt = ( update(table_class) - .where(table_class.pr_id == data['pr_id']) # Match the existing issue by issue_id - .values( - - ) + .where(table_class.pr_id == new_pr_history.pr_id) + .values() .returning(table_class) # Return the updated row(s) ) @@ -563,14 +601,13 @@ async def update_pr_data(self, data, table_name): # Optionally fetch the updated record(s) updated_record = result.fetchone() - + return updated_record if updated_record else None - + except Exception as e: print(f"Error in update_data: {e}") return None - async def update_pr_history(self, pr_id, data): try: async with self.session() as session: @@ -581,19 +618,21 @@ async def update_pr_history(self, pr_id, data): if pr_history_record: # Update the fields with new values from data - pr_history_record.created_at = data['created_at'] - pr_history_record.api_url = data['api_url'] - pr_history_record.html_url = data['html_url'] - pr_history_record.raised_by = data['raised_by'] - pr_history_record.raised_at = data['raised_at'] - pr_history_record.raised_by_username = data['raised_by_username'] - pr_history_record.status = data['status'] - pr_history_record.is_merged = data['is_merged'] - pr_history_record.merged_by = data['merged_by'] - pr_history_record.merged_at = None if data['merged_at'] is None else data['merged_at'] - pr_history_record.merged_by_username = data['merged_by_username'] - pr_history_record.ticket_url = data['ticket_url'] - pr_history_record.ticket_complexity = data['ticket_complexity'] + pr_history_record.created_at = data["created_at"] + pr_history_record.api_url = data["api_url"] + pr_history_record.html_url = data["html_url"] + pr_history_record.raised_by = data["raised_by"] + pr_history_record.raised_at = data["raised_at"] + pr_history_record.raised_by_username = data["raised_by_username"] + pr_history_record.status = data["status"] + pr_history_record.is_merged = data["is_merged"] + pr_history_record.merged_by = data["merged_by"] + pr_history_record.merged_at = ( + None if data["merged_at"] is None else data["merged_at"] + ) + pr_history_record.merged_by_username = data["merged_by_username"] + pr_history_record.ticket_url = data["ticket_url"] + pr_history_record.ticket_complexity = data["ticket_complexity"] pr_history_record.issue_id = data["issue_id"] # Commit the changes to the database @@ -611,88 +650,87 @@ async def update_pr_history(self, pr_id, data): print(f"Error in update_pr_history: {e}") return None - async def addPr(self, prData, issue_id): try: if issue_id: - ticket = await self.get_data("issue_id","issues",issue_id,None) - if len(ticket) ==0: - ticket = await self.get_data("issue_id","dmp_tickets",issue_id,None) + ticket = await self.get_data("issue_id", "issues", issue_id, None) + if len(ticket) == 0: + ticket = await self.get_data( + "issue_id", "dmp_tickets", issue_id, None + ) for pr in prData: data = { # "api_url":data["url"], - "html_url":pr["html_url"], - "pr_id":pr["pr_id"], - "raised_by":pr["raised_by"], - "raised_at":pr["raised_at"], - "raised_by_username":pr["raised_by_username"], - "status":pr["status"], - "is_merged":pr["is_merged"] if pr.get("is_merged") else None, - "merged_by":pr["merged_by"] if pr["merged_by"] else None, - "merged_by_username":pr["merged_by_username"] if pr.get("merged_by_username") else None, - "merged_at":pr["merged_at"] if pr.get("merged_at") else None, + "html_url": pr["html_url"], + "pr_id": pr["pr_id"], + "raised_by": pr["raised_by"], + "raised_at": pr["raised_at"], + "raised_by_username": pr["raised_by_username"], + "status": pr["status"], + "is_merged": pr["is_merged"] if pr.get("is_merged") else None, + "merged_by": pr["merged_by"] if pr["merged_by"] else None, + "merged_by_username": pr["merged_by_username"] + if pr.get("merged_by_username") + else None, + "merged_at": pr["merged_at"] if pr.get("merged_at") else None, "points": ticket[0]["ticket_points"] if issue_id else 0, - "ticket_url":ticket[0]["api_endpoint_url"] if issue_id else 0 + "ticket_url": ticket[0]["api_endpoint_url"] if issue_id else 0, } - resp = await self.add_data(data,"connected_prs") - + await self.add_data(data, "connected_prs") + return True except Exception as e: print(f"Error in addPr: {e}") return None - - - async def get_issue_from_issue_id(self,issue_id): + + async def get_issue_from_issue_id(self, issue_id): try: async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename("issues") - + # Build and execute the query to check if the issue_id already exists stmt = select(table).where(table.issue_id == issue_id) result = await session.execute(stmt) issues = result.scalars().first() - + if issues: return self.convert_dict(issues) return None - + except Exception as e: print(f"Error in get_issue_from_issue_id method: {e}") return None - - async def get_contributors_from_issue_id(self,issue_id): + + async def get_contributors_from_issue_id(self, issue_id): try: async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename("issue_contributors") - + # Build and execute the query to check if the issue_id already exists stmt = select(table).where(table.issue_id == issue_id) result = await session.execute(stmt) issues = result.scalars().all() - + if issues: return self.convert_dict(issues) return None - + except Exception as e: print(f"Error in get_contributors_from_issue_id method: {e}") return None - - async def get_pointsby_complexity(self, complexity_type,type="Contributor"): + + async def get_pointsby_complexity(self, complexity_type, type="Contributor"): try: async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename("points_mapping") - + # Build and execute the query with multiple conditions stmt = select(table).where( - and_( - table.complexity == complexity_type, - table.role == type - ) + and_(table.complexity == complexity_type, table.role == type) ) result = await session.execute(stmt) points = result.scalars().all() @@ -701,23 +739,22 @@ async def get_pointsby_complexity(self, complexity_type,type="Contributor"): except Exception as e: print(f"Error in get_pointsby_complexity method: {e}") return None - - async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Contributor"): + + async def upsert_point_transaction( + self, issue_id, user_id, points, user_type="Contributor" + ): try: async with self.session() as session: table = self.get_class_by_tablename("point_transactions") column_map = { - "Contributor": table.user_id, - "Mentor": table.mentor_id, + "Contributor": table.user_id, + "Mentor": table.mentor_id, } chosen_column = column_map.get(user_type) stmt = select(table).where( - and_( - table.issue_id == issue_id, - chosen_column == user_id - ) + and_(table.issue_id == issue_id, chosen_column == user_id) ) - + result = await session.execute(stmt) transaction = result.scalars().one_or_none() @@ -725,7 +762,9 @@ async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Co # Record exists, so update the points column update_stmt = ( update(table) - .where(and_(table.issue_id == issue_id, table.user_id == user_id)) + .where( + and_(table.issue_id == issue_id, table.user_id == user_id) + ) .values(point=points) ) await session.execute(update_stmt) @@ -734,7 +773,7 @@ async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Co else: # Record does not exist, so create a new one - new_transaction = table(issue_id=issue_id,point=points) + new_transaction = table(issue_id=issue_id, point=points) setattr(new_transaction, chosen_column.key, user_id) session.add(new_transaction) await session.commit() @@ -743,22 +782,21 @@ async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Co except Exception as e: print(f"Error in upsert_point_transaction method: {e}") return None - - async def save_user_points(self, user_id, points,user_type="Contributor"): + + async def save_user_points(self, user_id, points, user_type="Contributor"): try: async with self.session() as session: table = self.get_class_by_tablename("user_points_mapping") column_map = { - "Contributor": table.contributor, - "Mentor": table.mentor_id, + "Contributor": table.contributor, + "Mentor": table.mentor_id, } chosen_column = column_map.get(user_type) stmt = select(table).where(chosen_column == user_id) - + result = await session.execute(stmt) transaction = result.scalars().one_or_none() - if transaction: addon_points = points + transaction.points update_stmt = ( @@ -781,39 +819,41 @@ async def save_user_points(self, user_id, points,user_type="Contributor"): except Exception as e: print(f"Error in save_user_points method: {e}") return None - async def deleteIssueComment(self, commentId): try: async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename("ticket_comments") - + # Build and execute the query with multiple conditions - stmt = delete(table).where( - getattr(table, "id") == commentId - ) + stmt = delete(table).where(getattr(table, "id") == commentId) result = await session.execute(stmt) is_deleted = result.scalars().all() return is_deleted except Exception as e: print(f"Error in deleting issue comments: {e}") return None - async def getUserLeaderBoardData(self): try: - async with self.session() as session: + async with self.session() as session: orgs_alias = aliased(CommunityOrgs) points_alias = aliased(PointSystem) - + # Join the Issues table with the CommunityOrgs and PointSystem stmt = ( select(Issues, orgs_alias, points_alias) - .join(orgs_alias, Issues.org_id == orgs_alias.id, isouter=True) # Left join with CommunityOrgs - .join(points_alias, Issues.complexity == points_alias.complexity, isouter=True) # Left join with PointSystem + .join( + orgs_alias, Issues.org_id == orgs_alias.id, isouter=True + ) # Left join with CommunityOrgs + .join( + points_alias, + Issues.complexity == points_alias.complexity, + isouter=True, + ) # Left join with PointSystem ) - + # Execute the statement result = await session.execute(stmt) @@ -823,17 +863,16 @@ async def getUserLeaderBoardData(self): # Convert to dictionary format for readability (if needed) return [ { - 'issue': issue.to_dict(), - 'community_org': org.to_dict() if org else None, - 'point_system': points.to_dict() if points else None + "issue": issue.to_dict(), + "community_org": org.to_dict() if org else None, + "point_system": points.to_dict() if points else None, } for issue, org, points in records ] except Exception as e: - print('Exception occured while getting users leaderboard data ', e) + print("Exception occured while getting users leaderboard data ", e) return None - async def get_joined_data_with_filters(self, filters=None): async with self.session() as session: # Aliases for the tables @@ -842,14 +881,10 @@ async def get_joined_data_with_filters(self, filters=None): points = aliased(PointSystem) # Base query with the join - query = select( - issues, - orgs, - points - ).join( - orgs, issues.org_id == orgs.id - ).join( - points, points.complexity == issues.complexity + query = ( + select(issues, orgs, points) + .join(orgs, issues.org_id == orgs.id) + .join(points, points.complexity == issues.complexity) ) # If dynamic filters are provided, apply them @@ -865,40 +900,58 @@ async def get_joined_data_with_filters(self, filters=None): records = result.all() # Convert results to dictionaries if necessary - return [dict(issue=record[0].to_dict(), org=record[1].to_dict(), points=record[2].to_dict()) for record in records] + return [ + dict( + issue=record[0].to_dict(), + org=record[1].to_dict(), + points=record[2].to_dict(), + ) + for record in records + ] async def fetch_filtered_issues(self, filters): try: async with self.session() as session: # Start building the query by joining tables query = ( - select(Issues, CommunityOrgs, PointSystem, IssueContributors, ContributorsRegistration) - .join(CommunityOrgs, Issues.org_id == CommunityOrgs.id) - .join(PointSystem, Issues.complexity == PointSystem.complexity) - .outerjoin(IssueContributors, Issues.id == IssueContributors.issue_id) - .outerjoin(ContributorsRegistration, IssueContributors.contributor_id == ContributorsRegistration.id) - .where(Issues.complexity != 'Beginner') - .order_by(desc(Issues.id)) + select( + Issues, + CommunityOrgs, + PointSystem, + IssueContributors, + ContributorsRegistration, ) - + .join(CommunityOrgs, Issues.org_id == CommunityOrgs.id) + .join(PointSystem, Issues.complexity == PointSystem.complexity) + .outerjoin( + IssueContributors, Issues.id == IssueContributors.issue_id + ) + .outerjoin( + ContributorsRegistration, + IssueContributors.contributor_id == ContributorsRegistration.id, + ) + .where(Issues.complexity != "Beginner") + .order_by(desc(Issues.id)) + ) + # Prepare dynamic filter conditions conditions = [] - + # Check if there are filters for Issues table - if 'issues' in filters: - for field, value in filters['issues'].items(): + if "issues" in filters: + for field, value in filters["issues"].items(): conditions.append(getattr(Issues, field) == value) - + # Check if there are filters for CommunityOrgs table - if 'org' in filters: - for field, value in filters['org'].items(): + if "org" in filters: + for field, value in filters["org"].items(): conditions.append(getattr(CommunityOrgs, field) == value) - + # Check if there are filters for PointSystem table - if 'points' in filters: - for field, value in filters['points'].items(): + if "points" in filters: + for field, value in filters["points"].items(): conditions.append(getattr(PointSystem, field) == value) - + # Apply filters (if any) to the query if conditions: query = query.where(and_(*conditions)) @@ -913,22 +966,30 @@ async def fetch_filtered_issues(self, filters): issue = row.Issues.to_dict() org = row.CommunityOrgs.to_dict() if row.CommunityOrgs else None point_system = row.PointSystem.to_dict() - contributors_registration = row.ContributorsRegistration.to_dict() if row.ContributorsRegistration else None - data.append({ - 'issue': issue, - 'org': org, - 'points': point_system, - 'contributors_registration': contributors_registration - }) + contributors_registration = ( + row.ContributorsRegistration.to_dict() + if row.ContributorsRegistration + else None + ) + data.append( + { + "issue": issue, + "org": org, + "points": point_system, + "contributors_registration": contributors_registration, + } + ) return data except Exception as e: print(f"Error in fetch_filtered_issues: {e}") return None - def add_github_user(self, user): - data = self.client.table("contributors_registration").upsert(user, on_conflict=["github_id", "discord_id"]).execute() + data = ( + self.client.table("contributors_registration") + .upsert(user, on_conflict=["github_id", "discord_id"]) + .execute() + ) return data.data - diff --git a/migrations/__pycache__/env.cpython-310.pyc b/migrations/__pycache__/env.cpython-310.pyc deleted file mode 100644 index 146b573..0000000 Binary files a/migrations/__pycache__/env.cpython-310.pyc and /dev/null differ diff --git a/migrations/env.py b/migrations/env.py index 2f6fcc7..d241c16 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,17 +1,14 @@ from logging.config import fileConfig - from sqlalchemy import engine_from_config -from sqlalchemy import pool -from db.models import shared_metadata, Base - +from db.models import Base from alembic import context +from dotenv import load_dotenv +import os # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config -from dotenv import load_dotenv -import os load_dotenv() url = os.getenv("DATABASE_URL") @@ -68,14 +65,13 @@ def run_migrations_online() -> None: """ engine = engine_from_config( - config.get_section(config.config_ini_section), prefix='sqlalchemy.') + config.get_section(config.config_ini_section), prefix="sqlalchemy." + ) with engine.connect() as connection: context.configure( - connection=connection, - target_metadata=target_metadata, - compare_type=True - ) + connection=connection, target_metadata=target_metadata, compare_type=True + ) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/8d1e6a7e959a_initial_migration.py b/migrations/versions/8d1e6a7e959a_initial_migration.py index db77404..d81fdea 100644 --- a/migrations/versions/8d1e6a7e959a_initial_migration.py +++ b/migrations/versions/8d1e6a7e959a_initial_migration.py @@ -1,18 +1,19 @@ """Initial migration Revision ID: 8d1e6a7e959a -Revises: +Revises: Create Date: 2024-12-18 18:12:00.911503 """ -from typing import Sequence, Union +from typing import Sequence, Union from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql +import db # revision identifiers, used by Alembic. -revision: str = '8d1e6a7e959a' +revision: str = "8d1e6a7e959a" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -20,1704 +21,3220 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('github_profile_data', - sa.Column('github_username', sa.String(), nullable=False), - sa.Column('discord_id', sa.BigInteger(), nullable=False), - sa.Column('classroom_points', sa.Integer(), nullable=False), - sa.Column('prs_raised', sa.Integer(), nullable=False), - sa.Column('prs_reviewed', sa.Integer(), nullable=False), - sa.Column('prs_merged', sa.Integer(), nullable=False), - sa.Column('dpg_points', sa.Integer(), nullable=False), - sa.Column('milestone', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('github_username') - ) - op.create_table('leaderboard', - sa.Column('discord_id', sa.BigInteger(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BigInteger(), nullable=False), - sa.Column('github_url', sa.Text(), nullable=False), - sa.Column('apprentice_badge', sa.Boolean(), nullable=True), - sa.Column('converser_badge', sa.Boolean(), nullable=False), - sa.Column('rockstar_badge', sa.Boolean(), nullable=False), - sa.Column('enthusiast_badge', sa.Boolean(), nullable=False), - sa.Column('rising_star_badge', sa.Boolean(), nullable=False), - sa.Column('github_x_discord_badge', sa.Boolean(), nullable=False), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('bronze_badge', sa.Boolean(), nullable=False), - sa.Column('silver_badge', sa.Boolean(), nullable=False), - sa.Column('gold_badge', sa.Boolean(), nullable=False), - sa.Column('ruby_badge', sa.Boolean(), nullable=False), - sa.Column('diamond_badge', sa.Boolean(), nullable=False), - sa.Column('certificate_link', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('discord_id') - ) - op.create_table('role_master', - sa.Column('id', sa.BigInteger(), nullable=False), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=True), - sa.Column('role', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('unstructured_discord_data', - sa.Column('text', sa.Text(), nullable=True), - sa.Column('author', sa.BigInteger(), nullable=True), - sa.Column('channel', sa.BigInteger(), nullable=True), - sa.Column('channel_name', sa.Text(), nullable=True), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('author_name', sa.Text(), nullable=True), - sa.Column('author_roles', sa.Text(), nullable=True), - sa.Column('sent_at', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('uuid') - ) - op.create_table('user_points_mapping', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('contributor', sa.BigInteger(), nullable=True), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('level', sa.String(length=50), nullable=True), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=False), - sa.Column('mentor_id', sa.BigInteger(), nullable=True), - sa.ForeignKeyConstraint(['contributor'], ['contributors_registration.id'], ), - sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.drop_table('__contributors_vc') - op.drop_table('__mentors') - op.drop_table('__mentorship_program_ticket_comments') - op.drop_table('__mentorship_program_pull_request') - op.drop_table('__mentorship_program_tickets') - op.drop_table('__community_program_unique_user_data') - op.drop_table('__contributors_discord') - op.drop_table('__applicant') - op.drop_table('__dashboard_config') - op.drop_table('__mentorship_program_projects') - op.drop_table('__comments') - op.drop_table('__dev_onboarding') - op.drop_table('contributors_registration_old') - op.drop_table('__pull_requests') - op.drop_table('__community_program_tickets') - op.drop_table('__community_organisations') - op.drop_table('__mentorship_program_selected_contributors') - op.drop_table('__community_program_product_wise_tickets') - op.drop_table('unstructured discord data') - op.alter_column('app_comments', 'id', - existing_type=sa.UUID(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('app_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=True, - comment='db id of the corresponding member role in discord server', - existing_comment='db od of the corresponding member role in discord server') - op.alter_column('chapters', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('connected_prs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('connected_prs', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('connected_prs', 'merged_at', - existing_type=postgresql.TIMESTAMP(), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('contributors_id', sa.BigInteger(), nullable=True)) - op.drop_constraint('contributor_points_contributors_id_fkey', 'contributor_points', type_='foreignkey') - op.create_foreign_key(None, 'contributor_points', 'contributors_registration', ['contributors_id'], ['id']) - op.drop_column('contributor_points', 'user_id') - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_discord', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_column('contributors_discord', 'city') - op.drop_column('contributors_discord', 'country') - op.drop_column('contributors_discord', 'experience') - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_registration', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) + op.create_table( + "github_profile_data", + sa.Column("github_username", sa.String(), nullable=False), + sa.Column("discord_id", sa.BigInteger(), nullable=False), + sa.Column("classroom_points", sa.Integer(), nullable=False), + sa.Column("prs_raised", sa.Integer(), nullable=False), + sa.Column("prs_reviewed", sa.Integer(), nullable=False), + sa.Column("prs_merged", sa.Integer(), nullable=False), + sa.Column("dpg_points", sa.Integer(), nullable=False), + sa.Column("milestone", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("github_username"), + ) + op.create_table( + "leaderboard", + sa.Column("discord_id", sa.BigInteger(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BigInteger(), nullable=False), + sa.Column("github_url", sa.Text(), nullable=False), + sa.Column("apprentice_badge", sa.Boolean(), nullable=True), + sa.Column("converser_badge", sa.Boolean(), nullable=False), + sa.Column("rockstar_badge", sa.Boolean(), nullable=False), + sa.Column("enthusiast_badge", sa.Boolean(), nullable=False), + sa.Column("rising_star_badge", sa.Boolean(), nullable=False), + sa.Column("github_x_discord_badge", sa.Boolean(), nullable=False), + sa.Column("points", sa.Integer(), nullable=False), + sa.Column("bronze_badge", sa.Boolean(), nullable=False), + sa.Column("silver_badge", sa.Boolean(), nullable=False), + sa.Column("gold_badge", sa.Boolean(), nullable=False), + sa.Column("ruby_badge", sa.Boolean(), nullable=False), + sa.Column("diamond_badge", sa.Boolean(), nullable=False), + sa.Column("certificate_link", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("discord_id"), + ) + op.create_table( + "role_master", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("created_at", db.models.DateTime(), nullable=False), + sa.Column("updated_at", db.models.DateTime(), nullable=True), + sa.Column("role", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "unstructured_discord_data", + sa.Column("text", sa.Text(), nullable=True), + sa.Column("author", sa.BigInteger(), nullable=True), + sa.Column("channel", sa.BigInteger(), nullable=True), + sa.Column("channel_name", sa.Text(), nullable=True), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("author_name", sa.Text(), nullable=True), + sa.Column("author_roles", sa.Text(), nullable=True), + sa.Column("sent_at", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("uuid"), + ) + op.create_table( + "user_points_mapping", + sa.Column("id", sa.UUID(), nullable=False), + sa.Column("contributor", sa.BigInteger(), nullable=True), + sa.Column("points", sa.Integer(), nullable=False), + sa.Column("level", sa.String(length=50), nullable=True), + sa.Column("created_at", db.models.DateTime(), nullable=False), + sa.Column("updated_at", db.models.DateTime(), nullable=False), + sa.Column("mentor_id", sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint( + ["contributor"], + ["contributors_registration.id"], + ), + sa.ForeignKeyConstraint( + ["mentor_id"], + ["mentor_details.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + op.drop_table("__contributors_vc") + op.drop_table("__mentors") + op.drop_table("__mentorship_program_ticket_comments") + op.drop_table("__mentorship_program_pull_request") + op.drop_table("__mentorship_program_tickets") + op.drop_table("__community_program_unique_user_data") + op.drop_table("__contributors_discord") + op.drop_table("__applicant") + op.drop_table("__dashboard_config") + op.drop_table("__mentorship_program_projects") + op.drop_table("__comments") + op.drop_table("__dev_onboarding") + op.drop_table("contributors_registration_old") + op.drop_table("__pull_requests") + op.drop_table("__community_program_tickets") + op.drop_table("__community_organisations") + op.drop_table("__mentorship_program_selected_contributors") + op.drop_table("__community_program_product_wise_tickets") + op.drop_table("unstructured discord data") + op.alter_column( + "app_comments", + "id", + existing_type=sa.UUID(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "app_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "app_comments", "issue_id", existing_type=sa.BIGINT(), nullable=True + ) + op.alter_column( + "badges", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "badges", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "ccbp_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "ccbp_tickets", "issue_id", existing_type=sa.BIGINT(), nullable=True + ) + op.alter_column( + "ccbp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "ccbp_tickets", + "closed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_comment="date-time at which issue was closed", + existing_nullable=True, + ) + op.alter_column("chapters", "org_name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "chapters", + "discord_role_id", + existing_type=sa.BIGINT(), + nullable=True, + comment="db id of the corresponding member role in discord server", + existing_comment="db od of the corresponding member role in discord server", + ) + op.alter_column( + "chapters", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column("community_orgs", "name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "connected_prs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "connected_prs", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "connected_prs", + "merged_at", + existing_type=postgresql.TIMESTAMP(), + type_=sa.Text(), + existing_nullable=True, + ) + op.alter_column( + "contributor_names", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "contributor_points", + sa.Column("contributors_id", sa.BigInteger(), nullable=True), + ) + op.drop_constraint( + "contributor_points_contributors_id_fkey", + "contributor_points", + type_="foreignkey", + ) + op.create_foreign_key( + None, + "contributor_points", + "contributors_registration", + ["contributors_id"], + ["id"], + ) + op.drop_column("contributor_points", "user_id") + op.alter_column( + "contributors_discord", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "contributors_discord", + "joined_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.drop_column("contributors_discord", "city") + op.drop_column("contributors_discord", "country") + op.drop_column("contributors_discord", "experience") + op.alter_column( + "contributors_registration", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "contributors_registration", + "joined_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) op.drop_table_comment( - 'contributors_registration', - existing_comment='This is a duplicate of contributors_registration_old', - schema=None - ) - op.add_column('discord_engagement', sa.Column('converserbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticebadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('rockstarbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('risingstarbadge', sa.Boolean(), nullable=True)) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('discord_engagement', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_column('discord_engagement', 'apprenticeBadge') - op.drop_column('discord_engagement', 'converserBadge') - op.drop_column('discord_engagement', 'risingStarBadge') - op.drop_column('discord_engagement', 'enthusiastBadge') - op.drop_column('discord_engagement', 'rockstarBadge') - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', type_='unique') - op.drop_constraint('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('dmp_issues_dmp_id_key', 'dmp_issues', type_='unique') - op.drop_constraint('dmp_issues_org_id_fkey', 'dmp_issues', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issues', 'dmp_orgs', ['org_id'], ['id']) - op.drop_column('dmp_issues', 'repo_owner') - op.add_column('dmp_orgs', sa.Column('version', sa.Text(), nullable=True)) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_orgs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_constraint('dmp_orgs_id_key', 'dmp_orgs', type_='unique') - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', type_='unique') - op.drop_constraint('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - type_=sa.Integer(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - type_=sa.Integer(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', type_='foreignkey') - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_classroom_data', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) + "contributors_registration", + existing_comment="This is a duplicate of contributors_registration_old", + schema=None, + ) + op.add_column( + "discord_engagement", sa.Column("converserbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("apprenticebadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("rockstarbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("enthusiastbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("risingstarbadge", sa.Boolean(), nullable=True) + ) + op.alter_column( + "discord_engagement", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "discord_engagement", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.drop_column("discord_engagement", "apprenticeBadge") + op.drop_column("discord_engagement", "converserBadge") + op.drop_column("discord_engagement", "risingStarBadge") + op.drop_column("discord_engagement", "enthusiastBadge") + op.drop_column("discord_engagement", "rockstarBadge") + op.alter_column( + "dmp_issue_updates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_issue_updates", + "comment_updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "dmp_issue_updates_comment_id_key", "dmp_issue_updates", type_="unique" + ) + op.drop_constraint( + "dmp_issue_updates_dmp_id_fkey", "dmp_issue_updates", type_="foreignkey" + ) + op.create_foreign_key(None, "dmp_issue_updates", "dmp_issues", ["dmp_id"], ["id"]) + op.alter_column( + "dmp_issues", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint("dmp_issues_dmp_id_key", "dmp_issues", type_="unique") + op.drop_constraint("dmp_issues_org_id_fkey", "dmp_issues", type_="foreignkey") + op.create_foreign_key(None, "dmp_issues", "dmp_orgs", ["org_id"], ["id"]) + op.drop_column("dmp_issues", "repo_owner") + op.add_column("dmp_orgs", sa.Column("version", sa.Text(), nullable=True)) + op.alter_column( + "dmp_orgs", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_orgs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.drop_constraint("dmp_orgs_id_key", "dmp_orgs", type_="unique") + op.alter_column( + "dmp_pr_updates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_pr_updates", + "pr_updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "merged_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "closed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint("dmp_pr_updates_pr_id_key", "dmp_pr_updates", type_="unique") + op.drop_constraint( + "dmp_pr_updates_dmp_id_fkey", "dmp_pr_updates", type_="foreignkey" + ) + op.create_foreign_key(None, "dmp_pr_updates", "dmp_issues", ["dmp_id"], ["id"]) + op.alter_column( + "dmp_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + type_=sa.Integer(), + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "dmp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + type_=sa.Integer(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_week_updates", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint( + "dmp_week_updates_dmp_id_fkey", "dmp_week_updates", type_="foreignkey" + ) + op.alter_column( + "github_classroom_data", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_classroom_data", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "github_classroom_data", + "submission_timestamp", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "github_classroom_data", + "points_awarded", + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "points_available", + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True, + ) op.create_table_comment( - 'github_classroom_data', - 'Table for saving the details about github classroom assignment data', - existing_comment='Table for save the details about github classroom assignment datas', - schema=None - ) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_installations', 'github_ids', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - comment="Identifiers on the github database, prolly won't be used", - existing_comment="identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('github_installations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('github_installations_organisation_fkey', 'github_installations', type_='foreignkey') - op.create_foreign_key(None, 'github_installations', 'community_orgs', ['organisation'], ['name']) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - comment='Creation date of organization ticket', - existing_comment='creation date of organization ticket', - existing_nullable=True) - op.alter_column('issue_contributors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - nullable=True) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('issue_contributors', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('unique_issue_id_contributors', 'issue_contributors', type_='unique') - op.drop_constraint('issue_contributors_contributor_id_fkey', 'issue_contributors', type_='foreignkey') - op.create_foreign_key(None, 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id']) - op.create_foreign_key(None, 'issue_contributors', 'role_master', ['role'], ['id']) - op.alter_column('issue_mentors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - nullable=True) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('unique_issue_id_mentors', 'issue_mentors', type_='unique') - op.alter_column('issues', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.drop_constraint('issues_org_id_fkey', 'issues', type_='foreignkey') - op.create_foreign_key(None, 'issues', 'community_orgs', ['org_id'], ['id']) - op.alter_column('mentor_details', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_site_structure', sa.Column('product_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('contributor_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website_directory_label', sa.Text(), nullable=True)) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key(None, 'mentorship_program_site_structure', 'product', ['product_id'], ['id']) + "github_classroom_data", + "Table for saving the details about github classroom assignment data", + existing_comment="Table for save the details about github classroom assignment datas", + schema=None, + ) + op.alter_column( + "github_installations", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_installations", + "github_ids", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + comment="Identifiers on the github database, prolly won't be used", + existing_comment="identifiers on the github database, prolly won't be used", + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "permissions_and_events", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "github_installations_organisation_fkey", + "github_installations", + type_="foreignkey", + ) + op.create_foreign_key( + None, "github_installations", "community_orgs", ["organisation"], ["name"] + ) + op.alter_column( + "github_organisations_to_organisations", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + comment="Creation date of organization ticket", + existing_comment="creation date of organization ticket", + existing_nullable=True, + ) + op.alter_column( + "issue_contributors", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "issue_contributors", + "contributor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + nullable=True, + ) + op.alter_column( + "issue_contributors", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "issue_contributors", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.drop_constraint( + "unique_issue_id_contributors", "issue_contributors", type_="unique" + ) + op.drop_constraint( + "issue_contributors_contributor_id_fkey", + "issue_contributors", + type_="foreignkey", + ) + op.create_foreign_key( + None, + "issue_contributors", + "contributors_registration", + ["contributor_id"], + ["id"], + ) + op.create_foreign_key(None, "issue_contributors", "role_master", ["role"], ["id"]) + op.alter_column( + "issue_mentors", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), + ) + op.alter_column( + "issue_mentors", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + nullable=True, + ) + op.alter_column( + "issue_mentors", + "angel_mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint("unique_issue_id_mentors", "issue_mentors", type_="unique") + op.alter_column( + "issues", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"), + ) + op.drop_constraint("issues_org_id_fkey", "issues", type_="foreignkey") + op.create_foreign_key(None, "issues", "community_orgs", ["org_id"], ["id"]) + op.alter_column( + "mentor_details", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), + ) + op.alter_column( + "mentor_not_added", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("product_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("project_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("contributor_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("website_directory_label", sa.Text(), nullable=True), + ) + op.alter_column( + "mentorship_program_site_structure", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint( + "mentorship_program_site_structure_project_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.drop_constraint( + "mentorship_program_site_structure_product_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.drop_constraint( + "mentorship_program_site_structure_contributor_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.create_foreign_key( + None, "mentorship_program_site_structure", "product", ["product_id"], ["id"] + ) op.drop_table_comment( - 'mentorship_program_site_structure', - existing_comment='a mapping for the milestones website structure', - schema=None - ) - op.drop_column('mentorship_program_site_structure', 'project') - op.drop_column('mentorship_program_site_structure', 'product') - op.drop_column('mentorship_program_site_structure', 'website directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor') - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', type_='foreignkey') - op.drop_column('mentorship_program_website_has_updated', 'project') - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('point_transactions', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('point_transactions_user_id_fkey', 'point_transactions', type_='foreignkey') - op.create_foreign_key(None, 'point_transactions', 'mentor_details', ['angel_mentor_id'], ['id']) - op.create_foreign_key(None, 'point_transactions', 'contributors_registration', ['user_id'], ['id']) - op.alter_column('points_mapping', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('points_mapping', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('pr_history', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('pr_history', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) + "mentorship_program_site_structure", + existing_comment="a mapping for the milestones website structure", + schema=None, + ) + op.drop_column("mentorship_program_site_structure", "project") + op.drop_column("mentorship_program_site_structure", "product") + op.drop_column("mentorship_program_site_structure", "website directory_label") + op.drop_column("mentorship_program_site_structure", "contributor") + op.alter_column( + "mentorship_program_website_comments", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "files", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True, + ) + op.add_column( + "mentorship_program_website_has_updated", + sa.Column("project_id", sa.BigInteger(), nullable=True), + ) + op.alter_column( + "mentorship_program_website_has_updated", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week1_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week2_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week3_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week4_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week5_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week6_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week7_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week8_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week9_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "mentorship_program_website_has_updated_project_fkey", + "mentorship_program_website_has_updated", + type_="foreignkey", + ) + op.drop_column("mentorship_program_website_has_updated", "project") + op.alter_column( + "point_system", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_transactions", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_transactions", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "point_transactions", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "angel_mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint( + "point_transactions_user_id_fkey", "point_transactions", type_="foreignkey" + ) + op.create_foreign_key( + None, "point_transactions", "mentor_details", ["angel_mentor_id"], ["id"] + ) + op.create_foreign_key( + None, "point_transactions", "contributors_registration", ["user_id"], ["id"] + ) + op.alter_column( + "points_mapping", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "points_mapping", + "role", + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False, + ) + op.alter_column( + "points_mapping", + "complexity", + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False, + ) + op.alter_column( + "points_mapping", "points", existing_type=sa.INTEGER(), nullable=False + ) + op.alter_column( + "points_mapping", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "pr_history", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), + ) + op.alter_column( + "pr_history", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_history", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "pr_id", + existing_type=sa.BIGINT(), + comment=None, + existing_comment="github id of the pr", + existing_nullable=False, + ) op.drop_table_comment( - 'pr_history', - existing_comment='Holds records of pr webhooks', - schema=None - ) - op.drop_column('pr_history', 'points') - op.alter_column('pr_staging', 'id', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('pr_staging', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) + "pr_history", existing_comment="Holds records of pr webhooks", schema=None + ) + op.drop_column("pr_history", "points") + op.alter_column( + "pr_staging", + "id", + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "pr_staging", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_staging", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "pr_id", + existing_type=sa.BIGINT(), + comment=None, + existing_comment="github id of the pr", + existing_nullable=False, + ) op.drop_table_comment( - 'pr_staging', - existing_comment='This is a duplicate of connected_prs', - schema=None - ) - op.add_column('product', sa.Column('channel_id', sa.BigInteger(), nullable=True)) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment=None, - existing_comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.drop_constraint('product_channel_fkey', 'product', type_='foreignkey') - op.create_foreign_key(None, 'product', 'discord_channels', ['channel_id'], ['channel_id']) + "pr_staging", + existing_comment="This is a duplicate of connected_prs", + schema=None, + ) + op.add_column("product", sa.Column("channel_id", sa.BigInteger(), nullable=True)) + op.alter_column( + "product", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "product", + "description", + existing_type=sa.TEXT(), + comment=None, + existing_comment="URL to the product entry on C4GT wiki", + existing_nullable=True, + existing_server_default=sa.text("''::text"), + ) + op.drop_constraint("product_channel_fkey", "product", type_="foreignkey") + op.create_foreign_key( + None, "product", "discord_channels", ["channel_id"], ["channel_id"] + ) op.drop_table_comment( - 'product', + "product", existing_comment="A table containing all 'Products' in C4GT 2023", - schema=None - ) - op.drop_column('product', 'channel') - op.alter_column('ticket_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('ticket_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment=None, - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False) - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_unique_constraint(None, 'unlisted_tickets', ['uuid', 'issue_id']) - op.add_column('user_activity', sa.Column('contributor_id', sa.BigInteger(), nullable=False)) - op.alter_column('user_activity', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_activity', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('user_activity_user_id_fkey', 'user_activity', type_='foreignkey') - op.drop_constraint('user_activity_mentor_id_fkey', 'user_activity', type_='foreignkey') - op.create_foreign_key(None, 'user_activity', 'contributors_registration', ['contributor_id'], ['id']) - op.create_foreign_key(None, 'user_activity', 'mentor_details', ['mentor_id'], ['id']) - op.drop_column('user_activity', 'user_id') - op.alter_column('user_badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_certificates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('users', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.create_unique_constraint(None, 'users', ['discord']) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('vc_logs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) + schema=None, + ) + op.drop_column("product", "channel") + op.alter_column( + "ticket_comments", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "ticket_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "unlisted_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "unlisted_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + comment=None, + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "unlisted_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False, + ) + op.alter_column( + "unlisted_tickets", + "uuid", + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.create_unique_constraint(None, "unlisted_tickets", ["uuid", "issue_id"]) + op.add_column( + "user_activity", sa.Column("contributor_id", sa.BigInteger(), nullable=False) + ) + op.alter_column( + "user_activity", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "user_activity", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_activity", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint( + "user_activity_user_id_fkey", "user_activity", type_="foreignkey" + ) + op.drop_constraint( + "user_activity_mentor_id_fkey", "user_activity", type_="foreignkey" + ) + op.create_foreign_key( + None, "user_activity", "contributors_registration", ["contributor_id"], ["id"] + ) + op.create_foreign_key( + None, "user_activity", "mentor_details", ["mentor_id"], ["id"] + ) + op.drop_column("user_activity", "user_id") + op.alter_column( + "user_badges", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "badge_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)"), + ) + op.alter_column("users", "name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "users", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.create_unique_constraint(None, "users", ["discord"]) + op.alter_column( + "vc_logs", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "vc_logs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('vc_logs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'users', type_='unique') - op.alter_column('users', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('users', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('user_certificates', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_certificates', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.add_column('user_activity', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) - op.drop_constraint(None, 'user_activity', type_='foreignkey') - op.drop_constraint(None, 'user_activity', type_='foreignkey') - op.create_foreign_key('user_activity_mentor_id_fkey', 'user_activity', 'mentor_details', ['mentor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_foreign_key('user_activity_user_id_fkey', 'user_activity', 'users', ['user_id'], ['id']) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('user_activity', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_activity', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_column('user_activity', 'contributor_id') - op.drop_constraint(None, 'unlisted_tickets', type_='unique') - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ticket_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('ticket_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('product', sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True)) + op.alter_column( + "vc_logs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "vc_logs", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "users", type_="unique") + op.alter_column( + "users", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column("users", "name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "users", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)"), + ) + op.alter_column( + "user_certificates", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "badge_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.add_column( + "user_activity", + sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.drop_constraint(None, "user_activity", type_="foreignkey") + op.drop_constraint(None, "user_activity", type_="foreignkey") + op.create_foreign_key( + "user_activity_mentor_id_fkey", + "user_activity", + "mentor_details", + ["mentor_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "user_activity_user_id_fkey", "user_activity", "users", ["user_id"], ["id"] + ) + op.alter_column( + "user_activity", + "mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "user_activity", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_activity", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("user_activity", "contributor_id") + op.drop_constraint(None, "unlisted_tickets", type_="unique") + op.alter_column( + "unlisted_tickets", + "uuid", + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "unlisted_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + existing_nullable=False, + ) + op.alter_column( + "unlisted_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "unlisted_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "ticket_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "ticket_comments", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.add_column( + "product", sa.Column("channel", sa.BIGINT(), autoincrement=False, nullable=True) + ) op.create_table_comment( - 'product', + "product", "A table containing all 'Products' in C4GT 2023", existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'product', type_='foreignkey') - op.create_foreign_key('product_channel_fkey', 'product', 'discord_channels', ['channel'], ['channel_id']) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('product', 'channel_id') + schema=None, + ) + op.drop_constraint(None, "product", type_="foreignkey") + op.create_foreign_key( + "product_channel_fkey", + "product", + "discord_channels", + ["channel"], + ["channel_id"], + ) + op.alter_column( + "product", + "description", + existing_type=sa.TEXT(), + comment="URL to the product entry on C4GT wiki", + existing_nullable=True, + existing_server_default=sa.text("''::text"), + ) + op.alter_column( + "product", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("product", "channel_id") op.create_table_comment( - 'pr_staging', - 'This is a duplicate of connected_prs', + "pr_staging", + "This is a duplicate of connected_prs", existing_comment=None, - schema=None - ) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_staging', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_staging', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'id', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.add_column('pr_history', sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False)) + schema=None, + ) + op.alter_column( + "pr_staging", + "pr_id", + existing_type=sa.BIGINT(), + comment="github id of the pr", + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_staging", + "id", + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.add_column( + "pr_history", + sa.Column( + "points", + sa.SMALLINT(), + server_default=sa.text("'10'::smallint"), + autoincrement=False, + nullable=False, + ), + ) op.create_table_comment( - 'pr_history', - 'Holds records of pr webhooks', - existing_comment=None, - schema=None - ) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_history', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_history', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('points_mapping', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=True) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.create_foreign_key('point_transactions_user_id_fkey', 'point_transactions', 'contributors_registration', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) - op.create_foreign_key('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', '__mentorship_program_projects', ['project'], ['name']) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_website_has_updated', 'project_id') - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('mentorship_program_site_structure', sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) + "pr_history", "Holds records of pr webhooks", existing_comment=None, schema=None + ) + op.alter_column( + "pr_history", + "pr_id", + existing_type=sa.BIGINT(), + comment="github id of the pr", + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_history", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), + ) + op.alter_column( + "points_mapping", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", "points", existing_type=sa.INTEGER(), nullable=True + ) + op.alter_column( + "points_mapping", + "complexity", + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True, + ) + op.alter_column( + "points_mapping", + "role", + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True, + ) + op.alter_column( + "points_mapping", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "point_transactions", type_="foreignkey") + op.drop_constraint(None, "point_transactions", type_="foreignkey") + op.create_foreign_key( + "point_transactions_user_id_fkey", + "point_transactions", + "contributors_registration", + ["user_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.alter_column( + "point_transactions", + "angel_mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "point_transactions", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_system", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "mentorship_program_website_has_updated", + sa.Column("project", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.create_foreign_key( + "mentorship_program_website_has_updated_project_fkey", + "mentorship_program_website_has_updated", + "__mentorship_program_projects", + ["project"], + ["name"], + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week9_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week8_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week7_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week6_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week5_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week4_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week3_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week2_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week1_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("mentorship_program_website_has_updated", "project_id") + op.alter_column( + "mentorship_program_website_commits", + "files", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("contributor", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column( + "website directory_label", sa.TEXT(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("project", sa.TEXT(), autoincrement=False, nullable=True), + ) op.create_table_comment( - 'mentorship_program_site_structure', - 'a mapping for the milestones website structure', + "mentorship_program_site_structure", + "a mapping for the milestones website structure", existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', '__mentorship_program_selected_contributors', ['contributor'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', 'product', ['product'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', '__mentorship_program_projects', ['project'], ['name']) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_site_structure', 'website_directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor_id') - op.drop_column('mentorship_program_site_structure', 'project_id') - op.drop_column('mentorship_program_site_structure', 'product_id') - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('mentor_details', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.drop_constraint(None, 'issues', type_='foreignkey') - op.create_foreign_key('issues_org_id_fkey', 'issues', 'community_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL') - op.alter_column('issues', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.create_unique_constraint('unique_issue_id_mentors', 'issue_mentors', ['issue_id']) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - nullable=False) - op.alter_column('issue_mentors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.create_foreign_key('issue_contributors_contributor_id_fkey', 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('unique_issue_id_contributors', 'issue_contributors', ['issue_id']) - op.alter_column('issue_contributors', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - nullable=False) - op.alter_column('issue_contributors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - comment='creation date of organization ticket', - existing_comment='Creation date of organization ticket', - existing_nullable=True) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'github_installations', type_='foreignkey') - op.create_foreign_key('github_installations_organisation_fkey', 'github_installations', '__community_organisations', ['organisation'], ['name'], onupdate='CASCADE') - op.alter_column('github_installations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('github_installations', 'github_ids', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - comment="identifiers on the github database, prolly won't be used", - existing_comment="Identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) + schema=None, + ) + op.drop_constraint(None, "mentorship_program_site_structure", type_="foreignkey") + op.create_foreign_key( + "mentorship_program_site_structure_contributor_fkey", + "mentorship_program_site_structure", + "__mentorship_program_selected_contributors", + ["contributor"], + ["name"], + ) + op.create_foreign_key( + "mentorship_program_site_structure_product_fkey", + "mentorship_program_site_structure", + "product", + ["product"], + ["name"], + ) + op.create_foreign_key( + "mentorship_program_site_structure_project_fkey", + "mentorship_program_site_structure", + "__mentorship_program_projects", + ["project"], + ["name"], + ) + op.alter_column( + "mentorship_program_site_structure", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("mentorship_program_site_structure", "website_directory_label") + op.drop_column("mentorship_program_site_structure", "contributor_id") + op.drop_column("mentorship_program_site_structure", "project_id") + op.drop_column("mentorship_program_site_structure", "product_id") + op.alter_column( + "mentor_not_added", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=True, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "mentor_details", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), + ) + op.drop_constraint(None, "issues", type_="foreignkey") + op.create_foreign_key( + "issues_org_id_fkey", + "issues", + "community_orgs", + ["org_id"], + ["id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + op.alter_column( + "issues", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"), + ) + op.create_unique_constraint( + "unique_issue_id_mentors", "issue_mentors", ["issue_id"] + ) + op.alter_column( + "issue_mentors", + "angel_mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "issue_mentors", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + nullable=False, + ) + op.alter_column( + "issue_mentors", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), + ) + op.drop_constraint(None, "issue_contributors", type_="foreignkey") + op.drop_constraint(None, "issue_contributors", type_="foreignkey") + op.create_foreign_key( + "issue_contributors_contributor_id_fkey", + "issue_contributors", + "contributors_registration", + ["contributor_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint( + "unique_issue_id_contributors", "issue_contributors", ["issue_id"] + ) + op.alter_column( + "issue_contributors", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "issue_contributors", + "contributor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + nullable=False, + ) + op.alter_column( + "issue_contributors", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + comment="creation date of organization ticket", + existing_comment="Creation date of organization ticket", + existing_nullable=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "github_installations", type_="foreignkey") + op.create_foreign_key( + "github_installations_organisation_fkey", + "github_installations", + "__community_organisations", + ["organisation"], + ["name"], + onupdate="CASCADE", + ) + op.alter_column( + "github_installations", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "permissions_and_events", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "github_ids", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + comment="identifiers on the github database, prolly won't be used", + existing_comment="Identifiers on the github database, prolly won't be used", + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) op.create_table_comment( - 'github_classroom_data', - 'Table for save the details about github classroom assignment datas', - existing_comment='Table for saving the details about github classroom assignment data', - schema=None - ) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('github_classroom_data', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.create_foreign_key('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.Integer(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - type_=sa.SMALLINT(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.Integer(), - type_=sa.SMALLINT(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_constraint(None, 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', ['pr_id']) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_unique_constraint('dmp_orgs_id_key', 'dmp_orgs', ['id']) - op.alter_column('dmp_orgs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('dmp_orgs', 'version') - op.add_column('dmp_issues', sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'dmp_issues', type_='foreignkey') - op.create_foreign_key('dmp_issues_org_id_fkey', 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issues_dmp_id_key', 'dmp_issues', ['id']) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', ['comment_id']) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.add_column('discord_engagement', sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.alter_column('discord_engagement', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('discord_engagement', 'risingstarbadge') - op.drop_column('discord_engagement', 'enthusiastbadge') - op.drop_column('discord_engagement', 'rockstarbadge') - op.drop_column('discord_engagement', 'apprenticebadge') - op.drop_column('discord_engagement', 'converserbadge') + "github_classroom_data", + "Table for save the details about github classroom assignment datas", + existing_comment="Table for saving the details about github classroom assignment data", + schema=None, + ) + op.alter_column( + "github_classroom_data", + "points_available", + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "points_awarded", + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "submission_timestamp", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "github_classroom_data", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "github_classroom_data", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.create_foreign_key( + "dmp_week_updates_dmp_id_fkey", + "dmp_week_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.alter_column( + "dmp_week_updates", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_tickets", + "index", + existing_type=sa.Integer(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + type_=sa.SMALLINT(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_tickets", + "ticket_points", + existing_type=sa.Integer(), + type_=sa.SMALLINT(), + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "dmp_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.drop_constraint(None, "dmp_pr_updates", type_="foreignkey") + op.create_foreign_key( + "dmp_pr_updates_dmp_id_fkey", + "dmp_pr_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint("dmp_pr_updates_pr_id_key", "dmp_pr_updates", ["pr_id"]) + op.alter_column( + "dmp_pr_updates", + "closed_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "merged_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "pr_updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.create_unique_constraint("dmp_orgs_id_key", "dmp_orgs", ["id"]) + op.alter_column( + "dmp_orgs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_orgs", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("dmp_orgs", "version") + op.add_column( + "dmp_issues", + sa.Column("repo_owner", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.drop_constraint(None, "dmp_issues", type_="foreignkey") + op.create_foreign_key( + "dmp_issues_org_id_fkey", + "dmp_issues", + "dmp_orgs", + ["org_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint("dmp_issues_dmp_id_key", "dmp_issues", ["id"]) + op.alter_column( + "dmp_issues", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "dmp_issue_updates", type_="foreignkey") + op.create_foreign_key( + "dmp_issue_updates_dmp_id_fkey", + "dmp_issue_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint( + "dmp_issue_updates_comment_id_key", "dmp_issue_updates", ["comment_id"] + ) + op.alter_column( + "dmp_issue_updates", + "comment_updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_issue_updates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.add_column( + "discord_engagement", + sa.Column( + "rockstarBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "enthusiastBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "risingStarBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "converserBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "apprenticeBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.alter_column( + "discord_engagement", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "discord_engagement", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("discord_engagement", "risingstarbadge") + op.drop_column("discord_engagement", "enthusiastbadge") + op.drop_column("discord_engagement", "rockstarbadge") + op.drop_column("discord_engagement", "apprenticebadge") + op.drop_column("discord_engagement", "converserbadge") op.create_table_comment( - 'contributors_registration', - 'This is a duplicate of contributors_registration_old', + "contributors_registration", + "This is a duplicate of contributors_registration_old", existing_comment=None, - schema=None - ) - op.alter_column('contributors_registration', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('contributors_discord', sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('contributors_discord', sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('contributors_discord', sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True)) - op.alter_column('contributors_discord', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'contributor_points', type_='foreignkey') - op.create_foreign_key('contributor_points_contributors_id_fkey', 'contributor_points', 'contributors_registration', ['user_id'], ['id']) - op.drop_column('contributor_points', 'contributors_id') - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('connected_prs', 'merged_at', - existing_type=sa.Text(), - type_=postgresql.TIMESTAMP(), - existing_nullable=True) - op.alter_column('connected_prs', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('connected_prs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('chapters', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=False, - comment='db od of the corresponding member role in discord server', - existing_comment='db id of the corresponding member role in discord server') - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('app_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'id', - existing_type=sa.BigInteger(), - type_=sa.UUID(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_table('unstructured discord data', - sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), - sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') - ) - op.create_table('__community_program_product_wise_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') - ) - op.create_table('__mentorship_program_selected_contributors', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), - sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), - comment='List of contributors selected for C4GT Mentorship Program 2023' - ) - op.create_table('__community_organisations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='organisations_pkey'), - sa.UniqueConstraint('name', name='organisations_name_key'), - postgresql_ignore_search_path=False - ) - op.create_table('__community_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') - ) - op.create_table('__pull_requests', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), - sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), - sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') - ) - op.create_table('contributors_registration_old', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), - sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') - ) - op.create_table('__dev_onboarding', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), - sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') - ) - op.create_table('__comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='comments_pkey') - ) - op.create_table('__mentorship_program_projects', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), - sa.PrimaryKeyConstraint('id', name='projects_pkey'), - sa.UniqueConstraint('name', name='projects_name_key'), - comment='Selected projects under C4GT 2023' - ) - op.create_table('__dashboard_config', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') - ) - op.create_table('__applicant', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='applicant_pkey'), - sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') - ) - op.create_table('__contributors_discord', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), - sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), - sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') - ) - op.create_table('__community_program_unique_user_data', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') - ) - op.create_table('__mentorship_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') - ) - op.create_table('__mentorship_program_pull_request', - sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') - ) - op.create_table('__mentorship_program_ticket_comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') - ) - op.create_table('__mentors', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), - sa.PrimaryKeyConstraint('id', name='mentors_pkey') - ) - op.create_table('__contributors_vc', - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') - ) - op.drop_table('user_points_mapping') - op.drop_table('unstructured_discord_data') - op.drop_table('role_master') - op.drop_table('leaderboard') - op.drop_table('github_profile_data') + schema=None, + ) + op.alter_column( + "contributors_registration", + "joined_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "contributors_registration", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "contributors_discord", + sa.Column("experience", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "contributors_discord", + sa.Column("country", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "contributors_discord", + sa.Column("city", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.alter_column( + "contributors_discord", + "joined_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "contributors_discord", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "contributor_points", + sa.Column("user_id", sa.BIGINT(), autoincrement=False, nullable=True), + ) + op.drop_constraint(None, "contributor_points", type_="foreignkey") + op.create_foreign_key( + "contributor_points_contributors_id_fkey", + "contributor_points", + "contributors_registration", + ["user_id"], + ["id"], + ) + op.drop_column("contributor_points", "contributors_id") + op.alter_column( + "contributor_names", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "connected_prs", + "merged_at", + existing_type=sa.Text(), + type_=postgresql.TIMESTAMP(), + existing_nullable=True, + ) + op.alter_column( + "connected_prs", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "connected_prs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column("community_orgs", "name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "chapters", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "chapters", + "discord_role_id", + existing_type=sa.BIGINT(), + nullable=False, + comment="db od of the corresponding member role in discord server", + existing_comment="db id of the corresponding member role in discord server", + ) + op.alter_column("chapters", "org_name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "ccbp_tickets", + "closed_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_comment="date-time at which issue was closed", + existing_nullable=True, + ) + op.alter_column( + "ccbp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "ccbp_tickets", "issue_id", existing_type=sa.BIGINT(), nullable=False + ) + op.alter_column( + "ccbp_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "badges", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "app_comments", "issue_id", existing_type=sa.BIGINT(), nullable=False + ) + op.alter_column( + "app_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "app_comments", + "id", + existing_type=sa.BigInteger(), + type_=sa.UUID(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.create_table( + "unstructured discord data", + sa.Column("text", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("author", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("channel", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("channel_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "uuid", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("author_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "author_roles", + postgresql.ARRAY(sa.TEXT()), + autoincrement=False, + nullable=True, + ), + sa.Column("sent_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint( + "uuid", name="unstructured discord data_duplicate_pkey" + ), + sa.UniqueConstraint( + "uuid", name="unstructured discord data_duplicate_uuid_key" + ), + ) + op.create_table( + "__community_program_product_wise_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "community_label", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("gh_organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="community_program_tickets_duplicate_pkey"), + ) + op.create_table( + "__mentorship_program_selected_contributors", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("github_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("project_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("ticket_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint( + "id", name="mentorship_program_selected_contributors_pkey" + ), + sa.UniqueConstraint( + "name", name="mentorship_program_selected_contributors_name_key" + ), + comment="List of contributors selected for C4GT Mentorship Program 2023", + ) + op.create_table( + "__community_organisations", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="organisations_pkey"), + sa.UniqueConstraint("name", name="organisations_name_key"), + postgresql_ignore_search_path=False, + ) + op.create_table( + "__community_program_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "community_label", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="community_program_tickets_pkey"), + ) + op.create_table( + "__pull_requests", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=True, + ), + sa.Column("api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("raised_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("is_merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("merged_by", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("merged_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "pr_id", + sa.BIGINT(), + autoincrement=False, + nullable=False, + comment="github id of the pr", + ), + sa.Column( + "points", + sa.SMALLINT(), + server_default=sa.text("'10'::smallint"), + autoincrement=False, + nullable=False, + ), + sa.Column("ticket_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="pull_requests_pkey1"), + sa.UniqueConstraint("html_url", name="pull_requests_html_url_key"), + sa.UniqueConstraint("pr_id", name="pull_requests_pr_id_key"), + ) + op.create_table( + "contributors_registration_old", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "joined_at", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="contributors_registration_duplicate_pkey"), + sa.UniqueConstraint( + "discord_id", name="contributors_registration_duplicate_discord_id_key" + ), + sa.UniqueConstraint( + "github_id", name="contributors_registration_duplicate_github_id_key" + ), + ) + op.create_table( + "__dev_onboarding", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "repos", + postgresql.JSON(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="Onboarding_Dev_pkey"), + sa.UniqueConstraint("organisation", name="Onboarding_Dev_org_key"), + ) + op.create_table( + "__comments", + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "updated_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("content", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("reactions_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="comments_pkey"), + ) + op.create_table( + "__mentorship_program_projects", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("description", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_page_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("isssue_api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository_api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["product"], + ["product.name"], + name="__mentorship_program_projects_product_fkey", + ondelete="SET DEFAULT", + ), + sa.PrimaryKeyConstraint("id", name="projects_pkey"), + sa.UniqueConstraint("name", name="projects_name_key"), + comment="Selected projects under C4GT 2023", + ) + op.create_table( + "__dashboard_config", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("dashboard", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column( + "starting date", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="dashboard _config_pkey"), + ) + op.create_table( + "__applicant", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("sheet_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="applicant_pkey"), + sa.UniqueConstraint("discord_id", name="applicant_discord_id_key"), + ) + op.create_table( + "__contributors_discord", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "joined_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=False, + ), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column(" name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "chapter", + sa.TEXT(), + autoincrement=False, + nullable=True, + comment="the chapter they're associated with", + ), + sa.Column("gender", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="__contributors_pkey"), + sa.UniqueConstraint("discord_id", name="__contributors_discord_id_key"), + ) + op.create_table( + "__community_program_unique_user_data", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("ticket_name", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_pr", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "linked_pr_author_id", sa.BIGINT(), autoincrement=False, nullable=True + ), + sa.Column( + "linked_pr_author_username", sa.TEXT(), autoincrement=False, nullable=True + ), + sa.Column("is_registered", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("ticket_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_pr_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("state", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="community_program_unique_user_data_pkey"), + ) + op.create_table( + "__mentorship_program_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.PrimaryKeyConstraint("id", name="mentorship_program_tickets_pkey"), + ) + op.create_table( + "__mentorship_program_pull_request", + sa.Column("pr_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("pr_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("pr_node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("title", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("body", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "merged_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "assignees", postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True + ), + sa.Column( + "requested_reviewers", + postgresql.ARRAY(sa.TEXT()), + autoincrement=False, + nullable=True, + ), + sa.Column( + "labels", postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True + ), + sa.Column("review_comments_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("comments_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "repository_owner_name", sa.TEXT(), autoincrement=False, nullable=True + ), + sa.Column( + "repository_owner_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("repository_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column( + "number_of_commits", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "lines_of_code_added", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "lines_of_code_removed", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "number_of_files_changed", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("merged_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("merged_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_ticket", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("pr_id", name="mentorship_program_pull_request_pkey"), + ) + op.create_table( + "__mentorship_program_ticket_comments", + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "updated_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("content", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("reactions_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="mentorship_program_ticket_comments_pkey"), + ) + op.create_table( + "__mentors", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["organisation"], + ["__community_organisations.name"], + name="__mentors_organisation_fkey", + ), + sa.PrimaryKeyConstraint("id", name="mentors_pkey"), + ) + op.create_table( + "__contributors_vc", + sa.Column("github_username", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("certificate_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("stats", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("github_username", name="contributors_vc_pkey"), + ) + op.drop_table("user_points_mapping") + op.drop_table("unstructured_discord_data") + op.drop_table("role_master") + op.drop_table("leaderboard") + op.drop_table("github_profile_data") # ### end Alembic commands ### diff --git a/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc b/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc deleted file mode 100644 index eb3b18f..0000000 Binary files a/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc and /dev/null differ diff --git a/migrations/versions/ef8f73880570_added_year_field_in_dmpissues.py b/migrations/versions/ef8f73880570_added_year_field_in_dmpissues.py index f16965f..bd4427c 100644 --- a/migrations/versions/ef8f73880570_added_year_field_in_dmpissues.py +++ b/migrations/versions/ef8f73880570_added_year_field_in_dmpissues.py @@ -5,1725 +5,3232 @@ Create Date: 2025-06-30 12:15:29.383998 """ + from typing import Sequence, Union from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql +import db + # revision identifiers, used by Alembic. -revision: str = 'ef8f73880570' -down_revision: Union[str, None] = '8d1e6a7e959a' +revision: str = "ef8f73880570" +down_revision: Union[str, None] = "8d1e6a7e959a" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('github_profile_data', - sa.Column('github_username', sa.String(), nullable=False), - sa.Column('discord_id', sa.BigInteger(), nullable=False), - sa.Column('classroom_points', sa.Integer(), nullable=False), - sa.Column('prs_raised', sa.Integer(), nullable=False), - sa.Column('prs_reviewed', sa.Integer(), nullable=False), - sa.Column('prs_merged', sa.Integer(), nullable=False), - sa.Column('dpg_points', sa.Integer(), nullable=False), - sa.Column('milestone', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('github_username') - ) - op.create_table('leaderboard', - sa.Column('discord_id', sa.BigInteger(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BigInteger(), nullable=False), - sa.Column('github_url', sa.Text(), nullable=False), - sa.Column('apprentice_badge', sa.Boolean(), nullable=True), - sa.Column('converser_badge', sa.Boolean(), nullable=False), - sa.Column('rockstar_badge', sa.Boolean(), nullable=False), - sa.Column('enthusiast_badge', sa.Boolean(), nullable=False), - sa.Column('rising_star_badge', sa.Boolean(), nullable=False), - sa.Column('github_x_discord_badge', sa.Boolean(), nullable=False), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('bronze_badge', sa.Boolean(), nullable=False), - sa.Column('silver_badge', sa.Boolean(), nullable=False), - sa.Column('gold_badge', sa.Boolean(), nullable=False), - sa.Column('ruby_badge', sa.Boolean(), nullable=False), - sa.Column('diamond_badge', sa.Boolean(), nullable=False), - sa.Column('certificate_link', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('discord_id') - ) - op.create_table('role_master', - sa.Column('id', sa.BigInteger(), nullable=False), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=True), - sa.Column('role', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('unstructured_discord_data', - sa.Column('text', sa.Text(), nullable=True), - sa.Column('author', sa.BigInteger(), nullable=True), - sa.Column('channel', sa.BigInteger(), nullable=True), - sa.Column('channel_name', sa.Text(), nullable=True), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('author_name', sa.Text(), nullable=True), - sa.Column('author_roles', sa.Text(), nullable=True), - sa.Column('sent_at', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('uuid') - ) - op.create_table('user_points_mapping', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('contributor', sa.BigInteger(), nullable=True), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('level', sa.String(length=50), nullable=True), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=False), - sa.Column('mentor_id', sa.BigInteger(), nullable=True), - sa.ForeignKeyConstraint(['contributor'], ['contributors_registration.id'], ), - sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.drop_table('__pull_requests') - op.drop_table('__community_program_product_wise_tickets') - op.drop_table('__mentorship_program_selected_contributors') - op.drop_table('__comments') - op.drop_table('unstructured discord data') - op.drop_table('__contributors_vc') - op.drop_table('contributors_registration_old') - op.drop_table('__dashboard_config') - op.drop_table('__community_organisations') - op.drop_table('__mentors') - op.drop_table('__contributors_discord') - op.drop_table('__community_program_tickets') - op.drop_table('__applicant') - op.drop_table('__community_program_unique_user_data') - op.drop_table('__mentorship_program_ticket_comments') - op.drop_table('__mentorship_program_tickets') - op.drop_table('__mentorship_program_projects') - op.drop_table('__dev_onboarding') - op.drop_table('__mentorship_program_pull_request') - op.alter_column('app_comments', 'id', - existing_type=sa.UUID(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('app_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=True, - comment='db id of the corresponding member role in discord server', - existing_comment='db od of the corresponding member role in discord server') - op.alter_column('chapters', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('connected_prs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('connected_prs', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('connected_prs', 'merged_at', - existing_type=postgresql.TIMESTAMP(), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('contributors_id', sa.BigInteger(), nullable=True)) - op.drop_constraint('contributor_points_contributors_id_fkey', 'contributor_points', type_='foreignkey') - op.create_foreign_key(None, 'contributor_points', 'contributors_registration', ['contributors_id'], ['id']) - op.drop_column('contributor_points', 'user_id') - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_discord', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_registration', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) + op.create_table( + "github_profile_data", + sa.Column("github_username", sa.String(), nullable=False), + sa.Column("discord_id", sa.BigInteger(), nullable=False), + sa.Column("classroom_points", sa.Integer(), nullable=False), + sa.Column("prs_raised", sa.Integer(), nullable=False), + sa.Column("prs_reviewed", sa.Integer(), nullable=False), + sa.Column("prs_merged", sa.Integer(), nullable=False), + sa.Column("dpg_points", sa.Integer(), nullable=False), + sa.Column("milestone", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("github_username"), + ) + op.create_table( + "leaderboard", + sa.Column("discord_id", sa.BigInteger(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BigInteger(), nullable=False), + sa.Column("github_url", sa.Text(), nullable=False), + sa.Column("apprentice_badge", sa.Boolean(), nullable=True), + sa.Column("converser_badge", sa.Boolean(), nullable=False), + sa.Column("rockstar_badge", sa.Boolean(), nullable=False), + sa.Column("enthusiast_badge", sa.Boolean(), nullable=False), + sa.Column("rising_star_badge", sa.Boolean(), nullable=False), + sa.Column("github_x_discord_badge", sa.Boolean(), nullable=False), + sa.Column("points", sa.Integer(), nullable=False), + sa.Column("bronze_badge", sa.Boolean(), nullable=False), + sa.Column("silver_badge", sa.Boolean(), nullable=False), + sa.Column("gold_badge", sa.Boolean(), nullable=False), + sa.Column("ruby_badge", sa.Boolean(), nullable=False), + sa.Column("diamond_badge", sa.Boolean(), nullable=False), + sa.Column("certificate_link", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("discord_id"), + ) + op.create_table( + "role_master", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("created_at", db.models.DateTime(), nullable=False), + sa.Column("updated_at", db.models.DateTime(), nullable=True), + sa.Column("role", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "unstructured_discord_data", + sa.Column("text", sa.Text(), nullable=True), + sa.Column("author", sa.BigInteger(), nullable=True), + sa.Column("channel", sa.BigInteger(), nullable=True), + sa.Column("channel_name", sa.Text(), nullable=True), + sa.Column("uuid", sa.String(length=36), nullable=False), + sa.Column("author_name", sa.Text(), nullable=True), + sa.Column("author_roles", sa.Text(), nullable=True), + sa.Column("sent_at", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("uuid"), + ) + op.create_table( + "user_points_mapping", + sa.Column("id", sa.UUID(), nullable=False), + sa.Column("contributor", sa.BigInteger(), nullable=True), + sa.Column("points", sa.Integer(), nullable=False), + sa.Column("level", sa.String(length=50), nullable=True), + sa.Column("created_at", db.models.DateTime(), nullable=False), + sa.Column("updated_at", db.models.DateTime(), nullable=False), + sa.Column("mentor_id", sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint( + ["contributor"], + ["contributors_registration.id"], + ), + sa.ForeignKeyConstraint( + ["mentor_id"], + ["mentor_details.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + op.drop_table("__pull_requests") + op.drop_table("__community_program_product_wise_tickets") + op.drop_table("__mentorship_program_selected_contributors") + op.drop_table("__comments") + op.drop_table("unstructured discord data") + op.drop_table("__contributors_vc") + op.drop_table("contributors_registration_old") + op.drop_table("__dashboard_config") + op.drop_table("__community_organisations") + op.drop_table("__mentors") + op.drop_table("__contributors_discord") + op.drop_table("__community_program_tickets") + op.drop_table("__applicant") + op.drop_table("__community_program_unique_user_data") + op.drop_table("__mentorship_program_ticket_comments") + op.drop_table("__mentorship_program_tickets") + op.drop_table("__mentorship_program_projects") + op.drop_table("__dev_onboarding") + op.drop_table("__mentorship_program_pull_request") + op.alter_column( + "app_comments", + "id", + existing_type=sa.UUID(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "app_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "app_comments", "issue_id", existing_type=sa.BIGINT(), nullable=True + ) + op.alter_column( + "badges", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "badges", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "ccbp_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "ccbp_tickets", "issue_id", existing_type=sa.BIGINT(), nullable=True + ) + op.alter_column( + "ccbp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "ccbp_tickets", + "closed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_comment="date-time at which issue was closed", + existing_nullable=True, + ) + op.alter_column("chapters", "org_name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "chapters", + "discord_role_id", + existing_type=sa.BIGINT(), + nullable=True, + comment="db id of the corresponding member role in discord server", + existing_comment="db od of the corresponding member role in discord server", + ) + op.alter_column( + "chapters", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column("community_orgs", "name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "connected_prs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "connected_prs", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "connected_prs", + "merged_at", + existing_type=postgresql.TIMESTAMP(), + type_=sa.Text(), + existing_nullable=True, + ) + op.alter_column( + "contributor_names", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "contributor_points", + sa.Column("contributors_id", sa.BigInteger(), nullable=True), + ) + op.drop_constraint( + "contributor_points_contributors_id_fkey", + "contributor_points", + type_="foreignkey", + ) + op.create_foreign_key( + None, + "contributor_points", + "contributors_registration", + ["contributors_id"], + ["id"], + ) + op.drop_column("contributor_points", "user_id") + op.alter_column( + "contributors_discord", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "contributors_discord", + "joined_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "contributors_registration", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "contributors_registration", + "joined_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) op.drop_table_comment( - 'contributors_registration', - existing_comment='This is a duplicate of contributors_registration_old', - schema=None - ) - op.add_column('discord_engagement', sa.Column('converserbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticebadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('rockstarbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('risingstarbadge', sa.Boolean(), nullable=True)) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('discord_engagement', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_column('discord_engagement', 'risingStarBadge') - op.drop_column('discord_engagement', 'enthusiastBadge') - op.drop_column('discord_engagement', 'rockstarBadge') - op.drop_column('discord_engagement', 'apprenticeBadge') - op.drop_column('discord_engagement', 'converserBadge') - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', type_='unique') - op.drop_constraint('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_issues', 'repo_owner', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('dmp_issues', 'year', - existing_type=sa.BIGINT(), - type_=sa.Integer(), - nullable=True, - comment='The year the issue was created', - existing_server_default=sa.text('2024')) - op.drop_constraint('dmp_issues_dmp_id_key', 'dmp_issues', type_='unique') - op.drop_constraint('dmp_issues_org_id_fkey', 'dmp_issues', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issues', 'dmp_orgs', ['org_id'], ['id']) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_orgs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_constraint('dmp_orgs_id_key', 'dmp_orgs', type_='unique') - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', type_='unique') - op.drop_constraint('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - type_=sa.Integer(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - type_=sa.Integer(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', type_='foreignkey') - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_classroom_data', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) + "contributors_registration", + existing_comment="This is a duplicate of contributors_registration_old", + schema=None, + ) + op.add_column( + "discord_engagement", sa.Column("converserbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("apprenticebadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("rockstarbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("enthusiastbadge", sa.Boolean(), nullable=True) + ) + op.add_column( + "discord_engagement", sa.Column("risingstarbadge", sa.Boolean(), nullable=True) + ) + op.alter_column( + "discord_engagement", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "discord_engagement", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.drop_column("discord_engagement", "risingStarBadge") + op.drop_column("discord_engagement", "enthusiastBadge") + op.drop_column("discord_engagement", "rockstarBadge") + op.drop_column("discord_engagement", "apprenticeBadge") + op.drop_column("discord_engagement", "converserBadge") + op.alter_column( + "dmp_issue_updates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_issue_updates", + "comment_updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "dmp_issue_updates_comment_id_key", "dmp_issue_updates", type_="unique" + ) + op.drop_constraint( + "dmp_issue_updates_dmp_id_fkey", "dmp_issue_updates", type_="foreignkey" + ) + op.create_foreign_key(None, "dmp_issue_updates", "dmp_issues", ["dmp_id"], ["id"]) + op.alter_column( + "dmp_issues", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column("dmp_issues", "repo_owner", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "dmp_issues", + "year", + existing_type=sa.BIGINT(), + type_=sa.Integer(), + nullable=True, + comment="The year the issue was created", + existing_server_default=sa.text("2024"), + ) + op.drop_constraint("dmp_issues_dmp_id_key", "dmp_issues", type_="unique") + op.drop_constraint("dmp_issues_org_id_fkey", "dmp_issues", type_="foreignkey") + op.create_foreign_key(None, "dmp_issues", "dmp_orgs", ["org_id"], ["id"]) + op.alter_column( + "dmp_orgs", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_orgs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.drop_constraint("dmp_orgs_id_key", "dmp_orgs", type_="unique") + op.alter_column( + "dmp_pr_updates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_pr_updates", + "pr_updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "merged_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "closed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint("dmp_pr_updates_pr_id_key", "dmp_pr_updates", type_="unique") + op.drop_constraint( + "dmp_pr_updates_dmp_id_fkey", "dmp_pr_updates", type_="foreignkey" + ) + op.create_foreign_key(None, "dmp_pr_updates", "dmp_issues", ["dmp_id"], ["id"]) + op.alter_column( + "dmp_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + type_=sa.Integer(), + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "dmp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + type_=sa.Integer(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_week_updates", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint( + "dmp_week_updates_dmp_id_fkey", "dmp_week_updates", type_="foreignkey" + ) + op.alter_column( + "github_classroom_data", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_classroom_data", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "github_classroom_data", + "submission_timestamp", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "github_classroom_data", + "points_awarded", + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "points_available", + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True, + ) op.create_table_comment( - 'github_classroom_data', - 'Table for saving the details about github classroom assignment data', - existing_comment='Table for save the details about github classroom assignment datas', - schema=None - ) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_installations', 'github_ids', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - comment="Identifiers on the github database, prolly won't be used", - existing_comment="identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('github_installations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('github_installations_organisation_fkey', 'github_installations', type_='foreignkey') - op.create_foreign_key(None, 'github_installations', 'community_orgs', ['organisation'], ['name']) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - comment='Creation date of organization ticket', - existing_comment='creation date of organization ticket', - existing_nullable=True) - op.alter_column('issue_contributors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - nullable=True) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('issue_contributors', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('unique_issue_id_contributors', 'issue_contributors', type_='unique') - op.drop_constraint('issue_contributors_contributor_id_fkey', 'issue_contributors', type_='foreignkey') - op.create_foreign_key(None, 'issue_contributors', 'role_master', ['role'], ['id']) - op.create_foreign_key(None, 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id']) - op.alter_column('issue_mentors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('unique_issue_id_mentors', 'issue_mentors', type_='unique') - op.alter_column('issues', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.drop_constraint('issues_org_id_fkey', 'issues', type_='foreignkey') - op.create_foreign_key(None, 'issues', 'community_orgs', ['org_id'], ['id']) - op.alter_column('mentor_details', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_site_structure', sa.Column('product_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('contributor_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website_directory_label', sa.Text(), nullable=True)) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key(None, 'mentorship_program_site_structure', 'product', ['product_id'], ['id']) + "github_classroom_data", + "Table for saving the details about github classroom assignment data", + existing_comment="Table for save the details about github classroom assignment datas", + schema=None, + ) + op.alter_column( + "github_installations", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_installations", + "github_ids", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + comment="Identifiers on the github database, prolly won't be used", + existing_comment="identifiers on the github database, prolly won't be used", + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "permissions_and_events", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "github_installations_organisation_fkey", + "github_installations", + type_="foreignkey", + ) + op.create_foreign_key( + None, "github_installations", "community_orgs", ["organisation"], ["name"] + ) + op.alter_column( + "github_organisations_to_organisations", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + comment="Creation date of organization ticket", + existing_comment="creation date of organization ticket", + existing_nullable=True, + ) + op.alter_column( + "issue_contributors", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "issue_contributors", + "contributor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + nullable=True, + ) + op.alter_column( + "issue_contributors", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "issue_contributors", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.drop_constraint( + "unique_issue_id_contributors", "issue_contributors", type_="unique" + ) + op.drop_constraint( + "issue_contributors_contributor_id_fkey", + "issue_contributors", + type_="foreignkey", + ) + op.create_foreign_key(None, "issue_contributors", "role_master", ["role"], ["id"]) + op.create_foreign_key( + None, + "issue_contributors", + "contributors_registration", + ["contributor_id"], + ["id"], + ) + op.alter_column( + "issue_mentors", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), + ) + op.alter_column( + "issue_mentors", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "issue_mentors", + "angel_mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint("unique_issue_id_mentors", "issue_mentors", type_="unique") + op.alter_column( + "issues", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"), + ) + op.drop_constraint("issues_org_id_fkey", "issues", type_="foreignkey") + op.create_foreign_key(None, "issues", "community_orgs", ["org_id"], ["id"]) + op.alter_column( + "mentor_details", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), + ) + op.alter_column( + "mentor_not_added", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("product_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("project_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("contributor_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("website_directory_label", sa.Text(), nullable=True), + ) + op.alter_column( + "mentorship_program_site_structure", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint( + "mentorship_program_site_structure_product_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.drop_constraint( + "mentorship_program_site_structure_contributor_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.drop_constraint( + "mentorship_program_site_structure_project_fkey", + "mentorship_program_site_structure", + type_="foreignkey", + ) + op.create_foreign_key( + None, "mentorship_program_site_structure", "product", ["product_id"], ["id"] + ) op.drop_table_comment( - 'mentorship_program_site_structure', - existing_comment='a mapping for the milestones website structure', - schema=None - ) - op.drop_column('mentorship_program_site_structure', 'product') - op.drop_column('mentorship_program_site_structure', 'website directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor') - op.drop_column('mentorship_program_site_structure', 'project') - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', type_='foreignkey') - op.drop_column('mentorship_program_website_has_updated', 'project') - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('point_transactions', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('point_transactions_user_id_fkey', 'point_transactions', type_='foreignkey') - op.create_foreign_key(None, 'point_transactions', 'contributors_registration', ['user_id'], ['id']) - op.create_foreign_key(None, 'point_transactions', 'mentor_details', ['angel_mentor_id'], ['id']) - op.alter_column('points_mapping', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('points_mapping', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('pr_history', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('pr_history', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) + "mentorship_program_site_structure", + existing_comment="a mapping for the milestones website structure", + schema=None, + ) + op.drop_column("mentorship_program_site_structure", "product") + op.drop_column("mentorship_program_site_structure", "website directory_label") + op.drop_column("mentorship_program_site_structure", "contributor") + op.drop_column("mentorship_program_site_structure", "project") + op.alter_column( + "mentorship_program_website_comments", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "files", + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True, + ) + op.add_column( + "mentorship_program_website_has_updated", + sa.Column("project_id", sa.BigInteger(), nullable=True), + ) + op.alter_column( + "mentorship_program_website_has_updated", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week1_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week2_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week3_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week4_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week5_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week6_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week7_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week8_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week9_update_date", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.drop_constraint( + "mentorship_program_website_has_updated_project_fkey", + "mentorship_program_website_has_updated", + type_="foreignkey", + ) + op.drop_column("mentorship_program_website_has_updated", "project") + op.alter_column( + "point_system", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_transactions", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_transactions", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "point_transactions", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "angel_mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint( + "point_transactions_user_id_fkey", "point_transactions", type_="foreignkey" + ) + op.create_foreign_key( + None, "point_transactions", "contributors_registration", ["user_id"], ["id"] + ) + op.create_foreign_key( + None, "point_transactions", "mentor_details", ["angel_mentor_id"], ["id"] + ) + op.alter_column( + "points_mapping", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "points_mapping", + "role", + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False, + ) + op.alter_column( + "points_mapping", + "complexity", + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False, + ) + op.alter_column( + "points_mapping", "points", existing_type=sa.INTEGER(), nullable=False + ) + op.alter_column( + "points_mapping", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "pr_history", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), + ) + op.alter_column( + "pr_history", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_history", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "pr_id", + existing_type=sa.BIGINT(), + comment=None, + existing_comment="github id of the pr", + existing_nullable=False, + ) op.drop_table_comment( - 'pr_history', - existing_comment='Holds records of pr webhooks', - schema=None - ) - op.drop_column('pr_history', 'points') - op.alter_column('pr_staging', 'id', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('pr_staging', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) + "pr_history", existing_comment="Holds records of pr webhooks", schema=None + ) + op.drop_column("pr_history", "points") + op.alter_column( + "pr_staging", + "id", + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "pr_staging", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_staging", + "raised_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "pr_id", + existing_type=sa.BIGINT(), + comment=None, + existing_comment="github id of the pr", + existing_nullable=False, + ) op.drop_table_comment( - 'pr_staging', - existing_comment='This is a duplicate of connected_prs', - schema=None - ) - op.add_column('product', sa.Column('channel_id', sa.BigInteger(), nullable=True)) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment=None, - existing_comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.drop_constraint('product_channel_fkey', 'product', type_='foreignkey') - op.create_foreign_key(None, 'product', 'discord_channels', ['channel_id'], ['channel_id']) + "pr_staging", + existing_comment="This is a duplicate of connected_prs", + schema=None, + ) + op.add_column("product", sa.Column("channel_id", sa.BigInteger(), nullable=True)) + op.alter_column( + "product", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "product", + "description", + existing_type=sa.TEXT(), + comment=None, + existing_comment="URL to the product entry on C4GT wiki", + existing_nullable=True, + existing_server_default=sa.text("''::text"), + ) + op.drop_constraint("product_channel_fkey", "product", type_="foreignkey") + op.create_foreign_key( + None, "product", "discord_channels", ["channel_id"], ["channel_id"] + ) op.drop_table_comment( - 'product', + "product", existing_comment="A table containing all 'Products' in C4GT 2023", - schema=None - ) - op.drop_column('product', 'channel') - op.alter_column('ticket_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('ticket_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment=None, - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False) - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_unique_constraint(None, 'unlisted_tickets', ['uuid', 'issue_id']) - op.add_column('user_activity', sa.Column('contributor_id', sa.BigInteger(), nullable=False)) - op.alter_column('user_activity', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_activity', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('user_activity_user_id_fkey', 'user_activity', type_='foreignkey') - op.drop_constraint('user_activity_mentor_id_fkey', 'user_activity', type_='foreignkey') - op.create_foreign_key(None, 'user_activity', 'contributors_registration', ['contributor_id'], ['id']) - op.drop_column('user_activity', 'user_id') - op.alter_column('user_badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_certificates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('users', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.create_unique_constraint(None, 'users', ['discord']) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('vc_logs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) + schema=None, + ) + op.drop_column("product", "channel") + op.alter_column( + "ticket_comments", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "ticket_comments", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + ) + op.alter_column( + "unlisted_tickets", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "unlisted_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + comment=None, + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "unlisted_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False, + ) + op.alter_column( + "unlisted_tickets", + "uuid", + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.create_unique_constraint(None, "unlisted_tickets", ["uuid", "issue_id"]) + op.add_column( + "user_activity", sa.Column("contributor_id", sa.BigInteger(), nullable=False) + ) + op.alter_column( + "user_activity", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "user_activity", + "issue_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_activity", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "mentor_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True, + ) + op.drop_constraint( + "user_activity_user_id_fkey", "user_activity", type_="foreignkey" + ) + op.drop_constraint( + "user_activity_mentor_id_fkey", "user_activity", type_="foreignkey" + ) + op.create_foreign_key( + None, "user_activity", "contributors_registration", ["contributor_id"], ["id"] + ) + op.drop_column("user_activity", "user_id") + op.alter_column( + "user_badges", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "badge_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "id", + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "user_id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "id", + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)"), + ) + op.alter_column("users", "name", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "users", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "updated_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.create_unique_constraint(None, "users", ["discord"]) + op.alter_column( + "vc_logs", + "id", + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "vc_logs", + "created_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('vc_logs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'users', type_='unique') - op.alter_column('users', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('users', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('user_certificates', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_certificates', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.add_column('user_activity', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) - op.drop_constraint(None, 'user_activity', type_='foreignkey') - op.create_foreign_key('user_activity_mentor_id_fkey', 'user_activity', 'mentor_details', ['mentor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_foreign_key('user_activity_user_id_fkey', 'user_activity', 'users', ['user_id'], ['id']) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('user_activity', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_activity', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_column('user_activity', 'contributor_id') - op.drop_constraint(None, 'unlisted_tickets', type_='unique') - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ticket_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('ticket_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('product', sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True)) + op.alter_column( + "vc_logs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "vc_logs", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "users", type_="unique") + op.alter_column( + "users", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "users", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column("users", "name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "users", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)"), + ) + op.alter_column( + "user_certificates", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_certificates", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_certificates", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_badges", + "badge_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_badges", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.add_column( + "user_activity", + sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.drop_constraint(None, "user_activity", type_="foreignkey") + op.create_foreign_key( + "user_activity_mentor_id_fkey", + "user_activity", + "mentor_details", + ["mentor_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "user_activity_user_id_fkey", "user_activity", "users", ["user_id"], ["id"] + ) + op.alter_column( + "user_activity", + "mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "user_activity", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "user_activity", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "user_activity", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("user_activity", "contributor_id") + op.drop_constraint(None, "unlisted_tickets", type_="unique") + op.alter_column( + "unlisted_tickets", + "uuid", + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.alter_column( + "unlisted_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + existing_nullable=False, + ) + op.alter_column( + "unlisted_tickets", + "ticket_points", + existing_type=sa.SMALLINT(), + comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "unlisted_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "ticket_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "ticket_comments", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.add_column( + "product", sa.Column("channel", sa.BIGINT(), autoincrement=False, nullable=True) + ) op.create_table_comment( - 'product', + "product", "A table containing all 'Products' in C4GT 2023", existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'product', type_='foreignkey') - op.create_foreign_key('product_channel_fkey', 'product', 'discord_channels', ['channel'], ['channel_id']) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('product', 'channel_id') + schema=None, + ) + op.drop_constraint(None, "product", type_="foreignkey") + op.create_foreign_key( + "product_channel_fkey", + "product", + "discord_channels", + ["channel"], + ["channel_id"], + ) + op.alter_column( + "product", + "description", + existing_type=sa.TEXT(), + comment="URL to the product entry on C4GT wiki", + existing_nullable=True, + existing_server_default=sa.text("''::text"), + ) + op.alter_column( + "product", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("product", "channel_id") op.create_table_comment( - 'pr_staging', - 'This is a duplicate of connected_prs', + "pr_staging", + "This is a duplicate of connected_prs", existing_comment=None, - schema=None - ) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_staging', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_staging', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'id', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.add_column('pr_history', sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False)) + schema=None, + ) + op.alter_column( + "pr_staging", + "pr_id", + existing_type=sa.BIGINT(), + comment="github id of the pr", + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "pr_staging", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_staging", + "id", + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.add_column( + "pr_history", + sa.Column( + "points", + sa.SMALLINT(), + server_default=sa.text("'10'::smallint"), + autoincrement=False, + nullable=False, + ), + ) op.create_table_comment( - 'pr_history', - 'Holds records of pr webhooks', - existing_comment=None, - schema=None - ) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_history', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_history', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('points_mapping', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=True) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.create_foreign_key('point_transactions_user_id_fkey', 'point_transactions', 'contributors_registration', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) - op.create_foreign_key('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', '__mentorship_program_projects', ['project'], ['name']) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_website_has_updated', 'project_id') - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('mentorship_program_site_structure', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True)) + "pr_history", "Holds records of pr webhooks", existing_comment=None, schema=None + ) + op.alter_column( + "pr_history", + "pr_id", + existing_type=sa.BIGINT(), + comment="github id of the pr", + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "pr_history", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "pr_history", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), + ) + op.alter_column( + "points_mapping", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "points_mapping", "points", existing_type=sa.INTEGER(), nullable=True + ) + op.alter_column( + "points_mapping", + "complexity", + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True, + ) + op.alter_column( + "points_mapping", + "role", + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True, + ) + op.alter_column( + "points_mapping", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "point_transactions", type_="foreignkey") + op.drop_constraint(None, "point_transactions", type_="foreignkey") + op.create_foreign_key( + "point_transactions_user_id_fkey", + "point_transactions", + "contributors_registration", + ["user_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.alter_column( + "point_transactions", + "angel_mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "point_transactions", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "point_transactions", + "user_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "point_transactions", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "point_system", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "mentorship_program_website_has_updated", + sa.Column("project", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.create_foreign_key( + "mentorship_program_website_has_updated_project_fkey", + "mentorship_program_website_has_updated", + "__mentorship_program_projects", + ["project"], + ["name"], + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week9_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week8_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week7_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week6_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week5_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week4_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week3_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week2_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "week1_update_date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_has_updated", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("mentorship_program_website_has_updated", "project_id") + op.alter_column( + "mentorship_program_website_commits", + "files", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_commits", + "date", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "mentorship_program_website_comments", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("project", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("contributor", sa.TEXT(), autoincrement=False, nullable=True), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column( + "website directory_label", sa.TEXT(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "mentorship_program_site_structure", + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + ) op.create_table_comment( - 'mentorship_program_site_structure', - 'a mapping for the milestones website structure', + "mentorship_program_site_structure", + "a mapping for the milestones website structure", existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', '__mentorship_program_projects', ['project'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', '__mentorship_program_selected_contributors', ['contributor'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', 'product', ['product'], ['name']) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_site_structure', 'website_directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor_id') - op.drop_column('mentorship_program_site_structure', 'project_id') - op.drop_column('mentorship_program_site_structure', 'product_id') - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('mentor_details', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.drop_constraint(None, 'issues', type_='foreignkey') - op.create_foreign_key('issues_org_id_fkey', 'issues', 'community_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL') - op.alter_column('issues', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.create_unique_constraint('unique_issue_id_mentors', 'issue_mentors', ['issue_id']) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('issue_mentors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.create_foreign_key('issue_contributors_contributor_id_fkey', 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('unique_issue_id_contributors', 'issue_contributors', ['issue_id']) - op.alter_column('issue_contributors', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - nullable=False) - op.alter_column('issue_contributors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - comment='creation date of organization ticket', - existing_comment='Creation date of organization ticket', - existing_nullable=True) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'github_installations', type_='foreignkey') - op.create_foreign_key('github_installations_organisation_fkey', 'github_installations', '__community_organisations', ['organisation'], ['name'], onupdate='CASCADE') - op.alter_column('github_installations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('github_installations', 'github_ids', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - comment="identifiers on the github database, prolly won't be used", - existing_comment="Identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) + schema=None, + ) + op.drop_constraint(None, "mentorship_program_site_structure", type_="foreignkey") + op.create_foreign_key( + "mentorship_program_site_structure_project_fkey", + "mentorship_program_site_structure", + "__mentorship_program_projects", + ["project"], + ["name"], + ) + op.create_foreign_key( + "mentorship_program_site_structure_contributor_fkey", + "mentorship_program_site_structure", + "__mentorship_program_selected_contributors", + ["contributor"], + ["name"], + ) + op.create_foreign_key( + "mentorship_program_site_structure_product_fkey", + "mentorship_program_site_structure", + "product", + ["product"], + ["name"], + ) + op.alter_column( + "mentorship_program_site_structure", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("mentorship_program_site_structure", "website_directory_label") + op.drop_column("mentorship_program_site_structure", "contributor_id") + op.drop_column("mentorship_program_site_structure", "project_id") + op.drop_column("mentorship_program_site_structure", "product_id") + op.alter_column( + "mentor_not_added", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=True, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "mentor_details", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), + ) + op.drop_constraint(None, "issues", type_="foreignkey") + op.create_foreign_key( + "issues_org_id_fkey", + "issues", + "community_orgs", + ["org_id"], + ["id"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + op.alter_column( + "issues", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"), + ) + op.create_unique_constraint( + "unique_issue_id_mentors", "issue_mentors", ["issue_id"] + ) + op.alter_column( + "issue_mentors", + "angel_mentor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True, + ) + op.alter_column( + "issue_mentors", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "issue_mentors", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), + ) + op.drop_constraint(None, "issue_contributors", type_="foreignkey") + op.drop_constraint(None, "issue_contributors", type_="foreignkey") + op.create_foreign_key( + "issue_contributors_contributor_id_fkey", + "issue_contributors", + "contributors_registration", + ["contributor_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint( + "unique_issue_id_contributors", "issue_contributors", ["issue_id"] + ) + op.alter_column( + "issue_contributors", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "issue_contributors", + "issue_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "issue_contributors", + "contributor_id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + nullable=False, + ) + op.alter_column( + "issue_contributors", + "id", + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + comment="creation date of organization ticket", + existing_comment="Creation date of organization ticket", + existing_nullable=True, + ) + op.alter_column( + "github_organisations_to_organisations", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "github_installations", type_="foreignkey") + op.create_foreign_key( + "github_installations_organisation_fkey", + "github_installations", + "__community_organisations", + ["organisation"], + ["name"], + onupdate="CASCADE", + ) + op.alter_column( + "github_installations", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "permissions_and_events", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "github_ids", + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + comment="identifiers on the github database, prolly won't be used", + existing_comment="Identifiers on the github database, prolly won't be used", + existing_nullable=True, + ) + op.alter_column( + "github_installations", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) op.create_table_comment( - 'github_classroom_data', - 'Table for save the details about github classroom assignment datas', - existing_comment='Table for saving the details about github classroom assignment data', - schema=None - ) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('github_classroom_data', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.create_foreign_key('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.Integer(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - type_=sa.SMALLINT(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.Integer(), - type_=sa.SMALLINT(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_constraint(None, 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', ['pr_id']) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_unique_constraint('dmp_orgs_id_key', 'dmp_orgs', ['id']) - op.alter_column('dmp_orgs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'dmp_issues', type_='foreignkey') - op.create_foreign_key('dmp_issues_org_id_fkey', 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issues_dmp_id_key', 'dmp_issues', ['id']) - op.alter_column('dmp_issues', 'year', - existing_type=sa.Integer(), - type_=sa.BIGINT(), - nullable=False, - comment=None, - existing_comment='The year the issue was created', - existing_server_default=sa.text('2024')) - op.alter_column('dmp_issues', 'repo_owner', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', ['comment_id']) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.add_column('discord_engagement', sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.alter_column('discord_engagement', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('discord_engagement', 'risingstarbadge') - op.drop_column('discord_engagement', 'enthusiastbadge') - op.drop_column('discord_engagement', 'rockstarbadge') - op.drop_column('discord_engagement', 'apprenticebadge') - op.drop_column('discord_engagement', 'converserbadge') + "github_classroom_data", + "Table for save the details about github classroom assignment datas", + existing_comment="Table for saving the details about github classroom assignment data", + schema=None, + ) + op.alter_column( + "github_classroom_data", + "points_available", + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "points_awarded", + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "github_classroom_data", + "submission_timestamp", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "github_classroom_data", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "github_classroom_data", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.create_foreign_key( + "dmp_week_updates_dmp_id_fkey", + "dmp_week_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.alter_column( + "dmp_week_updates", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_tickets", + "index", + existing_type=sa.Integer(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + type_=sa.SMALLINT(), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "dmp_tickets", + "ticket_points", + existing_type=sa.Integer(), + type_=sa.SMALLINT(), + existing_comment="How many points the ticket is worth", + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint"), + ) + op.alter_column( + "dmp_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.drop_constraint(None, "dmp_pr_updates", type_="foreignkey") + op.create_foreign_key( + "dmp_pr_updates_dmp_id_fkey", + "dmp_pr_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint("dmp_pr_updates_pr_id_key", "dmp_pr_updates", ["pr_id"]) + op.alter_column( + "dmp_pr_updates", + "closed_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "merged_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "pr_updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_pr_updates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.create_unique_constraint("dmp_orgs_id_key", "dmp_orgs", ["id"]) + op.alter_column( + "dmp_orgs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "dmp_orgs", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "dmp_issues", type_="foreignkey") + op.create_foreign_key( + "dmp_issues_org_id_fkey", + "dmp_issues", + "dmp_orgs", + ["org_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint("dmp_issues_dmp_id_key", "dmp_issues", ["id"]) + op.alter_column( + "dmp_issues", + "year", + existing_type=sa.Integer(), + type_=sa.BIGINT(), + nullable=False, + comment=None, + existing_comment="The year the issue was created", + existing_server_default=sa.text("2024"), + ) + op.alter_column("dmp_issues", "repo_owner", existing_type=sa.TEXT(), nullable=True) + op.alter_column( + "dmp_issues", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_constraint(None, "dmp_issue_updates", type_="foreignkey") + op.create_foreign_key( + "dmp_issue_updates_dmp_id_fkey", + "dmp_issue_updates", + "dmp_issues", + ["dmp_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_unique_constraint( + "dmp_issue_updates_comment_id_key", "dmp_issue_updates", ["comment_id"] + ) + op.alter_column( + "dmp_issue_updates", + "comment_updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "dmp_issue_updates", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.add_column( + "discord_engagement", + sa.Column( + "converserBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "apprenticeBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "rockstarBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "enthusiastBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "discord_engagement", + sa.Column( + "risingStarBadge", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + ) + op.alter_column( + "discord_engagement", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "discord_engagement", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.drop_column("discord_engagement", "risingstarbadge") + op.drop_column("discord_engagement", "enthusiastbadge") + op.drop_column("discord_engagement", "rockstarbadge") + op.drop_column("discord_engagement", "apprenticebadge") + op.drop_column("discord_engagement", "converserbadge") op.create_table_comment( - 'contributors_registration', - 'This is a duplicate of contributors_registration_old', + "contributors_registration", + "This is a duplicate of contributors_registration_old", existing_comment=None, - schema=None - ) - op.alter_column('contributors_registration', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_discord', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'contributor_points', type_='foreignkey') - op.create_foreign_key('contributor_points_contributors_id_fkey', 'contributor_points', 'contributors_registration', ['user_id'], ['id']) - op.drop_column('contributor_points', 'contributors_id') - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('connected_prs', 'merged_at', - existing_type=sa.Text(), - type_=postgresql.TIMESTAMP(), - existing_nullable=True) - op.alter_column('connected_prs', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('connected_prs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('chapters', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=False, - comment='db od of the corresponding member role in discord server', - existing_comment='db id of the corresponding member role in discord server') - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('app_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'id', - existing_type=sa.BigInteger(), - type_=sa.UUID(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_table('__mentorship_program_pull_request', - sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') - ) - op.create_table('__dev_onboarding', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), - sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') - ) - op.create_table('__mentorship_program_projects', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), - sa.PrimaryKeyConstraint('id', name='projects_pkey'), - sa.UniqueConstraint('name', name='projects_name_key'), - comment='Selected projects under C4GT 2023' - ) - op.create_table('__mentorship_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') - ) - op.create_table('__mentorship_program_ticket_comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') - ) - op.create_table('__community_program_unique_user_data', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') - ) - op.create_table('__applicant', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='applicant_pkey'), - sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') - ) - op.create_table('__community_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') - ) - op.create_table('__contributors_discord', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), - sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), - sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') - ) - op.create_table('__mentors', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), - sa.PrimaryKeyConstraint('id', name='mentors_pkey') - ) - op.create_table('__community_organisations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='organisations_pkey'), - sa.UniqueConstraint('name', name='organisations_name_key') - ) - op.create_table('__dashboard_config', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') - ) - op.create_table('contributors_registration_old', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), - sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') - ) - op.create_table('__contributors_vc', - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') - ) - op.create_table('unstructured discord data', - sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), - sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') - ) - op.create_table('__comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='comments_pkey') - ) - op.create_table('__mentorship_program_selected_contributors', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), - sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), - comment='List of contributors selected for C4GT Mentorship Program 2023' - ) - op.create_table('__community_program_product_wise_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') - ) - op.create_table('__pull_requests', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), - sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), - sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') - ) - op.drop_table('user_points_mapping') - op.drop_table('unstructured_discord_data') - op.drop_table('role_master') - op.drop_table('leaderboard') - op.drop_table('github_profile_data') + schema=None, + ) + op.alter_column( + "contributors_registration", + "joined_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "contributors_registration", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "contributors_discord", + "joined_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "contributors_discord", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.add_column( + "contributor_points", + sa.Column("user_id", sa.BIGINT(), autoincrement=False, nullable=True), + ) + op.drop_constraint(None, "contributor_points", type_="foreignkey") + op.create_foreign_key( + "contributor_points_contributors_id_fkey", + "contributor_points", + "contributors_registration", + ["user_id"], + ["id"], + ) + op.drop_column("contributor_points", "contributors_id") + op.alter_column( + "contributor_names", + "id", + existing_type=sa.BIGINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "connected_prs", + "merged_at", + existing_type=sa.Text(), + type_=postgresql.TIMESTAMP(), + existing_nullable=True, + ) + op.alter_column( + "connected_prs", + "raised_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + ) + op.alter_column( + "connected_prs", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column("community_orgs", "name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "chapters", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + op.alter_column( + "chapters", + "discord_role_id", + existing_type=sa.BIGINT(), + nullable=False, + comment="db od of the corresponding member role in discord server", + existing_comment="db id of the corresponding member role in discord server", + ) + op.alter_column("chapters", "org_name", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "ccbp_tickets", + "closed_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_comment="date-time at which issue was closed", + existing_nullable=True, + ) + op.alter_column( + "ccbp_tickets", + "index", + existing_type=sa.SMALLINT(), + server_default=sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=32767, + cycle=False, + cache=1, + ), + existing_nullable=False, + autoincrement=True, + ) + op.alter_column( + "ccbp_tickets", "issue_id", existing_type=sa.BIGINT(), nullable=False + ) + op.alter_column( + "ccbp_tickets", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "badges", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "created_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("CURRENT_TIMESTAMP"), + ) + op.alter_column( + "badges", + "id", + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.alter_column( + "app_comments", "issue_id", existing_type=sa.BIGINT(), nullable=False + ) + op.alter_column( + "app_comments", + "updated_at", + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text("now()"), + ) + op.alter_column( + "app_comments", + "id", + existing_type=sa.BigInteger(), + type_=sa.UUID(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("gen_random_uuid()"), + ) + op.create_table( + "__mentorship_program_pull_request", + sa.Column("pr_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("pr_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("pr_node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("title", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("body", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "merged_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "assignees", postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True + ), + sa.Column( + "requested_reviewers", + postgresql.ARRAY(sa.TEXT()), + autoincrement=False, + nullable=True, + ), + sa.Column( + "labels", postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True + ), + sa.Column("review_comments_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("comments_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "repository_owner_name", sa.TEXT(), autoincrement=False, nullable=True + ), + sa.Column( + "repository_owner_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("repository_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column( + "number_of_commits", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "lines_of_code_added", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "lines_of_code_removed", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "number_of_files_changed", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("merged_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("merged_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_ticket", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("pr_id", name="mentorship_program_pull_request_pkey"), + ) + op.create_table( + "__dev_onboarding", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "repos", + postgresql.JSON(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="Onboarding_Dev_pkey"), + sa.UniqueConstraint("organisation", name="Onboarding_Dev_org_key"), + ) + op.create_table( + "__mentorship_program_projects", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("description", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_page_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("isssue_api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository_api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["product"], + ["product.name"], + name="__mentorship_program_projects_product_fkey", + ondelete="SET DEFAULT", + ), + sa.PrimaryKeyConstraint("id", name="projects_pkey"), + sa.UniqueConstraint("name", name="projects_name_key"), + comment="Selected projects under C4GT 2023", + ) + op.create_table( + "__mentorship_program_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.PrimaryKeyConstraint("id", name="mentorship_program_tickets_pkey"), + ) + op.create_table( + "__mentorship_program_ticket_comments", + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "updated_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("content", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("reactions_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="mentorship_program_ticket_comments_pkey"), + ) + op.create_table( + "__community_program_unique_user_data", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("ticket_name", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_pr", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "linked_pr_author_id", sa.BIGINT(), autoincrement=False, nullable=True + ), + sa.Column( + "linked_pr_author_username", sa.TEXT(), autoincrement=False, nullable=True + ), + sa.Column("is_registered", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("ticket_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("linked_pr_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("state", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="community_program_unique_user_data_pkey"), + ) + op.create_table( + "__applicant", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("sheet_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="applicant_pkey"), + sa.UniqueConstraint("discord_id", name="applicant_discord_id_key"), + ) + op.create_table( + "__community_program_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "community_label", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="community_program_tickets_pkey"), + ) + op.create_table( + "__contributors_discord", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "joined_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=False, + ), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column(" name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "chapter", + sa.TEXT(), + autoincrement=False, + nullable=True, + comment="the chapter they're associated with", + ), + sa.Column("gender", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="__contributors_pkey"), + sa.UniqueConstraint("discord_id", name="__contributors_discord_id_key"), + ) + op.create_table( + "__mentors", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["organisation"], + ["__community_organisations.name"], + name="__mentors_organisation_fkey", + ), + sa.PrimaryKeyConstraint("id", name="mentors_pkey"), + ) + op.create_table( + "__community_organisations", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="organisations_pkey"), + sa.UniqueConstraint("name", name="organisations_name_key"), + ) + op.create_table( + "__dashboard_config", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("dashboard", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column( + "starting date", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.PrimaryKeyConstraint("id", name="dashboard _config_pkey"), + ) + op.create_table( + "contributors_registration_old", + sa.Column( + "id", + sa.BIGINT(), + sa.Identity( + always=False, + start=1, + increment=1, + minvalue=1, + maxvalue=9223372036854775807, + cycle=False, + cache=1, + ), + autoincrement=True, + nullable=False, + ), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("github_url", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("discord_username", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "joined_at", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column("email", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="contributors_registration_duplicate_pkey"), + sa.UniqueConstraint( + "discord_id", name="contributors_registration_duplicate_discord_id_key" + ), + sa.UniqueConstraint( + "github_id", name="contributors_registration_duplicate_github_id_key" + ), + ) + op.create_table( + "__contributors_vc", + sa.Column("github_username", sa.TEXT(), autoincrement=False, nullable=False), + sa.Column("discord_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("certificate_link", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("stats", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("github_username", name="contributors_vc_pkey"), + ) + op.create_table( + "unstructured discord data", + sa.Column("text", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("author", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("channel", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("channel_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "uuid", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("author_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "author_roles", + postgresql.ARRAY(sa.TEXT()), + autoincrement=False, + nullable=True, + ), + sa.Column("sent_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint( + "uuid", name="unstructured discord data_duplicate_pkey" + ), + sa.UniqueConstraint( + "uuid", name="unstructured discord data_duplicate_uuid_key" + ), + ) + op.create_table( + "__comments", + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("issue_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("node_id", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("commented_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "updated_at", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("content", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("reactions_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="comments_pkey"), + ) + op.create_table( + "__mentorship_program_selected_contributors", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column("name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("github_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("project_name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("ticket_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint( + "id", name="mentorship_program_selected_contributors_pkey" + ), + sa.UniqueConstraint( + "name", name="mentorship_program_selected_contributors_name_key" + ), + comment="List of contributors selected for C4GT Mentorship Program 2023", + ) + op.create_table( + "__community_program_product_wise_tickets", + sa.Column("url", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "repository_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "comments_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "events_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column( + "html_url", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "node_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + sa.Column("title", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column( + "raised_by_username", + sa.VARCHAR(length=255), + autoincrement=False, + nullable=True, + ), + sa.Column("raised_by_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("labels", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column("assignees", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "number_of_comments", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column( + "created_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "updated_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "closed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column( + "community_label", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=True, + ), + sa.Column("product", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("gh_organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("repository name", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("organisation", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="community_program_tickets_duplicate_pkey"), + ) + op.create_table( + "__pull_requests", + sa.Column( + "id", + sa.UUID(), + server_default=sa.text("gen_random_uuid()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "created_at", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=True, + ), + sa.Column("api_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("html_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("raised_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("raised_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("status", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("is_merged", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("merged_by", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("merged_at", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column("merged_by_username", sa.TEXT(), autoincrement=False, nullable=True), + sa.Column( + "pr_id", + sa.BIGINT(), + autoincrement=False, + nullable=False, + comment="github id of the pr", + ), + sa.Column( + "points", + sa.SMALLINT(), + server_default=sa.text("'10'::smallint"), + autoincrement=False, + nullable=False, + ), + sa.Column("ticket_url", sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", name="pull_requests_pkey1"), + sa.UniqueConstraint("html_url", name="pull_requests_html_url_key"), + sa.UniqueConstraint("pr_id", name="pull_requests_pr_id_key"), + ) + op.drop_table("user_points_mapping") + op.drop_table("unstructured_discord_data") + op.drop_table("role_master") + op.drop_table("leaderboard") + op.drop_table("github_profile_data") # ### end Alembic commands ###