Skip to content

Commit

Permalink
Merge pull request #599 from edraj/soheyb-v3
Browse files Browse the repository at this point in the history
 re-structuring
  • Loading branch information
kefahi authored Jan 25, 2025
2 parents 2dc7563 + 45be7fb commit 68d9216
Show file tree
Hide file tree
Showing 63 changed files with 3,052 additions and 2,556 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/backend-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@ jobs:
run: |
source $HOME/.venv/bin/activate
cd backend;
cd requirements
pip install -r requirements.txt
pip install -r test-requirements.txt
pip install -r plugins-requirements.txt
pip install -r extra-requirements.txt
cd ..
mkdir logs/
cp ~/login_creds.sh ./
cp ~/config.env ./
Expand All @@ -44,7 +46,7 @@ jobs:
dropdb --if-exists -h 127.0.0.1 -U dmart dmart
createdb -h 127.0.0.1 -U dmart dmart
alembic upgrade head
./json_to_db_migration.py
./migrate.py json_to_db
./reload.sh
./curl.sh
systemctl --user stop dmart
Expand Down
2 changes: 1 addition & 1 deletion backend/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = postgresql+psycopg://dmart:xxxxx@localhost:5432/dmart
sqlalchemy.url = postgresql+psycopg://postgres:tenno1515@localhost:5432/dmart


[post_write_hooks]
Expand Down
2 changes: 1 addition & 1 deletion backend/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from alembic import context

from utils.database.create_tables import \
from data_adapters.sql.create_tables import \
metadata

from utils.settings import settings
Expand Down
28 changes: 28 additions & 0 deletions backend/alembic/scripts/migration_f7a4949eed19.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/usr/bin/env -S BACKEND_ENV=config.env python3
from sqlmodel import select

from data_adapters.sql.adapter import SQLAdapter
from data_adapters.sql.create_tables import Entries, Permissions, Roles, Spaces, Users
from utils.query_policies_helper import generate_query_policies


print("[INFO] [dmart.script] Performing post-upgrade data migration (calc-query-policies)")
with SQLAdapter().get_session() as session:
for table in [Entries, Permissions, Roles, Spaces, Users]:
print(f"[INFO] [dmart.script] Processing table: {table}")
records = session.exec(select(table)).all()
print(f"[INFO] [dmart.script] Processing {len(records)} records")
for record in records:
if not record.query_policies:
record.query_policies = generate_query_policies(
space_name=record.space_name,
subpath=record.subpath,
resource_type=record.resource_type,
is_active=record.is_active,
owner_shortname=record.owner_shortname,
owner_group_shortname= record.owner_group_shortname if hasattr(record, 'owner_group_shortname') else "",
)
session.add(record)
print(".", end="\r")
session.commit()
print("[INFO] [dmart.script] Post-upgrade data migration completed")
Original file line number Diff line number Diff line change
Expand Up @@ -25,49 +25,61 @@ def upgrade() -> None:
with op.batch_alter_table('attachments', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

with op.batch_alter_table('entries', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

with op.batch_alter_table('permissions', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

with op.batch_alter_table('roles', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

with op.batch_alter_table('spaces', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.alter_column('created_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.current_timestamp(),
nullable=False)

# ### end Alembic commands ###
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
"""adding query_policies to meta
Revision ID: f7a4949eed19
Revises: 848b623755a4
Create Date: 2025-01-22 11:37:26.347777
"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
import sqlmodel
import sqlmodel.sql.sqltypes
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision: str = 'f7a4949eed19'
down_revision: Union[str, None] = '848b623755a4'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('entries', schema=None) as batch_op:
batch_op.add_column(sa.Column('query_policies', postgresql.ARRAY(sa.TEXT()), nullable=False, server_default='{}'))

with op.batch_alter_table('permissions', schema=None) as batch_op:
batch_op.add_column(sa.Column('query_policies', postgresql.ARRAY(sa.TEXT()), nullable=False, server_default='{}'))

with op.batch_alter_table('roles', schema=None) as batch_op:
batch_op.add_column(sa.Column('query_policies', postgresql.ARRAY(sa.TEXT()), nullable=False, server_default='{}'))

with op.batch_alter_table('spaces', schema=None) as batch_op:
batch_op.add_column(sa.Column('query_policies', postgresql.ARRAY(sa.TEXT()), nullable=False, server_default='{}'))

with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.add_column(sa.Column('query_policies', postgresql.ARRAY(sa.TEXT()), nullable=False, server_default='{}'))

# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.drop_column('query_policies')

with op.batch_alter_table('spaces', schema=None) as batch_op:
batch_op.drop_column('query_policies')

with op.batch_alter_table('roles', schema=None) as batch_op:
batch_op.drop_column('query_policies')

with op.batch_alter_table('permissions', schema=None) as batch_op:
batch_op.drop_column('query_policies')

with op.batch_alter_table('entries', schema=None) as batch_op:
batch_op.drop_column('query_policies')

# ### end Alembic commands ###
29 changes: 5 additions & 24 deletions backend/api/managed/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,12 @@
camel_case,
csv_file_to_json,
flatten_dict,
resolve_schema_references,
)
from utils.internal_error_code import InternalErrorCode
from utils.jwt import GetJWTToken, JWTBearer
from utils.plugin_manager import plugin_manager
from utils.router_helper import is_space_exist
from utils.settings import settings
from utils.spaces import initialize_spaces

router = APIRouter()

Expand Down Expand Up @@ -241,7 +239,7 @@ async def serve_space(
),
)

await initialize_spaces()
await db.initialize_spaces()

await access_control.load_permissions_and_roles()

Expand Down Expand Up @@ -371,7 +369,7 @@ async def update_state(
) -> api.Response:
await is_space_exist(space_name)

_user_roles = await access_control.get_user_roles(logged_in_user)
_user_roles = await db.get_user_roles(logged_in_user)
user_roles = _user_roles.keys()

await plugin_manager.before_action(
Expand Down Expand Up @@ -728,24 +726,7 @@ async def import_resources_from_csv(
buffer = StringIO(decoded)
csv_reader = csv.DictReader(buffer)

if settings.active_data_db == "file":
schema_path = (
db.payload_path(space_name, "schema", core.Schema)
/ f"{schema_shortname}.json"
)
with open(schema_path) as schema_file:
schema_content = json.load(schema_file)
schema_content = resolve_schema_references(schema_content)
else:
schema_content = await db.load(
space_name=space_name,
subpath="/schema",
shortname=schema_shortname,
class_type=core.Schema,
user_shortname=owner_shortname,
)
if schema_content and schema_content.payload and isinstance(schema_content.payload.body, dict):
schema_content = resolve_schema_references(schema_content.payload.body)
schema_content = await db.get_schema(space_name, schema_shortname, owner_shortname)

data_types_mapper: dict[str, Callable] = {
"integer": int,
Expand Down Expand Up @@ -942,7 +923,7 @@ async def get_entry_by_uuid(
retrieve_lock_status: bool = False,
logged_in_user=Depends(JWTBearer()),
):
return await repository.get_entry_by_var(
return await db.get_entry_by_var(
"uuid",
uuid,
logged_in_user,
Expand All @@ -960,7 +941,7 @@ async def get_entry_by_slug(
retrieve_lock_status: bool = False,
logged_in_user=Depends(JWTBearer()),
):
return await repository.get_entry_by_var(
return await db.get_entry_by_var(
"slug",
slug,
logged_in_user,
Expand Down
23 changes: 9 additions & 14 deletions backend/api/managed/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from fastapi import status
from utils.generate_email import generate_email_from_template, generate_subject
from utils.custom_validations import validate_csv_with_schema, validate_jsonl_with_schema
from data_adapters.file.custom_validations import validate_csv_with_schema, validate_jsonl_with_schema
from utils.internal_error_code import InternalErrorCode
from utils.router_helper import is_space_exist
from utils.ticket_sys_utils import (
Expand All @@ -24,7 +24,6 @@
)
import sys
import json
from utils.jwt import remove_user_session
from utils.access_control import access_control
import utils.repository as repository
from utils.helpers import (
Expand All @@ -37,7 +36,6 @@
send_email,
send_sms,
)
from utils.redis_services import RedisServices
from languages.loader import languages
from data_adapters.adapter import data_adapter as db

Expand Down Expand Up @@ -523,7 +521,7 @@ async def serve_request_update_r_replace(request, owner_shortname: str):
record.attributes.get("is_active", None) is not None
):
if not record.attributes.get("is_active"):
await db.remove_sql_user_session(record.shortname)
await db.remove_user_session(record.shortname)

records.append(
resource_obj.to_record(
Expand Down Expand Up @@ -555,6 +553,7 @@ async def serve_request_update_r_replace(request, owner_shortname: str):
)
return records, failed_records


async def serve_request_patch(request, owner_shortname: str):
records: list[core.Record] = []
failed_records: list[dict] = []
Expand Down Expand Up @@ -719,7 +718,7 @@ async def serve_request_patch(request, owner_shortname: str):
isinstance(resource_obj, core.User) and
record.attributes.get("is_active") is False
):
await remove_user_session(record.shortname)
await db.remove_user_session(record.shortname)

records.append(
resource_obj.to_record(
Expand Down Expand Up @@ -751,6 +750,7 @@ async def serve_request_patch(request, owner_shortname: str):
)
return records, failed_records


async def serve_request_assign(request, owner_shortname: str):
records: list[core.Record] = []
failed_records: list[dict] = []
Expand Down Expand Up @@ -1419,6 +1419,7 @@ async def serve_space_update(request, record, owner_shortname: str):
)
return history_diff


async def serve_space_delete(request, record, owner_shortname: str):
if request.space_name == "management":
raise api.Exception(
Expand Down Expand Up @@ -1449,14 +1450,8 @@ async def serve_space_delete(request, record, owner_shortname: str):
),
)
await repository.delete_space(request.space_name, record, owner_shortname)
if settings.active_data_db == 'file':
async with RedisServices() as redis_services:
x = await redis_services.list_indices()
if x:
indices: list[str] = x
for index in indices:
if index.startswith(f"{request.space_name}:"):
await redis_services.drop_index(index, True)
await db.drop_index(request.space_name)



async def data_asset_attachments_handler(query, attachments):
Expand Down Expand Up @@ -1608,7 +1603,7 @@ async def import_resources_from_csv_handler(


async def create_or_update_resource_with_payload_handler(
record, owner_shortname, space_name, payload_file, payload_filename, checksum, sha, resource_content_type
record, owner_shortname, space_name, payload_file, payload_filename, checksum, sha, resource_content_type
):
if record.resource_type == ResourceType.ticket:
record = await set_init_state_from_record(
Expand Down
4 changes: 2 additions & 2 deletions backend/api/public/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,7 +552,7 @@ async def get_entry_by_uuid(
retrieve_attachments: bool = False,
retrieve_lock_status: bool = False
):
return await repository.get_entry_by_var(
return await db.get_entry_by_var(
"uuid",
uuid,
"anonymous",
Expand All @@ -569,7 +569,7 @@ async def get_entry_by_slug(
retrieve_attachments: bool = False,
retrieve_lock_status: bool = False,
):
return await repository.get_entry_by_var(
return await db.get_entry_by_var(
"slug",
slug,
"anonymous",
Expand Down
Loading

0 comments on commit 68d9216

Please sign in to comment.