Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • xeruf/dashboard
  • stackspin/dashboard
2 results
Show changes
Showing
with 2196 additions and 750 deletions
"""
List of functions to get data from Flux Kustomizations and Helmreleases
"""
import crypt
import functools
import secrets
import string
import threading
import jinja2
import yaml
from kubernetes import client, config
from kubernetes import client, config, watch
from kubernetes.config.incluster_config import InClusterConfigLoader
from kubernetes.client import api_client
from kubernetes.client.exceptions import ApiException
from kubernetes.utils import create_from_yaml
from kubernetes.utils.create_from_yaml import FailToCreateError
from flask import current_app
from config import LOAD_INCLUSTER_CONFIG
from config import KUBECONFIG, LOAD_INCLUSTER_CONFIG, TELEPRESENCE
# Load the kube config once
#
# By default this loads whatever we define in the `KUBECONFIG` env variable,
# otherwise loads the config from default locations, similar to what kubectl
# does.
if LOAD_INCLUSTER_CONFIG:
if TELEPRESENCE:
print(f"token_filename: {KUBECONFIG}/token")
import os
if os.path.isfile(f"{KUBECONFIG}/token"):
print("token_filename exists")
else:
print("token_filename does not exist")
InClusterConfigLoader(
token_filename=f"{KUBECONFIG}/token",
cert_filename=f"{KUBECONFIG}/ca.crt"
).load_and_set()
elif LOAD_INCLUSTER_CONFIG:
config.load_incluster_config()
else:
config.load_kube_config()
......@@ -53,7 +66,7 @@ def create_variables_secret(app_slug, variables_filepath):
elif current_secret_data.keys() != new_secret_dict["data"].keys():
# Update current secret with new keys
update_secret = True
current_app.logger.info(
current_app.logger.debug(
f"Secret {secret_name} in namespace {secret_namespace}"
" already exists. Merging..."
)
......@@ -61,12 +74,12 @@ def create_variables_secret(app_slug, variables_filepath):
new_secret_dict["data"] |= current_secret_data
else:
# Do Nothing
current_app.logger.info(
current_app.logger.debug(
f"Secret {secret_name} in namespace {secret_namespace}"
" is already in a good state, doing nothing."
)
return True
current_app.logger.info(
current_app.logger.debug(
f"Storing secret {secret_name} in namespace"
f" {secret_namespace} in cluster."
)
......@@ -170,9 +183,9 @@ def store_kubernetes_secret(secret_dict, namespace, update=False):
namespace=namespace
)
except FailToCreateError as ex:
current_app.logger.info(f"Secret not created because of exception {ex}")
current_app.logger.warning(f"Secret not created because of exception {ex}")
raise ex
current_app.logger.info(f"Secret {verb} with api response: {api_response}")
current_app.logger.debug(f"Secret {verb} with api response: {api_response}")
def store_kustomization(kustomization_template_filepath, app_slug):
......@@ -198,7 +211,7 @@ def store_kustomization(kustomization_template_filepath, app_slug):
plural="kustomizations",
body=kustomization_dict)
except FailToCreateError as ex:
current_app.logger.info(
current_app.logger.warning(
f"Could not create {app_slug} Kustomization because of exception {ex}")
raise ex
current_app.logger.debug(f"Kustomization created with api response: {api_response}")
......@@ -230,7 +243,7 @@ def delete_kustomization(kustomization_name):
name=kustomization_name,
body=body)
except ApiException as ex:
current_app.logger.info(
current_app.logger.warning(
f"Could not delete {kustomization_name} Kustomization because of exception {ex}")
raise ex
current_app.logger.debug(f"Kustomization deleted with api response: {api_response}")
......@@ -295,20 +308,6 @@ def generate_password(length):
return password
def gen_htpasswd(user, password):
"""
Generate htpasswd entry for user with password.
:param user: Username used in the htpasswd entry
:type user: string
:param password: Password for the user, will get encrypted.
:type password: string
:return: htpassword line entry
:rtype: string
"""
return f"{user}:{crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512))}"
def get_all_kustomizations(namespace='flux-system'):
"""
Returns all flux kustomizations in a namespace.
......@@ -382,3 +381,111 @@ def get_kustomization(name, namespace='flux-system'):
# Raise all non-404 errors
raise error
return resource
def get_gitrepo(name, namespace='flux-system'):
"""
Returns all info on a Flux GitRepo.
:param name: Name of the gitrepo
:type name: string
:param namespace: Namespace of the gitrepo
:type namespace: string
:return: gitrepo as returned by the API
:rtype: dict
"""
api = client.CustomObjectsApi()
try:
resource = api.get_namespaced_custom_object(
group="source.toolkit.fluxcd.io",
version="v1beta2",
name=name,
namespace=namespace,
plural="gitrepositories",
)
except client.exceptions.ApiException as error:
if error.status == 404:
return None
# Raise all non-404 errors
raise error
return resource
def debounce(timeout: float):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
wrapper.func.cancel()
wrapper.func = threading.Timer(timeout, func, args, kwargs)
wrapper.func.start()
wrapper.func = threading.Timer(timeout, lambda: None)
return wrapper
return decorator
def watch_dashboard_config(app, reload):
# Number of seconds to wait before reloading in case more secrets show up.
# In particular this prevents us from reloading once for every
# secret that exists at startup in succession.
debounce_timeout = 1
@debounce(debounce_timeout)
def debounced_reload():
reload()
w = watch.Watch()
api_instance = client.CoreV1Api(api_client.ApiClient())
def watch_scim_secrets():
with app.app_context():
for event in w.stream(
api_instance.list_namespaced_secret,
'flux-system',
label_selector="stackspin.net/scim-config=1",
watch=True
):
current_app.logger.debug(f"{event['type']} SCIM config secret: {event['object'].metadata.name}")
debounced_reload()
threading.Thread(target=watch_scim_secrets).start()
def watch_dashboard_configmaps():
with app.app_context():
for event in w.stream(
api_instance.list_namespaced_config_map,
'flux-system',
label_selector="stackspin.net/dashboard-config=1",
watch=True
):
current_app.logger.debug(f"{event['type']} dashboard config configmap: {event['object'].metadata.name}")
debounced_reload()
threading.Thread(target=watch_dashboard_configmaps).start()
def check_condition(status):
"""
Returns a tuple that has true/false for readiness and a message
Ready, in this case means that the condition's type == "Ready" and its
status == "True". If the condition type "Ready" does not occur, the
status is interpreted as not ready.
The message that is returned is the message that comes with the
condition with type "Ready"
:param status: Kubernetes resource's "status" object.
:type status: dict
"""
if status["observedGeneration"] == -1:
return False, "Kustomization is not yet seen by controller"
for condition in status["conditions"]:
if condition["type"] == "Ready":
return condition["status"] == "True", condition["message"]
return False, "Condition with type 'Ready' not found"
def wait_kustomization_ready(app):
w = watch.Watch()
api_instance = client.CustomObjectsApi()
for event in w.stream(api_instance.list_namespaced_custom_object, 'kustomize.toolkit.fluxcd.io', 'v1', 'flux-system', 'kustomizations'):
ks = event['object']
if ks['metadata']['name'] != app.slug:
# We're currently only interested in the `app` app.
continue
ks_ready, ks_message = check_condition(ks['status'])
if not ks_ready:
# There is some data on the app kustomization, but it's not ready
# yet.
continue
print(f"Kustomization {app.slug} is now ready.")
return
This diff is collapsed.
import functools
from posix_ipc import MessageQueue, O_CREAT, BusyError
import threading
# Signal to provisioning loop that we want to provision now.
provisioning_queue = MessageQueue('/stackspin-dashboard-provision-queue', O_CREAT)
def debounce(timeout: float):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
wrapper.func.cancel()
wrapper.func = threading.Timer(timeout, func, args, kwargs)
wrapper.func.start()
wrapper.func = threading.Timer(timeout, lambda: None)
return wrapper
return decorator
@debounce(1)
def request_provision():
try:
provisioning_queue.send("provision", timeout=0)
except BusyError:
# If we can't signal for some reason (queue limit reached), silently
# fail.
pass
def wait_provision():
# We first wait until there's any message in the queue.
provisioning_queue.receive()
# After that, we check if there are any more messages, to prevent a couple
# of (long) provisioning runs to be done back-to-back in case of multiple
# provisioning requests. Note however that if a request comes in during the
# middle of a provisioning run, we still do another one right after to make
# sure we propagate the latest changes right away.
try:
while True:
# We read with zero timeout, so we get an exception right away if
# the queue is empty.
provisioning_queue.receive(timeout=0)
except BusyError:
pass
from sqlalchemy import exc, text
from database import db
import logging
# We "reset" the alembic version history for Stackspin 2.2, to clean up our old
# mess of database migrations a bit, and in particular to make the transition
# easier to moving the source of truth for some of the data (list of apps) out
# of the database and into configmaps. This function deals with older clusters
# that have to be led through this transition. To determine if we need to do
# anything, we look at the `alembic_version` value in the database. If it's a
# legacy version, we delete the table so the alembic migration will view the
# database as "empty" and perform all new migrations on it. The new initial
# migration will have to handle that case specially, by checking if any tables
# already exist, and not do anything in that case.
def reset():
logging.info("Checking if alembic version needs to be reset.")
version = None
try:
result = db.session.execute(text("select version_num from alembic_version"))
for row in result:
version = row[0]
except exc.ProgrammingError:
# We assume this means the alembic_version table doesn't exist, which
# is expected for new clusters.
pass
logging.info(f"alembic version: {version}")
legacy_versions = ["fc0892d07771", "3fa0c38ea1ac", "e08df0bef76f", "b514cca2d47b", "5f462d2d9d25", "27761560bbcb"]
if version in legacy_versions:
logging.info("This is an old version: resetting.")
db.session.execute("drop table alembic_version")
else:
logging.info("This is not a known legacy version: not resetting.")
......@@ -8,43 +8,3 @@
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
......@@ -13,7 +13,9 @@ config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# We commented this out, because we want to configure logging in the app
# itself, not here.
# fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
......
"""Extend SCIM support to dynamic apps
Revision ID: 267d280db490
Revises: 825262488cd9
Create Date: 2024-04-12 11:49:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '267d280db490'
down_revision = '825262488cd9'
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"app",
sa.Column(
"scim_url",
sa.Unicode(length=1024),
nullable=True
),
)
op.add_column(
"app",
sa.Column(
"scim_token",
sa.Unicode(length=1024),
nullable=True
),
)
op.add_column(
"app",
sa.Column(
"scim_group_support",
sa.Boolean(),
server_default='0',
nullable=False
),
)
# ID of user in app for SCIM purposes. The dashboard needs this so it can
# construct the SCIM URL to the app identifying the user.
op.add_column(
"app_role",
sa.Column(
"scim_id",
sa.Unicode(length=256),
nullable=True
),
)
op.create_index(
"app_role__app_id__scim_id",
"app_role",
["app_id", "scim_id"],
unique=False
)
def downgrade():
op.drop_column("app", "scim_url")
op.drop_column("app", "scim_token")
op.drop_column("app", "scim_group_support")
op.drop_index("app_role__app_id__scim_id", "app_role")
op.drop_column("app_role", "scim_id")
"""add-velero-as-app
Revision ID: 3fa0c38ea1ac
Revises: e08df0bef76f
Create Date: 2022-10-13 09:40:44.290319
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3fa0c38ea1ac'
down_revision = 'e08df0bef76f'
branch_labels = None
depends_on = None
def upgrade():
# Add monitoring app
op.execute(f'INSERT IGNORE INTO app (`name`, `slug`) VALUES ("Velero","velero")')
def downgrade():
pass
"""convert role column to table
Revision ID: 5f462d2d9d25
Revises: 27761560bbcb
Create Date: 2022-04-13 15:00:27.182898
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "5f462d2d9d25"
down_revision = "27761560bbcb"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
role_table = op.create_table(
"role",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("app_role", sa.Column("role_id", sa.Integer(), nullable=True))
op.create_foreign_key(None, "app_role", "role", ["role_id"], ["id"])
# ### end Alembic commands ###
# Insert default role "admin" as ID 1
op.execute(sa.insert(role_table).values(id=1,name="admin"))
# Set role_id 1 to all current "admin" users
op.execute("UPDATE app_role SET role_id = 1 WHERE role = 'admin'")
# Drop old column
op.drop_column("app_role", "role")
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"app_role", sa.Column("role", mysql.VARCHAR(length=64), nullable=True)
)
op.drop_constraint(None, "app_role", type_="foreignkey")
op.drop_column("app_role", "role_id")
op.drop_table("role")
# ### end Alembic commands ###
"""Initial version after history reset: Create tables and fill the "role" one
Revision ID: 7d27395c892a
Revises:
Create Date: 2023-01-18 14:48:23.996261
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = '7d27395c892a'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
tables = inspector.get_table_names()
if "app" not in tables:
op.create_table(
"app",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=64), nullable=False),
sa.Column("slug", sa.String(length=64), nullable=False),
sa.Column("external", sa.Boolean(), server_default='0', nullable=False),
sa.Column("url", sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("slug"),
)
if "role" not in tables:
op.create_table(
"role",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint("id")
)
op.execute("INSERT INTO `role` (id, `name`) VALUES (1, 'admin')")
op.execute("INSERT INTO `role` (id, `name`) VALUES (2, 'user')")
op.execute("INSERT INTO `role` (id, `name`) VALUES (3, 'no access')")
if "app_role" not in tables:
op.create_table(
"app_role",
sa.Column("user_id", sa.String(length=64), nullable=False),
sa.Column("app_id", sa.Integer(), nullable=False),
sa.Column("role_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("user_id", "app_id"),
sa.ForeignKeyConstraint(["app_id"],["app.id"]),
sa.ForeignKeyConstraint(["role_id"],["role.id"])
)
if "oauthclient_app" not in tables:
op.create_table('oauthclient_app',
sa.Column('oauthclient_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('oauthclient_id'),
sa.ForeignKeyConstraint(['app_id'], ['app.id']),
mysql_default_charset='utf8mb3',
mysql_engine='InnoDB'
)
def downgrade():
op.drop_table("oauthclient_app")
op.drop_table("app_role")
op.drop_table("role")
op.drop_table("app")
"""Add SCIM support for user provisioning
Revision ID: 825262488cd9
Revises: fdb28e81f5c2
Create Date: 2023-03-08 10:50:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy.engine.reflection import Inspector
from helpers.provision import ProvisionStatus
# revision identifiers, used by Alembic.
revision = '825262488cd9'
down_revision = 'fdb28e81f5c2'
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"app_role",
sa.Column(
"provision_status",
sa.Enum(
ProvisionStatus,
native_enum=False,
length=32,
values_callable=lambda _: [str(member.value) for member in ProvisionStatus]
),
nullable=False,
default=ProvisionStatus.SyncNeeded,
server_default=ProvisionStatus.SyncNeeded.value
),
)
op.add_column(
"app_role",
sa.Column(
"last_provision_attempt",
sa.DateTime,
nullable=True
),
)
op.add_column(
"app_role",
sa.Column(
"last_provision_message",
sa.Unicode(length=256),
nullable=True
),
)
def downgrade():
op.drop_column("app_role", "provision_status")
op.drop_column("app_role", "last_provision_attempt")
op.drop_column("app_role", "last_provision_message")
"""Extend SCIM support to include some attributes during provisioning only when
they are changed, or the user is first created in the app.
Revision ID: 9ee5a7d65fa7
Revises: 267d280db490
Create Date: 2024-06-04 15:39:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9ee5a7d65fa7'
down_revision = '267d280db490'
branch_labels = None
depends_on = None
def upgrade():
# An entry in this table records that a certain user attribute needs to be
# set in a certain app via SCIM.
op.create_table(
"scim_attribute",
sa.Column("user_id", sa.String(length=64), nullable=False),
sa.Column("app_id", sa.Integer(), nullable=False),
sa.Column("attribute", sa.String(length=64), nullable=False),
sa.PrimaryKeyConstraint("user_id", "app_id", "attribute"),
sa.ForeignKeyConstraint(["app_id"],["app.id"]),
)
def downgrade():
op.drop_table("scim_attribute")
"""update apps and add 'user' and 'no access' role
Revision ID: b514cca2d47b
Revises: 5f462d2d9d25
Create Date: 2022-06-08 17:24:51.305129
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b514cca2d47b'
down_revision = '5f462d2d9d25'
branch_labels = None
depends_on = None
def upgrade():
# ### end Alembic commands ###
# Check and update app table in DB
apps = {
"dashboard": "Dashboard",
"wekan": "Wekan",
"wordpress": "WordPress",
"nextcloud": "Nextcloud",
"zulip": "Zulip"
}
# app table
app_table = sa.table('app', sa.column('id', sa.Integer), sa.column(
'name', sa.String), sa.column('slug', sa.String))
existing_apps = op.get_bind().execute(app_table.select()).fetchall()
existing_app_slugs = [app['slug'] for app in existing_apps]
for app_slug in apps.keys():
if app_slug in existing_app_slugs:
op.execute(f'UPDATE app SET `name` = "{apps.get(app_slug)}" WHERE slug = "{app_slug}"')
else:
op.execute(f'INSERT INTO app (`name`, slug) VALUES ("{apps.get(app_slug)}","{app_slug}")')
# Fetch all apps including newly created
existing_apps = op.get_bind().execute(app_table.select()).fetchall()
# Insert role "user" as ID 2
op.execute("INSERT INTO `role` (id, `name`) VALUES (2, 'user')")
# Insert role "no access" as ID 3
op.execute("INSERT INTO `role` (id, `name`) VALUES (3, 'no access')")
# Set role_id 2 to all current "user" users which by have NULL role ID
op.execute("UPDATE app_role SET role_id = 2 WHERE role_id IS NULL")
# Add 'no access' role for all users that don't have any roles for specific apps
app_roles_table = sa.table('app_role', sa.column('user_id', sa.String), sa.column(
'app_id', sa.Integer), sa.column('role_id', sa.Integer))
app_ids = [app['id'] for app in existing_apps]
app_roles = op.get_bind().execute(app_roles_table.select()).fetchall()
user_ids = set([app_role['user_id'] for app_role in app_roles])
for user_id in user_ids:
existing_user_app_ids = [x['app_id'] for x in list(filter(lambda role: role['user_id'] == user_id, app_roles))]
missing_user_app_ids = [x for x in app_ids if x not in existing_user_app_ids]
if len(missing_user_app_ids) > 0:
values = [{'user_id': user_id, 'app_id': app_id, 'role_id': 3} for app_id in missing_user_app_ids]
op.bulk_insert(app_roles_table, values)
def downgrade():
# Revert all users role_id to NULL where role is 'user'
op.execute("UPDATE app_role SET role_id = NULL WHERE role_id = 2")
# Delete role 'user' from roles
op.execute("DELETE FROM `role` WHERE id = 2")
# Delete all user app roles where role is 'no access' with role_id 3
op.execute("DELETE FROM app_role WHERE role_id = 3")
# Delete role 'no access' from roles
op.execute("DELETE FROM `role` WHERE id = 3")
"""Add fields for external apps
Revision ID: e08df0bef76f
Revises: b514cca2d47b
Create Date: 2022-09-23 16:38:06.557307
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e08df0bef76f'
down_revision = 'b514cca2d47b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('app', sa.Column('external', sa.Boolean(), server_default='0', nullable=False))
op.add_column('app', sa.Column('url', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
# Add monitoring app
op.execute(f'INSERT IGNORE INTO app (`name`, `slug`) VALUES ("Monitoring","monitoring")')
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('app', 'url')
op.drop_column('app', 'external')
# ### end Alembic commands ###
"""empty message
Revision ID: 27761560bbcb
Revises:
Create Date: 2021-12-21 06:07:14.857940
"""Add tags for user management.
Revision ID: fdb28e81f5c2
Revises: 7d27395c892a
Create Date: 2023-11-21 14:55:00
"""
import sqlalchemy as sa
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = "27761560bbcb"
down_revision = None
revision = 'fdb28e81f5c2'
down_revision = '7d27395c892a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"app",
"tag",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=64), nullable=True),
sa.Column("slug", sa.String(length=64), nullable=True),
sa.Column("name", sa.String(length=256), nullable=False),
sa.Column("colour", sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("slug"),
)
op.create_table(
"app_role",
"tag_user",
sa.Column("user_id", sa.String(length=64), nullable=False),
sa.Column("app_id", sa.Integer(), nullable=False),
sa.Column("role", sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(
["app_id"],
["app.id"],
),
sa.PrimaryKeyConstraint("user_id", "app_id"),
sa.Column("tag_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("user_id", "tag_id"),
sa.ForeignKeyConstraint(["tag_id"],["tag.id"]),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("app_role")
op.drop_table("app")
# ### end Alembic commands ###
op.drop_table("tag_user")
op.drop_table("tag")
APScheduler==3.11.0
# CLI creation kit
click==8.1.8
cryptography==44.0.2
Flask==3.1.0
Flask-Cors==5.0.1
flask-expects-json==1.7.0
Flask-JWT-Extended==4.7.1
Flask-Migrate==4.1.0
Flask-SQLAlchemy==3.1.1
gunicorn==23.0.0
jsonschema==4.23.0
# Templating kustomizations as part of app installation.
jinja2-base64-filters==0.1.4
kubernetes==32.0.1
pymysql==1.1.1
NamedAtomicLock==1.1.3
ory-kratos-client==1.3.8
ory-hydra-client==2.2.0
pip-install==1.3.5
posix-ipc==1.1.1
PyYAML==6.0.2
regex==2024.11.6
requests==2.32.3
requests-oauthlib==2.0.0
attrs==21.4.0
black==22.1.0
certifi==2021.10.8
cffi==1.15.0
charset-normalizer==2.0.12
click==8.0.4
cryptography==36.0.2
Flask==2.0.3
Flask-Cors==3.0.10
#
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --no-emit-index-url --output-file=requirements.txt --strip-extras requirements.in
#
alembic==1.15.1
# via flask-migrate
annotated-types==0.7.0
# via pydantic
apscheduler==3.11.0
# via -r requirements.in
attrs==25.3.0
# via
# jsonschema
# referencing
blinker==1.9.0
# via flask
cachetools==5.5.2
# via google-auth
certifi==2025.1.31
# via
# kubernetes
# requests
cffi==1.17.1
# via cryptography
charset-normalizer==3.4.1
# via requests
click==8.1.8
# via
# -r requirements.in
# flask
cryptography==44.0.2
# via -r requirements.in
durationpy==0.9
# via kubernetes
flask==3.1.0
# via
# -r requirements.in
# flask-cors
# flask-expects-json
# flask-jwt-extended
# flask-migrate
# flask-sqlalchemy
flask-cors==5.0.1
# via -r requirements.in
flask-expects-json==1.7.0
Flask-JWT-Extended==4.3.1
gunicorn==20.1.0
idna==3.3
install==1.3.5
itsdangerous==2.1.1
jsonschema==4.4.0
Jinja2==3.0.3
# via -r requirements.in
flask-jwt-extended==4.7.1
# via -r requirements.in
flask-migrate==4.1.0
# via -r requirements.in
flask-sqlalchemy==3.1.1
# via
# -r requirements.in
# flask-migrate
google-auth==2.38.0
# via kubernetes
greenlet==3.1.1
# via sqlalchemy
gunicorn==23.0.0
# via -r requirements.in
idna==3.10
# via requests
itsdangerous==2.2.0
# via flask
jinja2==3.1.6
# via
# flask
# jinja2-base64-filters
jinja2-base64-filters==0.1.4
kubernetes==24.2.0
MarkupSafe==2.1.1
mypy-extensions==0.4.3
oauthlib==3.2.0
pathspec==0.9.0
platformdirs==2.5.1
pycparser==2.21
PyJWT==2.3.0
pyrsistent==0.18.1
regex==2022.3.15
requests==2.27.1
requests-oauthlib==1.3.1
six==1.16.0
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.26.8
Werkzeug==2.0.3
ory-kratos-client==0.9.0a2
pymysql
Flask-SQLAlchemy
hydra-client
Flask-Migrate
# via -r requirements.in
jsonschema==4.23.0
# via
# -r requirements.in
# flask-expects-json
jsonschema-specifications==2024.10.1
# via jsonschema
kubernetes==32.0.1
# via -r requirements.in
mako==1.3.9
# via alembic
markupsafe==3.0.2
# via
# jinja2
# mako
# werkzeug
namedatomiclock==1.1.3
# via -r requirements.in
oauthlib==3.2.2
# via
# kubernetes
# requests-oauthlib
ory-hydra-client==2.2.0
# via -r requirements.in
ory-kratos-client==1.3.8
# via -r requirements.in
packaging==24.2
# via gunicorn
pip-install==1.3.5
# via -r requirements.in
posix-ipc==1.1.1
# via -r requirements.in
pyasn1==0.6.1
# via
# pyasn1-modules
# rsa
pyasn1-modules==0.4.1
# via google-auth
pycparser==2.22
# via cffi
pydantic==2.10.6
# via ory-kratos-client
pydantic-core==2.27.2
# via pydantic
pyjwt==2.10.1
# via flask-jwt-extended
pymysql==1.1.1
# via -r requirements.in
python-dateutil==2.9.0.post0
# via
# kubernetes
# ory-hydra-client
# ory-kratos-client
pyyaml==6.0.2
# via
# -r requirements.in
# kubernetes
referencing==0.36.2
# via
# jsonschema
# jsonschema-specifications
regex==2024.11.6
# via -r requirements.in
requests==2.32.3
# via
# -r requirements.in
# kubernetes
# requests-oauthlib
requests-oauthlib==2.0.0
# via
# -r requirements.in
# kubernetes
rpds-py==0.23.1
# via
# jsonschema
# referencing
rsa==4.9
# via google-auth
six==1.17.0
# via
# kubernetes
# python-dateutil
sqlalchemy==2.0.39
# via
# alembic
# flask-sqlalchemy
typing-extensions==4.12.2
# via
# alembic
# ory-kratos-client
# pydantic
# pydantic-core
# referencing
# sqlalchemy
tzlocal==5.3.1
# via apscheduler
urllib3==2.3.0
# via
# kubernetes
# ory-hydra-client
# ory-kratos-client
# requests
websocket-client==1.8.0
# via kubernetes
werkzeug==3.1.3
# via
# flask
# flask-cors
# flask-jwt-extended
This `web` directory is responsible for authentication frontend components.
It uses Tailwind for CSS; when making UI changes open a terminal in the `web` directory and run
`npx tailwindcss -i ./static/src/input.css -o ./static/css/main.css --watch`
This diff is collapsed.
This diff is collapsed.