Initial commit
This commit is contained in:
commit
b9cc670ef4
491
.backup/seed.sql
Normal file
491
.backup/seed.sql
Normal file
@ -0,0 +1,491 @@
|
|||||||
|
--[ Database Schema Version ]--
|
||||||
|
-- Version: 1.0.0
|
||||||
|
-- Last Updated: 2024-02-24
|
||||||
|
-- Description: Initial schema setup for ClassConcepts
|
||||||
|
-- Dependencies: auth.users (Supabase Auth)
|
||||||
|
|
||||||
|
--[ Validation ]--
|
||||||
|
do $$
|
||||||
|
begin
|
||||||
|
-- Verify required extensions
|
||||||
|
if not exists (select 1 from pg_extension where extname = 'uuid-ossp') then
|
||||||
|
raise exception 'Required extension uuid-ossp is not installed';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Verify auth schema exists
|
||||||
|
if not exists (select 1 from information_schema.schemata where schema_name = 'auth') then
|
||||||
|
raise exception 'Required auth schema is not available';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Verify storage schema exists
|
||||||
|
if not exists (select 1 from information_schema.schemata where schema_name = 'storage') then
|
||||||
|
raise exception 'Required storage schema is not available';
|
||||||
|
end if;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
--[ 1. Extensions ]--
|
||||||
|
create extension if not exists "uuid-ossp";
|
||||||
|
|
||||||
|
-- Create rpc schema if it doesn't exist
|
||||||
|
create schema if not exists rpc;
|
||||||
|
grant usage on schema rpc to anon, authenticated;
|
||||||
|
|
||||||
|
-- Create exec_sql function for admin operations
|
||||||
|
create or replace function exec_sql(query text)
|
||||||
|
returns void as $$
|
||||||
|
begin
|
||||||
|
execute query;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
|
||||||
|
-- Create updated_at trigger function
|
||||||
|
create or replace function public.handle_updated_at()
|
||||||
|
returns trigger as $$
|
||||||
|
begin
|
||||||
|
new.updated_at = timezone('utc'::text, now());
|
||||||
|
return new;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
|
||||||
|
--[ 5. Core Tables ]--
|
||||||
|
-- Base user profiles
|
||||||
|
create table if not exists public.profiles (
|
||||||
|
id uuid primary key references auth.users(id) on delete cascade,
|
||||||
|
email text not null unique,
|
||||||
|
user_type text not null check (user_type in ('admin', 'email_teacher', 'email_student')),
|
||||||
|
username text not null unique,
|
||||||
|
display_name text,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
last_login timestamp with time zone,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.profiles is 'User profiles linked to Supabase auth.users';
|
||||||
|
comment on column public.profiles.user_type is 'Type of user: admin, teacher, or student';
|
||||||
|
|
||||||
|
-- Institute import data
|
||||||
|
create table if not exists public.institute_imports (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
urn text unique,
|
||||||
|
establishment_name text not null,
|
||||||
|
la_code text,
|
||||||
|
la_name text,
|
||||||
|
establishment_number text,
|
||||||
|
establishment_type text,
|
||||||
|
establishment_type_group text,
|
||||||
|
establishment_status text,
|
||||||
|
reason_establishment_opened text,
|
||||||
|
open_date date,
|
||||||
|
reason_establishment_closed text,
|
||||||
|
close_date date,
|
||||||
|
phase_of_education text,
|
||||||
|
statutory_low_age integer,
|
||||||
|
statutory_high_age integer,
|
||||||
|
boarders text,
|
||||||
|
nursery_provision text,
|
||||||
|
official_sixth_form text,
|
||||||
|
gender text,
|
||||||
|
religious_character text,
|
||||||
|
religious_ethos text,
|
||||||
|
diocese text,
|
||||||
|
admissions_policy text,
|
||||||
|
school_capacity integer,
|
||||||
|
special_classes text,
|
||||||
|
census_date date,
|
||||||
|
number_of_pupils integer,
|
||||||
|
number_of_boys integer,
|
||||||
|
number_of_girls integer,
|
||||||
|
percentage_fsm numeric(5,2),
|
||||||
|
trust_school_flag text,
|
||||||
|
trusts_name text,
|
||||||
|
school_sponsor_flag text,
|
||||||
|
school_sponsors_name text,
|
||||||
|
federation_flag text,
|
||||||
|
federations_name text,
|
||||||
|
ukprn text,
|
||||||
|
fehe_identifier text,
|
||||||
|
further_education_type text,
|
||||||
|
ofsted_last_inspection date,
|
||||||
|
last_changed_date date,
|
||||||
|
street text,
|
||||||
|
locality text,
|
||||||
|
address3 text,
|
||||||
|
town text,
|
||||||
|
county text,
|
||||||
|
postcode text,
|
||||||
|
school_website text,
|
||||||
|
telephone_num text,
|
||||||
|
head_title text,
|
||||||
|
head_first_name text,
|
||||||
|
head_last_name text,
|
||||||
|
head_preferred_job_title text,
|
||||||
|
gssla_code text,
|
||||||
|
parliamentary_constituency text,
|
||||||
|
urban_rural text,
|
||||||
|
rsc_region text,
|
||||||
|
country text,
|
||||||
|
uprn text,
|
||||||
|
sen_stat boolean,
|
||||||
|
sen_no_stat boolean,
|
||||||
|
sen_unit_on_roll integer,
|
||||||
|
sen_unit_capacity integer,
|
||||||
|
resourced_provision_on_roll integer,
|
||||||
|
resourced_provision_capacity integer,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
imported_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institute_imports is 'Raw institute data imported from external sources';
|
||||||
|
|
||||||
|
-- Active institutes
|
||||||
|
create table if not exists public.institutes (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
import_id uuid references public.institute_imports(id),
|
||||||
|
name text not null,
|
||||||
|
urn text unique,
|
||||||
|
status text not null default 'active' check (status in ('active', 'inactive', 'pending')),
|
||||||
|
address jsonb default '{}'::jsonb,
|
||||||
|
website text,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
neo4j_unique_id text,
|
||||||
|
neo4j_public_sync_status text default 'pending' check (neo4j_public_sync_status in ('pending', 'synced', 'failed')),
|
||||||
|
neo4j_public_sync_at timestamp with time zone,
|
||||||
|
neo4j_private_sync_status text default 'not_started' check (neo4j_private_sync_status in ('not_started', 'pending', 'synced', 'failed')),
|
||||||
|
neo4j_private_sync_at timestamp with time zone,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institutes is 'Active institutes in the system';
|
||||||
|
|
||||||
|
--[ 6. Relationship Tables ]--
|
||||||
|
-- Institute memberships
|
||||||
|
create table if not exists public.institute_memberships (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete cascade,
|
||||||
|
institute_id uuid references public.institutes(id) on delete cascade,
|
||||||
|
role text not null check (role in ('admin', 'teacher', 'student')),
|
||||||
|
tldraw_preferences jsonb default '{}'::jsonb,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
unique(profile_id, institute_id)
|
||||||
|
);
|
||||||
|
comment on table public.institute_memberships is 'Manages user roles and relationships with institutes';
|
||||||
|
|
||||||
|
-- Membership requests
|
||||||
|
create table if not exists public.institute_membership_requests (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete cascade,
|
||||||
|
institute_id uuid references public.institutes(id) on delete cascade,
|
||||||
|
requested_role text check (requested_role in ('teacher', 'student')),
|
||||||
|
status text default 'pending' check (status in ('pending', 'approved', 'rejected')),
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institute_membership_requests is 'Tracks requests to join institutes';
|
||||||
|
|
||||||
|
--[ 7. Audit Tables ]--
|
||||||
|
-- System audit logs
|
||||||
|
create table if not exists public.audit_logs (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete set null,
|
||||||
|
action_type text,
|
||||||
|
table_name text,
|
||||||
|
record_id uuid,
|
||||||
|
changes jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.audit_logs is 'System-wide audit trail for important operations';
|
||||||
|
|
||||||
|
--[ 8. Auth Functions ]--
|
||||||
|
-- Create a secure function to check admin status
|
||||||
|
create or replace function auth.is_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select coalesce(
|
||||||
|
(select true
|
||||||
|
from public.profiles
|
||||||
|
where id = auth.uid()
|
||||||
|
and user_type = 'admin'),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Create a secure function to check super admin status
|
||||||
|
create or replace function auth.is_super_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select coalesce(
|
||||||
|
(select role = 'supabase_admin'
|
||||||
|
from auth.users
|
||||||
|
where id = auth.uid()),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Create public wrappers for the auth functions
|
||||||
|
create or replace function public.is_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select auth.is_admin();
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
create or replace function public.is_super_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select auth.is_super_admin();
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Grant execute permissions
|
||||||
|
grant execute on function public.is_admin to authenticated;
|
||||||
|
grant execute on function public.is_super_admin to authenticated;
|
||||||
|
grant execute on function auth.is_admin to authenticated;
|
||||||
|
grant execute on function auth.is_super_admin to authenticated;
|
||||||
|
|
||||||
|
-- Initial admin setup function
|
||||||
|
create or replace function public.setup_initial_admin(admin_email text)
|
||||||
|
returns json
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
declare
|
||||||
|
result json;
|
||||||
|
begin
|
||||||
|
-- Only allow this to run as service role or supabase_admin
|
||||||
|
if not (
|
||||||
|
current_user = 'service_role'
|
||||||
|
or exists (
|
||||||
|
select 1 from pg_roles
|
||||||
|
where rolname = current_user
|
||||||
|
and rolsuper
|
||||||
|
)
|
||||||
|
) then
|
||||||
|
raise exception 'Must be run as service_role or superuser';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Update user_type and username for admin
|
||||||
|
update public.profiles
|
||||||
|
set user_type = 'admin',
|
||||||
|
username = coalesce(username, 'superadmin'),
|
||||||
|
display_name = coalesce(display_name, 'Super Admin')
|
||||||
|
where email = admin_email
|
||||||
|
returning json_build_object(
|
||||||
|
'id', id,
|
||||||
|
'email', email,
|
||||||
|
'user_type', user_type,
|
||||||
|
'username', username,
|
||||||
|
'display_name', display_name
|
||||||
|
) into result;
|
||||||
|
|
||||||
|
if result is null then
|
||||||
|
raise exception 'Admin user with email % not found', admin_email;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permissions
|
||||||
|
revoke execute on function public.setup_initial_admin from public;
|
||||||
|
grant execute on function public.setup_initial_admin to authenticated, service_role, supabase_admin;
|
||||||
|
|
||||||
|
-- Create RPC wrapper for REST API access
|
||||||
|
create or replace function rpc.setup_initial_admin(admin_email text)
|
||||||
|
returns json
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
return public.setup_initial_admin(admin_email);
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permissions for RPC wrapper
|
||||||
|
grant execute on function rpc.setup_initial_admin to authenticated, service_role, supabase_admin;
|
||||||
|
|
||||||
|
--[ 9. Utility Functions ]--
|
||||||
|
-- Check if database is ready
|
||||||
|
create or replace function check_db_ready()
|
||||||
|
returns boolean
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
-- Check if essential schemas exist
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from information_schema.schemata
|
||||||
|
where schema_name in ('auth', 'storage', 'public')
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Check if essential tables exist
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from information_schema.tables
|
||||||
|
where table_schema = 'auth'
|
||||||
|
and table_name = 'users'
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Check if RLS is enabled on public.profiles
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from pg_tables
|
||||||
|
where schemaname = 'public'
|
||||||
|
and tablename = 'profiles'
|
||||||
|
and rowsecurity = true
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permission
|
||||||
|
grant execute on function check_db_ready to anon, authenticated, service_role;
|
||||||
|
|
||||||
|
-- Function to handle new user registration
|
||||||
|
create or replace function public.handle_new_user()
|
||||||
|
returns trigger
|
||||||
|
language plpgsql
|
||||||
|
security definer set search_path = public
|
||||||
|
as $$
|
||||||
|
declare
|
||||||
|
default_user_type text := 'email_student';
|
||||||
|
default_username text;
|
||||||
|
begin
|
||||||
|
-- Generate username from email
|
||||||
|
default_username := split_part(new.email, '@', 1);
|
||||||
|
|
||||||
|
insert into public.profiles (
|
||||||
|
id,
|
||||||
|
email,
|
||||||
|
user_type,
|
||||||
|
username,
|
||||||
|
display_name
|
||||||
|
)
|
||||||
|
values (
|
||||||
|
new.id,
|
||||||
|
new.email,
|
||||||
|
coalesce(new.raw_user_meta_data->>'user_type', default_user_type),
|
||||||
|
coalesce(new.raw_user_meta_data->>'username', default_username),
|
||||||
|
coalesce(new.raw_user_meta_data->>'display_name', default_username)
|
||||||
|
);
|
||||||
|
return new;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Trigger for new user creation
|
||||||
|
drop trigger if exists on_auth_user_created on auth.users;
|
||||||
|
create trigger on_auth_user_created
|
||||||
|
after insert on auth.users
|
||||||
|
for each row execute procedure public.handle_new_user();
|
||||||
|
|
||||||
|
--[ 10. Security Setup ]--
|
||||||
|
-- Enable RLS
|
||||||
|
alter table if exists public.profiles enable row level security;
|
||||||
|
alter table if exists public.institute_imports enable row level security;
|
||||||
|
alter table if exists public.institutes enable row level security;
|
||||||
|
alter table if exists public.institute_memberships enable row level security;
|
||||||
|
alter table if exists public.institute_membership_requests enable row level security;
|
||||||
|
alter table if exists public.audit_logs enable row level security;
|
||||||
|
|
||||||
|
-- First, ensure proper schema access
|
||||||
|
grant usage on schema public to anon, authenticated;
|
||||||
|
|
||||||
|
-- First, drop existing policies
|
||||||
|
drop policy if exists "Users can read and update own profile" on public.profiles;
|
||||||
|
drop policy if exists "Users can update their profile during registration" on public.profiles;
|
||||||
|
|
||||||
|
-- Create updated policies
|
||||||
|
create policy "Users can read own profile"
|
||||||
|
on public.profiles for select
|
||||||
|
to authenticated
|
||||||
|
using (auth.uid() = id);
|
||||||
|
|
||||||
|
create policy "Users can update own profile"
|
||||||
|
on public.profiles for update
|
||||||
|
to authenticated
|
||||||
|
using (auth.uid() = id)
|
||||||
|
with check (auth.uid() = id);
|
||||||
|
|
||||||
|
create policy "Public can read basic profile info"
|
||||||
|
on public.profiles for select
|
||||||
|
to anon, authenticated
|
||||||
|
using (
|
||||||
|
user_type in ('email_teacher', 'email_student')
|
||||||
|
);
|
||||||
|
|
||||||
|
create policy "Super admins have full access"
|
||||||
|
on public.profiles for all
|
||||||
|
using (auth.is_super_admin());
|
||||||
|
|
||||||
|
create policy "Admins can read all profiles"
|
||||||
|
on public.profiles for select
|
||||||
|
using (auth.is_admin() or auth.is_super_admin());
|
||||||
|
|
||||||
|
-- Grant permissions
|
||||||
|
grant select, update on public.profiles to authenticated;
|
||||||
|
grant select (id, email, user_type, display_name) on public.profiles to anon;
|
||||||
|
|
||||||
|
-- Storage bucket policies
|
||||||
|
alter table if exists storage.buckets enable row level security;
|
||||||
|
|
||||||
|
-- Allow super admin full access to buckets
|
||||||
|
create policy "Super admin has full access to buckets"
|
||||||
|
on storage.buckets for all
|
||||||
|
using (current_user = 'service_role' or current_user = 'supabase_admin' or current_user = 'authenticated');
|
||||||
|
|
||||||
|
-- Allow authenticated users to create buckets if they are the owner
|
||||||
|
create policy "Users can create their own buckets"
|
||||||
|
on storage.buckets for insert
|
||||||
|
to authenticated
|
||||||
|
with check (true); -- We'll handle ownership in the application layer
|
||||||
|
|
||||||
|
-- Allow users to view buckets they own or public buckets
|
||||||
|
create policy "Users can view their own buckets"
|
||||||
|
on storage.buckets for select
|
||||||
|
to authenticated
|
||||||
|
using (
|
||||||
|
owner::text = auth.uid()::text
|
||||||
|
);
|
||||||
|
|
||||||
|
--[ 11. Database Triggers ]--
|
||||||
|
drop trigger if exists handle_profiles_updated_at on public.profiles;
|
||||||
|
create trigger handle_profiles_updated_at
|
||||||
|
before update on public.profiles
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
|
|
||||||
|
drop trigger if exists handle_institute_memberships_updated_at on public.institute_memberships;
|
||||||
|
create trigger handle_institute_memberships_updated_at
|
||||||
|
before update on public.institute_memberships
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
|
|
||||||
|
drop trigger if exists handle_membership_requests_updated_at on public.institute_membership_requests;
|
||||||
|
create trigger handle_membership_requests_updated_at
|
||||||
|
before update on public.institute_membership_requests
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
|
|
||||||
|
--[ 12. Permissions ]--
|
||||||
|
-- Grant schema access
|
||||||
|
grant usage on schema public to postgres, anon, authenticated;
|
||||||
|
|
||||||
|
-- Grant table permissions
|
||||||
|
grant all privileges on all tables in schema public to postgres;
|
||||||
|
grant select, insert, update on all tables in schema public to authenticated;
|
||||||
|
|
||||||
|
--[ 13. Realtime Setup ]--
|
||||||
|
-- Drop existing publication if it exists
|
||||||
|
drop publication if exists supabase_realtime;
|
||||||
|
|
||||||
|
-- Create publication (without IF NOT EXISTS)
|
||||||
|
create publication supabase_realtime;
|
||||||
|
|
||||||
|
-- Add tables to publication (these are idempotent operations)
|
||||||
|
alter publication supabase_realtime add table profiles;
|
||||||
|
alter publication supabase_realtime add table institute_imports;
|
||||||
|
alter publication supabase_realtime add table institutes;
|
||||||
|
alter publication supabase_realtime add table institute_memberships;
|
||||||
|
alter publication supabase_realtime add table institute_membership_requests;
|
||||||
|
alter publication supabase_realtime add table audit_logs;
|
||||||
481
.env
Normal file
481
.env
Normal file
@ -0,0 +1,481 @@
|
|||||||
|
|
||||||
|
HOST_IP=localhost
|
||||||
|
|
||||||
|
## App Information
|
||||||
|
APP_NAME=ClassroomCopilot
|
||||||
|
APP_VERSION=0.0.1
|
||||||
|
APP_DESCRIPTION="An AI copilot for learners and educators."
|
||||||
|
APP_AUTHOR=KevlarAI
|
||||||
|
APP_AUTHOR_EMAIL=kcar@kevlarai.com
|
||||||
|
APP_URL=classroomcopilot.ai
|
||||||
|
APP_URL_INTERNAL=classroomcopilot.ai
|
||||||
|
APP_PROTOCOL=https
|
||||||
|
APP_WS_PROTOCOL=wss
|
||||||
|
|
||||||
|
API_EXTERNAL_URL=${APP_PROTOCOL}://supa.${APP_URL}
|
||||||
|
|
||||||
|
# KevelarAI URLs
|
||||||
|
KEVLARAI_URL=kevlarai.ai
|
||||||
|
KEVLARAI_PROTOCOL=https
|
||||||
|
|
||||||
|
# Super Admin user
|
||||||
|
SUPER_ADMIN_EMAIL=admin@classroomcopilot.ai
|
||||||
|
SUPER_ADMIN_WORKER_EMAIL=kcar@kevlarai.com
|
||||||
|
SUPER_ADMIN_PASSWORD=password
|
||||||
|
SUPER_ADMIN_USERNAME=superadmin
|
||||||
|
SUPER_ADMIN_NAME="Super Admin"
|
||||||
|
SUPER_ADMIN_DISPLAY_NAME="CC Creator"
|
||||||
|
SUPER_ADMIN_CALENDAR_START_DATE=2025-01-01
|
||||||
|
SUPER_ADMIN_CALENDAR_END_DATE=2025-01-31
|
||||||
|
|
||||||
|
## Hosts
|
||||||
|
HOST_OLLAMA=${HOST_IP}
|
||||||
|
|
||||||
|
## Ports
|
||||||
|
PORT_SUPABASE_KONG_HTTP=8000
|
||||||
|
PORT_SUPABASE_KONG_HTTPS=8443
|
||||||
|
PORT_SUPABASE_STUDIO=3000
|
||||||
|
PORT_SUPABASE_POSTGRES=5432
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## APP CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
## Supabase Basic URLs and Endpoints
|
||||||
|
SITE_URL=${APP_PROTOCOL}://${APP_URL}
|
||||||
|
SUPABASE_URL=${APP_PROTOCOL}://supa.${APP_URL}
|
||||||
|
SUPABASE_PUBLIC_URL=${APP_PROTOCOL}://supastudio.${APP_URL}
|
||||||
|
## App domains
|
||||||
|
APP_SITE_URL=${SITE_URL}
|
||||||
|
APP_SUPABASE_URL=${SUPABASE_URL}
|
||||||
|
APP_STUDIO_URL=${SUPABASE_PUBLIC_URL}
|
||||||
|
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## SUPABASE CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
|
||||||
|
## Supabase Authentication Keys and Secrets
|
||||||
|
# JWT configuration
|
||||||
|
JWT_SECRET=mE9FCC2YvHyrFIyyloH27F3lw51Ij93a77ejMZY-NRc
|
||||||
|
JWT_EXPIRY=3600
|
||||||
|
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||||
|
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||||
|
|
||||||
|
# API Keys
|
||||||
|
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJhdXRoZW50aWNhdGVkIiwiaWF0IjoxNzM0OTg4MzkxLCJpc3MiOiJzdXBhYmFzZSIsImV4cCI6MTc2NjUyNDM5MSwicm9sZSI6ImFub24ifQ.utdDZzVlhYIc-cSXuC2kyZz7HN59YfyMH4eaOw1hRlk
|
||||||
|
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJhdXRoZW50aWNhdGVkIiwiaWF0IjoxNzM0OTg4MzkxLCJpc3MiOiJzdXBhYmFzZSIsImV4cCI6MTc2NjUyNDM5MSwicm9sZSI6InNlcnZpY2Vfcm9sZSJ9.y-HHZC_Rxr8OTOX2rmb8ZgMnwLkSJYAF_lIHjkVtAyc
|
||||||
|
|
||||||
|
## Supabase Database Configuration
|
||||||
|
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||||
|
POSTGRES_HOST=db
|
||||||
|
POSTGRES_DB=postgres
|
||||||
|
POSTGRES_PORT=${PORT_SUPABASE_POSTGRES}
|
||||||
|
|
||||||
|
## Supabase Dashboard Configuration
|
||||||
|
DASHBOARD_USERNAME=supabase
|
||||||
|
DASHBOARD_PASSWORD=password
|
||||||
|
|
||||||
|
## Supabase Pooler Configuration (Database Connection Pooling)
|
||||||
|
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||||
|
POOLER_DEFAULT_POOL_SIZE=20
|
||||||
|
POOLER_MAX_CLIENT_CONN=100
|
||||||
|
POOLER_TENANT_ID=your-tenant-id
|
||||||
|
|
||||||
|
## Supabase Kong API Gateway Configuration
|
||||||
|
KONG_HTTP_PORT=${PORT_SUPABASE_KONG_HTTP}
|
||||||
|
KONG_HTTPS_PORT=${PORT_SUPABASE_KONG_HTTPS}
|
||||||
|
|
||||||
|
## Supabase PostgREST Configuration
|
||||||
|
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||||
|
|
||||||
|
## Supabase Auth Server Configuration
|
||||||
|
# General Auth Settings
|
||||||
|
ADDITIONAL_REDIRECT_URLS=""
|
||||||
|
AUTH_LOG_LEVEL=debug
|
||||||
|
DISABLE_SIGNUP=false
|
||||||
|
|
||||||
|
# Security Settings
|
||||||
|
# Uncomment these for enhanced security
|
||||||
|
# GOTRUE_SECURITY_REFRESH_TOKEN_ROTATION_ENABLED=true
|
||||||
|
# GOTRUE_SECURITY_REFRESH_TOKEN_REUSE_INTERVAL=30s
|
||||||
|
# GOTRUE_SECURITY_UPDATE_PASSWORD_REQUIRE_REAUTHENTICATION=true
|
||||||
|
# GOTRUE_PASSWORD_MIN_LENGTH=10
|
||||||
|
# GOTRUE_PASSWORD_REQUIRED_CHARACTERS=lowercase:uppercase:number:symbol
|
||||||
|
|
||||||
|
# Rate Limiting
|
||||||
|
# Uncomment these to enable rate limiting
|
||||||
|
# GOTRUE_RATE_LIMIT_HEADER=IP
|
||||||
|
# GOTRUE_RATE_LIMIT_EMAIL_SENT=4
|
||||||
|
|
||||||
|
## Supabase Email Configuration
|
||||||
|
# Mailer URL Paths
|
||||||
|
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||||
|
MAILER_SECURE_EMAIL_CHANGE_ENABLED=true
|
||||||
|
GOTRUE_MAILER_EXTERNAL_HOSTS="localhost,supabase.localhost"
|
||||||
|
|
||||||
|
# Email Auth Settings
|
||||||
|
ENABLE_EMAIL_SIGNUP=true
|
||||||
|
ENABLE_EMAIL_AUTOCONFIRM=true
|
||||||
|
SMTP_ADMIN_EMAIL=${APP_AUTHOR_EMAIL}
|
||||||
|
SMTP_USER=fake_mail_user
|
||||||
|
SMTP_PASS=fake_mail_password
|
||||||
|
SMTP_SENDER_NAME=fake_sender
|
||||||
|
|
||||||
|
SMTP_HOST=smtp.zoho.eu
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_USER=admin@${APP_URL}
|
||||||
|
SMTP_PASS=&%Z040&%
|
||||||
|
SMTP_ADMIN_EMAIL=admin@${APP_URL}
|
||||||
|
SMTP_SENDER_NAME="Classroom Copilot"
|
||||||
|
|
||||||
|
## Supabase Phone Auth Configuration
|
||||||
|
ENABLE_PHONE_SIGNUP=true
|
||||||
|
ENABLE_PHONE_AUTOCONFIRM=true
|
||||||
|
|
||||||
|
## Supabase Anonymous Users
|
||||||
|
ENABLE_ANONYMOUS_USERS=false
|
||||||
|
|
||||||
|
## Supabase Studio Configuration
|
||||||
|
SUPABASE_PROJECT_ID=${APP_NAME}
|
||||||
|
STUDIO_DEFAULT_ORGANIZATION=${APP_AUTHOR}
|
||||||
|
STUDIO_DEFAULT_PROJECT=${APP_NAME}
|
||||||
|
STUDIO_PORT=${PORT_SUPABASE_STUDIO}
|
||||||
|
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||||
|
|
||||||
|
## Supabase OAuth Providers
|
||||||
|
# Azure Auth
|
||||||
|
AZURE_ENABLED=false
|
||||||
|
AZURE_CLIENT_ID=c9a27d21-2012-44ce-9ebd-ffc868444383
|
||||||
|
AZURE_SECRET=.Nr8Q~kBXgDp_aX7~TlgCbzJHPledeTQwfTzja5y
|
||||||
|
AZURE_REDIRECT_URI=${APP_PROTOCOL}://${APP_URL}/web/auth/callback
|
||||||
|
AZURE_TENANT_ID=e637ec20-60ca-4dfc-a605-d2798f9e977b
|
||||||
|
|
||||||
|
## Supabase Functions Configuration
|
||||||
|
FUNCTIONS_VERIFY_JWT=false
|
||||||
|
|
||||||
|
## Supabase Logs Configuration
|
||||||
|
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||||
|
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## App Information
|
||||||
|
APP_NAME=ClassroomCopilot
|
||||||
|
APP_VERSION=0.0.1
|
||||||
|
APP_DESCRIPTION="An AI copilot for learners and educators."
|
||||||
|
APP_AUTHOR=KevlarAI
|
||||||
|
APP_AUTHOR_EMAIL=kcar@kevlarai.com
|
||||||
|
APP_URL=classroomcopilot.ai
|
||||||
|
APP_URL_INTERNAL=classroomcopilot.internal
|
||||||
|
APP_PROTOCOL=https
|
||||||
|
APP_WS_PROTOCOL=wss
|
||||||
|
|
||||||
|
# KevelarAI URLs
|
||||||
|
KEVLARAI_URL=kevlarai.ai
|
||||||
|
KEVLARAI_PROTOCOL=https
|
||||||
|
|
||||||
|
# Super Admin user
|
||||||
|
SUPER_ADMIN_EMAIL=admin@classroomcopilot.ai
|
||||||
|
SUPER_ADMIN_WORKER_EMAIL=kcar@kevlarai.com
|
||||||
|
SUPER_ADMIN_PASSWORD=password
|
||||||
|
SUPER_ADMIN_USERNAME=superadmin
|
||||||
|
SUPER_ADMIN_NAME="Super Admin"
|
||||||
|
SUPER_ADMIN_DISPLAY_NAME="CC Creator"
|
||||||
|
SUPER_ADMIN_CALENDAR_START_DATE=2025-01-01
|
||||||
|
SUPER_ADMIN_CALENDAR_END_DATE=2025-01-31
|
||||||
|
|
||||||
|
## Runtime settings
|
||||||
|
PROJECT_DIR=/Users/kcar/dev/ClassroomCopilot
|
||||||
|
BUILD_OS=macos
|
||||||
|
NGINX_MODE=prod
|
||||||
|
DEV_MODE=false
|
||||||
|
HOST_IP=localhost
|
||||||
|
BACKEND_DEV_MODE=false
|
||||||
|
STRICT_MODE=false
|
||||||
|
SUPER_ADMIN_CHECK=true
|
||||||
|
INIT_SUPER_ADMIN=false
|
||||||
|
|
||||||
|
## Docker compose environment variables
|
||||||
|
COMPOSE_PROJECT_NAME=classroomcopilot-${NGINX_MODE:-dev}
|
||||||
|
|
||||||
|
## Hosts
|
||||||
|
HOST_OLLAMA=${HOST_IP}
|
||||||
|
|
||||||
|
## Ports
|
||||||
|
PORT_KEYCLOAK=8080
|
||||||
|
PORT_KEYCLOAK_MANAGEMENT=9000
|
||||||
|
PORT_KEYCLOAK_SSL=8444
|
||||||
|
PORT_CC_ADMIN=5173
|
||||||
|
PORT_CC_ADMIN_DEVTOOLS=5001
|
||||||
|
PORT_SUPABASE_KONG_HTTP=8000
|
||||||
|
PORT_SUPABASE_KONG_HTTPS=8443
|
||||||
|
PORT_SUPABASE_STUDIO=3000
|
||||||
|
PORT_SUPABASE_POSTGRES=5432
|
||||||
|
# PORT_SOLID_CSS=3006 # not used currently in docker on by localhost solid server
|
||||||
|
PORT_SOLID_PROXY=3007
|
||||||
|
PORT_SOLID_PROXY_SSL=3008
|
||||||
|
PORT_NEO4J_BOLT=7687
|
||||||
|
PORT_NEO4J_HTTP=7474
|
||||||
|
PORT_NEO4J_HTTPS=7473
|
||||||
|
PORT_FRONTEND=3003
|
||||||
|
PORT_FRONTEND_SSL=3033
|
||||||
|
PORT_MARKETING_SITE=3004
|
||||||
|
PORT_MARKETING_SITE_SSL=3044
|
||||||
|
PORT_BACKEND=8880
|
||||||
|
PORT_BACKEND_SSL=8088
|
||||||
|
PORT_TLDRAW_SYNC=5002
|
||||||
|
PORT_WHISPERLIVE=5050
|
||||||
|
PORT_WHISPERLIVE_SSL=5053
|
||||||
|
PORT_TEXT_GENERATION=7861
|
||||||
|
PORT_TEXT_GENERATION_API=5010
|
||||||
|
PORT_STABLE_DIFFUSION=7860
|
||||||
|
PORT_STABLE_DIFFUSION_API=5011
|
||||||
|
PORT_OLLAMA=11434
|
||||||
|
PORT_OPEN_WEBUI=3333
|
||||||
|
PORT_OPEN_WEBUI_SSL=3334
|
||||||
|
PORT_OPENWEBUI_PROXY_INTERNAL=3335
|
||||||
|
PORT_MORPHIC=3001
|
||||||
|
PORT_REDIS=6379
|
||||||
|
PORT_SEARXNG=8090
|
||||||
|
PORT_MAILHOG_SMTP=1025
|
||||||
|
PORT_MAILHOG_WEB=8025
|
||||||
|
|
||||||
|
# WhisperLive Frontend
|
||||||
|
PORT_WHISPERLIVE_FRONTEND=5054
|
||||||
|
PORT_WHISPERLIVE_FRONTEND_SSL=5055
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## APP CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
## Supabase Basic URLs and Endpoints
|
||||||
|
SITE_URL=${APP_PROTOCOL}://${APP_URL}
|
||||||
|
SUPABASE_URL=${APP_PROTOCOL}://supa.${APP_URL}
|
||||||
|
API_EXTERNAL_URL=${APP_PROTOCOL}://supa.${APP_URL}
|
||||||
|
SUPABASE_PUBLIC_URL=${APP_PROTOCOL}://supastudio.${APP_URL}
|
||||||
|
## App domains
|
||||||
|
APP_SITE_URL=${SITE_URL}
|
||||||
|
APP_SUPABASE_URL=${SUPABASE_URL}
|
||||||
|
APP_STUDIO_URL=${SUPABASE_PUBLIC_URL}
|
||||||
|
APP_API_URL=${APP_PROTOCOL}://api.${APP_URL}
|
||||||
|
APP_GRAPH_URL=${APP_PROTOCOL}://graph.${APP_URL}
|
||||||
|
APP_BOLT_URL=bolt://neo4j:${PORT_NEO4J_BOLT}
|
||||||
|
CC_ADMIN_URL=${APP_PROTOCOL}://admin.${APP_URL}
|
||||||
|
APP_ADMIN_API_URL=${APP_PROTOCOL}://admin-api.${APP_URL}
|
||||||
|
## Vite environment variables
|
||||||
|
VITE_APP_URL=app.${APP_URL}
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## OAUTH2 PROXY CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
KEYCLOAK_SECRET_OPENWEBUI=XbKriIGb1YRSKmALfoKodpyJaQQOtP4U
|
||||||
|
KEYCLOAK_SECRET_ADMIN=""
|
||||||
|
COOKIE_SECRET_OPENWEBUI=QAm4ImW8ieeEftQgRly5guVYqHzcU/m+to5k5sHqfF8=
|
||||||
|
COOKIE_SECRET_ADMIN=yDaNr1DwYqRykdoeW+mS/Ari5pWs8m4YPQJsjIt2xYQ=
|
||||||
|
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## SUPABASE CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
|
||||||
|
## Supabase Authentication Keys and Secrets
|
||||||
|
# JWT configuration
|
||||||
|
JWT_SECRET=mE9FCC2YvHyrFIyyloH27F3lw51Ij93a77ejMZY-NRc
|
||||||
|
JWT_EXPIRY=3600
|
||||||
|
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||||
|
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||||
|
|
||||||
|
# API Keys
|
||||||
|
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJhdXRoZW50aWNhdGVkIiwiaWF0IjoxNzM0OTg4MzkxLCJpc3MiOiJzdXBhYmFzZSIsImV4cCI6MTc2NjUyNDM5MSwicm9sZSI6ImFub24ifQ.utdDZzVlhYIc-cSXuC2kyZz7HN59YfyMH4eaOw1hRlk
|
||||||
|
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJhdXRoZW50aWNhdGVkIiwiaWF0IjoxNzM0OTg4MzkxLCJpc3MiOiJzdXBhYmFzZSIsImV4cCI6MTc2NjUyNDM5MSwicm9sZSI6InNlcnZpY2Vfcm9sZSJ9.y-HHZC_Rxr8OTOX2rmb8ZgMnwLkSJYAF_lIHjkVtAyc
|
||||||
|
|
||||||
|
## Supabase Database Configuration
|
||||||
|
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||||
|
POSTGRES_HOST=db
|
||||||
|
POSTGRES_DB=postgres
|
||||||
|
POSTGRES_PORT=${PORT_SUPABASE_POSTGRES}
|
||||||
|
|
||||||
|
## Supabase Dashboard Configuration
|
||||||
|
DASHBOARD_USERNAME=supabase
|
||||||
|
DASHBOARD_PASSWORD=password
|
||||||
|
|
||||||
|
## Supabase Pooler Configuration (Database Connection Pooling)
|
||||||
|
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||||
|
POOLER_DEFAULT_POOL_SIZE=20
|
||||||
|
POOLER_MAX_CLIENT_CONN=100
|
||||||
|
POOLER_TENANT_ID=your-tenant-id
|
||||||
|
|
||||||
|
## Supabase Kong API Gateway Configuration
|
||||||
|
KONG_HTTP_PORT=${PORT_SUPABASE_KONG_HTTP}
|
||||||
|
KONG_HTTPS_PORT=${PORT_SUPABASE_KONG_HTTPS}
|
||||||
|
|
||||||
|
## Supabase PostgREST Configuration
|
||||||
|
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||||
|
|
||||||
|
## Supabase Auth Server Configuration
|
||||||
|
# General Auth Settings
|
||||||
|
ADDITIONAL_REDIRECT_URLS=""
|
||||||
|
AUTH_LOG_LEVEL=debug
|
||||||
|
DISABLE_SIGNUP=false
|
||||||
|
|
||||||
|
# Security Settings
|
||||||
|
# Uncomment these for enhanced security
|
||||||
|
# GOTRUE_SECURITY_REFRESH_TOKEN_ROTATION_ENABLED=true
|
||||||
|
# GOTRUE_SECURITY_REFRESH_TOKEN_REUSE_INTERVAL=30s
|
||||||
|
# GOTRUE_SECURITY_UPDATE_PASSWORD_REQUIRE_REAUTHENTICATION=true
|
||||||
|
# GOTRUE_PASSWORD_MIN_LENGTH=10
|
||||||
|
# GOTRUE_PASSWORD_REQUIRED_CHARACTERS=lowercase:uppercase:number:symbol
|
||||||
|
|
||||||
|
# Rate Limiting
|
||||||
|
# Uncomment these to enable rate limiting
|
||||||
|
# GOTRUE_RATE_LIMIT_HEADER=IP
|
||||||
|
# GOTRUE_RATE_LIMIT_EMAIL_SENT=4
|
||||||
|
|
||||||
|
## Supabase Email Configuration
|
||||||
|
# Mailer URL Paths
|
||||||
|
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||||
|
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||||
|
MAILER_SECURE_EMAIL_CHANGE_ENABLED=true
|
||||||
|
GOTRUE_MAILER_EXTERNAL_HOSTS="localhost,supabase.localhost"
|
||||||
|
|
||||||
|
# Email Auth Settings
|
||||||
|
ENABLE_EMAIL_SIGNUP=true
|
||||||
|
ENABLE_EMAIL_AUTOCONFIRM=true
|
||||||
|
SMTP_ADMIN_EMAIL=${APP_AUTHOR_EMAIL}
|
||||||
|
SMTP_USER=fake_mail_user
|
||||||
|
SMTP_PASS=fake_mail_password
|
||||||
|
SMTP_SENDER_NAME=fake_sender
|
||||||
|
|
||||||
|
SMTP_HOST=smtp.zoho.eu
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_USER=admin@${APP_URL}
|
||||||
|
SMTP_PASS=&%Z040&%
|
||||||
|
SMTP_ADMIN_EMAIL=admin@${APP_URL}
|
||||||
|
SMTP_SENDER_NAME="Classroom Copilot"
|
||||||
|
|
||||||
|
## Supabase Phone Auth Configuration
|
||||||
|
ENABLE_PHONE_SIGNUP=true
|
||||||
|
ENABLE_PHONE_AUTOCONFIRM=true
|
||||||
|
|
||||||
|
## Supabase Anonymous Users
|
||||||
|
ENABLE_ANONYMOUS_USERS=false
|
||||||
|
|
||||||
|
## Supabase OAuth Providers
|
||||||
|
# Azure Auth
|
||||||
|
AZURE_ENABLED=false
|
||||||
|
AZURE_CLIENT_ID=c9a27d21-2012-44ce-9ebd-ffc868444383
|
||||||
|
AZURE_SECRET=.Nr8Q~kBXgDp_aX7~TlgCbzJHPledeTQwfTzja5y
|
||||||
|
AZURE_REDIRECT_URI=${APP_PROTOCOL}://${APP_URL}/web/auth/callback
|
||||||
|
AZURE_TENANT_ID=e637ec20-60ca-4dfc-a605-d2798f9e977b
|
||||||
|
|
||||||
|
## Supabase Studio Configuration
|
||||||
|
SUPABASE_PROJECT_ID=${APP_NAME}
|
||||||
|
STUDIO_DEFAULT_ORGANIZATION=${APP_AUTHOR}
|
||||||
|
STUDIO_DEFAULT_PROJECT=${APP_NAME}
|
||||||
|
STUDIO_PORT=${PORT_SUPABASE_STUDIO}
|
||||||
|
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||||
|
|
||||||
|
## Supabase Functions Configuration
|
||||||
|
FUNCTIONS_VERIFY_JWT=false
|
||||||
|
|
||||||
|
## Supabase Logs Configuration
|
||||||
|
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||||
|
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||||
|
|
||||||
|
## Supabase Analytics Configuration (Google Cloud)
|
||||||
|
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
||||||
|
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
## OTHER SERVICES CONFIGURATION
|
||||||
|
#############################################################
|
||||||
|
|
||||||
|
# Neo4j Settings
|
||||||
|
USER_NEO4J=neo4j
|
||||||
|
PASSWORD_NEO4J=password
|
||||||
|
NEO4J_AUTH=${USER_NEO4J}/${PASSWORD_NEO4J}
|
||||||
|
|
||||||
|
## Keycloak Configuration
|
||||||
|
KEYCLOAK_ADMIN=admin
|
||||||
|
KEYCLOAK_ADMIN_PASSWORD=admin
|
||||||
|
KEYCLOAK_DB_USER=keycloak
|
||||||
|
KEYCLOAK_DB_PASSWORD=keycloak
|
||||||
|
KEYCLOAK_DB_DATABASE=keycloak
|
||||||
|
KEYCLOAK_PORT=${PORT_KEYCLOAK}
|
||||||
|
KEYCLOAK_MANAGEMENT_PORT=${PORT_KEYCLOAK_MANAGEMENT}
|
||||||
|
KEYCLOAK_SSL_PORT=${PORT_KEYCLOAK_SSL}
|
||||||
|
KEYCLOAK_IMAGE=quay.io/keycloak/keycloak:24.0.1
|
||||||
|
KEYCLOAK_REALM=classroomcopilot
|
||||||
|
KEYCLOAK_CLIENT_ID=frontend-app
|
||||||
|
KEYCLOAK_CLIENT_SECRET=your-super-secret-and-long-keycloak-client-secret
|
||||||
|
KEYCLOAK_URL=${KEVLARAI_PROTOCOL}://keycloak.${KEVLARAI_URL}
|
||||||
|
KEYCLOAK_ADMIN_URL=${KEVLARAI_PROTOCOL}://keycloak-admin.${KEVLARAI_URL}
|
||||||
|
KEYCLOAK_INTERNAL_URL=http://keycloak:8080
|
||||||
|
|
||||||
|
## Backend
|
||||||
|
UVICORN_WORKERS=2
|
||||||
|
CORS_SITE_URL=${APP_URL}
|
||||||
|
NODE_FILESYSTEM_PATH=/node_filesystem
|
||||||
|
BACKEND_INIT_PATH=/init
|
||||||
|
LOG_PATH=/logs
|
||||||
|
# Log level must be lowercase for Node.js services using Pino logger (storage, functions)
|
||||||
|
# Valid values: trace, debug, info, warn, error, fatal
|
||||||
|
LOG_LEVEL=debug
|
||||||
|
|
||||||
|
# Whisper live settings
|
||||||
|
WHISPERLIVE_SSL=false
|
||||||
|
WHISPL_USE_CUSTOM_MODEL=false
|
||||||
|
FASTERWHISPER_MODEL=faster-whisper-large-v3
|
||||||
|
WHISPERLIVE_URL=${APP_WS_PROTOCOL}://whisperlive.${APP_URL}
|
||||||
|
|
||||||
|
## SearXNG Settings
|
||||||
|
SEARXNG_URL=${APP_PROTOCOL}://search.${APP_URL}
|
||||||
|
SEARXNG_SECRET="" # generate a secret key e.g. openssl rand -base64 32
|
||||||
|
SEARXNG_PORT=${PORT_SEARXNG} # default port
|
||||||
|
SEARXNG_BIND_ADDRESS=0.0.0.0 # default address
|
||||||
|
SEARXNG_IMAGE_PROXY=true # enable image proxy
|
||||||
|
SEARXNG_LIMITER=false # can be enabled to limit the number of requests per IP address
|
||||||
|
SEARXNG_DEFAULT_DEPTH=basic # Set to 'basic' or 'advanced', only affects SearXNG searches
|
||||||
|
SEARXNG_MAX_RESULTS=50 # Maximum number of results to return from SearXNG
|
||||||
|
SEARXNG_ENGINES=google,bing,duckduckgo,wikipedia # Search engines to use
|
||||||
|
SEARXNG_TIME_RANGE=None # Time range for search results: day, week, month, year, or None (for all time)
|
||||||
|
SEARXNG_SAFESEARCH=0 # Safe search setting: 0 (off), 1 (moderate), 2 (strict)
|
||||||
|
|
||||||
|
## Morphic Settings
|
||||||
|
NEXT_PUBLIC_BASE_URL=http://morphic:3001
|
||||||
|
USE_LOCAL_REDIS=true
|
||||||
|
LOCAL_REDIS_URL=redis://redis:6379
|
||||||
|
SEARXNG_API_URL=${APP_PROTOCOL}://search.${APP_URL}
|
||||||
|
SEARCH_API=searxng # use searxng, tavily or exa
|
||||||
|
|
||||||
|
## Notion settings
|
||||||
|
NOTION_CAPTAINS_LOG_SENDER_INTERNAL_INTEGRATION_SECRET=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
|
||||||
|
|
||||||
|
## API Keys
|
||||||
|
OPENAI_API_KEY=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
|
||||||
|
LANGCHAIN_API_KEY=ls__27405da61a724d18ba4833a0b79730e0
|
||||||
|
|
||||||
|
## Other Settings
|
||||||
|
LANGCHAIN_TRACING_V2=true
|
||||||
|
LANGCHAIN_PROJECT='LangChain Perpexity Clone with human in the loop for Classroom Copilot'
|
||||||
|
USER_AGENT='cc_user_agent'
|
||||||
|
|
||||||
|
# Google API Settings
|
||||||
|
YOUTUBE_API_KEY=AIzaSyDbpJInK6dsFUjY6oG60FlzYkj7JUJmUNs
|
||||||
|
GOOGLE_CLIENT_SECRETS_FILE=Users/kcar/ClassroomCopilot/backend/app/secrets/google_cloud_yt_credentials.json
|
||||||
226
api/kong.yml
Normal file
226
api/kong.yml
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
_format_version: '2.1'
|
||||||
|
_transform: true
|
||||||
|
|
||||||
|
###
|
||||||
|
### Consumers / Users
|
||||||
|
###
|
||||||
|
consumers:
|
||||||
|
- username: DASHBOARD
|
||||||
|
- username: anon
|
||||||
|
keyauth_credentials:
|
||||||
|
- key: $SUPABASE_ANON_KEY
|
||||||
|
- username: service_role
|
||||||
|
keyauth_credentials:
|
||||||
|
- key: $SUPABASE_SERVICE_KEY
|
||||||
|
|
||||||
|
###
|
||||||
|
### Access Control List
|
||||||
|
###
|
||||||
|
acls:
|
||||||
|
- consumer: anon
|
||||||
|
group: anon
|
||||||
|
- consumer: service_role
|
||||||
|
group: admin
|
||||||
|
|
||||||
|
###
|
||||||
|
### Dashboard credentials
|
||||||
|
###
|
||||||
|
basicauth_credentials:
|
||||||
|
- consumer: DASHBOARD
|
||||||
|
username: $DASHBOARD_USERNAME
|
||||||
|
password: $DASHBOARD_PASSWORD
|
||||||
|
|
||||||
|
###
|
||||||
|
### API Routes
|
||||||
|
###
|
||||||
|
services:
|
||||||
|
## Open Auth routes
|
||||||
|
- name: auth-v1-open
|
||||||
|
url: http://auth:9999/verify
|
||||||
|
routes:
|
||||||
|
- name: auth-v1-open
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /auth/v1/verify
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: auth-v1-open-callback
|
||||||
|
url: http://auth:9999/callback
|
||||||
|
routes:
|
||||||
|
- name: auth-v1-open-callback
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /auth/v1/callback
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: auth-v1-open-authorize
|
||||||
|
url: http://auth:9999/authorize
|
||||||
|
routes:
|
||||||
|
- name: auth-v1-open-authorize
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /auth/v1/authorize
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
|
||||||
|
## Secure Auth routes
|
||||||
|
- name: auth-v1
|
||||||
|
_comment: 'GoTrue: /auth/v1/* -> http://auth:9999/*'
|
||||||
|
url: http://auth:9999/
|
||||||
|
routes:
|
||||||
|
- name: auth-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /auth/v1/
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: false
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
|
- anon
|
||||||
|
|
||||||
|
## Secure REST routes
|
||||||
|
- name: rest-v1
|
||||||
|
_comment: 'PostgREST: /rest/v1/* -> http://rest:3000/*'
|
||||||
|
url: http://rest:3000/
|
||||||
|
routes:
|
||||||
|
- name: rest-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /rest/v1/
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: true
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
|
- anon
|
||||||
|
|
||||||
|
## Secure GraphQL routes
|
||||||
|
- name: graphql-v1
|
||||||
|
_comment: 'PostgREST: /graphql/v1/* -> http://rest:3000/rpc/graphql'
|
||||||
|
url: http://rest:3000/rpc/graphql
|
||||||
|
routes:
|
||||||
|
- name: graphql-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /graphql/v1
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: true
|
||||||
|
- name: request-transformer
|
||||||
|
config:
|
||||||
|
add:
|
||||||
|
headers:
|
||||||
|
- Content-Profile:graphql_public
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
|
- anon
|
||||||
|
|
||||||
|
## Secure Realtime routes
|
||||||
|
- name: realtime-v1-ws
|
||||||
|
_comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
|
||||||
|
url: http://realtime-dev.supabase-realtime:4000/socket
|
||||||
|
protocol: ws
|
||||||
|
routes:
|
||||||
|
- name: realtime-v1-ws
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /realtime/v1/
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: false
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
|
- anon
|
||||||
|
- name: realtime-v1-rest
|
||||||
|
_comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
|
||||||
|
url: http://realtime-dev.supabase-realtime:4000/api
|
||||||
|
protocol: http
|
||||||
|
routes:
|
||||||
|
- name: realtime-v1-rest
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /realtime/v1/api
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: false
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
|
- anon
|
||||||
|
## Storage routes: the storage server manages its own auth
|
||||||
|
- name: storage-v1
|
||||||
|
_comment: 'Storage: /storage/v1/* -> http://storage:5000/*'
|
||||||
|
url: http://storage:5000/
|
||||||
|
routes:
|
||||||
|
- name: storage-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /storage/v1/
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
|
||||||
|
## Edge Functions routes
|
||||||
|
- name: functions-v1
|
||||||
|
_comment: 'Edge Functions: /functions/v1/* -> http://functions:9000/*'
|
||||||
|
url: http://functions:9000/
|
||||||
|
routes:
|
||||||
|
- name: functions-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /functions/v1/
|
||||||
|
plugins:
|
||||||
|
- name: cors
|
||||||
|
|
||||||
|
## Analytics routes
|
||||||
|
- name: analytics-v1
|
||||||
|
_comment: 'Analytics: /analytics/v1/* -> http://logflare:4000/*'
|
||||||
|
url: http://analytics:4000/
|
||||||
|
routes:
|
||||||
|
- name: analytics-v1-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /analytics/v1/
|
||||||
|
|
||||||
|
## Secure Database routes
|
||||||
|
- name: meta
|
||||||
|
_comment: 'pg-meta: /pg/* -> http://pg-meta:8080/*'
|
||||||
|
url: http://meta:8080/
|
||||||
|
routes:
|
||||||
|
- name: meta-all
|
||||||
|
strip_path: true
|
||||||
|
paths:
|
||||||
|
- /pg/
|
||||||
|
plugins:
|
||||||
|
- name: key-auth
|
||||||
|
config:
|
||||||
|
hide_credentials: false
|
||||||
|
- name: acl
|
||||||
|
config:
|
||||||
|
hide_groups_header: true
|
||||||
|
allow:
|
||||||
|
- admin
|
||||||
295
config.toml
Normal file
295
config.toml
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
# For detailed configuration reference documentation, visit:
|
||||||
|
# https://supabase.com/docs/guides/local-development/cli/config
|
||||||
|
# A string used to distinguish different Supabase projects on the same host. Defaults to the
|
||||||
|
# working directory name when running `supabase init`.
|
||||||
|
project_id = "ClassroomCopilot"
|
||||||
|
|
||||||
|
[api]
|
||||||
|
enabled = true
|
||||||
|
# Port to use for the API URL.
|
||||||
|
port = "env(PORT_SUPABASE_KONG_HTTP)"
|
||||||
|
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
|
||||||
|
# endpoints. `public` and `graphql_public` schemas are included by default.
|
||||||
|
schemas = ["public", "graphql_public"]
|
||||||
|
# Extra schemas to add to the search_path of every request.
|
||||||
|
extra_search_path = ["public", "extensions"]
|
||||||
|
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
|
||||||
|
# for accidental or malicious requests.
|
||||||
|
max_rows = 1000
|
||||||
|
|
||||||
|
[api.tls]
|
||||||
|
# Enable HTTPS endpoints locally using a self-signed certificate.
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
[db]
|
||||||
|
# Port to use for the local database URL.
|
||||||
|
port = "env(PORT_SUPABASE_POSTGRES)"
|
||||||
|
# Port used by db diff command to initialize the shadow database.
|
||||||
|
shadow_port = 54320
|
||||||
|
# The database major version to use. This has to be the same as your remote database's. Run `SHOW
|
||||||
|
# server_version;` on the remote database to check.
|
||||||
|
major_version = 15
|
||||||
|
|
||||||
|
[db.pooler]
|
||||||
|
enabled = false
|
||||||
|
# Port to use for the local connection pooler.
|
||||||
|
port = 54329
|
||||||
|
# Specifies when a server connection can be reused by other clients.
|
||||||
|
# Configure one of the supported pooler modes: `transaction`, `session`.
|
||||||
|
pool_mode = "transaction"
|
||||||
|
# How many server connections to allow per user/database pair.
|
||||||
|
default_pool_size = 20
|
||||||
|
# Maximum number of client connections allowed.
|
||||||
|
max_client_conn = 100
|
||||||
|
|
||||||
|
[db.vault]
|
||||||
|
secret_key = "env(VAULT_ENC_KEY)"
|
||||||
|
|
||||||
|
[db.migrations]
|
||||||
|
# Specifies an ordered list of schema files that describe your database.
|
||||||
|
# Supports glob patterns relative to supabase directory: "./schemas/*.sql"
|
||||||
|
schema_paths = ["./db/init-scripts/*.sql", "./db/migrations/*.sql"]
|
||||||
|
|
||||||
|
[db.seed]
|
||||||
|
# If enabled, seeds the database after migrations during a db reset.
|
||||||
|
enabled = true
|
||||||
|
# Specifies an ordered list of seed files to load during db reset.
|
||||||
|
# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
|
||||||
|
sql_paths = ["./db/init/seed.sql"]
|
||||||
|
|
||||||
|
[realtime]
|
||||||
|
enabled = true
|
||||||
|
# Bind realtime via either IPv4 or IPv6. (default: IPv4)
|
||||||
|
# ip_version = "IPv6"
|
||||||
|
# The maximum length in bytes of HTTP request headers. (default: 4096)
|
||||||
|
# max_header_length = 4096
|
||||||
|
|
||||||
|
[studio]
|
||||||
|
enabled = true
|
||||||
|
# Port to use for Supabase Studio.
|
||||||
|
port = "env(PORT_SUPABASE_STUDIO)"
|
||||||
|
# External URL of the API server that frontend connects to.
|
||||||
|
api_url = "http://localhost"
|
||||||
|
# OpenAI API Key to use for Supabase AI in the Supabase Studio.
|
||||||
|
openai_api_key = "env(OPENAI_API_KEY)"
|
||||||
|
|
||||||
|
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
|
||||||
|
# are monitored, and you can view the emails that would have been sent from the web interface.
|
||||||
|
[inbucket]
|
||||||
|
enabled = true
|
||||||
|
# Port to use for the email testing server web interface.
|
||||||
|
port = 54324
|
||||||
|
# Uncomment to expose additional ports for testing user applications that send emails.
|
||||||
|
# smtp_port = 54325
|
||||||
|
# pop3_port = 54326
|
||||||
|
admin_email = "env(SUPER_ADMIN_EMAIL)"
|
||||||
|
sender_name = "env(SUPER_ADMIN_NAME)"
|
||||||
|
|
||||||
|
[storage]
|
||||||
|
enabled = true
|
||||||
|
# The maximum file size allowed (e.g. "5MB", "500KB").
|
||||||
|
file_size_limit = "50MiB"
|
||||||
|
|
||||||
|
# Image transformation API is available to Supabase Pro plan.
|
||||||
|
# [storage.image_transformation]
|
||||||
|
# enabled = true
|
||||||
|
|
||||||
|
# Uncomment to configure local storage buckets
|
||||||
|
# [storage.buckets.images]
|
||||||
|
# public = false
|
||||||
|
# file_size_limit = "50MiB"
|
||||||
|
# allowed_mime_types = ["image/png", "image/jpeg"]
|
||||||
|
# objects_path = "./images"
|
||||||
|
|
||||||
|
[auth]
|
||||||
|
enabled = true
|
||||||
|
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
|
||||||
|
# in emails.
|
||||||
|
site_url = "env(SITE_URL)"
|
||||||
|
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
|
||||||
|
additional_redirect_urls = ["env(ADDITIONAL_REDIRECT_URLS)"]
|
||||||
|
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
|
||||||
|
jwt_expiry = 3600
|
||||||
|
# If disabled, the refresh token will never expire.
|
||||||
|
enable_refresh_token_rotation = true
|
||||||
|
# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
|
||||||
|
# Requires enable_refresh_token_rotation = true.
|
||||||
|
refresh_token_reuse_interval = 10
|
||||||
|
# Allow/disallow new user signups to your project.
|
||||||
|
enable_signup = true
|
||||||
|
# Allow/disallow anonymous sign-ins to your project.
|
||||||
|
enable_anonymous_sign_ins = false
|
||||||
|
# Allow/disallow testing manual linking of accounts
|
||||||
|
enable_manual_linking = false
|
||||||
|
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
|
||||||
|
minimum_password_length = 6
|
||||||
|
# Passwords that do not meet the following requirements will be rejected as weak. Supported values
|
||||||
|
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
|
||||||
|
password_requirements = ""
|
||||||
|
|
||||||
|
# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
|
||||||
|
# [auth.captcha]
|
||||||
|
# enabled = true
|
||||||
|
# provider = "hcaptcha"
|
||||||
|
# secret = ""
|
||||||
|
|
||||||
|
[auth.email]
|
||||||
|
# Allow/disallow new user signups via email to your project.
|
||||||
|
enable_signup = true
|
||||||
|
# If enabled, a user will be required to confirm any email change on both the old, and new email
|
||||||
|
# addresses. If disabled, only the new email is required to confirm.
|
||||||
|
double_confirm_changes = true
|
||||||
|
# If enabled, users need to confirm their email address before signing in.
|
||||||
|
enable_confirmations = false
|
||||||
|
# If enabled, users will need to reauthenticate or have logged in recently to change their password.
|
||||||
|
secure_password_change = false
|
||||||
|
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
|
||||||
|
max_frequency = "1s"
|
||||||
|
# Number of characters used in the email OTP.
|
||||||
|
otp_length = 6
|
||||||
|
# Number of seconds before the email OTP expires (defaults to 1 hour).
|
||||||
|
otp_expiry = 3600
|
||||||
|
|
||||||
|
# Use a production-ready SMTP server
|
||||||
|
# [auth.email.smtp]
|
||||||
|
# enabled = true
|
||||||
|
# host = "smtp.sendgrid.net"
|
||||||
|
# port = 587
|
||||||
|
# user = "apikey"
|
||||||
|
# pass = "env(SENDGRID_API_KEY)"
|
||||||
|
# admin_email = "admin@email.com"
|
||||||
|
# sender_name = "Admin"
|
||||||
|
|
||||||
|
# Uncomment to customize email template
|
||||||
|
# [auth.email.template.invite]
|
||||||
|
# subject = "You have been invited"
|
||||||
|
# content_path = "./supabase/templates/invite.html"
|
||||||
|
|
||||||
|
[auth.sms]
|
||||||
|
# Allow/disallow new user signups via SMS to your project.
|
||||||
|
enable_signup = false
|
||||||
|
# If enabled, users need to confirm their phone number before signing in.
|
||||||
|
enable_confirmations = false
|
||||||
|
# Template for sending OTP to users
|
||||||
|
template = "Your code is {{ .Code }}"
|
||||||
|
# Controls the minimum amount of time that must pass before sending another sms otp.
|
||||||
|
max_frequency = "5s"
|
||||||
|
|
||||||
|
# Use pre-defined map of phone number to OTP for testing.
|
||||||
|
# [auth.sms.test_otp]
|
||||||
|
# 4152127777 = "123456"
|
||||||
|
|
||||||
|
# Configure logged in session timeouts.
|
||||||
|
# [auth.sessions]
|
||||||
|
# Force log out after the specified duration.
|
||||||
|
# timebox = "24h"
|
||||||
|
# Force log out if the user has been inactive longer than the specified duration.
|
||||||
|
# inactivity_timeout = "8h"
|
||||||
|
|
||||||
|
# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
|
||||||
|
# [auth.hook.custom_access_token]
|
||||||
|
# enabled = true
|
||||||
|
# uri = "pg-functions://<database>/<schema>/<hook_name>"
|
||||||
|
|
||||||
|
# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
|
||||||
|
[auth.sms.twilio]
|
||||||
|
enabled = false
|
||||||
|
account_sid = ""
|
||||||
|
message_service_sid = ""
|
||||||
|
# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
|
||||||
|
auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
|
||||||
|
|
||||||
|
# Multi-factor-authentication is available to Supabase Pro plan.
|
||||||
|
[auth.mfa]
|
||||||
|
# Control how many MFA factors can be enrolled at once per user.
|
||||||
|
max_enrolled_factors = 10
|
||||||
|
|
||||||
|
# Control MFA via App Authenticator (TOTP)
|
||||||
|
[auth.mfa.totp]
|
||||||
|
enroll_enabled = false
|
||||||
|
verify_enabled = false
|
||||||
|
|
||||||
|
# Configure MFA via Phone Messaging
|
||||||
|
[auth.mfa.phone]
|
||||||
|
enroll_enabled = false
|
||||||
|
verify_enabled = false
|
||||||
|
otp_length = 6
|
||||||
|
template = "Your code is {{ .Code }}"
|
||||||
|
max_frequency = "5s"
|
||||||
|
|
||||||
|
# Configure MFA via WebAuthn
|
||||||
|
# [auth.mfa.web_authn]
|
||||||
|
# enroll_enabled = true
|
||||||
|
# verify_enabled = true
|
||||||
|
|
||||||
|
# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
|
||||||
|
# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
|
||||||
|
# `twitter`, `slack`, `spotify`, `workos`, `zoom`.
|
||||||
|
[auth.external.apple]
|
||||||
|
enabled = false
|
||||||
|
client_id = ""
|
||||||
|
# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
|
||||||
|
secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
|
||||||
|
# Overrides the default auth redirectUrl.
|
||||||
|
redirect_uri = ""
|
||||||
|
# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
|
||||||
|
# or any other third-party OIDC providers.
|
||||||
|
url = ""
|
||||||
|
# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
|
||||||
|
skip_nonce_check = false
|
||||||
|
|
||||||
|
# Use Firebase Auth as a third-party provider alongside Supabase Auth.
|
||||||
|
[auth.third_party.firebase]
|
||||||
|
enabled = false
|
||||||
|
# project_id = "my-firebase-project"
|
||||||
|
|
||||||
|
# Use Auth0 as a third-party provider alongside Supabase Auth.
|
||||||
|
[auth.third_party.auth0]
|
||||||
|
enabled = false
|
||||||
|
# tenant = "my-auth0-tenant"
|
||||||
|
# tenant_region = "us"
|
||||||
|
|
||||||
|
# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
|
||||||
|
[auth.third_party.aws_cognito]
|
||||||
|
enabled = false
|
||||||
|
# user_pool_id = "my-user-pool-id"
|
||||||
|
# user_pool_region = "us-east-1"
|
||||||
|
|
||||||
|
[edge_runtime]
|
||||||
|
enabled = true
|
||||||
|
# Configure one of the supported request policies: `oneshot`, `per_worker`.
|
||||||
|
# Use `oneshot` for hot reload, or `per_worker` for load testing.
|
||||||
|
policy = "oneshot"
|
||||||
|
# Port to attach the Chrome inspector for debugging edge functions.
|
||||||
|
inspector_port = 8083
|
||||||
|
|
||||||
|
# Use these configurations to customize your Edge Function.
|
||||||
|
# [functions.MY_FUNCTION_NAME]
|
||||||
|
# enabled = true
|
||||||
|
# verify_jwt = true
|
||||||
|
# import_map = "./functions/MY_FUNCTION_NAME/deno.json"
|
||||||
|
# Uncomment to specify a custom file path to the entrypoint.
|
||||||
|
# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
|
||||||
|
# entrypoint = "./functions/MY_FUNCTION_NAME/index.ts"
|
||||||
|
# Specifies static files to be bundled with the function. Supports glob patterns.
|
||||||
|
# For example, if you want to serve static HTML pages in your function:
|
||||||
|
# static_files = [ "./functions/MY_FUNCTION_NAME/*.html" ]
|
||||||
|
|
||||||
|
[analytics]
|
||||||
|
enabled = true
|
||||||
|
port = 54327
|
||||||
|
# Configure one of the supported backends: `postgres`, `bigquery`.
|
||||||
|
backend = "postgres"
|
||||||
|
|
||||||
|
# Experimental features may be deprecated any time
|
||||||
|
[experimental]
|
||||||
|
# Configures Postgres storage engine to use OrioleDB (S3)
|
||||||
|
orioledb_version = ""
|
||||||
|
# Configures S3 bucket URL, eg. <bucket_name>.s3-<region>.amazonaws.com
|
||||||
|
s3_host = "env(S3_HOST)"
|
||||||
|
# Configures S3 bucket region, eg. us-east-1
|
||||||
|
s3_region = "env(S3_REGION)"
|
||||||
|
# Configures AWS_ACCESS_KEY_ID for S3 bucket
|
||||||
|
s3_access_key = "env(S3_ACCESS_KEY)"
|
||||||
|
# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
|
||||||
|
s3_secret_key = "env(S3_SECRET_KEY)"
|
||||||
208
db/init-scripts/51-webhooks.sql
Normal file
208
db/init-scripts/51-webhooks.sql
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
BEGIN;
|
||||||
|
-- Create pg_net extension
|
||||||
|
CREATE EXTENSION IF NOT EXISTS pg_net SCHEMA extensions;
|
||||||
|
-- Create supabase_functions schema
|
||||||
|
CREATE SCHEMA supabase_functions AUTHORIZATION supabase_admin;
|
||||||
|
GRANT USAGE ON SCHEMA supabase_functions TO postgres, anon, authenticated, service_role;
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role;
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role;
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role;
|
||||||
|
-- supabase_functions.migrations definition
|
||||||
|
CREATE TABLE supabase_functions.migrations (
|
||||||
|
version text PRIMARY KEY,
|
||||||
|
inserted_at timestamptz NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
-- Initial supabase_functions migration
|
||||||
|
INSERT INTO supabase_functions.migrations (version) VALUES ('initial');
|
||||||
|
-- supabase_functions.hooks definition
|
||||||
|
CREATE TABLE supabase_functions.hooks (
|
||||||
|
id bigserial PRIMARY KEY,
|
||||||
|
hook_table_id integer NOT NULL,
|
||||||
|
hook_name text NOT NULL,
|
||||||
|
created_at timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
request_id bigint
|
||||||
|
);
|
||||||
|
CREATE INDEX supabase_functions_hooks_request_id_idx ON supabase_functions.hooks USING btree (request_id);
|
||||||
|
CREATE INDEX supabase_functions_hooks_h_table_id_h_name_idx ON supabase_functions.hooks USING btree (hook_table_id, hook_name);
|
||||||
|
COMMENT ON TABLE supabase_functions.hooks IS 'Supabase Functions Hooks: Audit trail for triggered hooks.';
|
||||||
|
CREATE FUNCTION supabase_functions.http_request()
|
||||||
|
RETURNS trigger
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
request_id bigint;
|
||||||
|
payload jsonb;
|
||||||
|
url text := TG_ARGV[0]::text;
|
||||||
|
method text := TG_ARGV[1]::text;
|
||||||
|
headers jsonb DEFAULT '{}'::jsonb;
|
||||||
|
params jsonb DEFAULT '{}'::jsonb;
|
||||||
|
timeout_ms integer DEFAULT 1000;
|
||||||
|
BEGIN
|
||||||
|
IF url IS NULL OR url = 'null' THEN
|
||||||
|
RAISE EXCEPTION 'url argument is missing';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF method IS NULL OR method = 'null' THEN
|
||||||
|
RAISE EXCEPTION 'method argument is missing';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF TG_ARGV[2] IS NULL OR TG_ARGV[2] = 'null' THEN
|
||||||
|
headers = '{"Content-Type": "application/json"}'::jsonb;
|
||||||
|
ELSE
|
||||||
|
headers = TG_ARGV[2]::jsonb;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF TG_ARGV[3] IS NULL OR TG_ARGV[3] = 'null' THEN
|
||||||
|
params = '{}'::jsonb;
|
||||||
|
ELSE
|
||||||
|
params = TG_ARGV[3]::jsonb;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF TG_ARGV[4] IS NULL OR TG_ARGV[4] = 'null' THEN
|
||||||
|
timeout_ms = 1000;
|
||||||
|
ELSE
|
||||||
|
timeout_ms = TG_ARGV[4]::integer;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
CASE
|
||||||
|
WHEN method = 'GET' THEN
|
||||||
|
SELECT http_get INTO request_id FROM net.http_get(
|
||||||
|
url,
|
||||||
|
params,
|
||||||
|
headers,
|
||||||
|
timeout_ms
|
||||||
|
);
|
||||||
|
WHEN method = 'POST' THEN
|
||||||
|
payload = jsonb_build_object(
|
||||||
|
'old_record', OLD,
|
||||||
|
'record', NEW,
|
||||||
|
'type', TG_OP,
|
||||||
|
'table', TG_TABLE_NAME,
|
||||||
|
'schema', TG_TABLE_SCHEMA
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT http_post INTO request_id FROM net.http_post(
|
||||||
|
url,
|
||||||
|
payload,
|
||||||
|
params,
|
||||||
|
headers,
|
||||||
|
timeout_ms
|
||||||
|
);
|
||||||
|
ELSE
|
||||||
|
RAISE EXCEPTION 'method argument % is invalid', method;
|
||||||
|
END CASE;
|
||||||
|
|
||||||
|
INSERT INTO supabase_functions.hooks
|
||||||
|
(hook_table_id, hook_name, request_id)
|
||||||
|
VALUES
|
||||||
|
(TG_RELID, TG_NAME, request_id);
|
||||||
|
|
||||||
|
RETURN NEW;
|
||||||
|
END
|
||||||
|
$function$;
|
||||||
|
-- Supabase super admin
|
||||||
|
DO
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_roles
|
||||||
|
WHERE rolname = 'supabase_functions_admin'
|
||||||
|
)
|
||||||
|
THEN
|
||||||
|
CREATE USER supabase_functions_admin NOINHERIT CREATEROLE LOGIN NOREPLICATION;
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$$;
|
||||||
|
GRANT ALL PRIVILEGES ON SCHEMA supabase_functions TO supabase_functions_admin;
|
||||||
|
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA supabase_functions TO supabase_functions_admin;
|
||||||
|
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA supabase_functions TO supabase_functions_admin;
|
||||||
|
ALTER USER supabase_functions_admin SET search_path = "supabase_functions";
|
||||||
|
ALTER table "supabase_functions".migrations OWNER TO supabase_functions_admin;
|
||||||
|
ALTER table "supabase_functions".hooks OWNER TO supabase_functions_admin;
|
||||||
|
ALTER function "supabase_functions".http_request() OWNER TO supabase_functions_admin;
|
||||||
|
GRANT supabase_functions_admin TO postgres;
|
||||||
|
-- Remove unused supabase_pg_net_admin role
|
||||||
|
DO
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_roles
|
||||||
|
WHERE rolname = 'supabase_pg_net_admin'
|
||||||
|
)
|
||||||
|
THEN
|
||||||
|
REASSIGN OWNED BY supabase_pg_net_admin TO supabase_admin;
|
||||||
|
DROP OWNED BY supabase_pg_net_admin;
|
||||||
|
DROP ROLE supabase_pg_net_admin;
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$$;
|
||||||
|
-- pg_net grants when extension is already enabled
|
||||||
|
DO
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_extension
|
||||||
|
WHERE extname = 'pg_net'
|
||||||
|
)
|
||||||
|
THEN
|
||||||
|
GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
|
||||||
|
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
|
||||||
|
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
|
||||||
|
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
|
||||||
|
REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
|
||||||
|
REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
|
||||||
|
GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$$;
|
||||||
|
-- Event trigger for pg_net
|
||||||
|
CREATE OR REPLACE FUNCTION extensions.grant_pg_net_access()
|
||||||
|
RETURNS event_trigger
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_event_trigger_ddl_commands() AS ev
|
||||||
|
JOIN pg_extension AS ext
|
||||||
|
ON ev.objid = ext.oid
|
||||||
|
WHERE ext.extname = 'pg_net'
|
||||||
|
)
|
||||||
|
THEN
|
||||||
|
GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
|
||||||
|
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
|
||||||
|
ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
|
||||||
|
ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
|
||||||
|
REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
|
||||||
|
REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
|
||||||
|
GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
COMMENT ON FUNCTION extensions.grant_pg_net_access IS 'Grants access to pg_net';
|
||||||
|
DO
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_event_trigger
|
||||||
|
WHERE evtname = 'issue_pg_net_access'
|
||||||
|
) THEN
|
||||||
|
CREATE EVENT TRIGGER issue_pg_net_access ON ddl_command_end WHEN TAG IN ('CREATE EXTENSION')
|
||||||
|
EXECUTE PROCEDURE extensions.grant_pg_net_access();
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$$;
|
||||||
|
INSERT INTO supabase_functions.migrations (version) VALUES ('20210809183423_update_grants');
|
||||||
|
ALTER function supabase_functions.http_request() SECURITY DEFINER;
|
||||||
|
ALTER function supabase_functions.http_request() SET search_path = supabase_functions;
|
||||||
|
REVOKE ALL ON FUNCTION supabase_functions.http_request() FROM PUBLIC;
|
||||||
|
GRANT EXECUTE ON FUNCTION supabase_functions.http_request() TO postgres, anon, authenticated, service_role;
|
||||||
|
COMMIT;
|
||||||
5
db/init-scripts/52-jwt.sql
Normal file
5
db/init-scripts/52-jwt.sql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
\set jwt_secret `echo "$JWT_SECRET"`
|
||||||
|
\set jwt_exp `echo "$JWT_EXP"`
|
||||||
|
|
||||||
|
ALTER DATABASE postgres SET "app.settings.jwt_secret" TO :'jwt_secret';
|
||||||
|
ALTER DATABASE postgres SET "app.settings.jwt_exp" TO :'jwt_exp';
|
||||||
8
db/init-scripts/52-roles.sql
Normal file
8
db/init-scripts/52-roles.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
-- NOTE: change to your own passwords for production environments
|
||||||
|
\set pgpass `echo "$POSTGRES_PASSWORD"`
|
||||||
|
|
||||||
|
ALTER USER authenticator WITH PASSWORD :'pgpass';
|
||||||
|
ALTER USER pgbouncer WITH PASSWORD :'pgpass';
|
||||||
|
ALTER USER supabase_auth_admin WITH PASSWORD :'pgpass';
|
||||||
|
ALTER USER supabase_functions_admin WITH PASSWORD :'pgpass';
|
||||||
|
ALTER USER supabase_storage_admin WITH PASSWORD :'pgpass';
|
||||||
25
db/migrations/core/60-create-databases.sql
Normal file
25
db/migrations/core/60-create-databases.sql
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
-- Create Keycloak schema if it doesn't exist
|
||||||
|
create schema if not exists keycloak;
|
||||||
|
|
||||||
|
-- Create Keycloak user if it doesn't exist
|
||||||
|
do $$
|
||||||
|
begin
|
||||||
|
if not exists (select 1 from pg_roles where rolname = 'keycloak') then
|
||||||
|
create user keycloak with password 'keycloak';
|
||||||
|
end if;
|
||||||
|
end
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant schema usage and ownership to Keycloak user
|
||||||
|
alter schema keycloak owner to keycloak;
|
||||||
|
grant usage on schema keycloak to keycloak;
|
||||||
|
|
||||||
|
-- Grant all privileges on all tables in keycloak schema to keycloak user
|
||||||
|
grant all privileges on all tables in schema keycloak to keycloak;
|
||||||
|
|
||||||
|
-- Grant all privileges on all sequences in keycloak schema to keycloak user
|
||||||
|
grant all privileges on all sequences in schema keycloak to keycloak;
|
||||||
|
|
||||||
|
-- Set default privileges for future tables and sequences
|
||||||
|
alter default privileges in schema keycloak grant all on tables to keycloak;
|
||||||
|
alter default privileges in schema keycloak grant all on sequences to keycloak;
|
||||||
200
db/migrations/core/61-core-schema.sql
Normal file
200
db/migrations/core/61-core-schema.sql
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
--[ Database Schema Version ]--
|
||||||
|
-- Version: 1.0.0
|
||||||
|
-- Last Updated: 2024-02-24
|
||||||
|
-- Description: Core schema setup for ClassConcepts
|
||||||
|
-- Dependencies: auth.users (Supabase Auth)
|
||||||
|
|
||||||
|
--[ Validation ]--
|
||||||
|
do $$
|
||||||
|
begin
|
||||||
|
-- Verify required extensions
|
||||||
|
if not exists (select 1 from pg_extension where extname = 'uuid-ossp') then
|
||||||
|
raise exception 'Required extension uuid-ossp is not installed';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Verify auth schema exists
|
||||||
|
if not exists (select 1 from information_schema.schemata where schema_name = 'auth') then
|
||||||
|
raise exception 'Required auth schema is not available';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Verify storage schema exists
|
||||||
|
if not exists (select 1 from information_schema.schemata where schema_name = 'storage') then
|
||||||
|
raise exception 'Required storage schema is not available';
|
||||||
|
end if;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
--[ 1. Extensions ]--
|
||||||
|
create extension if not exists "uuid-ossp";
|
||||||
|
|
||||||
|
-- Create rpc schema if it doesn't exist
|
||||||
|
create schema if not exists rpc;
|
||||||
|
grant usage on schema rpc to anon, authenticated;
|
||||||
|
|
||||||
|
-- Create exec_sql function for admin operations
|
||||||
|
create or replace function exec_sql(query text)
|
||||||
|
returns void as $$
|
||||||
|
begin
|
||||||
|
execute query;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
|
||||||
|
-- Create updated_at trigger function
|
||||||
|
create or replace function public.handle_updated_at()
|
||||||
|
returns trigger as $$
|
||||||
|
begin
|
||||||
|
new.updated_at = timezone('utc'::text, now());
|
||||||
|
return new;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
|
||||||
|
--[ 5. Core Tables ]--
|
||||||
|
-- Base user profiles
|
||||||
|
create table if not exists public.profiles (
|
||||||
|
id uuid primary key references auth.users(id) on delete cascade,
|
||||||
|
email text not null unique,
|
||||||
|
user_type text not null check (user_type in ('admin', 'email_teacher', 'email_student')),
|
||||||
|
username text not null unique,
|
||||||
|
full_name text,
|
||||||
|
display_name text,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
last_login timestamp with time zone,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.profiles is 'User profiles linked to Supabase auth.users';
|
||||||
|
comment on column public.profiles.user_type is 'Type of user: admin, teacher, or student';
|
||||||
|
|
||||||
|
-- Institute import data
|
||||||
|
create table if not exists public.institute_imports (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
urn text unique,
|
||||||
|
establishment_name text not null,
|
||||||
|
la_code text,
|
||||||
|
la_name text,
|
||||||
|
establishment_number text,
|
||||||
|
establishment_type text,
|
||||||
|
establishment_type_group text,
|
||||||
|
establishment_status text,
|
||||||
|
reason_establishment_opened text,
|
||||||
|
open_date date,
|
||||||
|
reason_establishment_closed text,
|
||||||
|
close_date date,
|
||||||
|
phase_of_education text,
|
||||||
|
statutory_low_age integer,
|
||||||
|
statutory_high_age integer,
|
||||||
|
boarders text,
|
||||||
|
nursery_provision text,
|
||||||
|
official_sixth_form text,
|
||||||
|
gender text,
|
||||||
|
religious_character text,
|
||||||
|
religious_ethos text,
|
||||||
|
diocese text,
|
||||||
|
admissions_policy text,
|
||||||
|
school_capacity integer,
|
||||||
|
special_classes text,
|
||||||
|
census_date date,
|
||||||
|
number_of_pupils integer,
|
||||||
|
number_of_boys integer,
|
||||||
|
number_of_girls integer,
|
||||||
|
percentage_fsm numeric(5,2),
|
||||||
|
trust_school_flag text,
|
||||||
|
trusts_name text,
|
||||||
|
school_sponsor_flag text,
|
||||||
|
school_sponsors_name text,
|
||||||
|
federation_flag text,
|
||||||
|
federations_name text,
|
||||||
|
ukprn text,
|
||||||
|
fehe_identifier text,
|
||||||
|
further_education_type text,
|
||||||
|
ofsted_last_inspection date,
|
||||||
|
last_changed_date date,
|
||||||
|
street text,
|
||||||
|
locality text,
|
||||||
|
address3 text,
|
||||||
|
town text,
|
||||||
|
county text,
|
||||||
|
postcode text,
|
||||||
|
school_website text,
|
||||||
|
telephone_num text,
|
||||||
|
head_title text,
|
||||||
|
head_first_name text,
|
||||||
|
head_last_name text,
|
||||||
|
head_preferred_job_title text,
|
||||||
|
gssla_code text,
|
||||||
|
parliamentary_constituency text,
|
||||||
|
urban_rural text,
|
||||||
|
rsc_region text,
|
||||||
|
country text,
|
||||||
|
uprn text,
|
||||||
|
sen_stat boolean,
|
||||||
|
sen_no_stat boolean,
|
||||||
|
sen_unit_on_roll integer,
|
||||||
|
sen_unit_capacity integer,
|
||||||
|
resourced_provision_on_roll integer,
|
||||||
|
resourced_provision_capacity integer,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
imported_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institute_imports is 'Raw institute data imported from external sources';
|
||||||
|
|
||||||
|
-- Active institutes
|
||||||
|
create table if not exists public.institutes (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
import_id uuid references public.institute_imports(id),
|
||||||
|
name text not null,
|
||||||
|
urn text unique,
|
||||||
|
status text not null default 'active' check (status in ('active', 'inactive', 'pending')),
|
||||||
|
address jsonb default '{}'::jsonb,
|
||||||
|
website text,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
neo4j_unique_id text,
|
||||||
|
neo4j_public_sync_status text default 'pending' check (neo4j_public_sync_status in ('pending', 'synced', 'failed')),
|
||||||
|
neo4j_public_sync_at timestamp with time zone,
|
||||||
|
neo4j_private_sync_status text default 'not_started' check (neo4j_private_sync_status in ('not_started', 'pending', 'synced', 'failed')),
|
||||||
|
neo4j_private_sync_at timestamp with time zone,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institutes is 'Active institutes in the system';
|
||||||
|
|
||||||
|
--[ 6. Relationship Tables ]--
|
||||||
|
-- Institute memberships
|
||||||
|
create table if not exists public.institute_memberships (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete cascade,
|
||||||
|
institute_id uuid references public.institutes(id) on delete cascade,
|
||||||
|
role text not null check (role in ('admin', 'teacher', 'student')),
|
||||||
|
tldraw_preferences jsonb default '{}'::jsonb,
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
unique(profile_id, institute_id)
|
||||||
|
);
|
||||||
|
comment on table public.institute_memberships is 'Manages user roles and relationships with institutes';
|
||||||
|
|
||||||
|
-- Membership requests
|
||||||
|
create table if not exists public.institute_membership_requests (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete cascade,
|
||||||
|
institute_id uuid references public.institutes(id) on delete cascade,
|
||||||
|
requested_role text check (requested_role in ('teacher', 'student')),
|
||||||
|
status text default 'pending' check (status in ('pending', 'approved', 'rejected')),
|
||||||
|
metadata jsonb default '{}'::jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now()),
|
||||||
|
updated_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.institute_membership_requests is 'Tracks requests to join institutes';
|
||||||
|
|
||||||
|
--[ 7. Audit Tables ]--
|
||||||
|
-- System audit logs
|
||||||
|
create table if not exists public.audit_logs (
|
||||||
|
id uuid primary key default uuid_generate_v4(),
|
||||||
|
profile_id uuid references public.profiles(id) on delete set null,
|
||||||
|
action_type text,
|
||||||
|
table_name text,
|
||||||
|
record_id uuid,
|
||||||
|
changes jsonb,
|
||||||
|
created_at timestamp with time zone default timezone('utc'::text, now())
|
||||||
|
);
|
||||||
|
comment on table public.audit_logs is 'System-wide audit trail for important operations';
|
||||||
199
db/migrations/core/62-functions-triggers.sql
Normal file
199
db/migrations/core/62-functions-triggers.sql
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
--[ 8. Auth Functions ]--
|
||||||
|
-- Create a secure function to check admin status
|
||||||
|
create or replace function auth.is_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select coalesce(
|
||||||
|
(select true
|
||||||
|
from public.profiles
|
||||||
|
where id = auth.uid()
|
||||||
|
and user_type = 'admin'),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Create a secure function to check super admin status
|
||||||
|
create or replace function auth.is_super_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select coalesce(
|
||||||
|
(select role = 'supabase_admin'
|
||||||
|
from auth.users
|
||||||
|
where id = auth.uid()),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Create public wrappers for the auth functions
|
||||||
|
create or replace function public.is_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select auth.is_admin();
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
create or replace function public.is_super_admin()
|
||||||
|
returns boolean as $$
|
||||||
|
select auth.is_super_admin();
|
||||||
|
$$ language sql security definer;
|
||||||
|
|
||||||
|
-- Grant execute permissions
|
||||||
|
grant execute on function public.is_admin to authenticated;
|
||||||
|
grant execute on function public.is_super_admin to authenticated;
|
||||||
|
grant execute on function auth.is_admin to authenticated;
|
||||||
|
grant execute on function auth.is_super_admin to authenticated;
|
||||||
|
|
||||||
|
-- Initial admin setup function
|
||||||
|
create or replace function public.setup_initial_admin(admin_email text)
|
||||||
|
returns json
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
declare
|
||||||
|
result json;
|
||||||
|
begin
|
||||||
|
-- Only allow this to run as service role or supabase_admin
|
||||||
|
if not (
|
||||||
|
current_user = 'service_role'
|
||||||
|
or exists (
|
||||||
|
select 1 from pg_roles
|
||||||
|
where rolname = current_user
|
||||||
|
and rolsuper
|
||||||
|
)
|
||||||
|
) then
|
||||||
|
raise exception 'Must be run as service_role or superuser';
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Update user_type and username for admin
|
||||||
|
update public.profiles
|
||||||
|
set user_type = 'admin',
|
||||||
|
username = coalesce(username, 'superadmin'),
|
||||||
|
display_name = coalesce(display_name, 'Super Admin')
|
||||||
|
where email = admin_email
|
||||||
|
returning json_build_object(
|
||||||
|
'id', id,
|
||||||
|
'email', email,
|
||||||
|
'user_type', user_type,
|
||||||
|
'username', username,
|
||||||
|
'display_name', display_name
|
||||||
|
) into result;
|
||||||
|
|
||||||
|
if result is null then
|
||||||
|
raise exception 'Admin user with email % not found', admin_email;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permissions
|
||||||
|
revoke execute on function public.setup_initial_admin from public;
|
||||||
|
grant execute on function public.setup_initial_admin to authenticated, service_role, supabase_admin;
|
||||||
|
|
||||||
|
-- Create RPC wrapper for REST API access
|
||||||
|
create or replace function rpc.setup_initial_admin(admin_email text)
|
||||||
|
returns json
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
return public.setup_initial_admin(admin_email);
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permissions for RPC wrapper
|
||||||
|
grant execute on function rpc.setup_initial_admin to authenticated, service_role, supabase_admin;
|
||||||
|
|
||||||
|
--[ 9. Utility Functions ]--
|
||||||
|
-- Check if database is ready
|
||||||
|
create or replace function check_db_ready()
|
||||||
|
returns boolean
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
-- Check if essential schemas exist
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from information_schema.schemata
|
||||||
|
where schema_name in ('auth', 'storage', 'public')
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Check if essential tables exist
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from information_schema.tables
|
||||||
|
where table_schema = 'auth'
|
||||||
|
and table_name = 'users'
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Check if RLS is enabled on public.profiles
|
||||||
|
if not exists (
|
||||||
|
select 1
|
||||||
|
from pg_tables
|
||||||
|
where schemaname = 'public'
|
||||||
|
and tablename = 'profiles'
|
||||||
|
and rowsecurity = true
|
||||||
|
) then
|
||||||
|
return false;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permission
|
||||||
|
grant execute on function check_db_ready to anon, authenticated, service_role;
|
||||||
|
|
||||||
|
-- Function to handle new user registration
|
||||||
|
create or replace function public.handle_new_user()
|
||||||
|
returns trigger
|
||||||
|
language plpgsql
|
||||||
|
security definer set search_path = public
|
||||||
|
as $$
|
||||||
|
declare
|
||||||
|
default_user_type text := 'email_student';
|
||||||
|
default_username text;
|
||||||
|
begin
|
||||||
|
-- Generate username from email
|
||||||
|
default_username := split_part(new.email, '@', 1);
|
||||||
|
|
||||||
|
insert into public.profiles (
|
||||||
|
id,
|
||||||
|
email,
|
||||||
|
user_type,
|
||||||
|
username,
|
||||||
|
display_name
|
||||||
|
)
|
||||||
|
values (
|
||||||
|
new.id,
|
||||||
|
new.email,
|
||||||
|
coalesce(new.raw_user_meta_data->>'user_type', default_user_type),
|
||||||
|
coalesce(new.raw_user_meta_data->>'username', default_username),
|
||||||
|
coalesce(new.raw_user_meta_data->>'display_name', default_username)
|
||||||
|
);
|
||||||
|
return new;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Trigger for new user creation
|
||||||
|
drop trigger if exists on_auth_user_created on auth.users;
|
||||||
|
create trigger on_auth_user_created
|
||||||
|
after insert on auth.users
|
||||||
|
for each row execute procedure public.handle_new_user();
|
||||||
|
|
||||||
|
--[ 11. Database Triggers ]--
|
||||||
|
drop trigger if exists handle_profiles_updated_at on public.profiles;
|
||||||
|
create trigger handle_profiles_updated_at
|
||||||
|
before update on public.profiles
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
|
|
||||||
|
drop trigger if exists handle_institute_memberships_updated_at on public.institute_memberships;
|
||||||
|
create trigger handle_institute_memberships_updated_at
|
||||||
|
before update on public.institute_memberships
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
|
|
||||||
|
drop trigger if exists handle_membership_requests_updated_at on public.institute_membership_requests;
|
||||||
|
create trigger handle_membership_requests_updated_at
|
||||||
|
before update on public.institute_membership_requests
|
||||||
|
for each row execute function public.handle_updated_at();
|
||||||
50
db/migrations/core/63-storage-policies.sql
Normal file
50
db/migrations/core/63-storage-policies.sql
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
-- Enable RLS on storage.buckets
|
||||||
|
alter table if exists storage.buckets enable row level security;
|
||||||
|
|
||||||
|
-- Drop existing policies if they exist
|
||||||
|
drop policy if exists "Super admin has full access to buckets" on storage.buckets;
|
||||||
|
drop policy if exists "Users can create their own buckets" on storage.buckets;
|
||||||
|
drop policy if exists "Users can view their own buckets" on storage.buckets;
|
||||||
|
|
||||||
|
-- Create new policies with proper permissions
|
||||||
|
create policy "Super admin has full access to buckets"
|
||||||
|
on storage.buckets for all
|
||||||
|
using (
|
||||||
|
current_user = 'service_role'
|
||||||
|
or current_user = 'supabase_admin'
|
||||||
|
or exists (
|
||||||
|
select 1 from public.profiles
|
||||||
|
where id = auth.uid()
|
||||||
|
and user_type = 'admin'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Allow authenticated users to create buckets
|
||||||
|
create policy "Users can create their own buckets"
|
||||||
|
on storage.buckets for insert
|
||||||
|
to authenticated
|
||||||
|
with check (
|
||||||
|
owner::text = auth.uid()::text
|
||||||
|
or exists (
|
||||||
|
select 1 from public.profiles
|
||||||
|
where id = auth.uid()
|
||||||
|
and user_type = 'admin'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Allow users to view buckets they own or public buckets
|
||||||
|
create policy "Users can view their own buckets"
|
||||||
|
on storage.buckets for select
|
||||||
|
to authenticated
|
||||||
|
using (
|
||||||
|
owner::text = auth.uid()::text
|
||||||
|
or exists (
|
||||||
|
select 1 from public.profiles
|
||||||
|
where id = auth.uid()
|
||||||
|
and user_type = 'admin'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Grant necessary permissions
|
||||||
|
grant all on storage.buckets to authenticated;
|
||||||
|
grant all on storage.objects to authenticated;
|
||||||
31
db/migrations/core/64-initial-admin.sql
Normal file
31
db/migrations/core/64-initial-admin.sql
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
-- Ensure uuid-ossp extension is enabled
|
||||||
|
create extension if not exists "uuid-ossp" schema extensions;
|
||||||
|
|
||||||
|
-- Function to set up initial admin
|
||||||
|
create or replace function public.setup_initial_admin()
|
||||||
|
returns void
|
||||||
|
language plpgsql
|
||||||
|
security definer
|
||||||
|
set search_path = public, extensions
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
-- Check if admin already exists
|
||||||
|
if exists (
|
||||||
|
select 1 from public.profiles
|
||||||
|
where user_type = 'admin'
|
||||||
|
) then
|
||||||
|
return;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- Grant necessary permissions
|
||||||
|
grant all on all tables in schema public to authenticated;
|
||||||
|
grant all on all sequences in schema public to authenticated;
|
||||||
|
grant all on all functions in schema public to authenticated;
|
||||||
|
end;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Execute the function
|
||||||
|
select public.setup_initial_admin();
|
||||||
|
|
||||||
|
-- Drop the function after execution
|
||||||
|
drop function public.setup_initial_admin();
|
||||||
23
db/migrations/core/65-keycloak-setup.sql
Normal file
23
db/migrations/core/65-keycloak-setup.sql
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
-- Create Keycloak user if it doesn't exist
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = 'keycloak') THEN
|
||||||
|
CREATE USER keycloak WITH PASSWORD 'keycloak';
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Create Keycloak schema if it doesn't exist
|
||||||
|
CREATE SCHEMA IF NOT EXISTS keycloak;
|
||||||
|
|
||||||
|
-- Grant necessary permissions
|
||||||
|
GRANT USAGE ON SCHEMA keycloak TO keycloak;
|
||||||
|
GRANT ALL ON ALL TABLES IN SCHEMA keycloak TO keycloak;
|
||||||
|
GRANT ALL ON ALL SEQUENCES IN SCHEMA keycloak TO keycloak;
|
||||||
|
|
||||||
|
-- Set default privileges for future tables
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA keycloak GRANT ALL ON TABLES TO keycloak;
|
||||||
|
ALTER DEFAULT PRIVILEGES IN SCHEMA keycloak GRANT ALL ON SEQUENCES TO keycloak;
|
||||||
|
|
||||||
|
-- Grant connect permission to the database
|
||||||
|
GRANT CONNECT ON DATABASE postgres TO keycloak;
|
||||||
3
db/migrations/supabase/50-_supabase.sql
Normal file
3
db/migrations/supabase/50-_supabase.sql
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
\set pguser `echo "$POSTGRES_USER"`
|
||||||
|
|
||||||
|
CREATE DATABASE _supabase WITH OWNER :pguser;
|
||||||
5
db/migrations/supabase/52-logs.sql
Normal file
5
db/migrations/supabase/52-logs.sql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
\set pguser `echo "$POSTGRES_USER"`
|
||||||
|
|
||||||
|
\c _supabase
|
||||||
|
create schema if not exists _analytics;
|
||||||
|
alter schema _analytics owner to :pguser;
|
||||||
5
db/migrations/supabase/52-pooler.sql
Normal file
5
db/migrations/supabase/52-pooler.sql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
\set pguser `echo "$POSTGRES_USER"`
|
||||||
|
|
||||||
|
\c _supabase
|
||||||
|
create schema if not exists _supavisor;
|
||||||
|
alter schema _supavisor owner to :pguser;
|
||||||
4
db/migrations/supabase/52-realtime.sql
Normal file
4
db/migrations/supabase/52-realtime.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
\set pguser `echo "$POSTGRES_USER"`
|
||||||
|
|
||||||
|
create schema if not exists _realtime;
|
||||||
|
alter schema _realtime owner to :pguser;
|
||||||
489
docker-compose.yml
Normal file
489
docker-compose.yml
Normal file
@ -0,0 +1,489 @@
|
|||||||
|
services:
|
||||||
|
# Supabase containers
|
||||||
|
studio:
|
||||||
|
container_name: supabase-studio
|
||||||
|
image: supabase/studio:20250113-83c9420
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD",
|
||||||
|
"node",
|
||||||
|
"-e",
|
||||||
|
"fetch('http://studio:3000/api/profile').then((r) => {if (r.status !== 200) throw new Error(r.status)})",
|
||||||
|
]
|
||||||
|
timeout: 10s
|
||||||
|
interval: 5s
|
||||||
|
retries: 3
|
||||||
|
depends_on:
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
ports:
|
||||||
|
- ${PORT_SUPABASE_STUDIO}:3000
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
STUDIO_PG_META_URL: http://meta:8080
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
DEFAULT_PROJECT_ID: "ClassroomCopilot"
|
||||||
|
DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION}
|
||||||
|
DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT}
|
||||||
|
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
||||||
|
SUPABASE_URL: ${SUPABASE_URL}
|
||||||
|
SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL}
|
||||||
|
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||||
|
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||||
|
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||||
|
LOGFLARE_URL: http://analytics:4000
|
||||||
|
NEXT_PUBLIC_ENABLE_LOGS: true
|
||||||
|
NEXT_ANALYTICS_BACKEND_PROVIDER: postgres
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
kong:
|
||||||
|
container_name: supabase-kong
|
||||||
|
image: kong:2.8.1
|
||||||
|
restart: unless-stopped
|
||||||
|
entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start'
|
||||||
|
ports:
|
||||||
|
- ${KONG_HTTP_PORT}:8000/tcp
|
||||||
|
- ${KONG_HTTPS_PORT}:8443/tcp
|
||||||
|
depends_on:
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
KONG_DATABASE: "off"
|
||||||
|
KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml
|
||||||
|
KONG_DNS_ORDER: LAST,A,CNAME
|
||||||
|
KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth
|
||||||
|
KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k
|
||||||
|
KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k
|
||||||
|
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||||
|
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||||
|
DASHBOARD_USERNAME: ${DASHBOARD_USERNAME}
|
||||||
|
DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD}
|
||||||
|
KONG_PROXY_ACCESS_LOG: "/dev/stdout"
|
||||||
|
KONG_ADMIN_ACCESS_LOG: "/dev/stdout"
|
||||||
|
KONG_PROXY_ERROR_LOG: "/dev/stderr"
|
||||||
|
KONG_ADMIN_ERROR_LOG: "/dev/stderr"
|
||||||
|
KONG_CORS_ORIGINS: "*"
|
||||||
|
KONG_CORS_METHODS: "GET,HEAD,PUT,PATCH,POST,DELETE,OPTIONS"
|
||||||
|
KONG_CORS_HEADERS: "DNT,X-Auth-Token,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,apikey,x-client-info"
|
||||||
|
KONG_CORS_EXPOSED_HEADERS: "Content-Length,Content-Range"
|
||||||
|
KONG_CORS_MAX_AGE: 3600
|
||||||
|
volumes:
|
||||||
|
- ./api/kong.yml:/home/kong/temp.yml:ro
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
auth:
|
||||||
|
container_name: supabase-auth
|
||||||
|
image: supabase/gotrue:v2.167.0
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD",
|
||||||
|
"wget",
|
||||||
|
"--no-verbose",
|
||||||
|
"--tries=1",
|
||||||
|
"--spider",
|
||||||
|
"http://localhost:9999/health",
|
||||||
|
]
|
||||||
|
timeout: 5s
|
||||||
|
interval: 5s
|
||||||
|
retries: 3
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
GOTRUE_API_HOST: 0.0.0.0
|
||||||
|
GOTRUE_API_PORT: 9999
|
||||||
|
API_EXTERNAL_URL: ${API_EXTERNAL_URL}
|
||||||
|
GOTRUE_DB_DRIVER: postgres
|
||||||
|
GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||||
|
GOTRUE_SITE_URL: ${SITE_URL}
|
||||||
|
GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS}
|
||||||
|
GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP}
|
||||||
|
GOTRUE_JWT_ADMIN_ROLES: service_role
|
||||||
|
GOTRUE_JWT_AUD: authenticated
|
||||||
|
GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated
|
||||||
|
GOTRUE_JWT_EXP: ${JWT_EXPIRY}
|
||||||
|
GOTRUE_JWT_SECRET: ${JWT_SECRET}
|
||||||
|
GOTRUE_LOG_LEVEL: ${AUTH_LOG_LEVEL}
|
||||||
|
GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL}
|
||||||
|
GOTRUE_SMTP_HOST: ${SMTP_HOST}
|
||||||
|
GOTRUE_SMTP_PORT: ${SMTP_PORT}
|
||||||
|
GOTRUE_SMTP_USER: ${SMTP_USER}
|
||||||
|
GOTRUE_SMTP_PASS: ${SMTP_PASS}
|
||||||
|
GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME}
|
||||||
|
GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE}
|
||||||
|
GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION}
|
||||||
|
GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY}
|
||||||
|
GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE}
|
||||||
|
GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM}
|
||||||
|
GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: ${MAILER_SECURE_EMAIL_CHANGE_ENABLED}
|
||||||
|
GOTRUE_MAILER_EXTERNAL_HOSTS: "localhost,admin.localhost,kong,supabase.classroomcopilot.ai,classroomcopilot.ai"
|
||||||
|
GOTRUE_MAILER_EXTERNAL_HOSTS_ALLOW_REGEX: ".*\\.classroomcopilot\\.ai$"
|
||||||
|
GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM}
|
||||||
|
GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP}
|
||||||
|
GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: ${ENABLE_ANONYMOUS_USERS}
|
||||||
|
GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP}
|
||||||
|
GOTRUE_EXTERNAL_AZURE_ENABLED: ${AZURE_ENABLED}
|
||||||
|
GOTRUE_EXTERNAL_AZURE_CLIENT_ID: ${AZURE_CLIENT_ID}
|
||||||
|
GOTRUE_EXTERNAL_AZURE_SECRET: ${AZURE_SECRET}
|
||||||
|
GOTRUE_EXTERNAL_AZURE_REDIRECT_URI: ${AZURE_REDIRECT_URI}
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
rest:
|
||||||
|
container_name: supabase-rest
|
||||||
|
image: postgrest/postgrest:v12.2.0
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||||
|
PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS}
|
||||||
|
PGRST_DB_ANON_ROLE: anon
|
||||||
|
PGRST_JWT_SECRET: ${JWT_SECRET}
|
||||||
|
PGRST_DB_USE_LEGACY_GUCS: "false"
|
||||||
|
PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET}
|
||||||
|
PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY}
|
||||||
|
command: "postgrest"
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
realtime:
|
||||||
|
container_name: supabase-realtime
|
||||||
|
image: supabase/realtime:v2.34.7
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD",
|
||||||
|
"curl",
|
||||||
|
"-sSfL",
|
||||||
|
"--head",
|
||||||
|
"-o",
|
||||||
|
"/dev/null",
|
||||||
|
"-H",
|
||||||
|
"Authorization: Bearer ${ANON_KEY}",
|
||||||
|
"http://localhost:4000/api/tenants/realtime-dev/health",
|
||||||
|
]
|
||||||
|
timeout: 5s
|
||||||
|
interval: 5s
|
||||||
|
retries: 3
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
PORT: 4000
|
||||||
|
DB_HOST: ${POSTGRES_HOST}
|
||||||
|
DB_PORT: ${POSTGRES_PORT}
|
||||||
|
DB_USER: supabase_admin
|
||||||
|
DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
DB_NAME: ${POSTGRES_DB}
|
||||||
|
DB_AFTER_CONNECT_QUERY: "SET search_path TO _realtime"
|
||||||
|
DB_ENC_KEY: supabaserealtime
|
||||||
|
API_JWT_SECRET: ${JWT_SECRET}
|
||||||
|
SECRET_KEY_BASE: ${SECRET_KEY_BASE}
|
||||||
|
ERL_AFLAGS: -proto_dist inet_tcp
|
||||||
|
DNS_NODES: "''"
|
||||||
|
RLIMIT_NOFILE: "10000"
|
||||||
|
APP_NAME: realtime
|
||||||
|
SEED_SELF_HOST: true
|
||||||
|
RUN_JANITOR: true
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
storage:
|
||||||
|
container_name: supabase-storage
|
||||||
|
image: supabase/storage-api:v1.14.5
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
rest:
|
||||||
|
condition: service_started
|
||||||
|
imgproxy:
|
||||||
|
condition: service_started
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD",
|
||||||
|
"wget",
|
||||||
|
"--no-verbose",
|
||||||
|
"--tries=1",
|
||||||
|
"--spider",
|
||||||
|
"http://storage:5000/status",
|
||||||
|
]
|
||||||
|
timeout: 5s
|
||||||
|
interval: 5s
|
||||||
|
retries: 3
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
ANON_KEY: ${ANON_KEY}
|
||||||
|
SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||||
|
POSTGREST_URL: http://rest:3000
|
||||||
|
PGRST_JWT_SECRET: ${JWT_SECRET}
|
||||||
|
DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||||
|
FILE_SIZE_LIMIT: 52428800
|
||||||
|
STORAGE_BACKEND: file
|
||||||
|
FILE_STORAGE_BACKEND_PATH: /var/lib/storage
|
||||||
|
TENANT_ID: stub
|
||||||
|
REGION: stub
|
||||||
|
GLOBAL_S3_BUCKET: stub
|
||||||
|
ENABLE_IMAGE_TRANSFORMATION: "true"
|
||||||
|
IMGPROXY_URL: http://imgproxy:5001
|
||||||
|
volumes:
|
||||||
|
- ./storage:/var/lib/storage:z
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
imgproxy:
|
||||||
|
container_name: supabase-imgproxy
|
||||||
|
image: darthsim/imgproxy:v3.8.0
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "imgproxy", "health"]
|
||||||
|
timeout: 10s
|
||||||
|
interval: 5s
|
||||||
|
retries: 10
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
IMGPROXY_BIND: ":5001"
|
||||||
|
IMGPROXY_LOCAL_FILESYSTEM_ROOT: /
|
||||||
|
IMGPROXY_USE_ETAG: "true"
|
||||||
|
IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION}
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
meta:
|
||||||
|
container_name: supabase-meta
|
||||||
|
image: supabase/postgres-meta:v0.84.2
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
PG_META_PORT: 8080
|
||||||
|
PG_META_DB_HOST: ${POSTGRES_HOST}
|
||||||
|
PG_META_DB_PORT: ${POSTGRES_PORT}
|
||||||
|
PG_META_DB_NAME: ${POSTGRES_DB}
|
||||||
|
PG_META_DB_USER: supabase_admin
|
||||||
|
PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
functions:
|
||||||
|
container_name: supabase-edge-functions
|
||||||
|
image: supabase/edge-runtime:v1.67.0
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
JWT_SECRET: ${JWT_SECRET}
|
||||||
|
SUPABASE_URL: ${SUPABASE_URL}
|
||||||
|
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY}
|
||||||
|
SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||||
|
VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}"
|
||||||
|
volumes:
|
||||||
|
- ./functions:/home/deno/functions:Z
|
||||||
|
command:
|
||||||
|
- start
|
||||||
|
- --main-service
|
||||||
|
- /home/deno/functions/main
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
analytics:
|
||||||
|
container_name: supabase-analytics
|
||||||
|
image: supabase/logflare:1.4.0
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "http://localhost:4000/health"]
|
||||||
|
timeout: 10s
|
||||||
|
interval: 5s
|
||||||
|
retries: 10
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
LOGFLARE_NODE_HOST: 127.0.0.1
|
||||||
|
DB_USERNAME: supabase_admin
|
||||||
|
DB_DATABASE: _supabase
|
||||||
|
DB_HOSTNAME: ${POSTGRES_HOST}
|
||||||
|
DB_PORT: ${POSTGRES_PORT}
|
||||||
|
DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
DB_SCHEMA: _analytics
|
||||||
|
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||||
|
LOGFLARE_SINGLE_TENANT: true
|
||||||
|
LOGFLARE_SUPABASE_MODE: true
|
||||||
|
LOGFLARE_MIN_CLUSTER_SIZE: 1
|
||||||
|
POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/_supabase
|
||||||
|
POSTGRES_BACKEND_SCHEMA: _analytics
|
||||||
|
LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true
|
||||||
|
ports:
|
||||||
|
- 4000:4000
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
db:
|
||||||
|
container_name: supabase-db
|
||||||
|
image: supabase/postgres:15.8.1.020
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U postgres -h localhost || exit 1"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 20
|
||||||
|
start_period: 30s
|
||||||
|
depends_on:
|
||||||
|
vector:
|
||||||
|
condition: service_healthy
|
||||||
|
command:
|
||||||
|
- postgres
|
||||||
|
- -c
|
||||||
|
- config_file=/etc/postgresql/postgresql.conf
|
||||||
|
- -c
|
||||||
|
- log_min_messages=fatal
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
POSTGRES_HOST: /var/run/postgresql
|
||||||
|
PGPORT: ${POSTGRES_PORT}
|
||||||
|
POSTGRES_PORT: ${POSTGRES_PORT}
|
||||||
|
PGPASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
PGDATABASE: ${POSTGRES_DB}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB}
|
||||||
|
JWT_SECRET: ${JWT_SECRET}
|
||||||
|
JWT_EXP: ${JWT_EXPIRY}
|
||||||
|
volumes:
|
||||||
|
- ./db/migrations/supabase/50-_supabase.sql:/docker-entrypoint-initdb.d/migrations/50-_supabase.sql
|
||||||
|
- ./db/migrations/supabase/52-realtime.sql:/docker-entrypoint-initdb.d/migrations/52-realtime.sql
|
||||||
|
- ./db/migrations/supabase/52-pooler.sql:/docker-entrypoint-initdb.d/migrations/52-pooler.sql
|
||||||
|
- ./db/migrations/supabase/52-logs.sql:/docker-entrypoint-initdb.d/migrations/52-logs.sql
|
||||||
|
- ./db/init-scripts/51-webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/51-webhooks.sql
|
||||||
|
- ./db/init-scripts/52-roles.sql:/docker-entrypoint-initdb.d/init-scripts/52-roles.sql
|
||||||
|
- ./db/init-scripts/52-jwt.sql:/docker-entrypoint-initdb.d/init-scripts/52-jwt.sql
|
||||||
|
- ./db/migrations/core/60-create-databases.sql:/docker-entrypoint-initdb.d/migrations/60-create-databases.sql
|
||||||
|
- ./db/migrations/core/61-core-schema.sql:/docker-entrypoint-initdb.d/migrations/61-core-schema.sql
|
||||||
|
- ./db/migrations/core/62-functions-triggers.sql:/docker-entrypoint-initdb.d/migrations/62-functions-triggers.sql
|
||||||
|
- ./db/migrations/core/63-storage-policies.sql:/docker-entrypoint-initdb.d/migrations/63-storage-policies.sql
|
||||||
|
- ./db/migrations/core/64-initial-admin.sql:/docker-entrypoint-initdb.d/migrations/64-initial-admin.sql
|
||||||
|
- ./db/migrations/core/65-keycloak-setup.sql:/docker-entrypoint-initdb.d/migrations/65-keycloak-setup.sql
|
||||||
|
- supabase-db-data:/var/lib/postgresql/data
|
||||||
|
- supabase-db-config:/etc/postgresql-custom
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
vector:
|
||||||
|
container_name: supabase-vector
|
||||||
|
image: timberio/vector:0.28.1-alpine
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD",
|
||||||
|
"wget",
|
||||||
|
"--no-verbose",
|
||||||
|
"--tries=1",
|
||||||
|
"--spider",
|
||||||
|
"http://vector:9001/health",
|
||||||
|
]
|
||||||
|
timeout: 10s
|
||||||
|
interval: 10s
|
||||||
|
retries: 10
|
||||||
|
volumes:
|
||||||
|
- ./logs/vector.yml:/etc/vector/vector.yml:ro
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||||
|
command: ["--config", "/etc/vector/vector.yml"]
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
supavisor:
|
||||||
|
container_name: supabase-pooler
|
||||||
|
image: supabase/supavisor:1.1.56
|
||||||
|
healthcheck:
|
||||||
|
test: curl -sSfL --head -o /dev/null "http://127.0.0.1:4000/api/health"
|
||||||
|
interval: 10s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 10
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
analytics:
|
||||||
|
condition: service_healthy
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- /app/bin/migrate && /app/bin/supavisor eval "$$(cat /etc/pooler/pooler.exs)" && /app/bin/server
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- ${POSTGRES_PORT}:5432
|
||||||
|
- ${POOLER_PROXY_PORT_TRANSACTION}:6543
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
- PORT=4000
|
||||||
|
- POSTGRES_PORT=${POSTGRES_PORT}
|
||||||
|
- POSTGRES_DB=${POSTGRES_DB}
|
||||||
|
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||||
|
- DATABASE_URL=ecto://supabase_admin:${POSTGRES_PASSWORD}@db:${POSTGRES_PORT}/_supabase
|
||||||
|
- CLUSTER_POSTGRES=true
|
||||||
|
- SECRET_KEY_BASE=${SECRET_KEY_BASE}
|
||||||
|
- VAULT_ENC_KEY=${VAULT_ENC_KEY}
|
||||||
|
- API_JWT_SECRET=${JWT_SECRET}
|
||||||
|
- METRICS_JWT_SECRET=${JWT_SECRET}
|
||||||
|
- REGION=local
|
||||||
|
- ERL_AFLAGS=-proto_dist inet_tcp
|
||||||
|
- POOLER_TENANT_ID=${POOLER_TENANT_ID}
|
||||||
|
- POOLER_DEFAULT_POOL_SIZE=${POOLER_DEFAULT_POOL_SIZE}
|
||||||
|
- POOLER_MAX_CLIENT_CONN=${POOLER_MAX_CLIENT_CONN}
|
||||||
|
- POOLER_POOL_MODE=transaction
|
||||||
|
volumes:
|
||||||
|
- ./pooler/pooler.exs:/etc/pooler/pooler.exs:ro
|
||||||
|
networks:
|
||||||
|
- kevlarai-network
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
supabase-db-config:
|
||||||
|
driver: local
|
||||||
|
supabase-db-data:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
kevlarai-network:
|
||||||
|
name: kevlarai-network
|
||||||
|
driver: bridge
|
||||||
16
functions/hello/index.ts
Normal file
16
functions/hello/index.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// Follow this setup guide to integrate the Deno language server with your editor:
|
||||||
|
// https://deno.land/manual/getting_started/setup_your_environment
|
||||||
|
// This enables autocomplete, go to definition, etc.
|
||||||
|
|
||||||
|
import { serve } from "https://deno.land/std@0.177.1/http/server.ts"
|
||||||
|
|
||||||
|
serve(async () => {
|
||||||
|
return new Response(
|
||||||
|
`"Hello from Edge Functions!"`,
|
||||||
|
{ headers: { "Content-Type": "application/json" } },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
// To invoke:
|
||||||
|
// curl 'http://localhost:<KONG_HTTP_PORT>/functions/v1/hello' \
|
||||||
|
// --header 'Authorization: Bearer <anon/service_role API key>'
|
||||||
94
functions/main/index.ts
Normal file
94
functions/main/index.ts
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
|
||||||
|
import * as jose from 'https://deno.land/x/jose@v4.14.4/index.ts'
|
||||||
|
|
||||||
|
console.log('main function started')
|
||||||
|
|
||||||
|
const JWT_SECRET = Deno.env.get('JWT_SECRET')
|
||||||
|
const VERIFY_JWT = Deno.env.get('VERIFY_JWT') === 'true'
|
||||||
|
|
||||||
|
function getAuthToken(req: Request) {
|
||||||
|
const authHeader = req.headers.get('authorization')
|
||||||
|
if (!authHeader) {
|
||||||
|
throw new Error('Missing authorization header')
|
||||||
|
}
|
||||||
|
const [bearer, token] = authHeader.split(' ')
|
||||||
|
if (bearer !== 'Bearer') {
|
||||||
|
throw new Error(`Auth header is not 'Bearer {token}'`)
|
||||||
|
}
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
|
||||||
|
async function verifyJWT(jwt: string): Promise<boolean> {
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
const secretKey = encoder.encode(JWT_SECRET)
|
||||||
|
try {
|
||||||
|
await jose.jwtVerify(jwt, secretKey)
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
serve(async (req: Request) => {
|
||||||
|
if (req.method !== 'OPTIONS' && VERIFY_JWT) {
|
||||||
|
try {
|
||||||
|
const token = getAuthToken(req)
|
||||||
|
const isValidJWT = await verifyJWT(token)
|
||||||
|
|
||||||
|
if (!isValidJWT) {
|
||||||
|
return new Response(JSON.stringify({ msg: 'Invalid JWT' }), {
|
||||||
|
status: 401,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e)
|
||||||
|
return new Response(JSON.stringify({ msg: e.toString() }), {
|
||||||
|
status: 401,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(req.url)
|
||||||
|
const { pathname } = url
|
||||||
|
const path_parts = pathname.split('/')
|
||||||
|
const service_name = path_parts[1]
|
||||||
|
|
||||||
|
if (!service_name || service_name === '') {
|
||||||
|
const error = { msg: 'missing function name in request' }
|
||||||
|
return new Response(JSON.stringify(error), {
|
||||||
|
status: 400,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const servicePath = `/home/deno/functions/${service_name}`
|
||||||
|
console.error(`serving the request with ${servicePath}`)
|
||||||
|
|
||||||
|
const memoryLimitMb = 150
|
||||||
|
const workerTimeoutMs = 1 * 60 * 1000
|
||||||
|
const noModuleCache = false
|
||||||
|
const importMapPath = null
|
||||||
|
const envVarsObj = Deno.env.toObject()
|
||||||
|
const envVars = Object.keys(envVarsObj).map((k) => [k, envVarsObj[k]])
|
||||||
|
|
||||||
|
try {
|
||||||
|
const worker = await EdgeRuntime.userWorkers.create({
|
||||||
|
servicePath,
|
||||||
|
memoryLimitMb,
|
||||||
|
workerTimeoutMs,
|
||||||
|
noModuleCache,
|
||||||
|
importMapPath,
|
||||||
|
envVars,
|
||||||
|
})
|
||||||
|
return await worker.fetch(req)
|
||||||
|
} catch (e) {
|
||||||
|
const error = { msg: e.toString() }
|
||||||
|
return new Response(JSON.stringify(error), {
|
||||||
|
status: 500,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
232
logs/vector.yml
Normal file
232
logs/vector.yml
Normal file
@ -0,0 +1,232 @@
|
|||||||
|
api:
|
||||||
|
enabled: true
|
||||||
|
address: 0.0.0.0:9001
|
||||||
|
|
||||||
|
sources:
|
||||||
|
docker_host:
|
||||||
|
type: docker_logs
|
||||||
|
exclude_containers:
|
||||||
|
- supabase-vector
|
||||||
|
|
||||||
|
transforms:
|
||||||
|
project_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- docker_host
|
||||||
|
source: |-
|
||||||
|
.project = "default"
|
||||||
|
.event_message = del(.message)
|
||||||
|
.appname = del(.container_name)
|
||||||
|
del(.container_created_at)
|
||||||
|
del(.container_id)
|
||||||
|
del(.source_type)
|
||||||
|
del(.stream)
|
||||||
|
del(.label)
|
||||||
|
del(.image)
|
||||||
|
del(.host)
|
||||||
|
del(.stream)
|
||||||
|
router:
|
||||||
|
type: route
|
||||||
|
inputs:
|
||||||
|
- project_logs
|
||||||
|
route:
|
||||||
|
kong: '.appname == "supabase-kong"'
|
||||||
|
auth: '.appname == "supabase-auth"'
|
||||||
|
rest: '.appname == "supabase-rest"'
|
||||||
|
realtime: '.appname == "supabase-realtime"'
|
||||||
|
storage: '.appname == "supabase-storage"'
|
||||||
|
functions: '.appname == "supabase-functions"'
|
||||||
|
db: '.appname == "supabase-db"'
|
||||||
|
# Ignores non nginx errors since they are related with kong booting up
|
||||||
|
kong_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.kong
|
||||||
|
source: |-
|
||||||
|
req, err = parse_nginx_log(.event_message, "combined")
|
||||||
|
if err == null {
|
||||||
|
.timestamp = req.timestamp
|
||||||
|
.metadata.request.headers.referer = req.referer
|
||||||
|
.metadata.request.headers.user_agent = req.agent
|
||||||
|
.metadata.request.headers.cf_connecting_ip = req.client
|
||||||
|
.metadata.request.method = req.method
|
||||||
|
.metadata.request.path = req.path
|
||||||
|
.metadata.request.protocol = req.protocol
|
||||||
|
.metadata.response.status_code = req.status
|
||||||
|
}
|
||||||
|
if err != null {
|
||||||
|
abort
|
||||||
|
}
|
||||||
|
# Ignores non nginx errors since they are related with kong booting up
|
||||||
|
kong_err:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.kong
|
||||||
|
source: |-
|
||||||
|
.metadata.request.method = "GET"
|
||||||
|
.metadata.response.status_code = 200
|
||||||
|
parsed, err = parse_nginx_log(.event_message, "error")
|
||||||
|
if err == null {
|
||||||
|
.timestamp = parsed.timestamp
|
||||||
|
.severity = parsed.severity
|
||||||
|
.metadata.request.host = parsed.host
|
||||||
|
.metadata.request.headers.cf_connecting_ip = parsed.client
|
||||||
|
url, err = split(parsed.request, " ")
|
||||||
|
if err == null {
|
||||||
|
.metadata.request.method = url[0]
|
||||||
|
.metadata.request.path = url[1]
|
||||||
|
.metadata.request.protocol = url[2]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != null {
|
||||||
|
abort
|
||||||
|
}
|
||||||
|
# Gotrue logs are structured json strings which frontend parses directly. But we keep metadata for consistency.
|
||||||
|
auth_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.auth
|
||||||
|
source: |-
|
||||||
|
parsed, err = parse_json(.event_message)
|
||||||
|
if err == null {
|
||||||
|
.metadata.timestamp = parsed.time
|
||||||
|
.metadata = merge!(.metadata, parsed)
|
||||||
|
}
|
||||||
|
# PostgREST logs are structured so we separate timestamp from message using regex
|
||||||
|
rest_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.rest
|
||||||
|
source: |-
|
||||||
|
parsed, err = parse_regex(.event_message, r'^(?P<time>.*): (?P<msg>.*)$')
|
||||||
|
if err == null {
|
||||||
|
.event_message = parsed.msg
|
||||||
|
.timestamp = to_timestamp!(parsed.time)
|
||||||
|
.metadata.host = .project
|
||||||
|
}
|
||||||
|
# Realtime logs are structured so we parse the severity level using regex (ignore time because it has no date)
|
||||||
|
realtime_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.realtime
|
||||||
|
source: |-
|
||||||
|
.metadata.project = del(.project)
|
||||||
|
.metadata.external_id = .metadata.project
|
||||||
|
parsed, err = parse_regex(.event_message, r'^(?P<time>\d+:\d+:\d+\.\d+) \[(?P<level>\w+)\] (?P<msg>.*)$')
|
||||||
|
if err == null {
|
||||||
|
.event_message = parsed.msg
|
||||||
|
.metadata.level = parsed.level
|
||||||
|
}
|
||||||
|
# Storage logs may contain json objects so we parse them for completeness
|
||||||
|
storage_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.storage
|
||||||
|
source: |-
|
||||||
|
.metadata.project = del(.project)
|
||||||
|
.metadata.tenantId = .metadata.project
|
||||||
|
parsed, err = parse_json(.event_message)
|
||||||
|
if err == null {
|
||||||
|
.event_message = parsed.msg
|
||||||
|
.metadata.level = parsed.level
|
||||||
|
.metadata.timestamp = parsed.time
|
||||||
|
.metadata.context[0].host = parsed.hostname
|
||||||
|
.metadata.context[0].pid = parsed.pid
|
||||||
|
}
|
||||||
|
# Postgres logs some messages to stderr which we map to warning severity level
|
||||||
|
db_logs:
|
||||||
|
type: remap
|
||||||
|
inputs:
|
||||||
|
- router.db
|
||||||
|
source: |-
|
||||||
|
.metadata.host = "db-default"
|
||||||
|
.metadata.parsed.timestamp = .timestamp
|
||||||
|
|
||||||
|
parsed, err = parse_regex(.event_message, r'.*(?P<level>INFO|NOTICE|WARNING|ERROR|LOG|FATAL|PANIC?):.*', numeric_groups: true)
|
||||||
|
|
||||||
|
if err != null || parsed == null {
|
||||||
|
.metadata.parsed.error_severity = "info"
|
||||||
|
}
|
||||||
|
if parsed != null {
|
||||||
|
.metadata.parsed.error_severity = parsed.level
|
||||||
|
}
|
||||||
|
if .metadata.parsed.error_severity == "info" {
|
||||||
|
.metadata.parsed.error_severity = "log"
|
||||||
|
}
|
||||||
|
.metadata.parsed.error_severity = upcase!(.metadata.parsed.error_severity)
|
||||||
|
|
||||||
|
sinks:
|
||||||
|
logflare_auth:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- auth_logs
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=gotrue.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_realtime:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- realtime_logs
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=realtime.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_rest:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- rest_logs
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=postgREST.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_db:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- db_logs
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
# We must route the sink through kong because ingesting logs before logflare is fully initialised will
|
||||||
|
# lead to broken queries from studio. This works by the assumption that containers are started in the
|
||||||
|
# following order: vector > db > logflare > kong
|
||||||
|
uri: 'http://kong:8000/analytics/v1/api/logs?source_name=postgres.logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_functions:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- router.functions
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=deno-relay-logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_storage:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- storage_logs
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=storage.logs.prod.2&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
|
logflare_kong:
|
||||||
|
type: 'http'
|
||||||
|
inputs:
|
||||||
|
- kong_logs
|
||||||
|
- kong_err
|
||||||
|
encoding:
|
||||||
|
codec: 'json'
|
||||||
|
method: 'post'
|
||||||
|
request:
|
||||||
|
retry_max_duration_secs: 10
|
||||||
|
uri: 'http://analytics:4000/api/logs?source_name=cloudflare.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
||||||
30
pooler/pooler.exs
Normal file
30
pooler/pooler.exs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{:ok, _} = Application.ensure_all_started(:supavisor)
|
||||||
|
|
||||||
|
{:ok, version} =
|
||||||
|
case Supavisor.Repo.query!("select version()") do
|
||||||
|
%{rows: [[ver]]} -> Supavisor.Helpers.parse_pg_version(ver)
|
||||||
|
_ -> nil
|
||||||
|
end
|
||||||
|
|
||||||
|
params = %{
|
||||||
|
"external_id" => System.get_env("POOLER_TENANT_ID"),
|
||||||
|
"db_host" => "db",
|
||||||
|
"db_port" => System.get_env("POSTGRES_PORT"),
|
||||||
|
"db_database" => System.get_env("POSTGRES_DB"),
|
||||||
|
"require_user" => false,
|
||||||
|
"auth_query" => "SELECT * FROM pgbouncer.get_auth($1)",
|
||||||
|
"default_max_clients" => System.get_env("POOLER_MAX_CLIENT_CONN"),
|
||||||
|
"default_pool_size" => System.get_env("POOLER_DEFAULT_POOL_SIZE"),
|
||||||
|
"default_parameter_status" => %{"server_version" => version},
|
||||||
|
"users" => [%{
|
||||||
|
"db_user" => "pgbouncer",
|
||||||
|
"db_password" => System.get_env("POSTGRES_PASSWORD"),
|
||||||
|
"mode_type" => System.get_env("POOLER_POOL_MODE"),
|
||||||
|
"pool_size" => System.get_env("POOLER_DEFAULT_POOL_SIZE"),
|
||||||
|
"is_manager" => true
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
if !Supavisor.Tenants.get_tenant_by_external_id(params["external_id"]) do
|
||||||
|
{:ok, _} = Supavisor.Tenants.create_tenant(params)
|
||||||
|
end
|
||||||
Loading…
x
Reference in New Issue
Block a user