feat: update database schema and migrations

This commit is contained in:
eligrinfeld 2025-01-06 21:24:54 -07:00
parent 765c8e549c
commit 7fa0e9dd9d
9 changed files with 635 additions and 2 deletions

View file

@ -168,4 +168,22 @@ CREATE INDEX IF NOT EXISTS idx_businesses_place_id ON businesses(place_id);
-- Create a unique constraint on place_id (excluding nulls)
CREATE UNIQUE INDEX IF NOT EXISTS idx_businesses_place_id_unique
ON businesses(place_id)
WHERE place_id IS NOT NULL;
WHERE place_id IS NOT NULL;
CREATE TABLE IF NOT EXISTS businesses (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
address TEXT NOT NULL,
phone TEXT NOT NULL,
description TEXT NOT NULL,
website TEXT,
source TEXT NOT NULL,
rating REAL,
lat REAL,
lng REAL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_businesses_source ON businesses(source);
CREATE INDEX IF NOT EXISTS idx_businesses_rating ON businesses(rating);

View file

@ -12,4 +12,4 @@ WHERE table_schema = 'public'
AND table_name = 'businesses';
-- Check row count
SELECT count(*) FROM businesses;
SELECT COUNT(*) as count FROM businesses;

4
supabase/.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
# Supabase
.branches
.temp
.env

275
supabase/config.toml Normal file
View file

@ -0,0 +1,275 @@
# For detailed configuration reference documentation, visit:
# https://supabase.com/docs/guides/local-development/cli/config
# A string used to distinguish different Supabase projects on the same host. Defaults to the
# working directory name when running `supabase init`.
project_id = "BizSearch"
[api]
enabled = true
# Port to use for the API URL.
port = 54321
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
# endpoints. `public` is always included.
schemas = ["public", "graphql_public"]
# Extra schemas to add to the search_path of every request. `public` is always included.
extra_search_path = ["public", "extensions"]
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
# for accidental or malicious requests.
max_rows = 1000
[api.tls]
enabled = false
[db]
# Port to use for the local database URL.
port = 54322
# Port used by db diff command to initialize the shadow database.
shadow_port = 54320
# The database major version to use. This has to be the same as your remote database's. Run `SHOW
# server_version;` on the remote database to check.
major_version = 15
[db.pooler]
enabled = false
# Port to use for the local connection pooler.
port = 54329
# Specifies when a server connection can be reused by other clients.
# Configure one of the supported pooler modes: `transaction`, `session`.
pool_mode = "transaction"
# How many server connections to allow per user/database pair.
default_pool_size = 20
# Maximum number of client connections allowed.
max_client_conn = 100
[db.seed]
# If enabled, seeds the database after migrations during a db reset.
enabled = true
# Specifies an ordered list of seed files to load during db reset.
# Supports glob patterns relative to supabase directory. For example:
# sql_paths = ['./seeds/*.sql', '../project-src/seeds/*-load-testing.sql']
sql_paths = ['./seed.sql']
[realtime]
enabled = true
# Bind realtime via either IPv4 or IPv6. (default: IPv4)
# ip_version = "IPv6"
# The maximum length in bytes of HTTP request headers. (default: 4096)
# max_header_length = 4096
[studio]
enabled = true
# Port to use for Supabase Studio.
port = 54323
# External URL of the API server that frontend connects to.
api_url = "http://127.0.0.1"
# OpenAI API Key to use for Supabase AI in the Supabase Studio.
openai_api_key = "env(OPENAI_API_KEY)"
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
# are monitored, and you can view the emails that would have been sent from the web interface.
[inbucket]
enabled = true
# Port to use for the email testing server web interface.
port = 54324
# Uncomment to expose additional ports for testing user applications that send emails.
# smtp_port = 54325
# pop3_port = 54326
# admin_email = "admin@email.com"
# sender_name = "Admin"
[storage]
enabled = true
# The maximum file size allowed (e.g. "5MB", "500KB").
file_size_limit = "50MiB"
[storage.image_transformation]
enabled = true
# Uncomment to configure local storage buckets
# [storage.buckets.images]
# public = false
# file_size_limit = "50MiB"
# allowed_mime_types = ["image/png", "image/jpeg"]
# objects_path = "./images"
[auth]
enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
# in emails.
site_url = "http://127.0.0.1:3000"
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
additional_redirect_urls = ["https://127.0.0.1:3000"]
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
jwt_expiry = 3600
# If disabled, the refresh token will never expire.
enable_refresh_token_rotation = true
# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
# Requires enable_refresh_token_rotation = true.
refresh_token_reuse_interval = 10
# Allow/disallow new user signups to your project.
enable_signup = true
# Allow/disallow anonymous sign-ins to your project.
enable_anonymous_sign_ins = false
# Allow/disallow testing manual linking of accounts
enable_manual_linking = false
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
minimum_password_length = 6
# Passwords that do not meet the following requirements will be rejected as weak. Supported values
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
password_requirements = ""
[auth.email]
# Allow/disallow new user signups via email to your project.
enable_signup = true
# If enabled, a user will be required to confirm any email change on both the old, and new email
# addresses. If disabled, only the new email is required to confirm.
double_confirm_changes = true
# If enabled, users need to confirm their email address before signing in.
enable_confirmations = false
# If enabled, users will need to reauthenticate or have logged in recently to change their password.
secure_password_change = false
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
max_frequency = "1s"
# Number of characters used in the email OTP.
otp_length = 6
# Number of seconds before the email OTP expires (defaults to 1 hour).
otp_expiry = 3600
# Use a production-ready SMTP server
# [auth.email.smtp]
# host = "smtp.sendgrid.net"
# port = 587
# user = "apikey"
# pass = "env(SENDGRID_API_KEY)"
# admin_email = "admin@email.com"
# sender_name = "Admin"
# Uncomment to customize email template
# [auth.email.template.invite]
# subject = "You have been invited"
# content_path = "./supabase/templates/invite.html"
[auth.sms]
# Allow/disallow new user signups via SMS to your project.
enable_signup = false
# If enabled, users need to confirm their phone number before signing in.
enable_confirmations = false
# Template for sending OTP to users
template = "Your code is {{ .Code }}"
# Controls the minimum amount of time that must pass before sending another sms otp.
max_frequency = "5s"
# Use pre-defined map of phone number to OTP for testing.
# [auth.sms.test_otp]
# 4152127777 = "123456"
# Configure logged in session timeouts.
# [auth.sessions]
# Force log out after the specified duration.
# timebox = "24h"
# Force log out if the user has been inactive longer than the specified duration.
# inactivity_timeout = "8h"
# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
# [auth.hook.custom_access_token]
# enabled = true
# uri = "pg-functions://<database>/<schema>/<hook_name>"
# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
[auth.sms.twilio]
enabled = false
account_sid = ""
message_service_sid = ""
# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
[auth.mfa]
# Control how many MFA factors can be enrolled at once per user.
max_enrolled_factors = 10
# Control use of MFA via App Authenticator (TOTP)
[auth.mfa.totp]
enroll_enabled = true
verify_enabled = true
# Configure Multi-factor-authentication via Phone Messaging
[auth.mfa.phone]
enroll_enabled = false
verify_enabled = false
otp_length = 6
template = "Your code is {{ .Code }}"
max_frequency = "5s"
# Configure Multi-factor-authentication via WebAuthn
# [auth.mfa.web_authn]
# enroll_enabled = true
# verify_enabled = true
# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
# `twitter`, `slack`, `spotify`, `workos`, `zoom`.
[auth.external.apple]
enabled = false
client_id = ""
# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
# Overrides the default auth redirectUrl.
redirect_uri = ""
# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
# or any other third-party OIDC providers.
url = ""
# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
skip_nonce_check = false
# Use Firebase Auth as a third-party provider alongside Supabase Auth.
[auth.third_party.firebase]
enabled = false
# project_id = "my-firebase-project"
# Use Auth0 as a third-party provider alongside Supabase Auth.
[auth.third_party.auth0]
enabled = false
# tenant = "my-auth0-tenant"
# tenant_region = "us"
# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
[auth.third_party.aws_cognito]
enabled = false
# user_pool_id = "my-user-pool-id"
# user_pool_region = "us-east-1"
[edge_runtime]
enabled = true
# Configure one of the supported request policies: `oneshot`, `per_worker`.
# Use `oneshot` for hot reload, or `per_worker` for load testing.
policy = "oneshot"
# Port to attach the Chrome inspector for debugging edge functions.
inspector_port = 8083
# Use these configurations to customize your Edge Function.
# [functions.MY_FUNCTION_NAME]
# enabled = true
# verify_jwt = true
# import_map = "./functions/MY_FUNCTION_NAME/deno.json"
# Uncomment to specify a custom file path to the entrypoint.
# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
# entrypoint = "./functions/MY_FUNCTION_NAME/index.ts"
[analytics]
enabled = true
port = 54327
# Configure one of the supported backends: `postgres`, `bigquery`.
backend = "postgres"
# Experimental features may be deprecated any time
[experimental]
# Configures Postgres storage engine to use OrioleDB (S3)
orioledb_version = ""
# Configures S3 bucket URL, eg. <bucket_name>.s3-<region>.amazonaws.com
s3_host = "env(S3_HOST)"
# Configures S3 bucket region, eg. us-east-1
s3_region = "env(S3_REGION)"
# Configures AWS_ACCESS_KEY_ID for S3 bucket
s3_access_key = "env(S3_ACCESS_KEY)"
# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
s3_secret_key = "env(S3_SECRET_KEY)"

View file

@ -0,0 +1,75 @@
-- Create businesses table
CREATE TABLE IF NOT EXISTS public.businesses (
id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
name TEXT NOT NULL,
address TEXT NOT NULL,
phone TEXT NOT NULL,
description TEXT NOT NULL,
website TEXT,
source TEXT NOT NULL,
rating REAL,
location POINT,
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- Create indexes
CREATE INDEX IF NOT EXISTS idx_businesses_source ON public.businesses(source);
CREATE INDEX IF NOT EXISTS idx_businesses_rating ON public.businesses(rating);
CREATE INDEX IF NOT EXISTS idx_businesses_location ON public.businesses USING GIST(location);
-- Enable Row Level Security (RLS)
ALTER TABLE public.businesses ENABLE ROW LEVEL SECURITY;
-- Create policies
CREATE POLICY "Allow public read access"
ON public.businesses
FOR SELECT
USING (true);
CREATE POLICY "Allow service role insert/update"
ON public.businesses
FOR ALL
USING (auth.role() = 'service_role');
-- Create function to update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create trigger for updated_at
CREATE TRIGGER update_businesses_updated_at
BEFORE UPDATE ON public.businesses
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Create the searches table
CREATE TABLE searches (
id BIGSERIAL PRIMARY KEY,
query TEXT NOT NULL,
results JSONB NOT NULL DEFAULT '[]',
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create an index on the query column for faster lookups
CREATE INDEX searches_query_idx ON searches USING GIN (to_tsvector('english', query));
-- Create a function to update the updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create a trigger to automatically update the updated_at column
CREATE TRIGGER update_searches_updated_at
BEFORE UPDATE ON searches
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

View file

@ -0,0 +1,63 @@
-- Create business_profiles table
CREATE TABLE IF NOT EXISTS public.business_profiles (
business_id TEXT PRIMARY KEY REFERENCES public.businesses(id),
claimed_by UUID REFERENCES auth.users(id),
claimed_at TIMESTAMP WITH TIME ZONE,
verification_status TEXT NOT NULL DEFAULT 'unverified',
social_links JSONB DEFAULT '{}',
hours_of_operation JSONB DEFAULT '{}',
additional_photos TEXT[] DEFAULT '{}',
tags TEXT[] DEFAULT '{}',
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT valid_verification_status CHECK (verification_status IN ('unverified', 'pending', 'verified', 'rejected'))
);
-- Create business_claims table to track claim requests
CREATE TABLE IF NOT EXISTS public.business_claims (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
business_id TEXT NOT NULL REFERENCES public.businesses(id),
user_id UUID NOT NULL REFERENCES auth.users(id),
status TEXT NOT NULL DEFAULT 'pending',
proof_documents TEXT[] DEFAULT '{}',
submitted_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
reviewed_at TIMESTAMP WITH TIME ZONE,
reviewed_by UUID REFERENCES auth.users(id),
notes TEXT,
CONSTRAINT valid_claim_status CHECK (status IN ('pending', 'approved', 'rejected'))
);
-- Create indexes
CREATE INDEX IF NOT EXISTS idx_business_profiles_claimed_by ON public.business_profiles(claimed_by);
CREATE INDEX IF NOT EXISTS idx_business_claims_business_id ON public.business_claims(business_id);
CREATE INDEX IF NOT EXISTS idx_business_claims_user_id ON public.business_claims(user_id);
CREATE INDEX IF NOT EXISTS idx_business_claims_status ON public.business_claims(status);
-- Add RLS policies
ALTER TABLE public.business_profiles ENABLE ROW LEVEL SECURITY;
ALTER TABLE public.business_claims ENABLE ROW LEVEL SECURITY;
-- Policies for business_profiles
CREATE POLICY "Public profiles are viewable by everyone"
ON public.business_profiles FOR SELECT
USING (true);
CREATE POLICY "Profiles can be updated by verified owners"
ON public.business_profiles FOR UPDATE
USING (auth.uid() = claimed_by AND verification_status = 'verified');
-- Policies for business_claims
CREATE POLICY "Users can view their own claims"
ON public.business_claims FOR SELECT
USING (auth.uid() = user_id);
CREATE POLICY "Users can create claims"
ON public.business_claims FOR INSERT
WITH CHECK (auth.uid() = user_id);
CREATE POLICY "Only admins can review claims"
ON public.business_claims FOR UPDATE
USING (EXISTS (
SELECT 1 FROM auth.users
WHERE auth.uid() = id
AND raw_app_meta_data->>'role' = 'admin'
));

View file

@ -0,0 +1,98 @@
-- Function to create businesses table
CREATE OR REPLACE FUNCTION create_businesses_table()
RETURNS void AS $$
BEGIN
CREATE TABLE IF NOT EXISTS public.businesses (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
phone TEXT,
email TEXT,
address TEXT,
rating NUMERIC,
website TEXT,
description TEXT,
source TEXT,
logo TEXT,
latitude NUMERIC,
longitude NUMERIC,
last_updated TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()),
search_count INTEGER DEFAULT 1,
created_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()),
place_id TEXT
);
END;
$$ LANGUAGE plpgsql;
-- Function to create business_profiles table
CREATE OR REPLACE FUNCTION create_business_profiles_table()
RETURNS void AS $$
BEGIN
CREATE TABLE IF NOT EXISTS public.business_profiles (
business_id TEXT PRIMARY KEY REFERENCES public.businesses(id),
claimed_by UUID REFERENCES auth.users(id),
claimed_at TIMESTAMP WITH TIME ZONE,
verification_status TEXT NOT NULL DEFAULT 'unverified',
social_links JSONB DEFAULT '{}',
hours_of_operation JSONB DEFAULT '{}',
additional_photos TEXT[] DEFAULT '{}',
tags TEXT[] DEFAULT '{}',
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT valid_verification_status CHECK (verification_status IN ('unverified', 'pending', 'verified', 'rejected'))
);
-- Create business_claims table
CREATE TABLE IF NOT EXISTS public.business_claims (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
business_id TEXT NOT NULL REFERENCES public.businesses(id),
user_id UUID NOT NULL REFERENCES auth.users(id),
status TEXT NOT NULL DEFAULT 'pending',
proof_documents TEXT[] DEFAULT '{}',
submitted_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
reviewed_at TIMESTAMP WITH TIME ZONE,
reviewed_by UUID REFERENCES auth.users(id),
notes TEXT,
CONSTRAINT valid_claim_status CHECK (status IN ('pending', 'approved', 'rejected'))
);
-- Create indexes
CREATE INDEX IF NOT EXISTS idx_business_profiles_claimed_by ON public.business_profiles(claimed_by);
CREATE INDEX IF NOT EXISTS idx_business_claims_business_id ON public.business_claims(business_id);
CREATE INDEX IF NOT EXISTS idx_business_claims_user_id ON public.business_claims(user_id);
CREATE INDEX IF NOT EXISTS idx_business_claims_status ON public.business_claims(status);
-- Add RLS policies
ALTER TABLE public.business_profiles ENABLE ROW LEVEL SECURITY;
ALTER TABLE public.business_claims ENABLE ROW LEVEL SECURITY;
-- Policies for business_profiles
DROP POLICY IF EXISTS "Public profiles are viewable by everyone" ON public.business_profiles;
CREATE POLICY "Public profiles are viewable by everyone"
ON public.business_profiles FOR SELECT
USING (true);
DROP POLICY IF EXISTS "Profiles can be updated by verified owners" ON public.business_profiles;
CREATE POLICY "Profiles can be updated by verified owners"
ON public.business_profiles FOR UPDATE
USING (auth.uid() = claimed_by AND verification_status = 'verified');
-- Policies for business_claims
DROP POLICY IF EXISTS "Users can view their own claims" ON public.business_claims;
CREATE POLICY "Users can view their own claims"
ON public.business_claims FOR SELECT
USING (auth.uid() = user_id);
DROP POLICY IF EXISTS "Users can create claims" ON public.business_claims;
CREATE POLICY "Users can create claims"
ON public.business_claims FOR INSERT
WITH CHECK (auth.uid() = user_id);
DROP POLICY IF EXISTS "Only admins can review claims" ON public.business_claims;
CREATE POLICY "Only admins can review claims"
ON public.business_claims FOR UPDATE
USING (EXISTS (
SELECT 1 FROM auth.users
WHERE auth.uid() = id
AND raw_app_meta_data->>'role' = 'admin'
));
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,63 @@
-- Function to review business claims
CREATE OR REPLACE FUNCTION review_business_claim(
p_claim_id UUID,
p_business_id TEXT,
p_user_id UUID,
p_status TEXT,
p_notes TEXT DEFAULT NULL
) RETURNS void AS $$
BEGIN
-- Start transaction
BEGIN
-- Update claim status
UPDATE public.business_claims
SET
status = p_status,
reviewed_at = CURRENT_TIMESTAMP,
reviewed_by = p_user_id,
notes = COALESCE(p_notes, notes)
WHERE id = p_claim_id;
-- If approved, update business profile
IF p_status = 'approved' THEN
-- Get the user_id from the claim
WITH claim_user AS (
SELECT user_id
FROM public.business_claims
WHERE id = p_claim_id
)
INSERT INTO public.business_profiles (
business_id,
claimed_by,
claimed_at,
verification_status
)
SELECT
p_business_id,
user_id,
CURRENT_TIMESTAMP,
'verified'
FROM claim_user
ON CONFLICT (business_id)
DO UPDATE SET
claimed_by = EXCLUDED.claimed_by,
claimed_at = EXCLUDED.claimed_at,
verification_status = EXCLUDED.verification_status;
END IF;
-- Reject any other pending claims for this business
IF p_status = 'approved' THEN
UPDATE public.business_claims
SET
status = 'rejected',
reviewed_at = CURRENT_TIMESTAMP,
reviewed_by = p_user_id,
notes = COALESCE(notes, '') || E'\nAutomatically rejected due to another approved claim.'
WHERE
business_id = p_business_id
AND id != p_claim_id
AND status = 'pending';
END IF;
END;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,37 @@
-- Insert test business
INSERT INTO public.businesses (
id,
name,
phone,
email,
address,
rating,
website,
description,
source
) VALUES (
'test-business-1',
'Test Coffee Shop',
'303-555-0123',
'contact@testcoffee.com',
'123 Test St, Denver, CO 80202',
4.5,
'https://testcoffee.com',
'A cozy coffee shop in downtown Denver serving artisanal coffee and pastries.',
'manual'
) ON CONFLICT (id) DO NOTHING;
-- Insert test business profile
INSERT INTO public.business_profiles (
business_id,
verification_status,
social_links,
hours_of_operation,
tags
) VALUES (
'test-business-1',
'unverified',
'{"facebook": "https://facebook.com/testcoffee", "instagram": "https://instagram.com/testcoffee"}',
'{"monday": ["7:00", "19:00"], "tuesday": ["7:00", "19:00"], "wednesday": ["7:00", "19:00"], "thursday": ["7:00", "19:00"], "friday": ["7:00", "20:00"], "saturday": ["8:00", "20:00"], "sunday": ["8:00", "18:00"]}',
ARRAY['coffee', 'pastries', 'breakfast', 'lunch']
) ON CONFLICT (business_id) DO NOTHING;