Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dump lovers #3503

Merged
merged 6 commits into from
Feb 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions backend/scripts/dump-love-tables.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/bin/bash

export PGPASSWORD=$SUPABASE_PASSWORD

DB_NAME="db.mfodonznyfxllcezufgr.supabase.co" # dev
# DB_NAME="db.pxidrgkatumlvfqaxcll.supabase.co" # prod

# Connect to the database and initialize tables
psql -U postgres -d postgres -h $DB_NAME -p 5432 -w -f ./dump-lovers-temp.sql

pg_dump -U postgres -d postgres -h $DB_NAME -n public -w \
-t lovers \
-t love_answers \
-t love_compatibility_answers \
-t love_likes \
-t love_questions \
-t love_ships \
-t love_waitlist \
-t private_user_message_channels \
-t private_user_seen_message_channels \
-t temp_users \
-t temp_love_messages \
> gen-love-dump.sql

# TODO: just copy pasta these functions from functions.sql
# --function firebase_uid \
# --function get_average_rating \
# --function get_compatibility_questions_with_answer_count \
# --function get_love_question_answers_and_lovers \
# --function millis_interval \
# --function millis_to_ts \
# --function random_alphanumeric \
# --function to_jsonb \
# --function ts_to_millis \

# psql -U postgres -d postgres -h $DB_NAME -p 5432 -w -f ./dump-lovers-cleanup.sql
3 changes: 3 additions & 0 deletions backend/scripts/dump-lovers-cleanup.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
drop table if exists temp_users;

drop table if exists temp_love_messages;
55 changes: 55 additions & 0 deletions backend/scripts/dump-lovers-temp.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
drop table if exists temp_users;

create table temp_users as
SELECT
u.id,
u.username,
u.name,
u.created_time,

-- select some keys from user data
u.data - array(select jsonb_object_keys(u.data
- 'isBannedFromPosting'
- 'userDeleted'
- 'bio'
- 'website'
- 'twitterHandle'
- 'discordHandle'
- 'fromLove'
- 'sweepstakesVerified'
- 'verifiedPhone'
- 'idVerified'
)) as user_data,

-- select some keys from private user data
pu.data - array(select jsonb_object_keys(pu.data
- 'email'
- 'notificationPreferences'
)) as private_user_data
FROM lovers l
JOIN users u ON u.id = l.user_id
LEFT JOIN private_users pu ON u.id = pu.id;

alter table temp_users enable row level security;

drop table if exists temp_love_messages;

create table temp_love_messages as
with love_channels as (
select mc.*
from private_user_message_channels mc
where not exists (
-- Check if any member is NOT a love user
select user_id from private_user_message_channel_members mcm
where mc.id = mcm.channel_id
except
select user_id from lovers
)
)
select
pum.*
from love_channels lc
left join private_user_messages pum
on pum.channel_id = lc.id;

alter table temp_love_messages enable row level security;
86 changes: 86 additions & 0 deletions backend/scripts/upload-lovers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import { runScript } from 'run-script'
import { type SupabaseDirectClient } from 'shared/supabase/init'
import * as fs from 'fs'
import { parse } from 'csv-parse/sync'

// Upload data from CSV files generated by dump-lovers.ts to the database
runScript(async ({ pg }) => {
try {
await uploadUsers(pg)
await uploadChats(pg)
console.log('Migration completed successfully')
} catch (error) {
console.error('Migration failed:', error)
process.exit(1)
}
})

const uploadUsers = async (pg: SupabaseDirectClient) => {
const fileContent = fs.readFileSync('users.csv', 'utf-8')
const records = parse(fileContent, {
columns: true,
skip_empty_lines: true,
cast: true // Automatically convert strings to appropriate types
})

// Create temp table with same structure
await pg.none(`
CREATE TEMP TABLE temp_users (
id text,
username text,
name text,
user_data jsonb,
private_user_data jsonb
)
`)

// Bulk insert into temp table
const cs = new pg.helpers.ColumnSet([
'id',
'username',
'name',
{ name: 'user_data', mod: ':json' },
{ name: 'private_user_data', mod: ':json' }
], { table: 'temp_users' })

const query = pg.helpers.insert(records, cs)
await pg.none(query)

console.log(`Uploaded ${records.length} users`)
}

const uploadChats = async (pg: SupabaseDirectClient) => {
const fileContent = fs.readFileSync('chats.csv', 'utf-8')
const records = parse(fileContent, {
columns: true,
skip_empty_lines: true,
cast: true
})

// Create temp table matching private_user_messages structure
await pg.none(`
CREATE TEMP TABLE temp_messages (
id bigint,
channel_id bigint,
user_id text,
content jsonb,
created_time timestamp with time zone,
visibility text
)
`)

// Bulk insert into temp table
const cs = new pg.helpers.ColumnSet([
'id',
'channel_id',
'user_id',
{ name: 'content', mod: ':json' },
'created_time',
'visibility'
], { table: 'temp_messages' })

const query = pg.helpers.insert(records, cs)
await pg.none(query)

console.log(`Uploaded ${records.length} messages`)
}