Weekly breakdown and line charts
This commit is contained in:
@ -1,9 +1,25 @@
|
|||||||
-- First establish some base parameters for our users
|
-- First establish some base parameters for our users
|
||||||
WITH config AS (
|
WITH config AS (
|
||||||
SELECT
|
SELECT
|
||||||
|
-- For now we're using this as our free-trial filter. We'll treat users
|
||||||
|
-- that churn during this period differently since they likely haven't actually
|
||||||
|
-- tried the software very much. We'll also use this to isolate feature usage
|
||||||
|
-- from this period, as it's likely that user goals are different at this stage.
|
||||||
14 AS short_term,
|
14 AS short_term,
|
||||||
|
-- This is our ~day74 cutoff. For users, we want to see patterns of usage here
|
||||||
|
-- that might differ from those who either churn later or hang around for a while.
|
||||||
|
-- For feature aggregates, this should represent the "serious" efforts to use ZenMaid.
|
||||||
90 AS medium_term,
|
90 AS medium_term,
|
||||||
|
-- This is a (in theory) rarer group of churned users that lasted beyond our day 74
|
||||||
|
-- danger zone but still churned.
|
||||||
|
-- Beyond this we'll consider anyone remaining a "success" (even if they churned at some
|
||||||
|
-- point) since things like company closures, etc. start to come into play more
|
||||||
|
-- frequently.
|
||||||
|
-- For features, we'll look at this as features used in the company's day-to-day, and not
|
||||||
|
-- evaluation attempts.
|
||||||
360 AS long_term,
|
360 AS long_term,
|
||||||
|
-- We'll only look back a few years since we want to make fairish comparisons betwen users
|
||||||
|
-- exposed to "modern" ZenMaid tools.
|
||||||
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
||||||
),
|
),
|
||||||
-- Then establish a standard core set of user data
|
-- Then establish a standard core set of user data
|
||||||
@ -51,96 +67,213 @@ users_to_examine AS (
|
|||||||
-- Finally, people who are still kicking.
|
-- Finally, people who are still kicking.
|
||||||
-- This cohort is going to be a little wrong for older
|
-- This cohort is going to be a little wrong for older
|
||||||
-- dates since it only includes those who are STILL around.
|
-- dates since it only includes those who are STILL around.
|
||||||
SELECT 'active-user' AS category, uwcs.*
|
SELECT 'long-termer' AS category, uwcs.*
|
||||||
FROM users_with_churn_stats uwcs, config
|
FROM users_with_churn_stats uwcs, config
|
||||||
WHERE BILLING_STATE != 'CHURNED'
|
-- We could also filter this by the following if we wanted to look at current users:
|
||||||
AND lifespan >= config.long_term
|
-- billing_state NOT IN ('CHURNED', 'ONBOARDING', 'UNKNOWN')
|
||||||
|
-- ...but for a multi-year peek into history, we'll lose too many good users who left
|
||||||
|
-- for other reasons like closures, etc.
|
||||||
|
WHERE lifespan >= config.long_term
|
||||||
),
|
),
|
||||||
|
|
||||||
|
-- Now we'll start pulling counts from our various core features to help check usage.
|
||||||
|
-- We'll ALSO bucket these by the same eras we're using to split up our users.
|
||||||
|
-- For example, things created in the short_term are probably less serious integration
|
||||||
|
-- efforts - users are just poking around at the software to see if they want to give it a
|
||||||
|
-- serious try.
|
||||||
|
-- Medium-term (up to our day 74ish cutoff) is when users REALLY decide if they want to build
|
||||||
|
-- a business around ZenMaid.
|
||||||
|
-- Long-term should represent users who are actively using ZenMaid for their day-to-day operations.
|
||||||
|
|
||||||
-- Well add booking forms created by era
|
-- Well add booking forms created by era
|
||||||
booking_form_counts AS (
|
booking_form_counts AS (
|
||||||
SELECT bf.user_id,
|
-- Establish a summary of records with ages
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
WITH summary AS (
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
SELECT booking_forms.user_id, booking_forms.created_at::date - users.created_at::date AS created_age
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
FROM config, booking_forms
|
||||||
COUNT(*) AS total
|
JOIN users ON booking_forms.user_id = users.id
|
||||||
FROM config, (
|
|
||||||
SELECT booking_forms.user_id, booking_forms.created_at::date - users.created_at::date AS created_age
|
|
||||||
FROM config, booking_forms JOIN users ON booking_forms.user_id = users.id
|
|
||||||
WHERE booking_forms.created_at >= config.date_cutoff
|
WHERE booking_forms.created_at >= config.date_cutoff
|
||||||
AND users.created_at >= config.date_cutoff
|
AND users.created_at >= config.date_cutoff
|
||||||
) bf
|
AND booking_forms.created_at <= users.created_at::date + config.long_term
|
||||||
GROUP BY bf.user_id
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
),
|
),
|
||||||
-- Bookings made by era
|
-- Bookings made by era
|
||||||
booking_counts AS (
|
booking_counts AS (
|
||||||
SELECT bk.user_id,
|
-- Establish a summary of records with ages
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
WITH summary AS (
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
SELECT booking_forms.user_id, bookings.created_at::date - users.created_at::date AS created_age
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
FROM config,
|
||||||
COUNT(*) AS total
|
booking_forms
|
||||||
FROM config, (
|
JOIN users ON booking_forms.user_id = users.id
|
||||||
SELECT booking_forms.user_id, bookings.created_at::date - users.created_at::date AS created_age
|
JOIN bookings ON bookings.booking_form_id = booking_forms.id
|
||||||
FROM config, booking_forms
|
WHERE booking_forms.created_at >= config.date_cutoff
|
||||||
JOIN users ON booking_forms.user_id = users.id
|
AND bookings.created_at >= config.date_cutoff
|
||||||
JOIN bookings ON bookings.booking_form_id = booking_forms.id
|
AND users.created_at >= config.date_cutoff
|
||||||
WHERE booking_forms.created_at >= config.date_cutoff
|
),
|
||||||
AND bookings.created_at >= config.date_cutoff
|
-- Group by week for line charts
|
||||||
AND users.created_at >= config.date_cutoff
|
weekly_counts AS (
|
||||||
) bk
|
SELECT user_id,
|
||||||
GROUP BY bk.user_id
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (
|
||||||
|
SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts
|
||||||
|
JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
),
|
),
|
||||||
-- Employees added by era
|
-- Employees added by era
|
||||||
employee_counts AS (
|
employee_counts AS (
|
||||||
SELECT empl.user_id,
|
-- Establish a summary of records with ages
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
WITH summary AS (
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT employees.user_id, employees.created_at::date - users.created_at::date AS created_age
|
SELECT employees.user_id, employees.created_at::date - users.created_at::date AS created_age
|
||||||
FROM config, employees JOIN users ON employees.user_id = users.id
|
FROM config, employees
|
||||||
|
JOIN users ON employees.user_id = users.id
|
||||||
WHERE employees.created_at >= config.date_cutoff
|
WHERE employees.created_at >= config.date_cutoff
|
||||||
AND users.created_at >= config.date_cutoff
|
AND users.created_at >= config.date_cutoff
|
||||||
) empl
|
AND employees.created_at <= users.created_at::date + config.long_term
|
||||||
GROUP BY empl.user_id
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
),
|
),
|
||||||
-- Contacts created by era
|
-- Contacts created by era
|
||||||
contact_counts AS (
|
contact_counts AS (
|
||||||
SELECT cust.user_id,
|
-- Establish a summary of records with ages
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
WITH summary AS (
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT customers.user_id, customers.created_at::date - users.created_at::date AS created_age
|
SELECT customers.user_id, customers.created_at::date - users.created_at::date AS created_age
|
||||||
FROM config, customers JOIN users ON customers.user_id = users.id
|
FROM config, customers
|
||||||
|
JOIN users ON customers.user_id = users.id
|
||||||
WHERE customers.created_at >= config.date_cutoff
|
WHERE customers.created_at >= config.date_cutoff
|
||||||
AND users.created_at >= config.date_cutoff
|
AND users.created_at >= config.date_cutoff
|
||||||
) cust
|
AND customers.created_at <= users.created_at::date + config.long_term
|
||||||
GROUP BY cust.user_id
|
),
|
||||||
),
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
)
|
||||||
-- Finally, we'll flatten it all out into a repurposable flat table
|
-- Finally, we'll flatten it all out into a repurposable flat table
|
||||||
SELECT users_to_examine.id,
|
SELECT users_to_examine.id,
|
||||||
|
users_to_examine.active,
|
||||||
|
users_to_examine.billing_state,
|
||||||
|
users_to_examine.free_trial_ends_at,
|
||||||
users_to_examine.category,
|
users_to_examine.category,
|
||||||
|
users_to_examine.created_at,
|
||||||
|
users_to_examine.month_created,
|
||||||
|
users_to_examine.updated_at,
|
||||||
|
users_to_examine.lifespan,
|
||||||
COALESCE(booking_form_counts.short_term, 0) AS booking_forms_short_term,
|
COALESCE(booking_form_counts.short_term, 0) AS booking_forms_short_term,
|
||||||
COALESCE(booking_form_counts.medium_term, 0) AS booking_forms_medium_term,
|
COALESCE(booking_form_counts.medium_term, 0) AS booking_forms_medium_term,
|
||||||
COALESCE(booking_form_counts.long_term, 0) AS booking_forms_long_term,
|
COALESCE(booking_form_counts.long_term, 0) AS booking_forms_long_term,
|
||||||
COALESCE(booking_form_counts.total, 0) AS booking_forms,
|
COALESCE(booking_form_counts.total, 0) AS booking_forms,
|
||||||
|
COALESCE(booking_form_counts.weekly_counts, '{}'::jsonb) AS booking_forms_weekly_counts,
|
||||||
COALESCE(booking_counts.short_term, 0) AS bookings_short_term,
|
COALESCE(booking_counts.short_term, 0) AS bookings_short_term,
|
||||||
COALESCE(booking_counts.medium_term, 0) AS bookings_medium_term,
|
COALESCE(booking_counts.medium_term, 0) AS bookings_medium_term,
|
||||||
COALESCE(booking_counts.long_term, 0) AS bookings_long_term,
|
COALESCE(booking_counts.long_term, 0) AS bookings_long_term,
|
||||||
COALESCE(booking_counts.total, 0) AS bookings,
|
COALESCE(booking_counts.total, 0) AS bookings,
|
||||||
|
COALESCE(booking_counts.weekly_counts, '{}'::jsonb) AS bookings_weekly_counts,
|
||||||
COALESCE(employee_counts.short_term, 0) AS employees_short_term,
|
COALESCE(employee_counts.short_term, 0) AS employees_short_term,
|
||||||
COALESCE(employee_counts.medium_term, 0) AS employees_medium_term,
|
COALESCE(employee_counts.medium_term, 0) AS employees_medium_term,
|
||||||
COALESCE(employee_counts.long_term, 0) AS employees_long_term,
|
COALESCE(employee_counts.long_term, 0) AS employees_long_term,
|
||||||
COALESCE(employee_counts.total, 0) AS employees,
|
COALESCE(employee_counts.total, 0) AS employees,
|
||||||
|
COALESCE(employee_counts.weekly_counts, '{}'::jsonb) AS employee_weekly_counts,
|
||||||
COALESCE(contact_counts.short_term, 0) AS contacts_short_term,
|
COALESCE(contact_counts.short_term, 0) AS contacts_short_term,
|
||||||
COALESCE(contact_counts.medium_term, 0) AS contacts_medium_term,
|
COALESCE(contact_counts.medium_term, 0) AS contacts_medium_term,
|
||||||
COALESCE(contact_counts.long_term, 0) AS contacts_long_term,
|
COALESCE(contact_counts.long_term, 0) AS contacts_long_term,
|
||||||
COALESCE(contact_counts.total, 0) AS contacts
|
COALESCE(contact_counts.total, 0) AS contacts,
|
||||||
|
COALESCE(contact_counts.weekly_counts, '{}'::jsonb) AS contacts_weekly_counts
|
||||||
FROM users_to_examine
|
FROM users_to_examine
|
||||||
LEFT OUTER JOIN booking_form_counts ON booking_form_counts.user_id = users_to_examine.id
|
LEFT JOIN booking_form_counts ON booking_form_counts.user_id = users_to_examine.id
|
||||||
LEFT OUTER JOIN booking_counts ON booking_counts.user_id = users_to_examine.id
|
LEFT JOIN booking_counts ON booking_counts.user_id = users_to_examine.id
|
||||||
LEFT OUTER JOIN employee_counts ON employee_counts.user_id = users_to_examine.id
|
LEFT JOIN employee_counts ON employee_counts.user_id = users_to_examine.id
|
||||||
LEFT OUTER JOIN contact_counts ON contact_counts.user_id = users_to_examine.id
|
LEFT JOIN contact_counts ON contact_counts.user_id = users_to_examine.id
|
||||||
;
|
;
|
||||||
|
|||||||
@ -1,9 +1,25 @@
|
|||||||
-- First establish some base parameters for our users
|
-- First establish some base parameters for our users
|
||||||
WITH config AS (
|
WITH config AS (
|
||||||
SELECT
|
SELECT
|
||||||
|
-- For now we're using this as our free-trial filter. We'll treat users
|
||||||
|
-- that churn during this period differently since they likely haven't actually
|
||||||
|
-- tried the software very much. We'll also use this to isolate feature usage
|
||||||
|
-- from this period, as it's likely that user goals are different at this stage.
|
||||||
14 AS short_term,
|
14 AS short_term,
|
||||||
|
-- This is our ~day74 cutoff. For users, we want to see patterns of usage here
|
||||||
|
-- that might differ from those who either churn later or hang around for a while.
|
||||||
|
-- For feature aggregates, this should represent the "serious" efforts to use ZenMaid.
|
||||||
90 AS medium_term,
|
90 AS medium_term,
|
||||||
|
-- This is a (in theory) rarer group of churned users that lasted beyond our day 74
|
||||||
|
-- danger zone but still churned.
|
||||||
|
-- Beyond this we'll consider anyone remaining a "success" (even if they churned at some
|
||||||
|
-- point) since things like company closures, etc. start to come into play more
|
||||||
|
-- frequently.
|
||||||
|
-- For features, we'll look at this as features used in the company's day-to-day, and not
|
||||||
|
-- evaluation attempts.
|
||||||
360 AS long_term,
|
360 AS long_term,
|
||||||
|
-- We'll only look back a few years since we want to make fairish comparisons betwen users
|
||||||
|
-- exposed to "modern" ZenMaid tools.
|
||||||
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
||||||
),
|
),
|
||||||
-- Then establish a standard core set of user data
|
-- Then establish a standard core set of user data
|
||||||
@ -51,33 +67,67 @@ users_to_examine AS (
|
|||||||
-- Finally, people who are still kicking.
|
-- Finally, people who are still kicking.
|
||||||
-- This cohort is going to be a little wrong for older
|
-- This cohort is going to be a little wrong for older
|
||||||
-- dates since it only includes those who are STILL around.
|
-- dates since it only includes those who are STILL around.
|
||||||
SELECT 'active-user' AS category, uwcs.*
|
SELECT 'long-termer' AS category, uwcs.*
|
||||||
FROM users_with_churn_stats uwcs, config
|
FROM users_with_churn_stats uwcs, config
|
||||||
WHERE BILLING_STATE != 'CHURNED'
|
-- We could also filter this by the following if we wanted to look at current users:
|
||||||
AND lifespan >= config.long_term
|
-- billing_state NOT IN ('CHURNED', 'ONBOARDING', 'UNKNOWN')
|
||||||
|
-- ...but for a multi-year peek into history, we'll lose too many good users who left
|
||||||
|
-- for other reasons like closures, etc.
|
||||||
|
WHERE lifespan >= config.long_term
|
||||||
),
|
),
|
||||||
-- Emails sent by era
|
-- Emails sent by era
|
||||||
email_counts AS (
|
email_counts AS (
|
||||||
SELECT email.user_id,
|
-- Establish a summary of records with ages
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
WITH summary AS (
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT sent_emails.user_id, sent_emails.created_at::date - users.created_at::date AS created_age
|
SELECT sent_emails.user_id, sent_emails.created_at::date - users.created_at::date AS created_age
|
||||||
FROM config, sent_emails JOIN users ON sent_emails.user_id = users.id
|
FROM config, sent_emails
|
||||||
|
JOIN users ON sent_emails.user_id = users.id
|
||||||
WHERE sent_emails.created_at >= config.date_cutoff
|
WHERE sent_emails.created_at >= config.date_cutoff
|
||||||
AND users.created_at >= config.date_cutoff
|
AND users.created_at >= config.date_cutoff
|
||||||
) email
|
AND sent_emails.created_at <= users.created_at::date + config.long_term
|
||||||
GROUP BY email.user_id
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
)
|
)
|
||||||
-- Finally, we'll flatten it all out into a repurposable flat table
|
-- Finally, we'll flatten it all out into a repurposable flat table
|
||||||
SELECT users_to_examine.id,
|
SELECT users_to_examine.id,
|
||||||
|
users_to_examine.active,
|
||||||
|
users_to_examine.billing_state,
|
||||||
|
users_to_examine.free_trial_ends_at,
|
||||||
users_to_examine.category,
|
users_to_examine.category,
|
||||||
|
users_to_examine.created_at,
|
||||||
|
users_to_examine.month_created,
|
||||||
|
users_to_examine.updated_at,
|
||||||
|
users_to_examine.lifespan,
|
||||||
COALESCE(email_counts.short_term, 0) AS emails_short_term,
|
COALESCE(email_counts.short_term, 0) AS emails_short_term,
|
||||||
COALESCE(email_counts.medium_term, 0) AS emails_medium_term,
|
COALESCE(email_counts.medium_term, 0) AS emails_medium_term,
|
||||||
COALESCE(email_counts.long_term, 0) AS emails_long_term,
|
COALESCE(email_counts.long_term, 0) AS emails_long_term,
|
||||||
COALESCE(email_counts.total, 0) AS emails
|
COALESCE(email_counts.total, 0) AS emails,
|
||||||
|
COALESCE(email_counts.weekly_counts, '{}'::jsonb) AS emails_weekly_counts
|
||||||
FROM users_to_examine
|
FROM users_to_examine
|
||||||
LEFT OUTER JOIN email_counts ON email_counts.user_id = users_to_examine.id
|
LEFT JOIN email_counts ON email_counts.user_id = users_to_examine.id
|
||||||
;
|
;
|
||||||
|
|||||||
@ -1,151 +0,0 @@
|
|||||||
-- First establish some base parameters for our users
|
|
||||||
WITH config AS (
|
|
||||||
SELECT
|
|
||||||
14 AS short_term,
|
|
||||||
90 AS medium_term,
|
|
||||||
360 AS long_term,
|
|
||||||
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
|
||||||
),
|
|
||||||
-- Then establish a standard core set of user data
|
|
||||||
users_with_churn_stats AS (
|
|
||||||
SELECT id, active, billing_state,
|
|
||||||
DATE_TRUNC('month', created_at) AS month_created,
|
|
||||||
created_at, free_trial_ends_at, updated_at,
|
|
||||||
updated_at::date - created_at::date AS lifespan
|
|
||||||
FROM users, config
|
|
||||||
-- We'll only look at newish users.
|
|
||||||
WHERE created_at >= config.date_cutoff
|
|
||||||
),
|
|
||||||
-- Next use that core data to extract the records we want to analyze
|
|
||||||
-- and categorize them by date and status
|
|
||||||
users_to_examine AS (
|
|
||||||
-- First the users who exited just after the trial
|
|
||||||
SELECT 'quick-exit' AS category, uwcs.*
|
|
||||||
FROM users_with_churn_stats uwcs, config
|
|
||||||
WHERE billing_state = 'CHURNED'
|
|
||||||
AND lifespan <= config.short_term
|
|
||||||
|
|
||||||
UNION
|
|
||||||
|
|
||||||
-- Next, users who gave the tools some time but still left
|
|
||||||
SELECT 'fair-trial' AS category, uwcs.*
|
|
||||||
FROM users_with_churn_stats uwcs, config
|
|
||||||
WHERE billing_state = 'CHURNED'
|
|
||||||
AND lifespan < config.medium_term
|
|
||||||
AND lifespan > config.short_term
|
|
||||||
|
|
||||||
UNION
|
|
||||||
|
|
||||||
-- Then some longer-time users who still churned.
|
|
||||||
-- We'll cut this off after 6 months since afterwards
|
|
||||||
-- company closures, etc. could factor in.
|
|
||||||
|
|
||||||
SELECT 'short-termer' AS category, uwcs.*
|
|
||||||
FROM users_with_churn_stats uwcs, config
|
|
||||||
WHERE billing_state = 'CHURNED'
|
|
||||||
AND lifespan >= config.medium_term
|
|
||||||
AND lifespan < config.long_term
|
|
||||||
|
|
||||||
UNION
|
|
||||||
|
|
||||||
-- Finally, people who are still kicking.
|
|
||||||
-- This cohort is going to be a little wrong for older
|
|
||||||
-- dates since it only includes those who are STILL around.
|
|
||||||
SELECT 'active-user' AS category, uwcs.*
|
|
||||||
FROM users_with_churn_stats uwcs, config
|
|
||||||
WHERE BILLING_STATE != 'CHURNED'
|
|
||||||
AND lifespan >= config.long_term
|
|
||||||
),
|
|
||||||
-- Messages sent by era
|
|
||||||
sms_counts AS (
|
|
||||||
SELECT texts.user_id,
|
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT sent_texts.user_id, sent_texts.created_at::date - users.created_at::date AS created_age
|
|
||||||
FROM config, sent_texts JOIN users ON sent_texts.user_id = users.id
|
|
||||||
WHERE sent_texts.created_at >= config.date_cutoff
|
|
||||||
AND users.created_at >= config.date_cutoff
|
|
||||||
) texts
|
|
||||||
GROUP BY texts.user_id
|
|
||||||
),
|
|
||||||
-- Appointments by era
|
|
||||||
appointment_counts AS (
|
|
||||||
SELECT appts.user_id,
|
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT appointments.user_id, appointments.created_at::date - users.created_at::date AS created_age
|
|
||||||
FROM config, appointments JOIN users ON appointments.user_id = users.id
|
|
||||||
WHERE appointments.created_at >= config.date_cutoff
|
|
||||||
AND users.created_at >= config.date_cutoff
|
|
||||||
) appts
|
|
||||||
GROUP BY appts.user_id
|
|
||||||
),
|
|
||||||
-- Checklists created by era
|
|
||||||
checklist_counts AS (
|
|
||||||
SELECT checks.user_id,
|
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT checklist_templates.user_id, checklists.created_at::date - users.created_at::date AS created_age
|
|
||||||
FROM config, checklist_templates
|
|
||||||
JOIN users ON checklist_templates.user_id = users.id
|
|
||||||
JOIN checklists ON checklists.checklist_template_id = checklist_templates.id
|
|
||||||
WHERE checklist_templates.created_at >= config.date_cutoff
|
|
||||||
AND checklists.created_at >= config.date_cutoff
|
|
||||||
AND users.created_at >= config.date_cutoff
|
|
||||||
) checks
|
|
||||||
GROUP BY checks.user_id
|
|
||||||
),
|
|
||||||
-- Checklist events created by era
|
|
||||||
checklist_filled_counts AS (
|
|
||||||
SELECT events.user_id,
|
|
||||||
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
|
||||||
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
|
||||||
COUNT(*) AS total
|
|
||||||
FROM config, (
|
|
||||||
SELECT employees.user_id, checklist_item_events.created_at::date - users.created_at::date AS created_age
|
|
||||||
FROM config, employees
|
|
||||||
JOIN users ON employees.user_id = users.id
|
|
||||||
JOIN checklist_item_events ON checklist_item_events.employee_id = employees.id
|
|
||||||
WHERE checklist_item_events.created_at >= config.date_cutoff
|
|
||||||
AND checklist_item_events.created_at >= config.date_cutoff
|
|
||||||
AND users.created_at >= config.date_cutoff
|
|
||||||
) events
|
|
||||||
GROUP BY events.user_id
|
|
||||||
)
|
|
||||||
-- Finally, we'll flatten it all out into a repurposable flat table
|
|
||||||
SELECT users_to_examine.id,
|
|
||||||
users_to_examine.category,
|
|
||||||
users_to_examine.month_created,
|
|
||||||
users_to_examine.lifespan,
|
|
||||||
COALESCE(sms_counts.short_term, 0) AS sms_short_term,
|
|
||||||
COALESCE(sms_counts.medium_term, 0) AS sms_medium_term,
|
|
||||||
COALESCE(sms_counts.long_term, 0) AS sms_long_term,
|
|
||||||
COALESCE(sms_counts.total, 0) AS sms,
|
|
||||||
COALESCE(appointment_counts.short_term, 0) AS appointments_short_term,
|
|
||||||
COALESCE(appointment_counts.medium_term, 0) AS appointments_medium_term,
|
|
||||||
COALESCE(appointment_counts.long_term, 0) AS appointments_long_term,
|
|
||||||
COALESCE(appointment_counts.total, 0) AS appointments,
|
|
||||||
COALESCE(checklist_counts.short_term, 0) AS checklists_short_term,
|
|
||||||
COALESCE(checklist_counts.medium_term, 0) AS checklists_medium_term,
|
|
||||||
COALESCE(checklist_counts.long_term, 0) AS checklists_long_term,
|
|
||||||
COALESCE(checklist_counts.total, 0) AS checklists,
|
|
||||||
COALESCE(checklist_filled_counts.short_term, 0) AS checklists_filled_short_term,
|
|
||||||
COALESCE(checklist_filled_counts.medium_term, 0) AS checklists_filled_medium_term,
|
|
||||||
COALESCE(checklist_filled_counts.long_term, 0) AS checklists_filled_long_term,
|
|
||||||
COALESCE(checklist_filled_counts.total, 0) AS checklists_filled
|
|
||||||
FROM users_to_examine
|
|
||||||
LEFT OUTER JOIN sms_counts ON sms_counts.user_id = users_to_examine.id
|
|
||||||
LEFT OUTER JOIN appointment_counts ON appointment_counts.user_id = users_to_examine.id
|
|
||||||
LEFT OUTER JOIN checklist_counts ON checklist_counts.user_id = users_to_examine.id
|
|
||||||
LEFT OUTER JOIN checklist_filled_counts ON checklist_filled_counts.user_id = users_to_examine.id
|
|
||||||
;
|
|
||||||
177
2026-01-02_query-03-sms-appts.sql
Normal file
177
2026-01-02_query-03-sms-appts.sql
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
-- First establish some base parameters for our users
|
||||||
|
WITH config AS (
|
||||||
|
SELECT
|
||||||
|
-- For now we're using this as our free-trial filter. We'll treat users
|
||||||
|
-- that churn during this period differently since they likely haven't actually
|
||||||
|
-- tried the software very much. We'll also use this to isolate feature usage
|
||||||
|
-- from this period, as it's likely that user goals are different at this stage.
|
||||||
|
14 AS short_term,
|
||||||
|
-- This is our ~day74 cutoff. For users, we want to see patterns of usage here
|
||||||
|
-- that might differ from those who either churn later or hang around for a while.
|
||||||
|
-- For feature aggregates, this should represent the "serious" efforts to use ZenMaid.
|
||||||
|
90 AS medium_term,
|
||||||
|
-- This is a (in theory) rarer group of churned users that lasted beyond our day 74
|
||||||
|
-- danger zone but still churned.
|
||||||
|
-- Beyond this we'll consider anyone remaining a "success" (even if they churned at some
|
||||||
|
-- point) since things like company closures, etc. start to come into play more
|
||||||
|
-- frequently.
|
||||||
|
-- For features, we'll look at this as features used in the company's day-to-day, and not
|
||||||
|
-- evaluation attempts.
|
||||||
|
360 AS long_term,
|
||||||
|
-- We'll only look back a few years since we want to make fairish comparisons betwen users
|
||||||
|
-- exposed to "modern" ZenMaid tools.
|
||||||
|
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
||||||
|
),
|
||||||
|
-- Then establish a standard core set of user data
|
||||||
|
users_with_churn_stats AS (
|
||||||
|
SELECT id, active, billing_state,
|
||||||
|
DATE_TRUNC('month', created_at) AS month_created,
|
||||||
|
created_at, free_trial_ends_at, updated_at,
|
||||||
|
updated_at::date - created_at::date AS lifespan
|
||||||
|
FROM users, config
|
||||||
|
-- We'll only look at newish users.
|
||||||
|
WHERE created_at >= config.date_cutoff
|
||||||
|
),
|
||||||
|
-- Next use that core data to extract the records we want to analyze
|
||||||
|
-- and categorize them by date and status
|
||||||
|
users_to_examine AS (
|
||||||
|
-- First the users who exited just after the trial
|
||||||
|
SELECT 'quick-exit' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan <= config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Next, users who gave the tools some time but still left
|
||||||
|
SELECT 'fair-trial' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan < config.medium_term
|
||||||
|
AND lifespan > config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Then some longer-time users who still churned.
|
||||||
|
-- We'll cut this off after 6 months since afterwards
|
||||||
|
-- company closures, etc. could factor in.
|
||||||
|
|
||||||
|
SELECT 'short-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan >= config.medium_term
|
||||||
|
AND lifespan < config.long_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Finally, people who are still kicking.
|
||||||
|
-- This cohort is going to be a little wrong for older
|
||||||
|
-- dates since it only includes those who are STILL around.
|
||||||
|
SELECT 'long-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
-- We could also filter this by the following if we wanted to look at current users:
|
||||||
|
-- billing_state NOT IN ('CHURNED', 'ONBOARDING', 'UNKNOWN')
|
||||||
|
-- ...but for a multi-year peek into history, we'll lose too many good users who left
|
||||||
|
-- for other reasons like closures, etc.
|
||||||
|
WHERE lifespan >= config.long_term
|
||||||
|
),
|
||||||
|
-- Messages sent by era
|
||||||
|
sms_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT sent_texts.user_id, sent_texts.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, sent_texts
|
||||||
|
JOIN users ON sent_texts.user_id = users.id
|
||||||
|
WHERE sent_texts.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND sent_texts.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Appointments by era
|
||||||
|
appointment_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT appointments.user_id, appointments.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, appointments
|
||||||
|
JOIN users ON appointments.user_id = users.id
|
||||||
|
WHERE appointments.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND appointments.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
)
|
||||||
|
-- Finally, we'll flatten it all out into a repurposable flat table
|
||||||
|
SELECT users_to_examine.id,
|
||||||
|
users_to_examine.active,
|
||||||
|
users_to_examine.billing_state,
|
||||||
|
users_to_examine.free_trial_ends_at,
|
||||||
|
users_to_examine.category,
|
||||||
|
users_to_examine.created_at,
|
||||||
|
users_to_examine.month_created,
|
||||||
|
users_to_examine.updated_at,
|
||||||
|
users_to_examine.lifespan,
|
||||||
|
COALESCE(sms_counts.short_term, 0) AS sms_short_term,
|
||||||
|
COALESCE(sms_counts.medium_term, 0) AS sms_medium_term,
|
||||||
|
COALESCE(sms_counts.long_term, 0) AS sms_long_term,
|
||||||
|
COALESCE(sms_counts.total, 0) AS sms,
|
||||||
|
COALESCE(sms_counts.weekly_counts, '{}'::jsonb) AS sms_weekly_counts,
|
||||||
|
COALESCE(appointment_counts.short_term, 0) AS appointments_short_term,
|
||||||
|
COALESCE(appointment_counts.medium_term, 0) AS appointments_medium_term,
|
||||||
|
COALESCE(appointment_counts.long_term, 0) AS appointments_long_term,
|
||||||
|
COALESCE(appointment_counts.total, 0) AS appointments,
|
||||||
|
COALESCE(appointment_counts.weekly_counts, '{}'::jsonb) AS appointments_weekly_counts
|
||||||
|
FROM users_to_examine
|
||||||
|
LEFT JOIN sms_counts ON sms_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN appointment_counts ON appointment_counts.user_id = users_to_examine.id
|
||||||
|
;
|
||||||
180
2026-01-02_query-04-checklist.sql
Normal file
180
2026-01-02_query-04-checklist.sql
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
-- First establish some base parameters for our users
|
||||||
|
WITH config AS (
|
||||||
|
SELECT
|
||||||
|
-- For now we're using this as our free-trial filter. We'll treat users
|
||||||
|
-- that churn during this period differently since they likely haven't actually
|
||||||
|
-- tried the software very much. We'll also use this to isolate feature usage
|
||||||
|
-- from this period, as it's likely that user goals are different at this stage.
|
||||||
|
14 AS short_term,
|
||||||
|
-- This is our ~day74 cutoff. For users, we want to see patterns of usage here
|
||||||
|
-- that might differ from those who either churn later or hang around for a while.
|
||||||
|
-- For feature aggregates, this should represent the "serious" efforts to use ZenMaid.
|
||||||
|
90 AS medium_term,
|
||||||
|
-- This is a (in theory) rarer group of churned users that lasted beyond our day 74
|
||||||
|
-- danger zone but still churned.
|
||||||
|
-- Beyond this we'll consider anyone remaining a "success" (even if they churned at some
|
||||||
|
-- point) since things like company closures, etc. start to come into play more
|
||||||
|
-- frequently.
|
||||||
|
-- For features, we'll look at this as features used in the company's day-to-day, and not
|
||||||
|
-- evaluation attempts.
|
||||||
|
360 AS long_term,
|
||||||
|
-- We'll only look back a few years since we want to make fairish comparisons betwen users
|
||||||
|
-- exposed to "modern" ZenMaid tools.
|
||||||
|
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
||||||
|
),
|
||||||
|
-- Then establish a standard core set of user data
|
||||||
|
users_with_churn_stats AS (
|
||||||
|
SELECT id, active, billing_state,
|
||||||
|
DATE_TRUNC('month', created_at) AS month_created,
|
||||||
|
created_at, free_trial_ends_at, updated_at,
|
||||||
|
updated_at::date - created_at::date AS lifespan
|
||||||
|
FROM users, config
|
||||||
|
-- We'll only look at newish users.
|
||||||
|
WHERE created_at >= config.date_cutoff
|
||||||
|
),
|
||||||
|
-- Next use that core data to extract the records we want to analyze
|
||||||
|
-- and categorize them by date and status
|
||||||
|
users_to_examine AS (
|
||||||
|
-- First the users who exited just after the trial
|
||||||
|
SELECT 'quick-exit' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan <= config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Next, users who gave the tools some time but still left
|
||||||
|
SELECT 'fair-trial' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan < config.medium_term
|
||||||
|
AND lifespan > config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Then some longer-time users who still churned.
|
||||||
|
-- We'll cut this off after 6 months since afterwards
|
||||||
|
-- company closures, etc. could factor in.
|
||||||
|
|
||||||
|
SELECT 'short-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan >= config.medium_term
|
||||||
|
AND lifespan < config.long_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Finally, people who are still kicking.
|
||||||
|
-- This cohort is going to be a little wrong for older
|
||||||
|
-- dates since it only includes those who are STILL around.
|
||||||
|
SELECT 'long-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
-- We could also filter this by the following if we wanted to look at current users:
|
||||||
|
-- billing_state NOT IN ('CHURNED', 'ONBOARDING', 'UNKNOWN')
|
||||||
|
-- ...but for a multi-year peek into history, we'll lose too many good users who left
|
||||||
|
-- for other reasons like closures, etc.
|
||||||
|
WHERE lifespan >= config.long_term
|
||||||
|
),
|
||||||
|
-- Checklists created by era
|
||||||
|
checklist_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT checklist_templates.user_id, checklists.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, checklist_templates
|
||||||
|
JOIN users ON checklist_templates.user_id = users.id
|
||||||
|
JOIN checklists ON checklists.checklist_template_id = checklist_templates.id
|
||||||
|
WHERE checklists.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND checklists.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Checklist events created by era
|
||||||
|
checklist_filled_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT employees.user_id, checklist_item_events.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, employees
|
||||||
|
JOIN users ON employees.user_id = users.id
|
||||||
|
JOIN checklist_item_events ON checklist_item_events.employee_id = employees.id
|
||||||
|
WHERE checklist_item_events.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND checklist_item_events.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
)
|
||||||
|
-- Finally, we'll flatten it all out into a repurposable flat table
|
||||||
|
SELECT users_to_examine.id,
|
||||||
|
users_to_examine.active,
|
||||||
|
users_to_examine.billing_state,
|
||||||
|
users_to_examine.free_trial_ends_at,
|
||||||
|
users_to_examine.category,
|
||||||
|
users_to_examine.created_at,
|
||||||
|
users_to_examine.month_created,
|
||||||
|
users_to_examine.updated_at,
|
||||||
|
users_to_examine.lifespan,
|
||||||
|
COALESCE(checklist_counts.short_term, 0) AS checklists_short_term,
|
||||||
|
COALESCE(checklist_counts.medium_term, 0) AS checklists_medium_term,
|
||||||
|
COALESCE(checklist_counts.long_term, 0) AS checklists_long_term,
|
||||||
|
COALESCE(checklist_counts.total, 0) AS checklists,
|
||||||
|
COALESCE(checklist_counts.weekly_counts, '{}'::jsonb) AS checklists_weekly_counts,
|
||||||
|
COALESCE(checklist_filled_counts.short_term, 0) AS checklists_filled_short_term,
|
||||||
|
COALESCE(checklist_filled_counts.medium_term, 0) AS checklists_filled_medium_term,
|
||||||
|
COALESCE(checklist_filled_counts.long_term, 0) AS checklists_filled_long_term,
|
||||||
|
COALESCE(checklist_filled_counts.total, 0) AS checklists_filled,
|
||||||
|
COALESCE(checklist_filled_counts.weekly_counts, '{}'::jsonb) AS checklists_filled_weekly_counts
|
||||||
|
|
||||||
|
FROM users_to_examine
|
||||||
|
LEFT JOIN checklist_counts ON checklist_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN checklist_filled_counts ON checklist_filled_counts.user_id = users_to_examine.id
|
||||||
|
;
|
||||||
502
2026-01-02_query.sql
Normal file
502
2026-01-02_query.sql
Normal file
@ -0,0 +1,502 @@
|
|||||||
|
-- First establish some base parameters for our users
|
||||||
|
WITH config AS (
|
||||||
|
SELECT
|
||||||
|
-- For now we're using this as our free-trial filter. We'll treat users
|
||||||
|
-- that churn during this period differently since they likely haven't actually
|
||||||
|
-- tried the software very much. We'll also use this to isolate feature usage
|
||||||
|
-- from this period, as it's likely that user goals are different at this stage.
|
||||||
|
14 AS short_term,
|
||||||
|
-- This is our ~day74 cutoff. For users, we want to see patterns of usage here
|
||||||
|
-- that might differ from those who either churn later or hang around for a while.
|
||||||
|
-- For feature aggregates, this should represent the "serious" efforts to use ZenMaid.
|
||||||
|
90 AS medium_term,
|
||||||
|
-- This is a (in theory) rarer group of churned users that lasted beyond our day 74
|
||||||
|
-- danger zone but still churned.
|
||||||
|
-- Beyond this we'll consider anyone remaining a "success" (even if they churned at some
|
||||||
|
-- point) since things like company closures, etc. start to come into play more
|
||||||
|
-- frequently.
|
||||||
|
-- For features, we'll look at this as features used in the company's day-to-day, and not
|
||||||
|
-- evaluation attempts.
|
||||||
|
360 AS long_term,
|
||||||
|
-- We'll only look back a few years since we want to make fairish comparisons betwen users
|
||||||
|
-- exposed to "modern" ZenMaid tools.
|
||||||
|
CAST('2023-01-01' AS timestamp) AS date_cutoff
|
||||||
|
),
|
||||||
|
-- Then establish a standard core set of user data
|
||||||
|
users_with_churn_stats AS (
|
||||||
|
SELECT id, active, billing_state,
|
||||||
|
DATE_TRUNC('month', created_at) AS month_created,
|
||||||
|
created_at, free_trial_ends_at, updated_at,
|
||||||
|
updated_at::date - created_at::date AS lifespan
|
||||||
|
FROM users, config
|
||||||
|
-- We'll only look at newish users.
|
||||||
|
WHERE created_at >= config.date_cutoff
|
||||||
|
),
|
||||||
|
-- Next use that core data to extract the records we want to analyze
|
||||||
|
-- and categorize them by date and status
|
||||||
|
users_to_examine AS (
|
||||||
|
-- First the users who exited just after the trial
|
||||||
|
SELECT 'quick-exit' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan <= config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Next, users who gave the tools some time but still left
|
||||||
|
SELECT 'fair-trial' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan < config.medium_term
|
||||||
|
AND lifespan > config.short_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Then some longer-time users who still churned.
|
||||||
|
-- We'll cut this off after 6 months since afterwards
|
||||||
|
-- company closures, etc. could factor in.
|
||||||
|
|
||||||
|
SELECT 'short-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
WHERE billing_state = 'CHURNED'
|
||||||
|
AND lifespan >= config.medium_term
|
||||||
|
AND lifespan < config.long_term
|
||||||
|
|
||||||
|
UNION
|
||||||
|
|
||||||
|
-- Finally, people who are still kicking.
|
||||||
|
-- This cohort is going to be a little wrong for older
|
||||||
|
-- dates since it only includes those who are STILL around.
|
||||||
|
SELECT 'long-termer' AS category, uwcs.*
|
||||||
|
FROM users_with_churn_stats uwcs, config
|
||||||
|
-- We could also filter this by the following if we wanted to look at current users:
|
||||||
|
-- billing_state NOT IN ('CHURNED', 'ONBOARDING', 'UNKNOWN')
|
||||||
|
-- ...but for a multi-year peek into history, we'll lose too many good users who left
|
||||||
|
-- for other reasons like closures, etc.
|
||||||
|
WHERE lifespan >= config.long_term
|
||||||
|
),
|
||||||
|
|
||||||
|
-- Now we'll start pulling counts from our various core features to help check usage.
|
||||||
|
-- We'll ALSO bucket these by the same eras we're using to split up our users.
|
||||||
|
-- For example, things created in the short_term are probably less serious integration
|
||||||
|
-- efforts - users are just poking around at the software to see if they want to give it a
|
||||||
|
-- serious try.
|
||||||
|
-- Medium-term (up to our day 74ish cutoff) is when users REALLY decide if they want to build
|
||||||
|
-- a business around ZenMaid.
|
||||||
|
-- Long-term should represent users who are actively using ZenMaid for their day-to-day operations.
|
||||||
|
|
||||||
|
-- Well add booking forms created by era
|
||||||
|
booking_form_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT booking_forms.user_id, booking_forms.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, booking_forms
|
||||||
|
JOIN users ON booking_forms.user_id = users.id
|
||||||
|
WHERE booking_forms.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND booking_forms.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Bookings made by era
|
||||||
|
booking_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT booking_forms.user_id, bookings.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config,
|
||||||
|
booking_forms
|
||||||
|
JOIN users ON booking_forms.user_id = users.id
|
||||||
|
JOIN bookings ON bookings.booking_form_id = booking_forms.id
|
||||||
|
WHERE booking_forms.created_at >= config.date_cutoff
|
||||||
|
AND bookings.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (
|
||||||
|
SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts
|
||||||
|
JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Employees added by era
|
||||||
|
employee_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT employees.user_id, employees.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, employees
|
||||||
|
JOIN users ON employees.user_id = users.id
|
||||||
|
WHERE employees.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND employees.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Contacts created by era
|
||||||
|
contact_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT customers.user_id, customers.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, customers
|
||||||
|
JOIN users ON customers.user_id = users.id
|
||||||
|
WHERE customers.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND customers.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Emails sent by era
|
||||||
|
email_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT sent_emails.user_id, sent_emails.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, sent_emails
|
||||||
|
JOIN users ON sent_emails.user_id = users.id
|
||||||
|
WHERE sent_emails.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND sent_emails.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Messages sent by era
|
||||||
|
sms_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT sent_texts.user_id, sent_texts.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, sent_texts
|
||||||
|
JOIN users ON sent_texts.user_id = users.id
|
||||||
|
WHERE sent_texts.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND sent_texts.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Appointments by era
|
||||||
|
appointment_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT appointments.user_id, appointments.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, appointments
|
||||||
|
JOIN users ON appointments.user_id = users.id
|
||||||
|
WHERE appointments.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND appointments.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Checklists created by era
|
||||||
|
checklist_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT checklist_templates.user_id, checklists.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, checklist_templates
|
||||||
|
JOIN users ON checklist_templates.user_id = users.id
|
||||||
|
JOIN checklists ON checklists.checklist_template_id = checklist_templates.id
|
||||||
|
WHERE checklists.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND checklists.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
),
|
||||||
|
-- Checklist events created by era
|
||||||
|
checklist_filled_counts AS (
|
||||||
|
-- Establish a summary of records with ages
|
||||||
|
WITH summary AS (
|
||||||
|
SELECT employees.user_id, checklist_item_events.created_at::date - users.created_at::date AS created_age
|
||||||
|
FROM config, employees
|
||||||
|
JOIN users ON employees.user_id = users.id
|
||||||
|
JOIN checklist_item_events ON checklist_item_events.employee_id = employees.id
|
||||||
|
WHERE checklist_item_events.created_at >= config.date_cutoff
|
||||||
|
AND users.created_at >= config.date_cutoff
|
||||||
|
AND checklist_item_events.created_at <= users.created_at::date + config.long_term
|
||||||
|
),
|
||||||
|
-- Group by week for line charts
|
||||||
|
weekly_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
'week_' || weeks_in,
|
||||||
|
weekly_count
|
||||||
|
) AS weekly_counts,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM (SELECT user_id, FLOOR(created_age / 7) AS weeks_in, COUNT(*) as weekly_count
|
||||||
|
FROM summary
|
||||||
|
GROUP BY user_id, weeks_in) by_weeks
|
||||||
|
GROUP BY user_id
|
||||||
|
),
|
||||||
|
-- Group by era for bar charts
|
||||||
|
bucket_counts AS (
|
||||||
|
SELECT user_id,
|
||||||
|
SUM(CASE WHEN created_age < config.short_term THEN 1 ELSE 0 END) AS short_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.short_term AND created_age < config.medium_term THEN 1 ELSE 0 END) AS medium_term,
|
||||||
|
SUM(CASE WHEN created_age >= config.medium_term AND created_age < config.long_term THEN 1 ELSE 0 END) AS long_term,
|
||||||
|
COUNT(*) AS total
|
||||||
|
FROM config, summary
|
||||||
|
GROUP BY user_id
|
||||||
|
)
|
||||||
|
-- Put it all together
|
||||||
|
SELECT bucket_counts.*, weekly_counts.weekly_counts
|
||||||
|
FROM bucket_counts JOIN weekly_counts ON weekly_counts.user_id = bucket_counts.user_id
|
||||||
|
)
|
||||||
|
-- Finally, we'll flatten it all out into a repurposable flat table
|
||||||
|
SELECT users_to_examine.id,
|
||||||
|
users_to_examine.active,
|
||||||
|
users_to_examine.billing_state,
|
||||||
|
users_to_examine.free_trial_ends_at,
|
||||||
|
users_to_examine.category,
|
||||||
|
users_to_examine.created_at,
|
||||||
|
users_to_examine.month_created,
|
||||||
|
users_to_examine.updated_at,
|
||||||
|
users_to_examine.lifespan,
|
||||||
|
COALESCE(booking_form_counts.short_term, 0) AS booking_forms_short_term,
|
||||||
|
COALESCE(booking_form_counts.medium_term, 0) AS booking_forms_medium_term,
|
||||||
|
COALESCE(booking_form_counts.long_term, 0) AS booking_forms_long_term,
|
||||||
|
COALESCE(booking_form_counts.total, 0) AS booking_forms,
|
||||||
|
COALESCE(booking_form_counts.weekly_counts, '{}'::jsonb) AS booking_forms_weekly_counts,
|
||||||
|
COALESCE(booking_counts.short_term, 0) AS bookings_short_term,
|
||||||
|
COALESCE(booking_counts.medium_term, 0) AS bookings_medium_term,
|
||||||
|
COALESCE(booking_counts.long_term, 0) AS bookings_long_term,
|
||||||
|
COALESCE(booking_counts.total, 0) AS bookings,
|
||||||
|
COALESCE(booking_counts.weekly_counts, '{}'::jsonb) AS bookings_weekly_counts,
|
||||||
|
COALESCE(employee_counts.short_term, 0) AS employees_short_term,
|
||||||
|
COALESCE(employee_counts.medium_term, 0) AS employees_medium_term,
|
||||||
|
COALESCE(employee_counts.long_term, 0) AS employees_long_term,
|
||||||
|
COALESCE(employee_counts.total, 0) AS employees,
|
||||||
|
COALESCE(employee_counts.weekly_counts, '{}'::jsonb) AS employee_weekly_counts,
|
||||||
|
COALESCE(contact_counts.short_term, 0) AS contacts_short_term,
|
||||||
|
COALESCE(contact_counts.medium_term, 0) AS contacts_medium_term,
|
||||||
|
COALESCE(contact_counts.long_term, 0) AS contacts_long_term,
|
||||||
|
COALESCE(contact_counts.total, 0) AS contacts,
|
||||||
|
COALESCE(contact_counts.weekly_counts, '{}'::jsonb) AS contacts_weekly_counts,
|
||||||
|
COALESCE(email_counts.short_term, 0) AS emails_short_term,
|
||||||
|
COALESCE(email_counts.medium_term, 0) AS emails_medium_term,
|
||||||
|
COALESCE(email_counts.long_term, 0) AS emails_long_term,
|
||||||
|
COALESCE(email_counts.total, 0) AS emails,
|
||||||
|
COALESCE(email_counts.weekly_counts, '{}'::jsonb) AS emails_weekly_counts,
|
||||||
|
COALESCE(sms_counts.short_term, 0) AS sms_short_term,
|
||||||
|
COALESCE(sms_counts.medium_term, 0) AS sms_medium_term,
|
||||||
|
COALESCE(sms_counts.long_term, 0) AS sms_long_term,
|
||||||
|
COALESCE(sms_counts.total, 0) AS sms,
|
||||||
|
COALESCE(sms_counts.weekly_counts, '{}'::jsonb) AS sms_weekly_counts,
|
||||||
|
COALESCE(appointment_counts.short_term, 0) AS appointments_short_term,
|
||||||
|
COALESCE(appointment_counts.medium_term, 0) AS appointments_medium_term,
|
||||||
|
COALESCE(appointment_counts.long_term, 0) AS appointments_long_term,
|
||||||
|
COALESCE(appointment_counts.total, 0) AS appointments,
|
||||||
|
COALESCE(appointment_counts.weekly_counts, '{}'::jsonb) AS appointments_weekly_counts,
|
||||||
|
COALESCE(checklist_counts.short_term, 0) AS checklists_short_term,
|
||||||
|
COALESCE(checklist_counts.medium_term, 0) AS checklists_medium_term,
|
||||||
|
COALESCE(checklist_counts.long_term, 0) AS checklists_long_term,
|
||||||
|
COALESCE(checklist_counts.total, 0) AS checklists,
|
||||||
|
COALESCE(checklist_counts.weekly_counts, '{}'::jsonb) AS checklists_weekly_counts,
|
||||||
|
COALESCE(checklist_filled_counts.short_term, 0) AS checklists_filled_short_term,
|
||||||
|
COALESCE(checklist_filled_counts.medium_term, 0) AS checklists_filled_medium_term,
|
||||||
|
COALESCE(checklist_filled_counts.long_term, 0) AS checklists_filled_long_term,
|
||||||
|
COALESCE(checklist_filled_counts.total, 0) AS checklists_filled,
|
||||||
|
COALESCE(checklist_filled_counts.weekly_counts, '{}'::jsonb) AS checklists_filled_weekly_counts
|
||||||
|
|
||||||
|
FROM users_to_examine
|
||||||
|
LEFT JOIN booking_form_counts ON booking_form_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN booking_counts ON booking_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN employee_counts ON employee_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN contact_counts ON contact_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN email_counts ON email_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN sms_counts ON sms_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN appointment_counts ON appointment_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN checklist_counts ON checklist_counts.user_id = users_to_examine.id
|
||||||
|
LEFT JOIN checklist_filled_counts ON checklist_filled_counts.user_id = users_to_examine.id
|
||||||
|
;
|
||||||
@ -13,3 +13,9 @@ poetry run jupyter notebook
|
|||||||
# or
|
# or
|
||||||
poetry run jupyter lab
|
poetry run jupyter lab
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Export to PDF
|
||||||
|
|
||||||
|
```bash
|
||||||
|
poetry run jupyter nbconvert --to pdf churn-analysis.ipynb --no-input
|
||||||
|
```
|
||||||
|
|||||||
8979
churn-analysis.ipynb
8979
churn-analysis.ipynb
File diff suppressed because one or more lines are too long
2
poetry.lock
generated
2
poetry.lock
generated
@ -2576,4 +2576,4 @@ files = [
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = "^3.13"
|
python-versions = "^3.13"
|
||||||
content-hash = "85e5d41ce436d80a788baf7d9bab8c00d0e9f7957eebb0b219e4146125aea4db"
|
content-hash = "875e088d07955f818673c7fd9992a9bbc85de3e6198acb78d09bfc9cfed0b821"
|
||||||
|
|||||||
@ -10,6 +10,7 @@ python = "^3.13"
|
|||||||
jupyter = "^1.1.1"
|
jupyter = "^1.1.1"
|
||||||
pandas = "^3.0.0"
|
pandas = "^3.0.0"
|
||||||
plotly = "^6.0.0"
|
plotly = "^6.0.0"
|
||||||
|
nbconvert = "^7.17.0"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||||
|
|||||||
Reference in New Issue
Block a user