diff --git a/.rubocop.yml b/.rubocop.yml index f9d86d4a..8960ee8b 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,6 +1,10 @@ # Omakase Ruby styling for Rails inherit_gem: { rubocop-rails-omakase: rubocop.yml } +# this is so annoying, i'm sorry +Layout/TrailingWhitespace: + Enabled: false + # Overwrite or add rules to create your own house style # # # Use `[a, [b, c]]` not `[ a, [ b, c ] ]` diff --git a/app/controllers/users_controller.rb b/app/controllers/users_controller.rb index 7faf7dab..b18462db 100644 --- a/app/controllers/users_controller.rb +++ b/app/controllers/users_controller.rb @@ -71,6 +71,12 @@ def migrate_heartbeats notice: "Heartbeats & api keys migration started" end + def migrate_wakatimecom_heartbeats + OneTime::MigrateWakatimecomHeartbeatsJob.perform_later(@user.id) + redirect_to is_own_settings? ? my_settings_path : settings_user_path(@user), + notice: "Wakatime.com heartbeats migration started" + end + def wakatime_setup api_key = current_user&.api_keys&.last api_key ||= current_user.api_keys.create!(name: "Wakatime API Key") @@ -132,6 +138,6 @@ def is_own_settings? end def user_params - params.require(:user).permit(:uses_slack_status, :hackatime_extension_text_type, :timezone, :allow_public_stats_lookup) + params.require(:user).permit(:uses_slack_status, :hackatime_extension_text_type, :timezone, :wakatime_api_key, :allow_public_stats_lookup) end end diff --git a/app/jobs/one_time/migrate_wakatimecom_heartbeats_job.rb b/app/jobs/one_time/migrate_wakatimecom_heartbeats_job.rb new file mode 100644 index 00000000..8429d9c1 --- /dev/null +++ b/app/jobs/one_time/migrate_wakatimecom_heartbeats_job.rb @@ -0,0 +1,233 @@ +require "fileutils" +require "open-uri" + +class OneTime::MigrateWakatimecomHeartbeatsJob < ApplicationJob + queue_as :default + + include GoodJob::ActiveJobExtensions::Concurrency + + # only allow one instance of this job to run at a time + good_job_control_concurrency_with( + key: -> { "migrate_wakatimecom_heartbeats_job_#{arguments.first}" }, + total_limit: 1, + ) + + def perform(user_id) + @user = User.find(user_id) + @api_key = WakatimeMirror.find_by(endpoint_url: "https://wakatime.com/api/v1", user_id: @user.id)&.encrypted_api_key + import_heartbeats + end + + private + + def import_heartbeats + puts "starting wakatime.com heartbeats import for user #{@user.id}" + + # get dump once to check if there's already one. + # in development i've already created one and don't want to keep spamming dumps + # (it's also really slow for me, my entire coding career is in there) + dump = get_dumps + + if dump["status"] != "Completed" && !wakatime_json_exists? + create_dump + while true + sleep 5 + dump = get_dumps + puts "wakatime.com import for #{@user.id} is at #{dump['percent_complete']}%" + break if dump["status"] == "Completed" + end + end + + output_path = download(dump) + machines = get_machines + agents = get_agents + + existing_heartbeats = Heartbeat.where(user_id: @user.id) + .select(:entity, :type, :project, :branch, :language, :time) + .map { |h| generate_dedup_key(h.entity, h.type, h.project, h.branch, h.language, h.time) } + .to_set + + # this could explode, let's see how it ends up. + parsed_json = JSON.parse(File.read(output_path)) + parsed_json = parsed_json["days"].select { |day| day["heartbeats"].any? } + puts "found #{parsed_json.size} days with heartbeats" + + heartbeats_to_insert = [] + parsed_json.each do |day| + day["heartbeats"].each do |wh| + next if wh["category"] == "browsing" + agent = agents.find { |a| a["id"] == wh["user_agent_id"] } + + attrs = { + user_id: @user.id, + branch: wh["branch"], + category: wh["category"], + dependencies: wh["dependencies"], + entity: wh["entity"], + is_write: wh["is_write"], + language: wh["language"], + project: wh["project"], + time: wh["time"], + type: wh["type"], + machine: machines.find { |m| m["id"] == wh["machine_name_id"] }&.dig("name"), + editor: agent&.dig("editor"), + operating_system: agent&.dig("os"), + cursorpos: wh["cursorpos"], + lineno: wh["lineno"], + lines: wh["lines"], + created_at: wh["created_at"], + source_type: 3 # wakatimecom_import + } + + attrs[:fields_hash] = Heartbeat.generate_fields_hash(attrs) + + dedup_key = generate_dedup_key(attrs[:entity], attrs[:type], attrs[:project], attrs[:branch], attrs[:language], attrs[:time]) + if existing_heartbeats.include?(dedup_key) + next + end + + heartbeats_to_insert << attrs + end + end + + # deduplicate heartbeats + heartbeats_to_insert = heartbeats_to_insert.uniq { |attrs| attrs[:fields_hash] } + puts "attempting to insert #{heartbeats_to_insert.size} heartbeats..." + + if heartbeats_to_insert.any? + begin + result = Heartbeat.upsert_all( + heartbeats_to_insert, + unique_by: :fields_hash + ) + puts "inserted #{result.rows.size} heartbeats." + rescue => e + puts "error during insert: #{e.class} - #{e.message}" + puts e.backtrace.join("\n") + end + else + puts "no new heartbeats to insert." + end + + # FileUtils.rm(output_path) + puts "finished wakatime.com heartbeats import for user #{@user.id}" + end + + def get_dumps + auth_token = Base64.strict_encode64("#{@api_key}:") + response = HTTP.auth("Basic #{auth_token}") + .get("https://api.wakatime.com/api/v1/users/current/data_dumps") + + if response.status.success? + dumps = JSON.parse(response.body)["data"].find { |dump| dump["type"] == "heartbeats" } + dumps || {} + else + puts "Failed to fetch Wakatime.com data dumps: #{response.status} - #{response.body}" + {} + end + end + + def create_dump + auth_token = Base64.strict_encode64("#{@api_key}:") + HTTP.auth("Basic #{auth_token}") + .post("https://api.wakatime.com/api/v1/users/current/data_dumps", + json: { + type: "heartbeats", + email_when_finished: false + } + ) + end + + def download(dump) + output_dir = Rails.root.join("storage", "wakatime_dumps") + FileUtils.mkdir_p(output_dir) + output_path = output_dir.join("wakatime_heartbeats_#{@user.id}.json") + # check if it doesnt exist + # this is because i've been working on this during a roadtrip without unlimited data + if wakatime_json_exists? + puts "file already exists, skipping download" + return output_path + end + + puts "downloading wakatime.com heartbeats dump for user #{@user.id}" + File.open(output_path, "wb") do |file| + # i don't get why with HTTP it doesn't work... + file.write(URI.open(dump["download_url"]).read) + end + + puts "wakatime.com heartbeats saved to #{output_path} for user #{@user.id}" + output_path + end + + def get_machines + auth_token = Base64.strict_encode64("#{@api_key}:") + all_machines = [] + page = 1 + + loop do + response = HTTP.auth("Basic #{auth_token}") + .get("https://api.wakatime.com/api/v1/users/current/machine_names", params: { page: page }) + + if response.status.success? + data = JSON.parse(response.body) + machines = data["data"] + all_machines.concat(machines) + + # Check if there are more pages + if data["next_page"] + sleep 1 # fricken ratelimits!!! + page += 1 + else + break + end + else + puts "failed to fetch wakatime.com machines: #{response.status} - #{response.body}" + break + end + end + + puts "fetched #{all_machines.size} machines total" + all_machines + end + + def get_agents # basically the editors + auth_token = Base64.strict_encode64("#{@api_key}:") + all_agents = [] + page = 1 + + loop do + response = HTTP.auth("Basic #{auth_token}") + .get("https://api.wakatime.com/api/v1/users/current/user_agents", params: { page: page }) + + if response.status.success? + data = JSON.parse(response.body) + agents = data["data"] + all_agents.concat(agents) + + # Check if there are more pages + if data["next_page"] + sleep 1 # fricken ratelimits!!! + page += 1 + else + break + end + else + puts "failed to fetch wakatime.com user agents: #{response.status} - #{response.body}" + break + end + end + + puts "fetched #{all_agents.size} user agents total" + all_agents + end + + def wakatime_json_exists? + output_dir = Rails.root.join("storage", "wakatime_dumps") + output_path = output_dir.join("wakatime_heartbeats_#{@user.id}.json") + File.exist?(output_path) + end + + def generate_dedup_key(entity, type, project, branch, language, time) + "#{entity}-#{type}-#{project}-#{branch}-#{language}-#{time}" + end +end diff --git a/app/models/heartbeat.rb b/app/models/heartbeat.rb index 30a6e753..dd07d041 100644 --- a/app/models/heartbeat.rb +++ b/app/models/heartbeat.rb @@ -18,7 +18,8 @@ class Heartbeat < ApplicationRecord enum :source_type, { direct_entry: 0, wakapi_import: 1, - test_entry: 2 + test_entry: 2, + wakatimecom_import: 3 } enum :ysws_program, { diff --git a/app/models/user.rb b/app/models/user.rb index 3c44b3f0..1d14d268 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -89,11 +89,15 @@ def streak_days_formatted after_save :invalidate_activity_graph_cache, if: :saved_change_to_timezone? def data_migration_jobs + job_classes = [ + "OneTime::MigrateUserFromHackatimeJob", + "OneTime::MigrateWakatimecomHeartbeatsJob" + ] GoodJob::Job.where( "serialized_params->>'arguments' = ?", [ id ].to_json ).where( - "job_class = ?", "MigrateUserFromHackatimeJob" - ).order(created_at: :desc).limit(10).all + job_class: job_classes + ).order(created_at: :desc).limit(10) end def in_progress_migration_jobs? diff --git a/app/models/wakatime_mirror.rb b/app/models/wakatime_mirror.rb index 41faf40c..a59955a0 100644 --- a/app/models/wakatime_mirror.rb +++ b/app/models/wakatime_mirror.rb @@ -13,7 +13,9 @@ class WakatimeMirror < ApplicationRecord def unsynced_heartbeats # Get heartbeats since last sync, or all heartbeats if never synced - user.heartbeats.where("created_at > ?", last_synced_at || Time.at(0)) + user.heartbeats + .where("created_at > ?", last_synced_at || Time.at(0)) + .where.not(source_type: :wakatimecom_import) end def sync_heartbeats diff --git a/app/views/users/edit.html.erb b/app/views/users/edit.html.erb index 9dacd794..a22e53b9 100644 --- a/app/views/users/edit.html.erb +++ b/app/views/users/edit.html.erb @@ -316,10 +316,13 @@

🚚 Migration Assistant

-

This will migrate your heartbeats from waka.hackclub.com to this platform.

+

This will migrate your heartbeats from other services to this platform.

<%= button_to "Migrate heartbeats", my_settings_migrate_heartbeats_path, method: :post, role: "button" %> + <% if WakatimeMirror.find_by(endpoint_url: "https://wakatime.com/api/v1", user_id: @user.id)&.encrypted_api_key %> + <%= button_to "Migrate wakatime.com", my_settings_migrate_wakatimecom_heartbeats_path, method: :post, role: "button" %> + <% end %> <% if @heartbeats_migration_jobs.any? %>
diff --git a/config/environments/development.rb b/config/environments/development.rb index 15afe33c..dd05f003 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -68,6 +68,8 @@ # Append comments with runtime information tags to SQL queries in logs. config.active_record.query_log_tags_enabled = true + config.active_record.logger = nil + # Replace the default in-process and non-durable queuing backend for Active Job. config.active_job.queue_adapter = :good_job # config.solid_queue.connects_to = { database: { writing: :queue } } diff --git a/config/routes.rb b/config/routes.rb index 73547b0d..d6c09204 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -95,6 +95,7 @@ def self.matches?(request) get "my/settings", to: "users#edit", as: :my_settings patch "my/settings", to: "users#update" post "my/settings/migrate_heartbeats", to: "users#migrate_heartbeats", as: :my_settings_migrate_heartbeats + post "my/settings/migrate_wakatimecom_heartbeats", to: "users#migrate_wakatimecom_heartbeats", as: :my_settings_migrate_wakatimecom_heartbeats namespace :my do resources :project_repo_mappings, param: :project_name, only: [ :edit, :update ] diff --git a/db/migrate/20250429114602_wakatime_api_key_user.rb b/db/migrate/20250429114602_wakatime_api_key_user.rb new file mode 100644 index 00000000..1b7f6ed2 --- /dev/null +++ b/db/migrate/20250429114602_wakatime_api_key_user.rb @@ -0,0 +1,5 @@ +class WakatimeApiKeyUser < ActiveRecord::Migration[8.0] + def change + add_column :users, :wakatime_api_key, :string, null: true + end +end diff --git a/db/primary_direct_schema.rb b/db/primary_direct_schema.rb new file mode 100644 index 00000000..aec34036 --- /dev/null +++ b/db/primary_direct_schema.rb @@ -0,0 +1,277 @@ +# This file is auto-generated from the current state of the database. Instead +# of editing this file, please use the migrations feature of Active Record to +# incrementally modify your database, and then regenerate this schema definition. +# +# This file is the source Rails uses to define your schema when running `bin/rails +# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to +# be faster and is potentially less error prone than running all of your +# migrations from scratch. Old migrations may fail to apply correctly if those +# migrations use external dependencies or application code. +# +# It's strongly recommended that you check this file into your version control system. + +ActiveRecord::Schema[8.0].define(version: 2025_04_29_114602) do + # These are extensions that must be enabled in order to support this database + enable_extension "pg_catalog.plpgsql" + + create_table "api_keys", force: :cascade do |t| + t.bigint "user_id", null: false + t.text "name", null: false + t.text "token", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["token"], name: "index_api_keys_on_token", unique: true + t.index ["user_id", "name"], name: "index_api_keys_on_user_id_and_name", unique: true + t.index ["user_id", "token"], name: "index_api_keys_on_user_id_and_token", unique: true + t.index ["user_id"], name: "index_api_keys_on_user_id" + end + + create_table "email_addresses", force: :cascade do |t| + t.string "email" + t.bigint "user_id", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["email"], name: "index_email_addresses_on_email", unique: true + t.index ["user_id"], name: "index_email_addresses_on_user_id" + end + + create_table "good_job_batches", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t| + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.text "description" + t.jsonb "serialized_properties" + t.text "on_finish" + t.text "on_success" + t.text "on_discard" + t.text "callback_queue_name" + t.integer "callback_priority" + t.datetime "enqueued_at" + t.datetime "discarded_at" + t.datetime "finished_at" + t.datetime "jobs_finished_at" + end + + create_table "good_job_executions", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t| + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.uuid "active_job_id", null: false + t.text "job_class" + t.text "queue_name" + t.jsonb "serialized_params" + t.datetime "scheduled_at" + t.datetime "finished_at" + t.text "error" + t.integer "error_event", limit: 2 + t.text "error_backtrace", array: true + t.uuid "process_id" + t.interval "duration" + t.index ["active_job_id", "created_at"], name: "index_good_job_executions_on_active_job_id_and_created_at" + t.index ["process_id", "created_at"], name: "index_good_job_executions_on_process_id_and_created_at" + end + + create_table "good_job_processes", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t| + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.jsonb "state" + t.integer "lock_type", limit: 2 + end + + create_table "good_job_settings", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t| + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.text "key" + t.jsonb "value" + t.index ["key"], name: "index_good_job_settings_on_key", unique: true + end + + create_table "good_jobs", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t| + t.text "queue_name" + t.integer "priority" + t.jsonb "serialized_params" + t.datetime "scheduled_at" + t.datetime "performed_at" + t.datetime "finished_at" + t.text "error" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.uuid "active_job_id" + t.text "concurrency_key" + t.text "cron_key" + t.uuid "retried_good_job_id" + t.datetime "cron_at" + t.uuid "batch_id" + t.uuid "batch_callback_id" + t.boolean "is_discrete" + t.integer "executions_count" + t.text "job_class" + t.integer "error_event", limit: 2 + t.text "labels", array: true + t.uuid "locked_by_id" + t.datetime "locked_at" + t.index ["active_job_id", "created_at"], name: "index_good_jobs_on_active_job_id_and_created_at" + t.index ["batch_callback_id"], name: "index_good_jobs_on_batch_callback_id", where: "(batch_callback_id IS NOT NULL)" + t.index ["batch_id"], name: "index_good_jobs_on_batch_id", where: "(batch_id IS NOT NULL)" + t.index ["concurrency_key"], name: "index_good_jobs_on_concurrency_key_when_unfinished", where: "(finished_at IS NULL)" + t.index ["cron_key", "created_at"], name: "index_good_jobs_on_cron_key_and_created_at_cond", where: "(cron_key IS NOT NULL)" + t.index ["cron_key", "cron_at"], name: "index_good_jobs_on_cron_key_and_cron_at_cond", unique: true, where: "(cron_key IS NOT NULL)" + t.index ["finished_at"], name: "index_good_jobs_jobs_on_finished_at", where: "((retried_good_job_id IS NULL) AND (finished_at IS NOT NULL))" + t.index ["labels"], name: "index_good_jobs_on_labels", where: "(labels IS NOT NULL)", using: :gin + t.index ["locked_by_id"], name: "index_good_jobs_on_locked_by_id", where: "(locked_by_id IS NOT NULL)" + t.index ["priority", "created_at"], name: "index_good_job_jobs_for_candidate_lookup", where: "(finished_at IS NULL)" + t.index ["priority", "created_at"], name: "index_good_jobs_jobs_on_priority_created_at_when_unfinished", order: { priority: "DESC NULLS LAST" }, where: "(finished_at IS NULL)" + t.index ["priority", "scheduled_at"], name: "index_good_jobs_on_priority_scheduled_at_unfinished_unlocked", where: "((finished_at IS NULL) AND (locked_by_id IS NULL))" + t.index ["queue_name", "scheduled_at"], name: "index_good_jobs_on_queue_name_and_scheduled_at", where: "(finished_at IS NULL)" + t.index ["scheduled_at"], name: "index_good_jobs_on_scheduled_at", where: "(finished_at IS NULL)" + end + + create_table "heartbeats", force: :cascade do |t| + t.bigint "user_id", null: false + t.string "branch" + t.string "category" + t.string "dependencies", default: [], array: true + t.string "editor" + t.string "entity" + t.string "language" + t.string "machine" + t.string "operating_system" + t.string "project" + t.string "type" + t.string "user_agent" + t.integer "line_additions" + t.integer "line_deletions" + t.integer "lineno" + t.integer "lines" + t.integer "cursorpos" + t.integer "project_root_count" + t.float "time", null: false + t.boolean "is_write" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.text "fields_hash" + t.integer "source_type", null: false + t.inet "ip_address" + t.integer "ysws_program", default: 0, null: false + t.index ["category", "time"], name: "index_heartbeats_on_category_and_time" + t.index ["fields_hash"], name: "index_heartbeats_on_fields_hash", unique: true + t.index ["user_id"], name: "index_heartbeats_on_user_id" + end + + create_table "leaderboard_entries", force: :cascade do |t| + t.bigint "leaderboard_id", null: false + t.integer "total_seconds", default: 0, null: false + t.integer "rank" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.bigint "user_id", null: false + t.integer "streak_count", default: 0 + t.index ["leaderboard_id", "user_id"], name: "idx_leaderboard_entries_on_leaderboard_and_user", unique: true + t.index ["leaderboard_id"], name: "index_leaderboard_entries_on_leaderboard_id" + end + + create_table "leaderboards", force: :cascade do |t| + t.date "start_date", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.datetime "finished_generating_at" + t.datetime "deleted_at" + t.integer "period_type", default: 0, null: false + end + + create_table "project_repo_mappings", force: :cascade do |t| + t.bigint "user_id", null: false + t.string "project_name", null: false + t.string "repo_url", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["user_id", "project_name"], name: "index_project_repo_mappings_on_user_id_and_project_name", unique: true + t.index ["user_id"], name: "index_project_repo_mappings_on_user_id" + end + + create_table "sailors_log_leaderboards", force: :cascade do |t| + t.string "slack_channel_id" + t.string "slack_uid" + t.text "message" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.datetime "deleted_at" + end + + create_table "sailors_log_notification_preferences", force: :cascade do |t| + t.string "slack_uid", null: false + t.string "slack_channel_id", null: false + t.boolean "enabled", default: true, null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["slack_uid", "slack_channel_id"], name: "idx_sailors_log_notification_preferences_unique_user_channel", unique: true + end + + create_table "sailors_log_slack_notifications", force: :cascade do |t| + t.string "slack_uid", null: false + t.string "slack_channel_id", null: false + t.string "project_name", null: false + t.integer "project_duration", null: false + t.boolean "sent", default: false, null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + end + + create_table "sailors_logs", force: :cascade do |t| + t.string "slack_uid", null: false + t.jsonb "projects_summary", default: {}, null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + end + + create_table "sign_in_tokens", force: :cascade do |t| + t.string "token" + t.bigint "user_id", null: false + t.integer "auth_type" + t.datetime "expires_at" + t.datetime "used_at" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["token"], name: "index_sign_in_tokens_on_token" + t.index ["user_id"], name: "index_sign_in_tokens_on_user_id" + end + + create_table "users", force: :cascade do |t| + t.string "slack_uid" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.string "username" + t.string "slack_avatar_url" + t.boolean "is_admin", default: false, null: false + t.boolean "uses_slack_status", default: false, null: false + t.string "slack_scopes", default: [], array: true + t.text "slack_access_token" + t.integer "hackatime_extension_text_type", default: 0, null: false + t.string "timezone", default: "UTC" + t.string "github_uid" + t.string "github_avatar_url" + t.text "github_access_token" + t.string "github_username" + t.string "slack_username" + t.string "wakatime_api_key" + t.index ["slack_uid"], name: "index_users_on_slack_uid", unique: true + t.index ["timezone"], name: "index_users_on_timezone" + end + + create_table "versions", force: :cascade do |t| + t.string "whodunnit" + t.datetime "created_at" + t.bigint "item_id", null: false + t.string "item_type", null: false + t.string "event", null: false + t.text "object" + t.text "object_changes" + t.index ["item_type", "item_id"], name: "index_versions_on_item_type_and_item_id" + end + + add_foreign_key "api_keys", "users" + add_foreign_key "email_addresses", "users" + add_foreign_key "heartbeats", "users" + add_foreign_key "leaderboard_entries", "leaderboards" + add_foreign_key "leaderboard_entries", "users" + add_foreign_key "project_repo_mappings", "users" + add_foreign_key "sign_in_tokens", "users" +end diff --git a/db/schema.rb b/db/schema.rb index bba0cfdb..cf5624c5 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -485,6 +485,7 @@ t.text "github_access_token" t.string "github_username" t.string "slack_username" + t.string "wakatime_api_key" t.string "slack_neighborhood_channel" t.integer "trust_level", default: 0, null: false t.string "country_code"