|
require 'json' |
|
require 'csv' |
|
require 'fileutils' |
|
|
|
class SlackParser |
|
attr_accessor :channels, :users |
|
|
|
OUTPUT_USERS_DIR = 'output/users' |
|
OUTPUT_TRAFFIC_DIR = 'output/traffic' |
|
|
|
def self.run(*args) |
|
new(*args).run |
|
end |
|
|
|
def initialize |
|
@channels = Dir['data/*'].select { |f| File.directory?(f) } |
|
@users = JSON.parse(File.read("data/users.json")) |
|
end |
|
|
|
def run |
|
channels.each do |channel_path| |
|
days = Dir["#{channel_path}/*.json"] |
|
channel = File.basename(channel_path) |
|
|
|
FileUtils.mkdir_p(OUTPUT_USERS_DIR) |
|
FileUtils.mkdir_p(OUTPUT_TRAFFIC_DIR) |
|
|
|
user_stats = days.reduce({}) { |channel_stats, day| |
|
messages = JSON.parse(File.read(day)) |
|
real_messages = messages.reject { |m| m.key?('subtype') } |
|
|
|
stats = real_messages.reduce({}) { |daily_stats, message| |
|
user = message['user'] |
|
daily_stats.merge(user => 1) { |key, old, new| old + new } |
|
} |
|
|
|
channel_stats.merge(stats) { |key, old, new| old + new } |
|
} |
|
|
|
user_stats = ids_to_names(user_stats) |
|
sorted_stats = user_stats.to_a.sort { |a, b| a[1] <=> b[1] }.reverse |
|
|
|
CSV.open("#{OUTPUT_USERS_DIR}/#{channel}.csv", 'wb', headers: %w(username messages), write_headers: true) do |csv| |
|
sorted_stats.each do |row| |
|
csv << row |
|
end |
|
end |
|
|
|
CSV.open("#{OUTPUT_TRAFFIC_DIR}/#{channel}.csv", 'wb', headers: %w(date messages users_active), write_headers: true) do |csv| |
|
messages_per_day = days.each do |day| |
|
|
|
messages = JSON.parse(File.read(day)) |
|
real_messages = messages.reject { |m| m.key?('subtype') } |
|
|
|
csv << [ |
|
File.basename(day, '.*'), |
|
real_messages.count, |
|
real_messages.map { |m| m['user'] }.uniq.count, |
|
] |
|
end |
|
end |
|
end |
|
end |
|
|
|
private |
|
|
|
def ids_to_names(by_id) |
|
by_name = {} |
|
|
|
users.each do |user| |
|
by_name[user['name']] = by_id[user['id']] if by_id.key?(user['id']) |
|
end |
|
|
|
by_name |
|
end |
|
end |
|
|
|
SlackParser.run |