You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

258 lines
6.4 KiB

#!/usr/bin/env ruby
require 'parallel'
require 'zip'
require 'oj'
require 'sqlite3'
require 'time'
require 'erb'
require 'fileutils'
require './config'
Zip.setup do |c|
c.default_compression = Zlib::BEST_COMPRESSION
c.continue_on_exists_proc = true
end
class Hash
def leaves(&block)
Enumerator.new do |e|
self.class.leaves self, [], e, &block
end
end
def symbolize_keys
self.transform_keys { |k| k.to_sym rescue k }
end
def walk(&block)
self.class._walk self, [], &block
end
private
def self._walk(hash, path, &block)
hash.each do |k, v|
p = [*path, k]
block.call :node, p, v
case v
when Hash
_walk v, p, &block
else
block.call :leaf, p, v
end
end
end
def self.leaves(hash, path, enumerator, &block)
hash.each do |k, v|
p = [*path, k]
case v
when Hash
self.leaves v, p, enumerator, &block
else
result = block.call p, v
enumerator << result
end
end
end
end
DURATION = {
'h' => 60*60*1_000,
'm' => 60*1_000,
's' => 1_000,
'ms' => 1,
}.freeze
def parse_duration(string)
string.split
.collect do |s|
duration, type = s.match(/(\d+)(.+)/).captures
duration = DURATION.fetch(type) * duration.to_i
end
.sum
end
def duration_to_s(duration)
text = []
DURATION.each do |k, v|
part = duration / v
text << "#{part}#{k}" if part > 0
duration -= part * v
end
text.join '&nbsp;'
end
class Reports
def initialize
@reports = Hash.new { |h, k| h[k] = [] }
end
def <<(report)
id = report.fetch :id
@reports[id] << report
end
def [](boss)
id = IDS[boss]
return nil unless id && @reports.has_key?(id)
reports = @reports.fetch(id).sort { |a, b| b.fetch(:date) <=> a.fetch(:date) }
cm, standard = reports.partition { |r| r.fetch :cm }
{ cm: cm, standard: standard }
end
end
def compress_evtc(evtc)
puts evtc
basename = File.basename evtc, '.evtc'
dirname = File.dirname evtc
zevtc = File.join dirname, basename + '.zevtc'
FileUtils.rm_f zevtc
Zip::File.open zevtc, Zip::File::CREATE do |zip|
zip.add evtc, evtc
end
FileUtils.rm evtc
zevtc
end
def process_evtcs
Parallel.map(Dir['arcdps.cbtlogs/**/*.evtc'].sort) { |evtc| compress_evtc evtc }
end
def process_zevtcs(zevtcs)
return if zevtcs.empty?
system 'mono', 'GW2EI/GuildWars2EliteInsights.exe', '-p', '-c', 'gw2ei.conf', *zevtcs
zevtcs.collect { |f| File.join 'html', File.basename(f, '.zevtc') + '.json.gz' }
end
def reprocess_zevtcs
process_zevtcs Dir['arcdps.cbtlogs/**/*.zevtc'].sort
end
EXTRACT_BOSS_FROM_FILENAME = /\d{8}-\d{6}_([^_]+)_\d+s_(kill|fail).html/.freeze
# BOSS_DIRECTORIES = BOSSES.leaves { |p, bs| d = p.first; bs.collect { |b| [b, d] } }
# .to_a.flatten(1).to_h
# .merge({ai: :fractals}).freeze
BOSS_DIRECTORIES = ARCDPS_BOSSES.collect { |t, bs| bs.collect { |b| [b, t] } }
.flatten(1).to_h.freeze
def sort_html
Dir['html/*.html'].sort.each do |file|
basename = File.basename file
match = EXTRACT_BOSS_FROM_FILENAME.match file
next unless match
boss = match[1].to_sym
directory = BOSS_DIRECTORIES.fetch(boss).to_s
directory = File.join 'html', directory
type = match[2].to_sym
directory = File.join directory, 'fail' if type == :fail
FileUtils.mv file, directory
end
end
def do_in_database
SQLite3::Database.new 'reports.db' do |db|
db.execute <<-SQL
CREATE TABLE IF NOT EXISTS reports (
filename TEXT NOT NULL,
id INTEGER NOT NULL,
cm BOOLEAN NOT NULL,
date DATETIME NOT NULL,
duration INTEGER NOT NULL,
name TEXT NOT NULL,
profession TEXT NOT NULL,
total_dps INTEGER NOT NULL,
top_dps INTEGER NOT NULL,
own_dps INTEGER NOT NULL
)
SQL
db.execute <<-SQL
CREATE UNIQUE INDEX IF NOT EXISTS filename_index ON reports(filename)
SQL
db.execute <<-SQL
CREATE INDEX IF NOT EXISTS id_index ON reports(id)
SQL
db.execute <<-SQL
CREATE INDEX IF NOT EXISTS date_index ON reports(id, date DESC)
SQL
db.results_as_hash = true
yield db
end
end
def parse_json(file)
File.open file do |fd|
json = Oj.load Zlib::GzipReader.wrap fd
id = json.fetch 'triggerID'
name = json.fetch 'fightName'
cm = json.fetch 'isCM'
date = Time.parse json.fetch 'timeStartStd'
duration = parse_duration json.fetch 'duration'
players = json.fetch 'players'
dps = players.collect { |p| p.fetch('dpsTargets').first.first.fetch('dps') }
total_dps = dps.sum
top_dps = dps.max
me = players.find { |p| p.fetch('account') == ME }
name = me.fetch 'name'
own_dps = me.fetch('dpsTargets').first.first.fetch 'dps'
profession = me.fetch 'profession'
{
id: id, cm: cm, date: date, duration: duration,
name: name, profession: profession,
total_dps: total_dps, top_dps: top_dps, own_dps: own_dps,
}
end
end
def process_json(db, json)
# ap json
filename = File.basename json, '.json.gz'
row = db.get_first_row 'SELECT * FROM reports WHERE filename = ?', filename
return if row
report = parse_json json
sql = <<-SQL
INSERT INTO reports ( filename, id, cm, date, duration, name, profession, total_dps, top_dps, own_dps )
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )
SQL
db.execute sql, filename, report.fetch(:id), (report.fetch(:cm) ? 1 : 0),
report.fetch(:date).to_i, report.fetch(:duration), report.fetch(:name),
report.fetch(:profession), report.fetch(:total_dps), report.fetch(:top_dps),
report.fetch(:own_dps)
end
def process_jsons(db, jsons)
Parallel.each jsons, in_threads: 16 do |json|
process_json db, json
end
end
def reprocess_jsons(db)
process_jsons db, Dir["html/*_kill.json.gz"].sort
end
def generate_html(db)
@reports = Reports.new
rows = db.execute 'SELECT * FROM reports'
rows.each do |row|
row = row.symbolize_keys
row[:cm] = row.fetch(:cm) == 1
row[:date] = Time.at row.fetch :date
@reports << row
end
erb = ERB.new File.read 'index.html.erb'
html = erb.result binding
File.write 'html/index.html', html
end
zevtcs = process_evtcs
process_zevtcs zevtcs
sort_html
# # reprocess_zevtcs
do_in_database do |db|
reprocess_jsons db
generate_html db
end