8 changed files with 167 additions and 105 deletions
@ -1,100 +0,0 @@
|
||||
require 'roo' |
||||
require 'csv' |
||||
require 'yaml' |
||||
require_relative '../args/input_file' |
||||
require_relative '../args/import' |
||||
require_relative '../args/csv_opts' |
||||
require_relative '../import/data_frame' |
||||
|
||||
module Squib |
||||
|
||||
# DSL method. See http://squib.readthedocs.io |
||||
def csv(opts = {}) |
||||
# TODO refactor all this out to separate methods, and its own class |
||||
import = Args::Import.new.load!(opts) |
||||
file = Args::InputFile.new(file: 'deck.csv').load!(opts).file[0] |
||||
data = opts.key?(:data) ? opts[:data] : File.read(file) |
||||
csv_opts = Args::CSV_Opts.new(opts) |
||||
table = CSV.parse(data, **csv_opts.to_hash) |
||||
check_duplicate_csv_headers(table) |
||||
hash = Squib::DataFrame.new |
||||
table.headers.each do |header| |
||||
new_header = header.to_s |
||||
new_header = new_header.strip if import.strip? |
||||
hash[new_header] ||= table[header] |
||||
end |
||||
if import.strip? |
||||
new_hash = Squib::DataFrame.new |
||||
hash.each do |header, col| |
||||
new_hash[header] = col.map do |str| |
||||
str = str.strip if str.respond_to?(:strip) |
||||
str |
||||
end |
||||
end |
||||
hash = new_hash |
||||
end |
||||
if block_given? |
||||
hash.each do |header, col| |
||||
col.map! do |val| |
||||
yield(header, val) |
||||
end |
||||
end |
||||
end |
||||
return explode_quantities(hash, import.explode) |
||||
end |
||||
module_function :csv |
||||
|
||||
# DSL method. See http://squib.readthedocs.io |
||||
def yaml(opts = {}) |
||||
input = Args::InputFile.new(file: 'deck.yml').load!(opts) |
||||
import = Args::Import.new.load!(opts) |
||||
yml = YAML.load_file(input.file[0]) |
||||
data = Squib::DataFrame.new |
||||
# Get a universal list of keys to ensure everything is covered. |
||||
keys = yml.map { |c| c.keys}.flatten.uniq |
||||
keys.each { |k| data[k] = [] } #init arrays |
||||
yml.each do |card| |
||||
# nil value if key isn't set. |
||||
keys.each { |k| data[k] << card[k] } |
||||
end |
||||
if block_given? |
||||
data.each do |header, col| |
||||
col.map! do |val| |
||||
yield(header, val) |
||||
end |
||||
end |
||||
end |
||||
explode_quantities(data, import.explode) |
||||
end |
||||
module_function :yaml |
||||
|
||||
# Check if the given CSV table has duplicate columns, and throw a warning |
||||
# @api private |
||||
def check_duplicate_csv_headers(table) |
||||
if table.headers.size != table.headers.uniq.size |
||||
dups = table.headers.select{|e| table.headers.count(e) > 1 } |
||||
Squib.logger.warn "CSV duplicated the following column keys: #{dups.join(',')}" |
||||
end |
||||
end |
||||
module_function :check_duplicate_csv_headers |
||||
|
||||
|
||||
|
||||
class Deck |
||||
|
||||
# DSL method. See http://squib.readthedocs.io |
||||
def xlsx(opts = {}) |
||||
Squib.xlsx(opts) |
||||
end |
||||
|
||||
# DSL method. See http://squib.readthedocs.io |
||||
def csv(opts = {}) |
||||
Squib.csv(opts) |
||||
end |
||||
|
||||
# DSL method. See http://squib.readthedocs.io |
||||
def yaml(opts = {}) |
||||
Squib.yaml(opts) |
||||
end |
||||
end |
||||
end |
||||
@ -0,0 +1,41 @@
|
||||
require_relative '../args/import' |
||||
require_relative '../import/csv_importer' |
||||
require_relative '../errors_warnings/warn_unexpected_params' |
||||
|
||||
module Squib |
||||
# DSL method. See http://squib.readthedocs.io |
||||
def csv(opts = {}, &block) |
||||
DSL::Csv.new(__callee__).run(opts, &block) |
||||
end |
||||
module_function :csv |
||||
|
||||
class Deck |
||||
# DSL method. See http://squib.readthedocs.io |
||||
def csv(opts = {}, &block) |
||||
DSL::Csv.new(__callee__).run(opts, &block) |
||||
end |
||||
end |
||||
|
||||
module DSL |
||||
class Csv |
||||
include WarnUnexpectedParams |
||||
attr_reader :dsl_method, :block |
||||
|
||||
def initialize(dsl_method) |
||||
@dsl_method = dsl_method |
||||
end |
||||
|
||||
def self.accepted_params |
||||
%i( file data strip explode col_sep quote_char) |
||||
end |
||||
|
||||
def run(opts,&block) |
||||
warn_if_unexpected opts |
||||
import_args = Args.extract_import opts |
||||
importer = Squib::Import::CsvImporter.new |
||||
csv_opts = Args::CSV_Opts.new(opts) |
||||
importer.import_to_dataframe(import_args, csv_opts, &block) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
@ -0,0 +1,40 @@
|
||||
require_relative '../args/import' |
||||
require_relative '../import/Yaml_importer' |
||||
require_relative '../errors_warnings/warn_unexpected_params' |
||||
|
||||
module Squib |
||||
# DSL method. See http://squib.readthedocs.io |
||||
def yaml(opts = {}, &block) |
||||
DSL::Yaml.new(__callee__).run(opts, &block) |
||||
end |
||||
module_function :yaml |
||||
|
||||
class Deck |
||||
# DSL method. See http://squib.readthedocs.io |
||||
def yaml(opts = {}, &block) |
||||
DSL::Yaml.new(__callee__).run(opts, &block) |
||||
end |
||||
end |
||||
|
||||
module DSL |
||||
class Yaml |
||||
include WarnUnexpectedParams |
||||
attr_reader :dsl_method, :block |
||||
|
||||
def initialize(dsl_method) |
||||
@dsl_method = dsl_method |
||||
end |
||||
|
||||
def self.accepted_params |
||||
%i( file data explode ) |
||||
end |
||||
|
||||
def run(opts,&block) |
||||
warn_if_unexpected opts |
||||
import_args = Args.extract_import opts |
||||
importer = Squib::Import::YamlImporter.new |
||||
importer.import_to_dataframe(import_args, &block) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
@ -0,0 +1,44 @@
|
||||
require_relative 'quantity_exploder' |
||||
|
||||
module Squib::Import |
||||
class CsvImporter |
||||
include Squib::Import::QuantityExploder |
||||
def import_to_dataframe(import, csv_opts, &block) |
||||
data = import.data.nil? ? File.read(import.file) : import.data |
||||
table = CSV.parse(data, **csv_opts.to_hash) |
||||
check_duplicate_csv_headers(table) |
||||
hash = Squib::DataFrame.new |
||||
table.headers.each do |header| |
||||
new_header = header.to_s |
||||
new_header = new_header.strip if import.strip? |
||||
hash[new_header] ||= table[header] |
||||
end |
||||
if import.strip? |
||||
new_hash = Squib::DataFrame.new |
||||
hash.each do |header, col| |
||||
new_hash[header] = col.map do |str| |
||||
str = str.strip if str.respond_to?(:strip) |
||||
str |
||||
end |
||||
end |
||||
hash = new_hash |
||||
end |
||||
unless block.nil? |
||||
hash.each do |header, col| |
||||
col.map! do |val| |
||||
yield(header, val) |
||||
end |
||||
end |
||||
end |
||||
return explode_quantities(hash, import.explode) |
||||
end |
||||
|
||||
def check_duplicate_csv_headers(table) |
||||
if table.headers.size != table.headers.uniq.size |
||||
dups = table.headers.select{|e| table.headers.count(e) > 1 } |
||||
Squib.logger.warn "CSV duplicated the following column keys: #{dups.join(',')}" |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
@ -0,0 +1,29 @@
|
||||
require_relative 'data_frame' |
||||
require_relative 'quantity_exploder' |
||||
|
||||
module Squib::Import |
||||
class YamlImporter |
||||
include Squib::Import::QuantityExploder |
||||
def import_to_dataframe(import, &block) |
||||
data = import.data.nil? ? File.read(import.file) : import.data |
||||
yml = YAML.load(data) |
||||
data = Squib::DataFrame.new |
||||
# Get a universal list of keys to ensure everything is covered. |
||||
keys = yml.map { |c| c.keys}.flatten.uniq |
||||
keys.each { |k| data[k] = [] } #init arrays |
||||
yml.each do |card| |
||||
# nil value if key isn't set. |
||||
keys.each { |k| data[k] << card[k] } |
||||
end |
||||
unless block.nil? |
||||
data.each do |header, col| |
||||
col.map! do |val| |
||||
block.yield(header, val) |
||||
end |
||||
end |
||||
end |
||||
explode_quantities(data, import.explode) |
||||
end |
||||
end |
||||
end |
||||
|
||||
Loading…
Reference in new issue