Rubocop autocorrect app directory

This commit is contained in:
Lars Henrik Mai 2020-03-09 12:13:27 +01:00
parent ec4aa64cfc
commit c08ce6864c
10 changed files with 132 additions and 110 deletions

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
@ -12,11 +14,11 @@ class ApplicationController < ActionController::Base
end
def glossary
render action: "glossary"
render action: 'glossary'
end
def impressum
render action: "impressum"
render action: 'impressum'
end
helper_method :current_user, :signed_in?

View file

@ -1,6 +1,7 @@
class GeoController < ApplicationController
# frozen_string_literal: true
class GeoController < ApplicationController
def index
render action: "index"
render action: 'index'
end
end

View file

@ -1,11 +1,13 @@
# frozen_string_literal: true
class ImportController < ApplicationController
skip_before_action :verify_authenticity_token, :only => [:new_papers_callback]
skip_before_action :verify_authenticity_token, only: [:new_papers_callback]
def new_papers_callback
require 'open-uri'
api_key = Rails.application.config_for(:morph)["key"]
api_key = Rails.application.config_for(:morph)['key']
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
Paper.import_from_json(uri.read)
render :nothing => true
render nothing: true
end
end

View file

@ -1,14 +1,15 @@
SearchFacet = Struct.new("SearchFacet", :term, :count) do
# frozen_string_literal: true
SearchFacet = Struct.new('SearchFacet', :term, :count) do
def term_with_count
"#{term} (#{count})"
end
end
class SearchController < ApplicationController
def index
@search_definition = PaperSearch.new(search_params)
@search_definition.sort_by ||= "date"
@search_definition.sort_by ||= 'date'
execute_search
end
@ -16,7 +17,7 @@ class SearchController < ApplicationController
def show
@search_definition = PaperSearch.find params[:id]
execute_search
render action: "index"
render action: 'index'
end
private
@ -24,15 +25,16 @@ class SearchController < ApplicationController
def execute_search
@response = Paper.search(@search_definition.to_definition)
@papers = @response.page(params[:page]).results
@sub = Hash.new
@sub = {}
@papers.each do |paper|
unless paper.reference.nil? && paper.reference.contains("-")
segments = paper.reference.split("-")
id = ((paper.reference.start_with?("VI-") || paper.reference.start_with?("VII-")) && segments.count > 2 ?
next if paper.reference.nil? && paper.reference.contains('-')
segments = paper.reference.split('-')
id = ((paper.reference.start_with?('VI-') || paper.reference.start_with?('VII-')) && segments.count > 2 ?
segments[2] : segments[1])
escaped_chars = Regexp.escape('\\+-*:()[]{}&!?^|\/')
sanitized_id = id.gsub(/([#{escaped_chars}])/, '\\\\\1')
['AND', 'OR', 'NOT'].each do |reserved|
%w[AND OR NOT].each do |reserved|
escaped_reserved = reserved.split('').map { |c| "\\#{c}" }.join('')
sanitized_id = sanitized_id.gsub('/\s*\b(#{reserved.upcase})\b\s*/',
" #{escaped_reserved} ")
@ -40,8 +42,8 @@ class SearchController < ApplicationController
@sub_search_definition = Elasticsearch::DSL::Search.search do
query do
query_string do
query "*" + sanitized_id + "*"
fields ["reference"]
query '*' + sanitized_id + '*'
fields ['reference']
end
end
@ -53,7 +55,6 @@ class SearchController < ApplicationController
@sub_papers = Paper.search(@sub_search_definition)
@sub[paper.reference] = @sub_papers
end
end
@paper_type_facets = extract_facets('paper_types')
@originator_facets = extract_facets('originators')
end
@ -63,9 +64,8 @@ class SearchController < ApplicationController
end
def extract_facets(name)
@response.
response['aggregations'][name.to_s][name.to_s]['buckets'].
map {|m| SearchFacet.new(m['key'], m['doc_count'])}
@response
.response['aggregations'][name.to_s][name.to_s]['buckets']
.map { |m| SearchFacet.new(m['key'], m['doc_count']) }
end
end

View file

@ -1,26 +1,27 @@
# frozen_string_literal: true
module SearchHelper
def facet_list(facets)
return unless facets.present?
content_tag(:ul) do
facets.each do |facet|
concat content_tag(:li,
"#{facet.term} (#{facet.count})",
class: "facet"
)
class: 'facet')
end
end
end
def filter_select(builder, name, desc, facets, selected)
capture do
concat(builder.label name, desc)
concat(builder.label(name, desc))
concat(
builder.select name,
builder.select(name,
options_from_collection_for_select(facets, :term, :term_with_count, selected),
{ include_blank: true },
{ onchange: "this.form.submit();" }
{ onchange: 'this.form.submit();' })
)
end
end
end

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
class Importer < ActiveRecord::Base
validates :url, presence: true, uniqueness: true
end

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
require 'elasticsearch/model'
require 'json'
require 'parseable_date_validator'
@ -9,7 +11,7 @@ class Paper < ActiveRecord::Base
validates :name, presence: true, length: { maximum: 1000 }
validates :url, presence: true,
length: { maximum: 1000 },
uniqueness: true, # TODO use unique index instead
uniqueness: true, # TODO: use unique index instead
url: true
validates :reference, presence: true, length: { maximum: 100 }
validates :body, presence: true, length: { maximum: 100 }
@ -19,45 +21,46 @@ class Paper < ActiveRecord::Base
validates :published_at, presence: true, parseable_date: true
validates :resolution, length: { maximum: 30_000 }
index_name ['srm', Rails.env, self.base_class.to_s.pluralize.underscore].join('_')
index_name ['srm', Rails.env, base_class.to_s.pluralize.underscore].join('_')
settings index: {
number_of_shards: 1,
analysis: {
filter: {
german_stop: {
type: "stop",
stopwords: "_german_"
type: 'stop',
stopwords: '_german_'
},
german_stemmer: {
type: "stemmer",
language: "light_german"
type: 'stemmer',
language: 'light_german'
},
german_decompounder: {
type: "hyphenation_decompounder",
word_list_path: "analysis/dictionary-de.txt",
hyphenation_patterns_path: "analysis/de_DR.xml",
type: 'hyphenation_decompounder',
word_list_path: 'analysis/dictionary-de.txt',
hyphenation_patterns_path: 'analysis/de_DR.xml',
only_longest_match: true,
min_subword_size: 4
},
}
},
analyzer: {
german: {
tokenizer: "standard",
filter: [
"lowercase",
"german_stop",
"german_decompounder",
"german_normalization",
"german_stemmer"
tokenizer: 'standard',
filter: %w[
lowercase
german_stop
german_decompounder
german_normalization
german_stemmer
]
}
}
}
} do mappings dynamic: false do
indexes :name, type: :text, analyzer: "german"
indexes :content, type: :text, analyzer: "german"
indexes :resolution, type: :text, analyzer: "german"
} do
mappings dynamic: false do
indexes :name, type: :text, analyzer: 'german'
indexes :content, type: :text, analyzer: 'german'
indexes :resolution, type: :text, analyzer: 'german'
indexes :reference, type: :keyword, index: true
indexes :paper_type, type: :keyword, index: true
indexes :published_at, type: :date, index: true
@ -66,10 +69,10 @@ class Paper < ActiveRecord::Base
end
def split_originator
originator.split(/\d\.\s/).reject {|s| s.blank?} || originator
originator.split(/\d\.\s/).reject(&:blank?) || originator
end
def as_indexed_json(options={})
def as_indexed_json(_options = {})
as_json.merge(originator: split_originator)
end
@ -86,7 +89,7 @@ class Paper < ActiveRecord::Base
paper_type: record['paper_type'],
published_at: record['published_at'],
reference: record['reference'],
url: record['url'],
url: record['url']
}
record = find_or_initialize_by(url: attributes[:url])
record.update_attributes(attributes)
@ -106,6 +109,5 @@ class Paper < ActiveRecord::Base
__elasticsearch__.create_index! force: true
all.each { |p| p.__elasticsearch__.index_document }
end
end
end

View file

@ -1,5 +1,6 @@
class PaperSearch < ActiveRecord::Base
# frozen_string_literal: true
class PaperSearch < ActiveRecord::Base
def to_definition
options = { paper_type: paper_type, originator: originator, sort_by: sort_by }
PaperSearch.definition(query, options)
@ -7,33 +8,36 @@ class PaperSearch < ActiveRecord::Base
def self.definition(q, options = {})
Elasticsearch::DSL::Search.search do
sort do
if options[:sort_by] == 'score'
by '_score'
end
by '_score' if options[:sort_by] == 'score'
by :published_at, order: 'desc'
end
query do
# search query
unless q.blank?
if q.blank?
match_all
else
multi_match do
query q
fields ["name", "content"]
fields %w[name content]
end
else
match_all
end
end
# apply filter after aggregations
post_filter do
bool do
must { term paper_type: options[:paper_type] } if options[:paper_type].present?
must { term originator: options[:originator] } if options[:originator].present?
if options[:paper_type].present?
must { term paper_type: options[:paper_type] }
end
if options[:originator].present?
must { term originator: options[:originator] }
end
# catchall when no filters set
must { match_all } unless (options[:paper_type].present? || options[:originator].present?)
unless options[:paper_type].present? || options[:originator].present?
must { match_all }
end
end
end
@ -41,7 +45,9 @@ class PaperSearch < ActiveRecord::Base
# filter by originator
f = Elasticsearch::DSL::Search::Filters::Bool.new
f.must { match_all }
f.must { term originator: options[:originator] } if options[:originator].present?
if options[:originator].present?
f.must { term originator: options[:originator] }
end
filter f.to_hash do
aggregation :paper_types do
terms do
@ -55,7 +61,9 @@ class PaperSearch < ActiveRecord::Base
# filter by paper_type
f = Elasticsearch::DSL::Search::Filters::Bool.new
f.must { match_all }
f.must { term paper_type: options[:paper_type] } if options[:paper_type].present?
if options[:paper_type].present?
f.must { term paper_type: options[:paper_type] }
end
filter f.to_hash do
aggregation :originators do
terms do

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
class User < ActiveRecord::Base
def self.find_or_create_from_auth_hash(hash)
User.find_or_create_by(email: hash['info']['email'])

View file

@ -1,28 +1,30 @@
# frozen_string_literal: true
require 'date'
xml.instruct! :xml, :version => "1.0"
xml.rss :version => "2.0", "xmlns:dc" => "http://purl.org/dc/elements/1.1/" do
xml.instruct! :xml, version: '1.0'
xml.rss :version => '2.0', 'xmlns:dc' => 'http://purl.org/dc/elements/1.1/' do
xml.channel do
xml.title "Search results"
xml.description "Papers matching search criteria"
xml.title 'Search results'
xml.description 'Papers matching search criteria'
@papers.each do |doc|
xml.item do
xml.title doc.name
if !doc.content.blank?
unless doc.content.blank?
xml.description do
xml.cdata! truncate(doc.content.sub("------------------------------- ", ""), length: 768)
xml.cdata! truncate(doc.content.sub('------------------------------- ', ''), length: 768)
end
end
if !doc.published_at.blank?
xml.pubDate DateTime.parse(doc.published_at).utc.strftime("%a, %d %b %Y %H:%M:%S %z")
unless doc.published_at.blank?
xml.pubDate DateTime.parse(doc.published_at).utc.strftime('%a, %d %b %Y %H:%M:%S %z')
end
doc.originator.each do |originator|
xml.dc :creator do
xml.cdata! originator
end
end
if !doc.paper_type.blank?
unless doc.paper_type.blank?
xml.category do
xml.cdata! doc.paper_type
end