mirror of
https://github.com/CodeforLeipzig/stadtratmonitor.git
synced 2024-12-22 15:43:14 +01:00
Rubocop autocorrect app directory
This commit is contained in:
parent
ec4aa64cfc
commit
c08ce6864c
10 changed files with 132 additions and 110 deletions
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class ApplicationController < ActionController::Base
|
class ApplicationController < ActionController::Base
|
||||||
# Prevent CSRF attacks by raising an exception.
|
# Prevent CSRF attacks by raising an exception.
|
||||||
# For APIs, you may want to use :null_session instead.
|
# For APIs, you may want to use :null_session instead.
|
||||||
|
@ -12,11 +14,11 @@ class ApplicationController < ActionController::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def glossary
|
def glossary
|
||||||
render action: "glossary"
|
render action: 'glossary'
|
||||||
end
|
end
|
||||||
|
|
||||||
def impressum
|
def impressum
|
||||||
render action: "impressum"
|
render action: 'impressum'
|
||||||
end
|
end
|
||||||
|
|
||||||
helper_method :current_user, :signed_in?
|
helper_method :current_user, :signed_in?
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
class GeoController < ApplicationController
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class GeoController < ApplicationController
|
||||||
def index
|
def index
|
||||||
render action: "index"
|
render action: 'index'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class ImportController < ApplicationController
|
class ImportController < ApplicationController
|
||||||
skip_before_action :verify_authenticity_token, :only => [:new_papers_callback]
|
skip_before_action :verify_authenticity_token, only: [:new_papers_callback]
|
||||||
|
|
||||||
def new_papers_callback
|
def new_papers_callback
|
||||||
require 'open-uri'
|
require 'open-uri'
|
||||||
api_key = Rails.application.config_for(:morph)["key"]
|
api_key = Rails.application.config_for(:morph)['key']
|
||||||
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
||||||
Paper.import_from_json(uri.read)
|
Paper.import_from_json(uri.read)
|
||||||
render :nothing => true
|
render nothing: true
|
||||||
end
|
end
|
||||||
end
|
end
|
|
@ -1,14 +1,15 @@
|
||||||
SearchFacet = Struct.new("SearchFacet", :term, :count) do
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
SearchFacet = Struct.new('SearchFacet', :term, :count) do
|
||||||
def term_with_count
|
def term_with_count
|
||||||
"#{term} (#{count})"
|
"#{term} (#{count})"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
class SearchController < ApplicationController
|
class SearchController < ApplicationController
|
||||||
def index
|
def index
|
||||||
@search_definition = PaperSearch.new(search_params)
|
@search_definition = PaperSearch.new(search_params)
|
||||||
@search_definition.sort_by ||= "date"
|
@search_definition.sort_by ||= 'date'
|
||||||
|
|
||||||
execute_search
|
execute_search
|
||||||
end
|
end
|
||||||
|
@ -16,7 +17,7 @@ class SearchController < ApplicationController
|
||||||
def show
|
def show
|
||||||
@search_definition = PaperSearch.find params[:id]
|
@search_definition = PaperSearch.find params[:id]
|
||||||
execute_search
|
execute_search
|
||||||
render action: "index"
|
render action: 'index'
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -24,35 +25,35 @@ class SearchController < ApplicationController
|
||||||
def execute_search
|
def execute_search
|
||||||
@response = Paper.search(@search_definition.to_definition)
|
@response = Paper.search(@search_definition.to_definition)
|
||||||
@papers = @response.page(params[:page]).results
|
@papers = @response.page(params[:page]).results
|
||||||
@sub = Hash.new
|
@sub = {}
|
||||||
@papers.each do |paper|
|
@papers.each do |paper|
|
||||||
unless paper.reference.nil? && paper.reference.contains("-")
|
next if paper.reference.nil? && paper.reference.contains('-')
|
||||||
segments = paper.reference.split("-")
|
|
||||||
id = ((paper.reference.start_with?("VI-") || paper.reference.start_with?("VII-")) && segments.count > 2 ?
|
|
||||||
segments[2] : segments[1])
|
|
||||||
escaped_chars = Regexp.escape('\\+-*:()[]{}&!?^|\/')
|
|
||||||
sanitized_id = id.gsub(/([#{escaped_chars}])/, '\\\\\1')
|
|
||||||
['AND', 'OR', 'NOT'].each do |reserved|
|
|
||||||
escaped_reserved = reserved.split('').map { |c| "\\#{c}" }.join('')
|
|
||||||
sanitized_id = sanitized_id.gsub('/\s*\b(#{reserved.upcase})\b\s*/',
|
|
||||||
" #{escaped_reserved} ")
|
|
||||||
end
|
|
||||||
@sub_search_definition = Elasticsearch::DSL::Search.search do
|
|
||||||
query do
|
|
||||||
query_string do
|
|
||||||
query "*" + sanitized_id + "*"
|
|
||||||
fields ["reference"]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
sort do
|
segments = paper.reference.split('-')
|
||||||
by :published_at, order: 'desc'
|
id = ((paper.reference.start_with?('VI-') || paper.reference.start_with?('VII-')) && segments.count > 2 ?
|
||||||
by :reference, order: 'desc'
|
segments[2] : segments[1])
|
||||||
|
escaped_chars = Regexp.escape('\\+-*:()[]{}&!?^|\/')
|
||||||
|
sanitized_id = id.gsub(/([#{escaped_chars}])/, '\\\\\1')
|
||||||
|
%w[AND OR NOT].each do |reserved|
|
||||||
|
escaped_reserved = reserved.split('').map { |c| "\\#{c}" }.join('')
|
||||||
|
sanitized_id = sanitized_id.gsub('/\s*\b(#{reserved.upcase})\b\s*/',
|
||||||
|
" #{escaped_reserved} ")
|
||||||
|
end
|
||||||
|
@sub_search_definition = Elasticsearch::DSL::Search.search do
|
||||||
|
query do
|
||||||
|
query_string do
|
||||||
|
query '*' + sanitized_id + '*'
|
||||||
|
fields ['reference']
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@sub_papers = Paper.search(@sub_search_definition)
|
|
||||||
@sub[paper.reference] = @sub_papers
|
sort do
|
||||||
|
by :published_at, order: 'desc'
|
||||||
|
by :reference, order: 'desc'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@sub_papers = Paper.search(@sub_search_definition)
|
||||||
|
@sub[paper.reference] = @sub_papers
|
||||||
end
|
end
|
||||||
@paper_type_facets = extract_facets('paper_types')
|
@paper_type_facets = extract_facets('paper_types')
|
||||||
@originator_facets = extract_facets('originators')
|
@originator_facets = extract_facets('originators')
|
||||||
|
@ -63,9 +64,8 @@ class SearchController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_facets(name)
|
def extract_facets(name)
|
||||||
@response.
|
@response
|
||||||
response['aggregations'][name.to_s][name.to_s]['buckets'].
|
.response['aggregations'][name.to_s][name.to_s]['buckets']
|
||||||
map {|m| SearchFacet.new(m['key'], m['doc_count'])}
|
.map { |m| SearchFacet.new(m['key'], m['doc_count']) }
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,26 +1,27 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
module SearchHelper
|
module SearchHelper
|
||||||
def facet_list(facets)
|
def facet_list(facets)
|
||||||
return unless facets.present?
|
return unless facets.present?
|
||||||
|
|
||||||
content_tag(:ul) do
|
content_tag(:ul) do
|
||||||
facets.each do |facet|
|
facets.each do |facet|
|
||||||
concat content_tag(:li,
|
concat content_tag(:li,
|
||||||
"#{facet.term} (#{facet.count})",
|
"#{facet.term} (#{facet.count})",
|
||||||
class: "facet"
|
class: 'facet')
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter_select(builder, name, desc, facets, selected)
|
def filter_select(builder, name, desc, facets, selected)
|
||||||
capture do
|
capture do
|
||||||
concat(builder.label name, desc)
|
concat(builder.label(name, desc))
|
||||||
concat(
|
concat(
|
||||||
builder.select name,
|
builder.select(name,
|
||||||
options_from_collection_for_select(facets, :term, :term_with_count, selected),
|
options_from_collection_for_select(facets, :term, :term_with_count, selected),
|
||||||
{ include_blank: true },
|
{ include_blank: true },
|
||||||
{ onchange: "this.form.submit();" }
|
{ onchange: 'this.form.submit();' })
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class Importer < ActiveRecord::Base
|
class Importer < ActiveRecord::Base
|
||||||
validates :url, presence: true, uniqueness: true
|
validates :url, presence: true, uniqueness: true
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'elasticsearch/model'
|
require 'elasticsearch/model'
|
||||||
require 'json'
|
require 'json'
|
||||||
require 'parseable_date_validator'
|
require 'parseable_date_validator'
|
||||||
|
@ -9,7 +11,7 @@ class Paper < ActiveRecord::Base
|
||||||
validates :name, presence: true, length: { maximum: 1000 }
|
validates :name, presence: true, length: { maximum: 1000 }
|
||||||
validates :url, presence: true,
|
validates :url, presence: true,
|
||||||
length: { maximum: 1000 },
|
length: { maximum: 1000 },
|
||||||
uniqueness: true, # TODO use unique index instead
|
uniqueness: true, # TODO: use unique index instead
|
||||||
url: true
|
url: true
|
||||||
validates :reference, presence: true, length: { maximum: 100 }
|
validates :reference, presence: true, length: { maximum: 100 }
|
||||||
validates :body, presence: true, length: { maximum: 100 }
|
validates :body, presence: true, length: { maximum: 100 }
|
||||||
|
@ -19,45 +21,46 @@ class Paper < ActiveRecord::Base
|
||||||
validates :published_at, presence: true, parseable_date: true
|
validates :published_at, presence: true, parseable_date: true
|
||||||
validates :resolution, length: { maximum: 30_000 }
|
validates :resolution, length: { maximum: 30_000 }
|
||||||
|
|
||||||
index_name ['srm', Rails.env, self.base_class.to_s.pluralize.underscore].join('_')
|
index_name ['srm', Rails.env, base_class.to_s.pluralize.underscore].join('_')
|
||||||
|
|
||||||
settings index: {
|
settings index: {
|
||||||
number_of_shards: 1,
|
number_of_shards: 1,
|
||||||
analysis: {
|
analysis: {
|
||||||
filter: {
|
filter: {
|
||||||
german_stop: {
|
german_stop: {
|
||||||
type: "stop",
|
type: 'stop',
|
||||||
stopwords: "_german_"
|
stopwords: '_german_'
|
||||||
},
|
},
|
||||||
german_stemmer: {
|
german_stemmer: {
|
||||||
type: "stemmer",
|
type: 'stemmer',
|
||||||
language: "light_german"
|
language: 'light_german'
|
||||||
},
|
},
|
||||||
german_decompounder: {
|
german_decompounder: {
|
||||||
type: "hyphenation_decompounder",
|
type: 'hyphenation_decompounder',
|
||||||
word_list_path: "analysis/dictionary-de.txt",
|
word_list_path: 'analysis/dictionary-de.txt',
|
||||||
hyphenation_patterns_path: "analysis/de_DR.xml",
|
hyphenation_patterns_path: 'analysis/de_DR.xml',
|
||||||
only_longest_match: true,
|
only_longest_match: true,
|
||||||
min_subword_size: 4
|
min_subword_size: 4
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
analyzer: {
|
analyzer: {
|
||||||
german: {
|
german: {
|
||||||
tokenizer: "standard",
|
tokenizer: 'standard',
|
||||||
filter: [
|
filter: %w[
|
||||||
"lowercase",
|
lowercase
|
||||||
"german_stop",
|
german_stop
|
||||||
"german_decompounder",
|
german_decompounder
|
||||||
"german_normalization",
|
german_normalization
|
||||||
"german_stemmer"
|
german_stemmer
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} do mappings dynamic: false do
|
} do
|
||||||
indexes :name, type: :text, analyzer: "german"
|
mappings dynamic: false do
|
||||||
indexes :content, type: :text, analyzer: "german"
|
indexes :name, type: :text, analyzer: 'german'
|
||||||
indexes :resolution, type: :text, analyzer: "german"
|
indexes :content, type: :text, analyzer: 'german'
|
||||||
|
indexes :resolution, type: :text, analyzer: 'german'
|
||||||
indexes :reference, type: :keyword, index: true
|
indexes :reference, type: :keyword, index: true
|
||||||
indexes :paper_type, type: :keyword, index: true
|
indexes :paper_type, type: :keyword, index: true
|
||||||
indexes :published_at, type: :date, index: true
|
indexes :published_at, type: :date, index: true
|
||||||
|
@ -66,10 +69,10 @@ class Paper < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def split_originator
|
def split_originator
|
||||||
originator.split(/\d\.\s/).reject {|s| s.blank?} || originator
|
originator.split(/\d\.\s/).reject(&:blank?) || originator
|
||||||
end
|
end
|
||||||
|
|
||||||
def as_indexed_json(options={})
|
def as_indexed_json(_options = {})
|
||||||
as_json.merge(originator: split_originator)
|
as_json.merge(originator: split_originator)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -86,7 +89,7 @@ class Paper < ActiveRecord::Base
|
||||||
paper_type: record['paper_type'],
|
paper_type: record['paper_type'],
|
||||||
published_at: record['published_at'],
|
published_at: record['published_at'],
|
||||||
reference: record['reference'],
|
reference: record['reference'],
|
||||||
url: record['url'],
|
url: record['url']
|
||||||
}
|
}
|
||||||
record = find_or_initialize_by(url: attributes[:url])
|
record = find_or_initialize_by(url: attributes[:url])
|
||||||
record.update_attributes(attributes)
|
record.update_attributes(attributes)
|
||||||
|
@ -104,8 +107,7 @@ class Paper < ActiveRecord::Base
|
||||||
|
|
||||||
def reset_index!
|
def reset_index!
|
||||||
__elasticsearch__.create_index! force: true
|
__elasticsearch__.create_index! force: true
|
||||||
all.each {|p| p.__elasticsearch__.index_document }
|
all.each { |p| p.__elasticsearch__.index_document }
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,39 +1,43 @@
|
||||||
class PaperSearch < ActiveRecord::Base
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class PaperSearch < ActiveRecord::Base
|
||||||
def to_definition
|
def to_definition
|
||||||
options = {paper_type: paper_type, originator: originator, sort_by: sort_by}
|
options = { paper_type: paper_type, originator: originator, sort_by: sort_by }
|
||||||
PaperSearch.definition(query, options)
|
PaperSearch.definition(query, options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.definition(q, options={})
|
def self.definition(q, options = {})
|
||||||
Elasticsearch::DSL::Search.search do
|
Elasticsearch::DSL::Search.search do
|
||||||
|
|
||||||
sort do
|
sort do
|
||||||
if options[:sort_by] == 'score'
|
by '_score' if options[:sort_by] == 'score'
|
||||||
by '_score'
|
|
||||||
end
|
|
||||||
by :published_at, order: 'desc'
|
by :published_at, order: 'desc'
|
||||||
end
|
end
|
||||||
|
|
||||||
query do
|
query do
|
||||||
# search query
|
# search query
|
||||||
unless q.blank?
|
if q.blank?
|
||||||
|
match_all
|
||||||
|
else
|
||||||
multi_match do
|
multi_match do
|
||||||
query q
|
query q
|
||||||
fields ["name", "content"]
|
fields %w[name content]
|
||||||
end
|
end
|
||||||
else
|
|
||||||
match_all
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# apply filter after aggregations
|
# apply filter after aggregations
|
||||||
post_filter do
|
post_filter do
|
||||||
bool do
|
bool do
|
||||||
must { term paper_type: options[:paper_type] } if options[:paper_type].present?
|
if options[:paper_type].present?
|
||||||
must { term originator: options[:originator] } if options[:originator].present?
|
must { term paper_type: options[:paper_type] }
|
||||||
|
end
|
||||||
|
if options[:originator].present?
|
||||||
|
must { term originator: options[:originator] }
|
||||||
|
end
|
||||||
# catchall when no filters set
|
# catchall when no filters set
|
||||||
must { match_all } unless (options[:paper_type].present? || options[:originator].present?)
|
unless options[:paper_type].present? || options[:originator].present?
|
||||||
|
must { match_all }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -41,7 +45,9 @@ class PaperSearch < ActiveRecord::Base
|
||||||
# filter by originator
|
# filter by originator
|
||||||
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
||||||
f.must { match_all }
|
f.must { match_all }
|
||||||
f.must { term originator: options[:originator] } if options[:originator].present?
|
if options[:originator].present?
|
||||||
|
f.must { term originator: options[:originator] }
|
||||||
|
end
|
||||||
filter f.to_hash do
|
filter f.to_hash do
|
||||||
aggregation :paper_types do
|
aggregation :paper_types do
|
||||||
terms do
|
terms do
|
||||||
|
@ -55,7 +61,9 @@ class PaperSearch < ActiveRecord::Base
|
||||||
# filter by paper_type
|
# filter by paper_type
|
||||||
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
||||||
f.must { match_all }
|
f.must { match_all }
|
||||||
f.must { term paper_type: options[:paper_type] } if options[:paper_type].present?
|
if options[:paper_type].present?
|
||||||
|
f.must { term paper_type: options[:paper_type] }
|
||||||
|
end
|
||||||
filter f.to_hash do
|
filter f.to_hash do
|
||||||
aggregation :originators do
|
aggregation :originators do
|
||||||
terms do
|
terms do
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class User < ActiveRecord::Base
|
class User < ActiveRecord::Base
|
||||||
def self.find_or_create_from_auth_hash(hash)
|
def self.find_or_create_from_auth_hash(hash)
|
||||||
User.find_or_create_by(email: hash['info']['email'])
|
User.find_or_create_by(email: hash['info']['email'])
|
||||||
|
|
|
@ -1,31 +1,33 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'date'
|
require 'date'
|
||||||
|
|
||||||
xml.instruct! :xml, :version => "1.0"
|
xml.instruct! :xml, version: '1.0'
|
||||||
xml.rss :version => "2.0", "xmlns:dc" => "http://purl.org/dc/elements/1.1/" do
|
xml.rss :version => '2.0', 'xmlns:dc' => 'http://purl.org/dc/elements/1.1/' do
|
||||||
xml.channel do
|
xml.channel do
|
||||||
xml.title "Search results"
|
xml.title 'Search results'
|
||||||
xml.description "Papers matching search criteria"
|
xml.description 'Papers matching search criteria'
|
||||||
|
|
||||||
@papers.each do |doc|
|
@papers.each do |doc|
|
||||||
xml.item do
|
xml.item do
|
||||||
xml.title doc.name
|
xml.title doc.name
|
||||||
if !doc.content.blank?
|
unless doc.content.blank?
|
||||||
xml.description do
|
xml.description do
|
||||||
xml.cdata! truncate(doc.content.sub("------------------------------- ", ""), length: 768)
|
xml.cdata! truncate(doc.content.sub('------------------------------- ', ''), length: 768)
|
||||||
end
|
end
|
||||||
end
|
|
||||||
if !doc.published_at.blank?
|
|
||||||
xml.pubDate DateTime.parse(doc.published_at).utc.strftime("%a, %d %b %Y %H:%M:%S %z")
|
|
||||||
end
|
end
|
||||||
|
unless doc.published_at.blank?
|
||||||
|
xml.pubDate DateTime.parse(doc.published_at).utc.strftime('%a, %d %b %Y %H:%M:%S %z')
|
||||||
|
end
|
||||||
doc.originator.each do |originator|
|
doc.originator.each do |originator|
|
||||||
xml.dc :creator do
|
xml.dc :creator do
|
||||||
xml.cdata! originator
|
xml.cdata! originator
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if !doc.paper_type.blank?
|
unless doc.paper_type.blank?
|
||||||
xml.category do
|
xml.category do
|
||||||
xml.cdata! doc.paper_type
|
xml.cdata! doc.paper_type
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
xml.link doc.url
|
xml.link doc.url
|
||||||
xml.guid doc.url
|
xml.guid doc.url
|
||||||
|
|
Loading…
Reference in a new issue