mirror of
https://github.com/CodeforLeipzig/stadtratmonitor.git
synced 2024-12-22 15:43:14 +01:00
Merge pull request #44 from CodeforLeipzig/fix-rubocop-offenses
Fix rubocop offenses
This commit is contained in:
commit
155b57d58a
63 changed files with 604 additions and 613 deletions
25
.rubocop.yml
Normal file
25
.rubocop.yml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
inherit_from: .rubocop_todo.yml
|
||||||
|
|
||||||
|
AllCops:
|
||||||
|
TargetRubyVersion: 2.5
|
||||||
|
Exclude:
|
||||||
|
- 'bin/**/*'
|
||||||
|
- 'db/schema.rb'
|
||||||
|
- 'vendor/**/*'
|
||||||
|
|
||||||
|
Style/Documentation:
|
||||||
|
Enabled: false
|
||||||
|
|
||||||
|
Style/FrozenStringLiteralComment:
|
||||||
|
Enabled: true
|
||||||
|
Exclude:
|
||||||
|
- 'config.ru'
|
||||||
|
- 'Gemfile'
|
||||||
|
- 'Rakefile'
|
||||||
|
- 'app/views/**/*'
|
||||||
|
- 'config/**/*'
|
||||||
|
- 'db/**/*'
|
||||||
|
- 'lib/tasks/**/*'
|
||||||
|
|
||||||
|
Style/IfUnlessModifier:
|
||||||
|
Enabled: false
|
73
.rubocop_todo.yml
Normal file
73
.rubocop_todo.yml
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
# This configuration was generated by
|
||||||
|
# `rubocop --auto-gen-config`
|
||||||
|
# on 2020-03-09 14:56:53 +0100 using RuboCop version 0.80.1.
|
||||||
|
# The point is for the user to remove these configuration records
|
||||||
|
# one by one as the offenses are removed from the code base.
|
||||||
|
# Note that changes in the inspected code, or installation of new
|
||||||
|
# versions of RuboCop, may require this file to be generated again.
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
Lint/InterpolationCheck:
|
||||||
|
Exclude:
|
||||||
|
- 'app/controllers/search_controller.rb'
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
# Cop supports --auto-correct.
|
||||||
|
Lint/SendWithMixinArgument:
|
||||||
|
Exclude:
|
||||||
|
- 'config/initializers/kaminari_config.rb'
|
||||||
|
|
||||||
|
# Offense count: 4
|
||||||
|
Metrics/AbcSize:
|
||||||
|
Max: 58
|
||||||
|
|
||||||
|
# Offense count: 8
|
||||||
|
# Configuration parameters: CountComments, ExcludedMethods.
|
||||||
|
# ExcludedMethods: refine
|
||||||
|
Metrics/BlockLength:
|
||||||
|
Max: 203
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
Metrics/CyclomaticComplexity:
|
||||||
|
Max: 9
|
||||||
|
|
||||||
|
# Offense count: 4
|
||||||
|
# Configuration parameters: CountComments, ExcludedMethods.
|
||||||
|
Metrics/MethodLength:
|
||||||
|
Max: 57
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
Metrics/PerceivedComplexity:
|
||||||
|
Max: 10
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
# Configuration parameters: MinNameLength, AllowNamesEndingInNumbers, AllowedNames, ForbiddenNames.
|
||||||
|
# AllowedNames: io, id, to, by, on, in, at, ip, db, os, pp
|
||||||
|
Naming/MethodParameterName:
|
||||||
|
Exclude:
|
||||||
|
- 'app/models/paper_search.rb'
|
||||||
|
|
||||||
|
# Offense count: 2
|
||||||
|
# Configuration parameters: EnforcedStyle.
|
||||||
|
# SupportedStyles: snake_case, normalcase, non_integer
|
||||||
|
Naming/VariableNumber:
|
||||||
|
Exclude:
|
||||||
|
- 'spec/features/search_filters_spec.rb'
|
||||||
|
|
||||||
|
# Offense count: 1
|
||||||
|
Style/DoubleNegation:
|
||||||
|
Exclude:
|
||||||
|
- 'app/controllers/application_controller.rb'
|
||||||
|
|
||||||
|
# Offense count: 2
|
||||||
|
Style/MultilineTernaryOperator:
|
||||||
|
Exclude:
|
||||||
|
- 'app/controllers/search_controller.rb'
|
||||||
|
- 'spec/features/basic_search_spec.rb'
|
||||||
|
|
||||||
|
# Offense count: 57
|
||||||
|
# Cop supports --auto-correct.
|
||||||
|
# Configuration parameters: AutoCorrect, AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, IgnoredPatterns.
|
||||||
|
# URISchemes: http, https
|
||||||
|
Layout/LineLength:
|
||||||
|
Max: 148
|
1
Gemfile
1
Gemfile
|
@ -59,6 +59,7 @@ group :development, :test do
|
||||||
gem 'capybara'
|
gem 'capybara'
|
||||||
gem 'puma'
|
gem 'puma'
|
||||||
gem 'launchy'
|
gem 'launchy'
|
||||||
|
gem 'rubocop'
|
||||||
end
|
end
|
||||||
|
|
||||||
group :test do
|
group :test do
|
||||||
|
|
|
@ -329,6 +329,7 @@ DEPENDENCIES
|
||||||
rails-controller-testing
|
rails-controller-testing
|
||||||
rails-i18n
|
rails-i18n
|
||||||
rspec-rails (~> 3.0)
|
rspec-rails (~> 3.0)
|
||||||
|
rubocop
|
||||||
rubocop-faker
|
rubocop-faker
|
||||||
sass-rails
|
sass-rails
|
||||||
sdoc (~> 0.4.0)
|
sdoc (~> 0.4.0)
|
||||||
|
|
20
Guardfile
20
Guardfile
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
# A sample Guardfile
|
# A sample Guardfile
|
||||||
# More info at https://github.com/guard/guard#readme
|
# More info at https://github.com/guard/guard#readme
|
||||||
|
|
||||||
|
@ -24,8 +26,8 @@
|
||||||
# * zeus: 'zeus rspec' (requires the server to be started separately)
|
# * zeus: 'zeus rspec' (requires the server to be started separately)
|
||||||
# * 'just' rspec: 'rspec'
|
# * 'just' rspec: 'rspec'
|
||||||
|
|
||||||
guard :rspec, cmd: "bin/rspec" do
|
guard :rspec, cmd: 'bin/rspec' do
|
||||||
require "guard/rspec/dsl"
|
require 'guard/rspec/dsl'
|
||||||
dsl = Guard::RSpec::Dsl.new(self)
|
dsl = Guard::RSpec::Dsl.new(self)
|
||||||
|
|
||||||
# Feel free to open issues for suggestions and improvements
|
# Feel free to open issues for suggestions and improvements
|
||||||
|
@ -41,15 +43,15 @@ guard :rspec, cmd: "bin/rspec" do
|
||||||
dsl.watch_spec_files_for(ruby.lib_files)
|
dsl.watch_spec_files_for(ruby.lib_files)
|
||||||
|
|
||||||
# Rails files
|
# Rails files
|
||||||
rails = dsl.rails(view_extensions: %w(erb haml slim))
|
rails = dsl.rails(view_extensions: %w[erb haml slim])
|
||||||
dsl.watch_spec_files_for(rails.app_files)
|
dsl.watch_spec_files_for(rails.app_files)
|
||||||
dsl.watch_spec_files_for(rails.views)
|
dsl.watch_spec_files_for(rails.views)
|
||||||
|
|
||||||
watch(rails.controllers) do |m|
|
watch(rails.controllers) do |m|
|
||||||
[
|
[
|
||||||
rspec.spec.("routing/#{m[1]}_routing"),
|
rspec.spec.call("routing/#{m[1]}_routing"),
|
||||||
rspec.spec.("controllers/#{m[1]}_controller"),
|
rspec.spec.call("controllers/#{m[1]}_controller"),
|
||||||
rspec.spec.("acceptance/#{m[1]}")
|
rspec.spec.call("acceptance/#{m[1]}")
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -59,12 +61,12 @@ guard :rspec, cmd: "bin/rspec" do
|
||||||
watch(rails.app_controller) { "#{rspec.spec_dir}/controllers" }
|
watch(rails.app_controller) { "#{rspec.spec_dir}/controllers" }
|
||||||
|
|
||||||
# Capybara features specs
|
# Capybara features specs
|
||||||
watch(rails.view_dirs) { |m| rspec.spec.("features/#{m[1]}") }
|
watch(rails.view_dirs) { |m| rspec.spec.call("features/#{m[1]}") }
|
||||||
watch(rails.layouts) { |m| rspec.spec.("features/#{m[1]}") }
|
watch(rails.layouts) { |m| rspec.spec.call("features/#{m[1]}") }
|
||||||
|
|
||||||
# Turnip features and steps
|
# Turnip features and steps
|
||||||
watch(%r{^spec/acceptance/(.+)\.feature$})
|
watch(%r{^spec/acceptance/(.+)\.feature$})
|
||||||
watch(%r{^spec/acceptance/steps/(.+)_steps\.rb$}) do |m|
|
watch(%r{^spec/acceptance/steps/(.+)_steps\.rb$}) do |m|
|
||||||
Dir[File.join("**/#{m[1]}.feature")][0] || "spec/acceptance"
|
Dir[File.join("**/#{m[1]}.feature")][0] || 'spec/acceptance'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
2
Rakefile
2
Rakefile
|
@ -1,6 +1,6 @@
|
||||||
# Add your own tasks in files placed in lib/tasks ending in .rake,
|
# Add your own tasks in files placed in lib/tasks ending in .rake,
|
||||||
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
|
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
|
||||||
|
|
||||||
require File.expand_path('../config/application', __FILE__)
|
require File.expand_path('config/application', __dir__)
|
||||||
|
|
||||||
Rails.application.load_tasks
|
Rails.application.load_tasks
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class ApplicationController < ActionController::Base
|
class ApplicationController < ActionController::Base
|
||||||
# Prevent CSRF attacks by raising an exception.
|
# Prevent CSRF attacks by raising an exception.
|
||||||
# For APIs, you may want to use :null_session instead.
|
# For APIs, you may want to use :null_session instead.
|
||||||
|
@ -12,11 +14,11 @@ class ApplicationController < ActionController::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def glossary
|
def glossary
|
||||||
render action: "glossary"
|
render action: 'glossary'
|
||||||
end
|
end
|
||||||
|
|
||||||
def impressum
|
def impressum
|
||||||
render action: "impressum"
|
render action: 'impressum'
|
||||||
end
|
end
|
||||||
|
|
||||||
helper_method :current_user, :signed_in?
|
helper_method :current_user, :signed_in?
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
class GeoController < ApplicationController
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class GeoController < ApplicationController
|
||||||
def index
|
def index
|
||||||
render action: "index"
|
render action: 'index'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class ImportController < ApplicationController
|
class ImportController < ApplicationController
|
||||||
skip_before_action :verify_authenticity_token, :only => [:new_papers_callback]
|
skip_before_action :verify_authenticity_token, only: [:new_papers_callback]
|
||||||
|
|
||||||
def new_papers_callback
|
def new_papers_callback
|
||||||
require 'open-uri'
|
require 'open-uri'
|
||||||
api_key = Rails.application.config_for(:morph)["key"]
|
api_key = Rails.application.config_for(:morph)['key']
|
||||||
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
||||||
Paper.import_from_json(uri.read)
|
Paper.import_from_json(uri.read)
|
||||||
render :nothing => true
|
render nothing: true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
SearchFacet = Struct.new("SearchFacet", :term, :count) do
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
SearchFacet = Struct.new('SearchFacet', :term, :count) do
|
||||||
def term_with_count
|
def term_with_count
|
||||||
"#{term} (#{count})"
|
"#{term} (#{count})"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
class SearchController < ApplicationController
|
class SearchController < ApplicationController
|
||||||
def index
|
def index
|
||||||
@search_definition = PaperSearch.new(search_params)
|
@search_definition = PaperSearch.new(search_params)
|
||||||
@search_definition.sort_by ||= "date"
|
@search_definition.sort_by ||= 'date'
|
||||||
|
|
||||||
execute_search
|
execute_search
|
||||||
end
|
end
|
||||||
|
@ -16,7 +17,7 @@ class SearchController < ApplicationController
|
||||||
def show
|
def show
|
||||||
@search_definition = PaperSearch.find params[:id]
|
@search_definition = PaperSearch.find params[:id]
|
||||||
execute_search
|
execute_search
|
||||||
render action: "index"
|
render action: 'index'
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -24,35 +25,35 @@ class SearchController < ApplicationController
|
||||||
def execute_search
|
def execute_search
|
||||||
@response = Paper.search(@search_definition.to_definition)
|
@response = Paper.search(@search_definition.to_definition)
|
||||||
@papers = @response.page(params[:page]).results
|
@papers = @response.page(params[:page]).results
|
||||||
@sub = Hash.new
|
@sub = {}
|
||||||
@papers.each do |paper|
|
@papers.each do |paper|
|
||||||
unless paper.reference.nil? && paper.reference.contains("-")
|
next if paper.reference.nil? && paper.reference.contains('-')
|
||||||
segments = paper.reference.split("-")
|
|
||||||
id = ((paper.reference.start_with?("VI-") || paper.reference.start_with?("VII-")) && segments.count > 2 ?
|
|
||||||
segments[2] : segments[1])
|
|
||||||
escaped_chars = Regexp.escape('\\+-*:()[]{}&!?^|\/')
|
|
||||||
sanitized_id = id.gsub(/([#{escaped_chars}])/, '\\\\\1')
|
|
||||||
['AND', 'OR', 'NOT'].each do |reserved|
|
|
||||||
escaped_reserved = reserved.split('').map { |c| "\\#{c}" }.join('')
|
|
||||||
sanitized_id = sanitized_id.gsub('/\s*\b(#{reserved.upcase})\b\s*/',
|
|
||||||
" #{escaped_reserved} ")
|
|
||||||
end
|
|
||||||
@sub_search_definition = Elasticsearch::DSL::Search.search do
|
|
||||||
query do
|
|
||||||
query_string do
|
|
||||||
query "*" + sanitized_id + "*"
|
|
||||||
fields ["reference"]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
sort do
|
segments = paper.reference.split('-')
|
||||||
by :published_at, order: 'desc'
|
id = ((paper.reference.start_with?('VI-') || paper.reference.start_with?('VII-')) && segments.count > 2 ?
|
||||||
by :reference, order: 'desc'
|
segments[2] : segments[1])
|
||||||
|
escaped_chars = Regexp.escape('\\+-*:()[]{}&!?^|\/')
|
||||||
|
sanitized_id = id.gsub(/([#{escaped_chars}])/, '\\\\\1')
|
||||||
|
%w[AND OR NOT].each do |reserved|
|
||||||
|
escaped_reserved = reserved.split('').map { |c| "\\#{c}" }.join('')
|
||||||
|
sanitized_id = sanitized_id.gsub('/\s*\b(#{reserved.upcase})\b\s*/',
|
||||||
|
" #{escaped_reserved} ")
|
||||||
|
end
|
||||||
|
@sub_search_definition = Elasticsearch::DSL::Search.search do
|
||||||
|
query do
|
||||||
|
query_string do
|
||||||
|
query '*' + sanitized_id + '*'
|
||||||
|
fields ['reference']
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@sub_papers = Paper.search(@sub_search_definition)
|
|
||||||
@sub[paper.reference] = @sub_papers
|
sort do
|
||||||
|
by :published_at, order: 'desc'
|
||||||
|
by :reference, order: 'desc'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@sub_papers = Paper.search(@sub_search_definition)
|
||||||
|
@sub[paper.reference] = @sub_papers
|
||||||
end
|
end
|
||||||
@paper_type_facets = extract_facets('paper_types')
|
@paper_type_facets = extract_facets('paper_types')
|
||||||
@originator_facets = extract_facets('originators')
|
@originator_facets = extract_facets('originators')
|
||||||
|
@ -63,9 +64,8 @@ class SearchController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_facets(name)
|
def extract_facets(name)
|
||||||
@response.
|
@response
|
||||||
response['aggregations'][name.to_s][name.to_s]['buckets'].
|
.response['aggregations'][name.to_s][name.to_s]['buckets']
|
||||||
map {|m| SearchFacet.new(m['key'], m['doc_count'])}
|
.map { |m| SearchFacet.new(m['key'], m['doc_count']) }
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,26 +1,27 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
module SearchHelper
|
module SearchHelper
|
||||||
def facet_list(facets)
|
def facet_list(facets)
|
||||||
return unless facets.present?
|
return unless facets.present?
|
||||||
|
|
||||||
content_tag(:ul) do
|
content_tag(:ul) do
|
||||||
facets.each do |facet|
|
facets.each do |facet|
|
||||||
concat content_tag(:li,
|
concat content_tag(:li,
|
||||||
"#{facet.term} (#{facet.count})",
|
"#{facet.term} (#{facet.count})",
|
||||||
class: "facet"
|
class: 'facet')
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter_select(builder, name, desc, facets, selected)
|
def filter_select(builder, name, desc, facets, selected)
|
||||||
capture do
|
capture do
|
||||||
concat(builder.label name, desc)
|
concat(builder.label(name, desc))
|
||||||
concat(
|
concat(
|
||||||
builder.select name,
|
builder.select(name,
|
||||||
options_from_collection_for_select(facets, :term, :term_with_count, selected),
|
options_from_collection_for_select(facets, :term, :term_with_count, selected),
|
||||||
{ include_blank: true },
|
{ include_blank: true },
|
||||||
{ onchange: "this.form.submit();" }
|
{ onchange: 'this.form.submit();' })
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class Importer < ActiveRecord::Base
|
class Importer < ActiveRecord::Base
|
||||||
validates :url, presence: true, uniqueness: true
|
validates :url, presence: true, uniqueness: true
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'elasticsearch/model'
|
require 'elasticsearch/model'
|
||||||
require 'json'
|
require 'json'
|
||||||
require 'parseable_date_validator'
|
require 'parseable_date_validator'
|
||||||
|
@ -9,7 +11,7 @@ class Paper < ActiveRecord::Base
|
||||||
validates :name, presence: true, length: { maximum: 1000 }
|
validates :name, presence: true, length: { maximum: 1000 }
|
||||||
validates :url, presence: true,
|
validates :url, presence: true,
|
||||||
length: { maximum: 1000 },
|
length: { maximum: 1000 },
|
||||||
uniqueness: true, # TODO use unique index instead
|
uniqueness: true, # TODO: use unique index instead
|
||||||
url: true
|
url: true
|
||||||
validates :reference, presence: true, length: { maximum: 100 }
|
validates :reference, presence: true, length: { maximum: 100 }
|
||||||
validates :body, presence: true, length: { maximum: 100 }
|
validates :body, presence: true, length: { maximum: 100 }
|
||||||
|
@ -19,45 +21,46 @@ class Paper < ActiveRecord::Base
|
||||||
validates :published_at, presence: true, parseable_date: true
|
validates :published_at, presence: true, parseable_date: true
|
||||||
validates :resolution, length: { maximum: 30_000 }
|
validates :resolution, length: { maximum: 30_000 }
|
||||||
|
|
||||||
index_name ['srm', Rails.env, self.base_class.to_s.pluralize.underscore].join('_')
|
index_name ['srm', Rails.env, base_class.to_s.pluralize.underscore].join('_')
|
||||||
|
|
||||||
settings index: {
|
settings index: {
|
||||||
number_of_shards: 1,
|
number_of_shards: 1,
|
||||||
analysis: {
|
analysis: {
|
||||||
filter: {
|
filter: {
|
||||||
german_stop: {
|
german_stop: {
|
||||||
type: "stop",
|
type: 'stop',
|
||||||
stopwords: "_german_"
|
stopwords: '_german_'
|
||||||
},
|
},
|
||||||
german_stemmer: {
|
german_stemmer: {
|
||||||
type: "stemmer",
|
type: 'stemmer',
|
||||||
language: "light_german"
|
language: 'light_german'
|
||||||
},
|
},
|
||||||
german_decompounder: {
|
german_decompounder: {
|
||||||
type: "hyphenation_decompounder",
|
type: 'hyphenation_decompounder',
|
||||||
word_list_path: "analysis/dictionary-de.txt",
|
word_list_path: 'analysis/dictionary-de.txt',
|
||||||
hyphenation_patterns_path: "analysis/de_DR.xml",
|
hyphenation_patterns_path: 'analysis/de_DR.xml',
|
||||||
only_longest_match: true,
|
only_longest_match: true,
|
||||||
min_subword_size: 4
|
min_subword_size: 4
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
analyzer: {
|
analyzer: {
|
||||||
german: {
|
german: {
|
||||||
tokenizer: "standard",
|
tokenizer: 'standard',
|
||||||
filter: [
|
filter: %w[
|
||||||
"lowercase",
|
lowercase
|
||||||
"german_stop",
|
german_stop
|
||||||
"german_decompounder",
|
german_decompounder
|
||||||
"german_normalization",
|
german_normalization
|
||||||
"german_stemmer"
|
german_stemmer
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} do mappings dynamic: false do
|
} do
|
||||||
indexes :name, type: :text, analyzer: "german"
|
mappings dynamic: false do
|
||||||
indexes :content, type: :text, analyzer: "german"
|
indexes :name, type: :text, analyzer: 'german'
|
||||||
indexes :resolution, type: :text, analyzer: "german"
|
indexes :content, type: :text, analyzer: 'german'
|
||||||
|
indexes :resolution, type: :text, analyzer: 'german'
|
||||||
indexes :reference, type: :keyword, index: true
|
indexes :reference, type: :keyword, index: true
|
||||||
indexes :paper_type, type: :keyword, index: true
|
indexes :paper_type, type: :keyword, index: true
|
||||||
indexes :published_at, type: :date, index: true
|
indexes :published_at, type: :date, index: true
|
||||||
|
@ -66,10 +69,10 @@ class Paper < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def split_originator
|
def split_originator
|
||||||
originator.split(/\d\.\s/).reject {|s| s.blank?} || originator
|
originator.split(/\d\.\s/).reject(&:blank?) || originator
|
||||||
end
|
end
|
||||||
|
|
||||||
def as_indexed_json(options={})
|
def as_indexed_json(_options = {})
|
||||||
as_json.merge(originator: split_originator)
|
as_json.merge(originator: split_originator)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -86,7 +89,7 @@ class Paper < ActiveRecord::Base
|
||||||
paper_type: record['paper_type'],
|
paper_type: record['paper_type'],
|
||||||
published_at: record['published_at'],
|
published_at: record['published_at'],
|
||||||
reference: record['reference'],
|
reference: record['reference'],
|
||||||
url: record['url'],
|
url: record['url']
|
||||||
}
|
}
|
||||||
record = find_or_initialize_by(url: attributes[:url])
|
record = find_or_initialize_by(url: attributes[:url])
|
||||||
record.update_attributes(attributes)
|
record.update_attributes(attributes)
|
||||||
|
@ -104,8 +107,7 @@ class Paper < ActiveRecord::Base
|
||||||
|
|
||||||
def reset_index!
|
def reset_index!
|
||||||
__elasticsearch__.create_index! force: true
|
__elasticsearch__.create_index! force: true
|
||||||
all.each {|p| p.__elasticsearch__.index_document }
|
all.each { |p| p.__elasticsearch__.index_document }
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,39 +1,43 @@
|
||||||
class PaperSearch < ActiveRecord::Base
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class PaperSearch < ActiveRecord::Base
|
||||||
def to_definition
|
def to_definition
|
||||||
options = {paper_type: paper_type, originator: originator, sort_by: sort_by}
|
options = { paper_type: paper_type, originator: originator, sort_by: sort_by }
|
||||||
PaperSearch.definition(query, options)
|
PaperSearch.definition(query, options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.definition(q, options={})
|
def self.definition(q, options = {})
|
||||||
Elasticsearch::DSL::Search.search do
|
Elasticsearch::DSL::Search.search do
|
||||||
|
|
||||||
sort do
|
sort do
|
||||||
if options[:sort_by] == 'score'
|
by '_score' if options[:sort_by] == 'score'
|
||||||
by '_score'
|
|
||||||
end
|
|
||||||
by :published_at, order: 'desc'
|
by :published_at, order: 'desc'
|
||||||
end
|
end
|
||||||
|
|
||||||
query do
|
query do
|
||||||
# search query
|
# search query
|
||||||
unless q.blank?
|
if q.blank?
|
||||||
|
match_all
|
||||||
|
else
|
||||||
multi_match do
|
multi_match do
|
||||||
query q
|
query q
|
||||||
fields ["name", "content"]
|
fields %w[name content]
|
||||||
end
|
end
|
||||||
else
|
|
||||||
match_all
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# apply filter after aggregations
|
# apply filter after aggregations
|
||||||
post_filter do
|
post_filter do
|
||||||
bool do
|
bool do
|
||||||
must { term paper_type: options[:paper_type] } if options[:paper_type].present?
|
if options[:paper_type].present?
|
||||||
must { term originator: options[:originator] } if options[:originator].present?
|
must { term paper_type: options[:paper_type] }
|
||||||
|
end
|
||||||
|
if options[:originator].present?
|
||||||
|
must { term originator: options[:originator] }
|
||||||
|
end
|
||||||
# catchall when no filters set
|
# catchall when no filters set
|
||||||
must { match_all } unless (options[:paper_type].present? || options[:originator].present?)
|
unless options[:paper_type].present? || options[:originator].present?
|
||||||
|
must { match_all }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -41,7 +45,9 @@ class PaperSearch < ActiveRecord::Base
|
||||||
# filter by originator
|
# filter by originator
|
||||||
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
||||||
f.must { match_all }
|
f.must { match_all }
|
||||||
f.must { term originator: options[:originator] } if options[:originator].present?
|
if options[:originator].present?
|
||||||
|
f.must { term originator: options[:originator] }
|
||||||
|
end
|
||||||
filter f.to_hash do
|
filter f.to_hash do
|
||||||
aggregation :paper_types do
|
aggregation :paper_types do
|
||||||
terms do
|
terms do
|
||||||
|
@ -55,7 +61,9 @@ class PaperSearch < ActiveRecord::Base
|
||||||
# filter by paper_type
|
# filter by paper_type
|
||||||
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
f = Elasticsearch::DSL::Search::Filters::Bool.new
|
||||||
f.must { match_all }
|
f.must { match_all }
|
||||||
f.must { term paper_type: options[:paper_type] } if options[:paper_type].present?
|
if options[:paper_type].present?
|
||||||
|
f.must { term paper_type: options[:paper_type] }
|
||||||
|
end
|
||||||
filter f.to_hash do
|
filter f.to_hash do
|
||||||
aggregation :originators do
|
aggregation :originators do
|
||||||
terms do
|
terms do
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class User < ActiveRecord::Base
|
class User < ActiveRecord::Base
|
||||||
def self.find_or_create_from_auth_hash(hash)
|
def self.find_or_create_from_auth_hash(hash)
|
||||||
User.find_or_create_by(email: hash['info']['email'])
|
User.find_or_create_by(email: hash['info']['email'])
|
||||||
|
|
|
@ -1,35 +1,37 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'date'
|
require 'date'
|
||||||
|
|
||||||
xml.instruct! :xml, :version => "1.0"
|
xml.instruct! :xml, version: '1.0'
|
||||||
xml.rss :version => "2.0", "xmlns:dc" => "http://purl.org/dc/elements/1.1/" do
|
xml.rss :version => '2.0', 'xmlns:dc' => 'http://purl.org/dc/elements/1.1/' do
|
||||||
xml.channel do
|
xml.channel do
|
||||||
xml.title "Search results"
|
xml.title 'Search results'
|
||||||
xml.description "Papers matching search criteria"
|
xml.description 'Papers matching search criteria'
|
||||||
|
|
||||||
@papers.each do |doc|
|
@papers.each do |doc|
|
||||||
xml.item do
|
xml.item do
|
||||||
xml.title doc.name
|
xml.title doc.name
|
||||||
if !doc.content.blank?
|
unless doc.content.blank?
|
||||||
xml.description do
|
xml.description do
|
||||||
xml.cdata! truncate(doc.content.sub("------------------------------- ", ""), length: 768)
|
xml.cdata! truncate(doc.content.sub('------------------------------- ', ''), length: 768)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if !doc.published_at.blank?
|
unless doc.published_at.blank?
|
||||||
xml.pubDate DateTime.parse(doc.published_at).utc.strftime("%a, %d %b %Y %H:%M:%S %z")
|
xml.pubDate DateTime.parse(doc.published_at).utc.strftime('%a, %d %b %Y %H:%M:%S %z')
|
||||||
end
|
end
|
||||||
doc.originator.each do |originator|
|
doc.originator.each do |originator|
|
||||||
xml.dc :creator do
|
xml.dc :creator do
|
||||||
xml.cdata! originator
|
xml.cdata! originator
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if !doc.paper_type.blank?
|
unless doc.paper_type.blank?
|
||||||
xml.category do
|
xml.category do
|
||||||
xml.cdata! doc.paper_type
|
xml.cdata! doc.paper_type
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
xml.link doc.url
|
xml.link doc.url
|
||||||
xml.guid doc.url
|
xml.guid doc.url
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# This file is used by Rack-based servers to start the application.
|
# This file is used by Rack-based servers to start the application.
|
||||||
|
|
||||||
require ::File.expand_path('../config/environment', __FILE__)
|
require ::File.expand_path('../config/environment', __FILE__)
|
||||||
run Rails.application
|
run Rails.application
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
require File.expand_path('../boot', __FILE__)
|
require File.expand_path('boot', __dir__)
|
||||||
|
|
||||||
require 'rails/all'
|
require 'rails/all'
|
||||||
|
|
||||||
|
@ -20,9 +20,7 @@ module LorisWeb
|
||||||
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
|
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
|
||||||
config.i18n.default_locale = :de
|
config.i18n.default_locale = :de
|
||||||
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/28008145
|
# https://stackoverflow.com/a/28008145
|
||||||
#config.active_record.raise_in_transactional_callbacks = true
|
# config.active_record.raise_in_transactional_callbacks = true
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Set up gems listed in the Gemfile.
|
# Set up gems listed in the Gemfile.
|
||||||
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
|
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
|
||||||
|
|
||||||
require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
|
require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Load the Rails application.
|
# Load the Rails application.
|
||||||
require File.expand_path('../application', __FILE__)
|
require File.expand_path('application', __dir__)
|
||||||
|
|
||||||
# Initialize the Rails application.
|
# Initialize the Rails application.
|
||||||
Rails.application.initialize!
|
Rails.application.initialize!
|
||||||
|
|
|
@ -37,5 +37,4 @@ Rails.application.configure do
|
||||||
|
|
||||||
# flush stdout in order to get logs in realtime
|
# flush stdout in order to get logs in realtime
|
||||||
$stdout.sync = true
|
$stdout.sync = true
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,4 +6,4 @@ Rails.application.config.assets.version = '1.0'
|
||||||
# Precompile additional assets.
|
# Precompile additional assets.
|
||||||
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
|
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
|
||||||
# Rails.application.config.assets.precompile += %w( search.js )
|
# Rails.application.config.assets.precompile += %w( search.js )
|
||||||
# Rails.application.config.assets.precompile += %w( geo.js )
|
# Rails.application.config.assets.precompile += %w( geo.js )
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
# Be sure to restart your server when you modify this file.
|
# Be sure to restart your server when you modify this file.
|
||||||
|
|
||||||
Rails.application.config.action_dispatch.cookies_serializer = :json
|
Rails.application.config.action_dispatch.cookies_serializer = :json
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
Rails.application.routes.draw do
|
Rails.application.routes.draw do
|
||||||
root :to => 'search#index', as: :search
|
root to: 'search#index', as: :search
|
||||||
post '/import' => 'import#new_papers_callback'
|
post '/import' => 'import#new_papers_callback'
|
||||||
get '/map' => 'geo#index', as: :geo
|
get '/map' => 'geo#index', as: :geo
|
||||||
get '/glossary' => 'application#glossary', as: :glossary
|
get '/glossary' => 'application#glossary', as: :glossary
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
class CreateUsers < ActiveRecord::Migration
|
class CreateUsers < ActiveRecord::Migration[4.2]
|
||||||
def change
|
def change
|
||||||
create_table :users do |t|
|
create_table :users do |t|
|
||||||
t.string :email
|
t.string :email
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
class CreatePapers < ActiveRecord::Migration
|
class CreatePapers < ActiveRecord::Migration[4.2]
|
||||||
def change
|
def change
|
||||||
create_table :papers do |t|
|
create_table :papers do |t|
|
||||||
t.string :name
|
t.string :name
|
||||||
|
@ -19,6 +19,6 @@ class CreatePapers < ActiveRecord::Migration
|
||||||
add_index(:papers, :reference)
|
add_index(:papers, :reference)
|
||||||
add_index(:papers, :originator)
|
add_index(:papers, :originator)
|
||||||
add_index(:papers, :body)
|
add_index(:papers, :body)
|
||||||
add_index(:papers, [:reference, :body], unique: true)
|
add_index(:papers, %i[reference body], unique: true)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
class CreateImporters < ActiveRecord::Migration
|
class CreateImporters < ActiveRecord::Migration[4.2]
|
||||||
def change
|
def change
|
||||||
create_table :importers do |t|
|
create_table :importers do |t|
|
||||||
t.string :url
|
t.string :url
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
class CreatePaperSearches < ActiveRecord::Migration
|
class CreatePaperSearches < ActiveRecord::Migration[4.2]
|
||||||
def change
|
def change
|
||||||
create_table :paper_searches do |t|
|
create_table :paper_searches do |t|
|
||||||
t.string :query
|
t.string :query
|
||||||
|
|
33
db/schema.rb
33
db/schema.rb
|
@ -1,4 +1,3 @@
|
||||||
# encoding: UTF-8
|
|
||||||
# This file is auto-generated from the current state of the database. Instead
|
# This file is auto-generated from the current state of the database. Instead
|
||||||
# of editing this file, please use the migrations feature of Active Record to
|
# of editing this file, please use the migrations feature of Active Record to
|
||||||
# incrementally modify your database, and then regenerate this schema definition.
|
# incrementally modify your database, and then regenerate this schema definition.
|
||||||
|
@ -14,7 +13,7 @@
|
||||||
ActiveRecord::Schema.define(version: 20151010070158) do
|
ActiveRecord::Schema.define(version: 20151010070158) do
|
||||||
|
|
||||||
create_table "importers", force: :cascade do |t|
|
create_table "importers", force: :cascade do |t|
|
||||||
t.string "url"
|
t.string "url"
|
||||||
t.datetime "created_at", null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime "updated_at", null: false
|
t.datetime "updated_at", null: false
|
||||||
end
|
end
|
||||||
|
@ -27,31 +26,29 @@ ActiveRecord::Schema.define(version: 20151010070158) do
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table "papers", force: :cascade do |t|
|
create_table "papers", force: :cascade do |t|
|
||||||
t.string "name"
|
t.string "name"
|
||||||
t.string "url"
|
t.string "url"
|
||||||
t.string "reference"
|
t.string "reference"
|
||||||
t.string "body"
|
t.string "body"
|
||||||
t.datetime "published_at"
|
t.datetime "published_at"
|
||||||
t.datetime "scraped_at"
|
t.datetime "scraped_at"
|
||||||
t.string "paper_type"
|
t.string "paper_type"
|
||||||
t.string "originator"
|
t.string "originator"
|
||||||
t.text "resolution"
|
t.text "resolution"
|
||||||
t.text "content"
|
t.text "content"
|
||||||
t.datetime "created_at"
|
t.datetime "created_at"
|
||||||
t.datetime "updated_at"
|
t.datetime "updated_at"
|
||||||
|
t.index ["body"], name: "index_papers_on_body"
|
||||||
|
t.index ["originator"], name: "index_papers_on_originator"
|
||||||
|
t.index ["reference", "body"], name: "index_papers_on_reference_and_body", unique: true
|
||||||
|
t.index ["reference"], name: "index_papers_on_reference"
|
||||||
end
|
end
|
||||||
|
|
||||||
add_index "papers", ["body"], name: "index_papers_on_body"
|
|
||||||
add_index "papers", ["originator"], name: "index_papers_on_originator"
|
|
||||||
add_index "papers", ["reference", "body"], name: "index_papers_on_reference_and_body", unique: true
|
|
||||||
add_index "papers", ["reference"], name: "index_papers_on_reference"
|
|
||||||
|
|
||||||
create_table "users", force: :cascade do |t|
|
create_table "users", force: :cascade do |t|
|
||||||
t.string "email"
|
t.string "email"
|
||||||
t.datetime "created_at"
|
t.datetime "created_at"
|
||||||
t.datetime "updated_at"
|
t.datetime "updated_at"
|
||||||
|
t.index ["email"], name: "index_users_on_email", unique: true
|
||||||
end
|
end
|
||||||
|
|
||||||
add_index "users", ["email"], name: "index_users_on_email", unique: true
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
class ParseableDateValidator < ActiveModel::EachValidator
|
# frozen_string_literal: true
|
||||||
|
|
||||||
def validate_each(record, attribute, value)
|
class ParseableDateValidator < ActiveModel::EachValidator
|
||||||
|
def validate_each(record, attribute, _value)
|
||||||
raw_value = record.read_attribute_before_type_cast(attribute)
|
raw_value = record.read_attribute_before_type_cast(attribute)
|
||||||
return nil if raw_value.nil?
|
return nil if raw_value.nil?
|
||||||
|
|
||||||
Date.parse(raw_value.to_json)
|
Date.parse(raw_value.to_json)
|
||||||
nil
|
nil
|
||||||
rescue ArgumentError => e
|
rescue ArgumentError
|
||||||
record.errors[attribute] << (options[:message] || I18n.t("errors.messages.unparseable_date"))
|
record.errors[attribute] << (options[:message] || I18n.t('errors.messages.unparseable_date'))
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ desc 'Import Paper records from setup importers'
|
||||||
task import_papers: :environment do
|
task import_papers: :environment do
|
||||||
require 'open-uri'
|
require 'open-uri'
|
||||||
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
|
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
|
||||||
api_key = Rails.application.config_for(:morph)["key"]
|
api_key = Rails.application.config_for(:morph)['key']
|
||||||
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
uri = URI.parse("https://api.morph.io/jrlover/city_council_leipzig_recent_papers/data.json?key=#{api_key}&query=select%20*%20from%20%27data%27")
|
||||||
Paper.import_from_json(uri.read)
|
Paper.import_from_json(uri.read)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,48 +1,37 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
RSpec.describe SearchController, type: :controller, elasticsearch: true do
|
RSpec.describe SearchController, type: :controller, elasticsearch: true do
|
||||||
|
describe 'GET #index' do
|
||||||
describe "GET #index" do
|
it 'returns http success' do
|
||||||
it "returns http success" do
|
|
||||||
get :index, body: 'leipzig'
|
get :index, body: 'leipzig'
|
||||||
expect(response).to have_http_status(:success)
|
expect(response).to have_http_status(:success)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "assigns @search_definition with default sort order" do
|
it 'assigns @search_definition with default sort order' do
|
||||||
search = PaperSearch.new(sort_by: 'date')
|
search = PaperSearch.new(sort_by: 'date')
|
||||||
get :index, body: 'leipzig'
|
get :index, body: 'leipzig'
|
||||||
expect(assigns(:search_definition).attributes).to eq(search.attributes)
|
expect(assigns(:search_definition).attributes).to eq(search.attributes)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "renders the index template" do
|
it 'renders the index template' do
|
||||||
get :index, body: 'leipzig'
|
get :index, body: 'leipzig'
|
||||||
expect(response).to render_template(:index)
|
expect(response).to render_template(:index)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "executes the search with PaperSearch parameters" do
|
it 'returns rss' do
|
||||||
pending("simplify search implementation")
|
get :index, format: 'rss', body: 'leipzig'
|
||||||
result_page = double("page", results: []) # MEH
|
|
||||||
response = double("es_response", page: result_page)
|
|
||||||
expect(Paper).to receive(:search).and_return(response)
|
|
||||||
|
|
||||||
get :index, body: 'leipzig'
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
it "returns rss" do
|
|
||||||
get :index, :format => "rss", body: 'leipzig'
|
|
||||||
expect(response).to be_success
|
expect(response).to be_success
|
||||||
expect(response).to render_template(:index)
|
expect(response).to render_template(:index)
|
||||||
expect(response.content_type).to eq("application/rss+xml")
|
expect(response.content_type).to eq('application/rss+xml')
|
||||||
#expect(response.body).to have_tag "rss" do
|
# expect(response.body).to have_tag "rss" do
|
||||||
# with_tag "channel" do
|
# with_tag "channel" do
|
||||||
# with_tag "title"
|
# with_tag "title"
|
||||||
# with_tag "description"
|
# with_tag "description"
|
||||||
# with_tag "link"
|
# with_tag "link"
|
||||||
# end
|
# end
|
||||||
#end
|
# end
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require_relative '../factory_helper'
|
require_relative '../factory_helper'
|
||||||
|
|
||||||
FactoryBot.define do
|
FactoryBot.define do
|
||||||
factory :paper do
|
factory :paper do
|
||||||
name { Faker::Lorem.sentence }
|
name { Faker::Lorem.sentence }
|
||||||
sequence(:url) { |n| Faker::Internet.url(host: "ris.example.org", path: "/paper-#{n}.html") }
|
sequence(:url) { |n| Faker::Internet.url(host: 'ris.example.org', path: "/paper-#{n}.html") }
|
||||||
sequence(:reference) { |n| FactoryHelper.reference(n) }
|
sequence(:reference) { |n| FactoryHelper.reference(n) }
|
||||||
body { "leipzig" }
|
body { 'leipzig' }
|
||||||
published_at { "2015-07-20 21:16:53" }
|
published_at { '2015-07-20 21:16:53' }
|
||||||
scraped_at { "2015-07-20 21:16:53" }
|
scraped_at { '2015-07-20 21:16:53' }
|
||||||
paper_type { FactoryHelper.paper_type }
|
paper_type { FactoryHelper.paper_type }
|
||||||
originator { Faker::Name.name }
|
originator { Faker::Name.name }
|
||||||
resolution { Faker::Lorem.paragraph(sentence_count: 3) }
|
resolution { Faker::Lorem.paragraph(sentence_count: 3) }
|
||||||
|
|
|
@ -1,29 +1,29 @@
|
||||||
module FactoryHelper
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module FactoryHelper
|
||||||
PAPER_TYPES = [
|
PAPER_TYPES = [
|
||||||
"Verwaltungsstandpunkt",
|
'Verwaltungsstandpunkt',
|
||||||
"Anfrage",
|
'Anfrage',
|
||||||
"Beschlussvorlage",
|
'Beschlussvorlage',
|
||||||
"Änderungsantrag",
|
'Änderungsantrag',
|
||||||
"Antrag",
|
'Antrag',
|
||||||
"Neufassung",
|
'Neufassung',
|
||||||
"Informationsvorlage",
|
'Informationsvorlage',
|
||||||
"Einwohneranfrage",
|
'Einwohneranfrage',
|
||||||
"Petition",
|
'Petition',
|
||||||
"schriftliche Antwort zur Anfrage",
|
'schriftliche Antwort zur Anfrage',
|
||||||
"Wichtige Angelegenheit",
|
'Wichtige Angelegenheit',
|
||||||
"Eilentscheidung",
|
'Eilentscheidung',
|
||||||
"Dringliche Anfrage"
|
'Dringliche Anfrage'
|
||||||
]
|
].freeze
|
||||||
|
|
||||||
def self.paper_type
|
def self.paper_type
|
||||||
PAPER_TYPES.sample
|
PAPER_TYPES.sample
|
||||||
end
|
end
|
||||||
|
|
||||||
REFERENCE = "A-%05i/16"
|
REFERENCE = 'A-%05i/16'
|
||||||
|
|
||||||
def self.reference(seq)
|
def self.reference(seq)
|
||||||
REFERENCE % seq
|
REFERENCE % seq
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,239 +1,238 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
require 'pp'
|
require 'pp'
|
||||||
|
|
||||||
RSpec.feature "Basic search", type: :feature, elasticsearch: true do
|
RSpec.feature 'Basic search', type: :feature, elasticsearch: true do
|
||||||
|
|
||||||
before(:each) do
|
before(:each) do
|
||||||
@papers = FactoryBot.create_list(:paper, 11)
|
@papers = FactoryBot.create_list(:paper, 11)
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "It displays the search form" do
|
scenario 'It displays the search form' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
expect(page).to have_content("Stadtratmonitor")
|
expect(page).to have_content('Stadtratmonitor')
|
||||||
expect(page).to have_field("paper_search_query")
|
expect(page).to have_field('paper_search_query')
|
||||||
expect(page).to have_select("Typ")
|
expect(page).to have_select('Typ')
|
||||||
expect(page).to have_select("Einreicher")
|
expect(page).to have_select('Einreicher')
|
||||||
expect(page).to have_selector("label", text: "Sortierung")
|
expect(page).to have_selector('label', text: 'Sortierung')
|
||||||
expect(page).to have_field("paper_search_sort_by_date", type: "radio")
|
expect(page).to have_field('paper_search_sort_by_date', type: 'radio')
|
||||||
expect(page).to have_field("paper_search_sort_by_score", type: "radio")
|
expect(page).to have_field('paper_search_sort_by_score', type: 'radio')
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "With empty query displays all documents" do
|
scenario 'With empty query displays all documents' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
expect(page).to have_selector("ul#search_results")
|
expect(page).to have_selector('ul#search_results')
|
||||||
expect(page).to have_content("#{@papers.size} Dokumente in der Datenbank")
|
expect(page).to have_content("#{@papers.size} Dokumente in der Datenbank")
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Search results are paginated" do
|
scenario 'Search results are paginated' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
expect(page).to have_css("li.search-result", count: 10)
|
expect(page).to have_css('li.search-result', count: 10)
|
||||||
expect(page).to have_css("div#pagination")
|
expect(page).to have_css('div#pagination')
|
||||||
within("div#pagination") do
|
within('div#pagination') do
|
||||||
expect(page).to have_css("li", count: 4) # two pages + next + last
|
expect(page).to have_css('li', count: 4) # two pages + next + last
|
||||||
expect(page).to have_css("li.current", text: "1")
|
expect(page).to have_css('li.current', text: '1')
|
||||||
expect(page).to have_link("2")
|
expect(page).to have_link('2')
|
||||||
expect(page).to have_link("Weiter")
|
expect(page).to have_link('Weiter')
|
||||||
expect(page).to have_link("Ende")
|
expect(page).to have_link('Ende')
|
||||||
end
|
end
|
||||||
|
|
||||||
page.find("div#pagination").click_link("2")
|
page.find('div#pagination').click_link('2')
|
||||||
expect(page).to have_css("li.search-result", count: 1)
|
expect(page).to have_css('li.search-result', count: 1)
|
||||||
within("div#pagination") do
|
within('div#pagination') do
|
||||||
expect(page).to have_css("li.current", text: "2")
|
expect(page).to have_css('li.current', text: '2')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Search results have basic information" do
|
scenario 'Search results have basic information' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
paper = @papers.first
|
paper = @papers.first
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
|
|
||||||
resultSubEntry = resultEntry.find("li.current", match: :first)
|
result_subentry = result_entry.find('li.current', match: :first)
|
||||||
linkName = getLinkName(paper)
|
linkname = get_linkname(paper)
|
||||||
expect(resultSubEntry).to have_link(linkName, href: paper.url)
|
expect(result_subentry).to have_link(linkname, href: paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
def getLinkName(paper)
|
def get_linkname(paper)
|
||||||
dateStr = I18n.l(paper.published_at.to_date)
|
date = I18n.l(paper.published_at.to_date)
|
||||||
originatorStr = (paper.originator.kind_of?(Array) ?
|
originator = (paper.originator.is_a?(Array) ?
|
||||||
paper.originator.join(", ") : paper.originator)
|
paper.originator.join(', ') : paper.originator)
|
||||||
return "#{dateStr}: #{paper.paper_type} von #{originatorStr}"
|
"#{date}: #{paper.paper_type} von #{originator}"
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds papers by name" do
|
scenario 'Finds papers by name' do
|
||||||
paper = FactoryBot.create(:paper, name: "Opendata als default")
|
paper = FactoryBot.create(:paper, name: 'Opendata als default')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Opendata"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Opendata' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
|
|
||||||
resultSubEntry = resultEntry.find("li.current", match: :first)
|
result_subentry = result_entry.find('li.current', match: :first)
|
||||||
linkName = getLinkName(paper)
|
linkname = get_linkname(paper)
|
||||||
expect(resultSubEntry).to have_link(linkName, href: paper.url)
|
expect(result_subentry).to have_link(linkname, href: paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds papers by content" do
|
scenario 'Finds papers by content' do
|
||||||
paper = FactoryBot.create(:paper,
|
paper = FactoryBot.create(:paper,
|
||||||
name: "Opendata als default",
|
name: 'Opendata als default',
|
||||||
content: "Alle Verwaltungsdokumente werden als Opendata veröffentlicht"
|
content: 'Alle Verwaltungsdokumente werden als Opendata veröffentlicht')
|
||||||
)
|
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Verwaltungsdokumente"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Verwaltungsdokumente' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
|
|
||||||
resultSubEntry = resultEntry.find("li.current", match: :first)
|
result_subentry = result_entry.find('li.current', match: :first)
|
||||||
linkName = getLinkName(paper)
|
linkname = get_linkname(paper)
|
||||||
expect(resultSubEntry).to have_link(linkName, href: paper.url)
|
expect(result_subentry).to have_link(linkname, href: paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Papers with common reference id in search result ordered by date" do
|
scenario 'Papers with common reference id in search result ordered by date' do
|
||||||
mainPaper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
main_paper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
||||||
name: "Opendata als default", reference: "VI-0815")
|
name: 'Opendata als default', reference: 'VI-0815')
|
||||||
newPaper = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als optional", reference: "VI-0815-ÄA-01")
|
name: 'Opendata als optional', reference: 'VI-0815-ÄA-01')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "default"}
|
visit search_path body: 'leipzig', paper_search: { query: 'default' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(mainPaper.name)
|
expect(result_entry).to have_content(main_paper.name)
|
||||||
|
|
||||||
resultSubEntry1 = resultEntry.find("li.current", match: :first)
|
result_subentry1 = result_entry.find('li.current', match: :first)
|
||||||
linkName1 = getLinkName(mainPaper)
|
linkname1 = get_linkname(main_paper)
|
||||||
expect(resultSubEntry1).to have_link(linkName1, href: mainPaper.url)
|
expect(result_subentry1).to have_link(linkname1, href: main_paper.url)
|
||||||
|
|
||||||
resultSubEntries = resultEntry.find("ul").all("li")
|
result_subentries = result_entry.find('ul').all('li')
|
||||||
linkName2 = getLinkName(newPaper)
|
linkname2 = get_linkname(new_paper)
|
||||||
expect(resultSubEntries[0]).to have_link(linkName2, href: newPaper.url)
|
expect(result_subentries[0]).to have_link(linkname2, href: new_paper.url)
|
||||||
expect(resultSubEntries[1]).to have_link(linkName1, href: mainPaper.url)
|
expect(result_subentries[1]).to have_link(linkname1, href: main_paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Papers with common reference id in search result ordered by ref" do
|
scenario 'Papers with common reference id in search result ordered by ref' do
|
||||||
mainPaper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
main_paper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
||||||
name: "Opendata als default", reference: "VI-0815")
|
name: 'Opendata als default', reference: 'VI-0815')
|
||||||
newPaper1 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper1 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als optional", reference: "VI-0815-ÄA-02")
|
name: 'Opendata als optional', reference: 'VI-0815-ÄA-02')
|
||||||
newPaper2 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper2 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als optional", reference: "VI-0815-ÄA-01")
|
name: 'Opendata als optional', reference: 'VI-0815-ÄA-01')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "default"}
|
visit search_path body: 'leipzig', paper_search: { query: 'default' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
|
|
||||||
resultSubEntries = resultEntry.find("ul").all("li")
|
result_subentries = result_entry.find('ul').all('li')
|
||||||
linkName1 = getLinkName(newPaper1)
|
linkname1 = get_linkname(new_paper1)
|
||||||
expect(resultSubEntries[0]).to have_link(linkName1, href: newPaper1.url)
|
expect(result_subentries[0]).to have_link(linkname1, href: new_paper1.url)
|
||||||
linkName2 = getLinkName(newPaper2)
|
linkname2 = get_linkname(new_paper2)
|
||||||
expect(resultSubEntries[1]).to have_link(linkName2, href: newPaper2.url)
|
expect(result_subentries[1]).to have_link(linkname2, href: new_paper2.url)
|
||||||
linkName3 = getLinkName(mainPaper)
|
linkname3 = get_linkname(main_paper)
|
||||||
expect(resultSubEntries[2]).to have_link(linkName3, href: mainPaper.url)
|
expect(result_subentries[2]).to have_link(linkname3, href: main_paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Papers with common reference id handled also for missing prefix" do
|
scenario 'Papers with common reference id handled also for missing prefix' do
|
||||||
mainPaper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
main_paper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
||||||
name: "Opendata als default", reference: "VI-0815")
|
name: 'Opendata als default', reference: 'VI-0815')
|
||||||
newPaper1 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper1 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als optional", reference: "VI-0815-NF-01")
|
name: 'Opendata als optional', reference: 'VI-0815-NF-01')
|
||||||
newPaper1Change = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper2 = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als nicht optional", reference: "-0815-NF-01-ÄA-01")
|
name: 'Opendata als nicht optional', reference: '-0815-NF-01-ÄA-01')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "default"}
|
visit search_path body: 'leipzig', paper_search: { query: 'default' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
|
|
||||||
resultSubEntries = resultEntry.find("ul").all("li")
|
result_subentries = result_entry.find('ul').all('li')
|
||||||
linkName1 = getLinkName(newPaper1)
|
linkname1 = get_linkname(new_paper1)
|
||||||
expect(resultSubEntries[0]).to have_link(linkName1, href: newPaper1.url)
|
expect(result_subentries[0]).to have_link(linkname1, href: new_paper1.url)
|
||||||
linkName2 = getLinkName(newPaper1Change)
|
linkname2 = get_linkname(new_paper2)
|
||||||
expect(resultSubEntries[1]).to have_link(linkName2, href: newPaper1Change.url)
|
expect(result_subentries[1]).to have_link(linkname2, href: new_paper2.url)
|
||||||
linkName3 = getLinkName(mainPaper)
|
linkname3 = get_linkname(main_paper)
|
||||||
expect(resultSubEntries[2]).to have_link(linkName3, href: mainPaper.url)
|
expect(result_subentries[2]).to have_link(linkname3, href: main_paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Testen' with search 'Test'" do
|
scenario "Finds 'Testen' with search 'Test'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Testen")
|
paper = FactoryBot.create(:paper, name: 'Testen')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Test"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Test' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Test' with search 'Testen'" do
|
scenario "Finds 'Test' with search 'Testen'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Test")
|
paper = FactoryBot.create(:paper, name: 'Test')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Testen"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Testen' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Fahrräderverleih' with search 'Fahrrad'" do
|
scenario "Finds 'Fahrräderverleih' with search 'Fahrrad'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Fahrräderverleih")
|
paper = FactoryBot.create(:paper, name: 'Fahrräderverleih')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Fahrrad"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Fahrrad' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Fahrräderverleih' with search 'Fahrräder'" do
|
scenario "Finds 'Fahrräderverleih' with search 'Fahrräder'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Fahrräderverleih")
|
paper = FactoryBot.create(:paper, name: 'Fahrräderverleih')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Fahrräder"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Fahrräder' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Fahrräderverleih' with search 'Verleih'" do
|
scenario "Finds 'Fahrräderverleih' with search 'Verleih'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Fahrräderverleih")
|
paper = FactoryBot.create(:paper, name: 'Fahrräderverleih')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Verleih"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Verleih' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds 'Fahrräderverleih' with search 'Autoverleih'" do
|
scenario "Finds 'Fahrräderverleih' with search 'Autoverleih'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Fahrräderverleih")
|
paper = FactoryBot.create(:paper, name: 'Fahrräderverleih')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Autoverleih"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Autoverleih' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(paper.name)
|
expect(result_entry).to have_content(paper.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Finds no 'Fahrrad' with search 'Rad'" do
|
scenario "Finds no 'Fahrrad' with search 'Rad'" do
|
||||||
paper = FactoryBot.create(:paper, name: "Fahrrad")
|
FactoryBot.create(:paper, name: 'Fahrrad')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Rad"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Rad' }
|
||||||
expect(page).to have_content("0 Dokumente in der Datenbank")
|
expect(page).to have_content('0 Dokumente in der Datenbank')
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Papers with reference id having slash is escaped" do
|
scenario 'Papers with reference id having slash is escaped' do
|
||||||
mainPaper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
main_paper = FactoryBot.create(:paper, published_at: '2016-12-19T19:00:00',
|
||||||
name: "Opendata als default", reference: "VI-00768/14")
|
name: 'Opendata als default', reference: 'VI-00768/14')
|
||||||
newPaper = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
new_paper = FactoryBot.create(:paper, published_at: '2016-12-23T12:00:00',
|
||||||
name: "Opendata als optional", reference: "VI-00768/14-ÄA-01")
|
name: 'Opendata als optional', reference: 'VI-00768/14-ÄA-01')
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
visit search_path body: "leipzig", paper_search: {query: "default"}
|
visit search_path body: 'leipzig', paper_search: { query: 'default' }
|
||||||
expect(page).to have_content("1 Dokument in der Datenbank")
|
expect(page).to have_content('1 Dokument in der Datenbank')
|
||||||
resultEntry = page.find("li.search-result", match: :first)
|
result_entry = page.find('li.search-result', match: :first)
|
||||||
expect(resultEntry).to have_content(mainPaper.name)
|
expect(result_entry).to have_content(main_paper.name)
|
||||||
|
|
||||||
resultSubEntry1 = resultEntry.find("li.current", match: :first)
|
result_subentry1 = result_entry.find('li.current', match: :first)
|
||||||
linkName1 = getLinkName(mainPaper)
|
linkname1 = get_linkname(main_paper)
|
||||||
expect(resultSubEntry1).to have_link(linkName1, href: mainPaper.url)
|
expect(result_subentry1).to have_link(linkname1, href: main_paper.url)
|
||||||
|
|
||||||
resultSubEntries = resultEntry.find("ul").all("li")
|
result_subentries = result_entry.find('ul').all('li')
|
||||||
linkName2 = getLinkName(newPaper)
|
linkname2 = get_linkname(new_paper)
|
||||||
expect(resultSubEntries[0]).to have_link(linkName2, href: newPaper.url)
|
expect(result_subentries[0]).to have_link(linkname2, href: new_paper.url)
|
||||||
expect(resultSubEntries[1]).to have_link(linkName1, href: mainPaper.url)
|
expect(result_subentries[1]).to have_link(linkname1, href: main_paper.url)
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,69 +1,65 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
RSpec.feature "Search filters", type: :feature, elasticsearch: true do
|
RSpec.feature 'Search filters', type: :feature, elasticsearch: true do
|
||||||
|
|
||||||
before(:each) do
|
before(:each) do
|
||||||
@antrag = FactoryBot.create(:paper,
|
@antrag = FactoryBot.create(:paper,
|
||||||
paper_type: "Antrag",
|
paper_type: 'Antrag',
|
||||||
name: "Mehr Spielplätze in Leipzig",
|
name: 'Mehr Spielplätze in Leipzig',
|
||||||
originator: "Dezernat Jugend, Soziales, Gesundheit und Schule"
|
originator: 'Dezernat Jugend, Soziales, Gesundheit und Schule')
|
||||||
)
|
|
||||||
@anfrage = FactoryBot.create(:paper,
|
@anfrage = FactoryBot.create(:paper,
|
||||||
paper_type: "Anfrage",
|
paper_type: 'Anfrage',
|
||||||
originator: "CDU-Fraktion"
|
originator: 'CDU-Fraktion')
|
||||||
)
|
|
||||||
@vorlage_1 = FactoryBot.create(:paper,
|
@vorlage_1 = FactoryBot.create(:paper,
|
||||||
paper_type: "Vorlage",
|
paper_type: 'Vorlage',
|
||||||
name: "Zustand der Spielplätze",
|
name: 'Zustand der Spielplätze',
|
||||||
originator: "Dezernat Jugend, Soziales, Gesundheit und Schule"
|
originator: 'Dezernat Jugend, Soziales, Gesundheit und Schule')
|
||||||
)
|
|
||||||
@vorlage_2 = FactoryBot.create(:paper,
|
@vorlage_2 = FactoryBot.create(:paper,
|
||||||
paper_type: "Vorlage",
|
paper_type: 'Vorlage',
|
||||||
name: "Mehr Ampeln in der Innenstadt",
|
name: 'Mehr Ampeln in der Innenstadt',
|
||||||
originator: "Oberbürgermeister"
|
originator: 'Oberbürgermeister')
|
||||||
)
|
|
||||||
Paper.__elasticsearch__.refresh_index!
|
Paper.__elasticsearch__.refresh_index!
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Displays paper types and their respective count witin the search results" do
|
scenario 'Displays paper types and their respective count witin the search results' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
paper_type_filter = page.find("select#paper_search_paper_type")
|
paper_type_filter = page.find('select#paper_search_paper_type')
|
||||||
expect(paper_type_filter).to have_css("option", text: "Antrag (1)")
|
expect(paper_type_filter).to have_css('option', text: 'Antrag (1)')
|
||||||
expect(paper_type_filter).to have_css("option", text: "Anfrage (1)")
|
expect(paper_type_filter).to have_css('option', text: 'Anfrage (1)')
|
||||||
expect(paper_type_filter).to have_css("option", text: "Vorlage (2)")
|
expect(paper_type_filter).to have_css('option', text: 'Vorlage (2)')
|
||||||
|
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Spielplätze"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Spielplätze' }
|
||||||
paper_type_filter = page.find("select#paper_search_paper_type")
|
paper_type_filter = page.find('select#paper_search_paper_type')
|
||||||
expect(paper_type_filter).to have_css("option", text: "Antrag (1)")
|
expect(paper_type_filter).to have_css('option', text: 'Antrag (1)')
|
||||||
expect(paper_type_filter).not_to have_css("option", text: "Anfrage")
|
expect(paper_type_filter).not_to have_css('option', text: 'Anfrage')
|
||||||
expect(paper_type_filter).to have_css("option", text: "Vorlage (1)")
|
expect(paper_type_filter).to have_css('option', text: 'Vorlage (1)')
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Filtering by paper type", js: true do
|
scenario 'Filtering by paper type', js: true do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
expect(page).to have_css("li.search-result", count: 4)
|
expect(page).to have_css('li.search-result', count: 4)
|
||||||
select "Antrag (1)", from: "Typ"
|
select 'Antrag (1)', from: 'Typ'
|
||||||
expect(page).to have_css("li.search-result", count: 1, text: "Spielplätze")
|
expect(page).to have_css('li.search-result', count: 1, text: 'Spielplätze')
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Displays originators and their respective count within the search results" do
|
scenario 'Displays originators and their respective count within the search results' do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
originator_filter = page.find("select#paper_search_originator")
|
originator_filter = page.find('select#paper_search_originator')
|
||||||
expect(originator_filter).to have_css("option", text: "CDU-Fraktion (1)")
|
expect(originator_filter).to have_css('option', text: 'CDU-Fraktion (1)')
|
||||||
expect(originator_filter).to have_css("option", text: "Dezernat Jugend, Soziales, Gesundheit und Schule (2)")
|
expect(originator_filter).to have_css('option', text: 'Dezernat Jugend, Soziales, Gesundheit und Schule (2)')
|
||||||
expect(originator_filter).to have_css("option", text: "Oberbürgermeister (1)")
|
expect(originator_filter).to have_css('option', text: 'Oberbürgermeister (1)')
|
||||||
|
|
||||||
visit search_path body: "leipzig", paper_search: {query: "Spielplätze"}
|
visit search_path body: 'leipzig', paper_search: { query: 'Spielplätze' }
|
||||||
originator_filter = page.find("select#paper_search_originator")
|
originator_filter = page.find('select#paper_search_originator')
|
||||||
expect(originator_filter).to have_css("option", text: "Dezernat Jugend, Soziales, Gesundheit und Schule (2)")
|
expect(originator_filter).to have_css('option', text: 'Dezernat Jugend, Soziales, Gesundheit und Schule (2)')
|
||||||
expect(originator_filter).not_to have_css("option", text: "Oberbürgermeister")
|
expect(originator_filter).not_to have_css('option', text: 'Oberbürgermeister')
|
||||||
end
|
end
|
||||||
|
|
||||||
scenario "Filtering by originator", js: true do
|
scenario 'Filtering by originator', js: true do
|
||||||
visit search_path body: "leipzig"
|
visit search_path body: 'leipzig'
|
||||||
expect(page).to have_css("li.search-result", count: 4)
|
expect(page).to have_css('li.search-result', count: 4)
|
||||||
select "Oberbürgermeister (1)", from: "Einreicher"
|
select 'Oberbürgermeister (1)', from: 'Einreicher'
|
||||||
expect(page).to have_css("li.search-result", count: 1, text: "Ampeln")
|
expect(page).to have_css('li.search-result', count: 1, text: 'Ampeln')
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,22 +1,24 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
RSpec.describe Paper do
|
RSpec.describe Paper do
|
||||||
context "Validations" do
|
context 'Validations' do
|
||||||
it { should validate_presence_of(:name) }
|
it { should validate_presence_of(:name) }
|
||||||
it { should validate_length_of(:name).is_at_most(1000) }
|
it { should validate_length_of(:name).is_at_most(1000) }
|
||||||
|
|
||||||
it { should validate_presence_of(:url) }
|
it { should validate_presence_of(:url) }
|
||||||
it { should validate_length_of(:url).is_at_most(1000) }
|
it { should validate_length_of(:url).is_at_most(1000) }
|
||||||
|
|
||||||
context "URL uniqueness" do
|
context 'URL uniqueness' do
|
||||||
subject { FactoryBot.build(:paper) }
|
subject { FactoryBot.build(:paper) }
|
||||||
it { should validate_uniqueness_of(:url) }
|
it { should validate_uniqueness_of(:url) }
|
||||||
end
|
end
|
||||||
|
|
||||||
it "validate url format sane" do
|
it 'validate url format sane' do
|
||||||
expected_error = "ist keine gültige URL"
|
expected_error = 'ist keine gültige URL'
|
||||||
paper = FactoryBot.build(:paper, url: "wtf")
|
paper = FactoryBot.build(:paper, url: 'wtf')
|
||||||
expect(paper).not_to be_valid, "Expected paper to not be valid with invalid URL"
|
expect(paper).not_to be_valid, 'Expected paper to not be valid with invalid URL'
|
||||||
expect(paper.errors[:url]).not_to be_empty
|
expect(paper.errors[:url]).not_to be_empty
|
||||||
expect(paper.errors[:url]).to include(expected_error), "Expected #{paper.errors[:url]} to include \"#{expected_error}\""
|
expect(paper.errors[:url]).to include(expected_error), "Expected #{paper.errors[:url]} to include \"#{expected_error}\""
|
||||||
end
|
end
|
||||||
|
@ -36,11 +38,11 @@ RSpec.describe Paper do
|
||||||
it { should validate_presence_of(:paper_type) }
|
it { should validate_presence_of(:paper_type) }
|
||||||
it { should validate_length_of(:paper_type).is_at_most(50) }
|
it { should validate_length_of(:paper_type).is_at_most(50) }
|
||||||
|
|
||||||
context "published_at" do
|
context 'published_at' do
|
||||||
it { should validate_presence_of(:published_at) }
|
it { should validate_presence_of(:published_at) }
|
||||||
it "validate date is parseable" do
|
it 'validate date is parseable' do
|
||||||
expected_error = "ist kein gültiges Datum"
|
expected_error = 'ist kein gültiges Datum'
|
||||||
paper = FactoryBot.build(:paper, published_at: "fubar")
|
paper = FactoryBot.build(:paper, published_at: 'fubar')
|
||||||
expect(paper).not_to be_valid
|
expect(paper).not_to be_valid
|
||||||
expect(paper.errors[:published_at]).not_to be_empty
|
expect(paper.errors[:published_at]).not_to be_empty
|
||||||
expect(paper.errors[:published_at]).to include(expected_error), "Expected #{paper.errors[:published_at]} to include \"#{expected_error}\""
|
expect(paper.errors[:published_at]).to include(expected_error), "Expected #{paper.errors[:published_at]} to include \"#{expected_error}\""
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
# This file is copied to spec/ when you run 'rails generate rspec:install'
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
|
||||||
ENV['RAILS_ENV'] ||= 'test'
|
ENV['RAILS_ENV'] ||= 'test'
|
||||||
require File.expand_path('../../config/environment', __FILE__)
|
require File.expand_path('../config/environment', __dir__)
|
||||||
# Prevent database truncation if the environment is production
|
# Prevent database truncation if the environment is production
|
||||||
abort("The Rails environment is running in production mode!") if Rails.env.production?
|
if Rails.env.production?
|
||||||
|
abort('The Rails environment is running in production mode!')
|
||||||
|
end
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
require 'rspec/rails'
|
require 'rspec/rails'
|
||||||
# Add additional requires below this line. Rails is not loaded until this point!
|
# Add additional requires below this line. Rails is not loaded until this point!
|
||||||
|
@ -20,7 +24,7 @@ require 'rspec/rails'
|
||||||
# directory. Alternatively, in the individual `*_spec.rb` files, manually
|
# directory. Alternatively, in the individual `*_spec.rb` files, manually
|
||||||
# require only the support files necessary.
|
# require only the support files necessary.
|
||||||
#
|
#
|
||||||
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
|
Dir[Rails.root.join('spec/support/**/*.rb')].sort.each { |f| require f }
|
||||||
|
|
||||||
# Checks for pending migrations before tests are run.
|
# Checks for pending migrations before tests are run.
|
||||||
# If you are not using ActiveRecord, you can remove this line.
|
# If you are not using ActiveRecord, you can remove this line.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'coveralls'
|
require 'coveralls'
|
||||||
Coveralls.wear!('rails')
|
Coveralls.wear!('rails')
|
||||||
|
|
||||||
|
@ -43,53 +45,51 @@ RSpec.configure do |config|
|
||||||
mocks.verify_partial_doubles = true
|
mocks.verify_partial_doubles = true
|
||||||
end
|
end
|
||||||
|
|
||||||
# The settings below are suggested to provide a good initial experience
|
# The settings below are suggested to provide a good initial experience
|
||||||
# with RSpec, but feel free to customize to your heart's content.
|
# with RSpec, but feel free to customize to your heart's content.
|
||||||
=begin
|
# # These two settings work together to allow you to limit a spec run
|
||||||
# These two settings work together to allow you to limit a spec run
|
# # to individual examples or groups you care about by tagging them with
|
||||||
# to individual examples or groups you care about by tagging them with
|
# # `:focus` metadata. When nothing is tagged with `:focus`, all examples
|
||||||
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
|
# # get run.
|
||||||
# get run.
|
# config.filter_run :focus
|
||||||
config.filter_run :focus
|
# config.run_all_when_everything_filtered = true
|
||||||
config.run_all_when_everything_filtered = true
|
#
|
||||||
|
# # Allows RSpec to persist some state between runs in order to support
|
||||||
# Allows RSpec to persist some state between runs in order to support
|
# # the `--only-failures` and `--next-failure` CLI options. We recommend
|
||||||
# the `--only-failures` and `--next-failure` CLI options. We recommend
|
# # you configure your source control system to ignore this file.
|
||||||
# you configure your source control system to ignore this file.
|
# config.example_status_persistence_file_path = "spec/examples.txt"
|
||||||
config.example_status_persistence_file_path = "spec/examples.txt"
|
#
|
||||||
|
# # Limits the available syntax to the non-monkey patched syntax that is
|
||||||
# Limits the available syntax to the non-monkey patched syntax that is
|
# # recommended. For more details, see:
|
||||||
# recommended. For more details, see:
|
# # - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
|
||||||
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
|
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
|
||||||
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
|
# # - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
|
||||||
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
|
# config.disable_monkey_patching!
|
||||||
config.disable_monkey_patching!
|
#
|
||||||
|
# # Many RSpec users commonly either run the entire suite or an individual
|
||||||
# Many RSpec users commonly either run the entire suite or an individual
|
# # file, and it's useful to allow more verbose output when running an
|
||||||
# file, and it's useful to allow more verbose output when running an
|
# # individual spec file.
|
||||||
# individual spec file.
|
# if config.files_to_run.one?
|
||||||
if config.files_to_run.one?
|
# # Use the documentation formatter for detailed output,
|
||||||
# Use the documentation formatter for detailed output,
|
# # unless a formatter has already been configured
|
||||||
# unless a formatter has already been configured
|
# # (e.g. via a command-line flag).
|
||||||
# (e.g. via a command-line flag).
|
# config.default_formatter = 'doc'
|
||||||
config.default_formatter = 'doc'
|
# end
|
||||||
end
|
#
|
||||||
|
# # Print the 10 slowest examples and example groups at the
|
||||||
# Print the 10 slowest examples and example groups at the
|
# # end of the spec run, to help surface which specs are running
|
||||||
# end of the spec run, to help surface which specs are running
|
# # particularly slow.
|
||||||
# particularly slow.
|
# config.profile_examples = 10
|
||||||
config.profile_examples = 10
|
#
|
||||||
|
# # Run specs in random order to surface order dependencies. If you find an
|
||||||
# Run specs in random order to surface order dependencies. If you find an
|
# # order dependency and want to debug it, you can fix the order by providing
|
||||||
# order dependency and want to debug it, you can fix the order by providing
|
# # the seed, which is printed after each run.
|
||||||
# the seed, which is printed after each run.
|
# # --seed 1234
|
||||||
# --seed 1234
|
# config.order = :random
|
||||||
config.order = :random
|
#
|
||||||
|
# # Seed global randomization in this process using the `--seed` CLI option.
|
||||||
# Seed global randomization in this process using the `--seed` CLI option.
|
# # Setting this allows you to use `--seed` to deterministically reproduce
|
||||||
# Setting this allows you to use `--seed` to deterministically reproduce
|
# # test failures related to randomization by passing the same `--seed` value
|
||||||
# test failures related to randomization by passing the same `--seed` value
|
# # as the one that triggered the failure.
|
||||||
# as the one that triggered the failure.
|
# Kernel.srand config.seed
|
||||||
Kernel.srand config.seed
|
|
||||||
=end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'capybara/apparition'
|
require 'capybara/apparition'
|
||||||
Capybara.register_driver :apparition do |app|
|
Capybara.register_driver :apparition do |app|
|
||||||
Capybara::Apparition::Driver.new(
|
Capybara::Apparition::Driver.new(
|
||||||
app,
|
app,
|
||||||
headless: true,
|
headless: true,
|
||||||
browser_options: [ :no_sandbox, disable_features: 'VizDisplayCompositor']
|
browser_options: [:no_sandbox, disable_features: 'VizDisplayCompositor']
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
Capybara.javascript_driver = :apparition
|
Capybara.javascript_driver = :apparition
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
RSpec.configure do |config|
|
RSpec.configure do |config|
|
||||||
config.before(:suite) do
|
config.before(:suite) do
|
||||||
DatabaseCleaner.clean_with(:truncation)
|
DatabaseCleaner.clean_with(:truncation)
|
||||||
|
@ -18,5 +20,4 @@ RSpec.configure do |config|
|
||||||
config.append_after(:each) do
|
config.append_after(:each) do
|
||||||
DatabaseCleaner.clean
|
DatabaseCleaner.clean
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
RSpec.configure do |config|
|
RSpec.configure do |config|
|
||||||
config.before :each, elasticsearch: true do
|
config.before :each, elasticsearch: true do
|
||||||
Paper.__elasticsearch__.create_index!(force: true)
|
Paper.__elasticsearch__.create_index!(force: true)
|
||||||
Elasticsearch::Model.client.cluster.health wait_for_status: "yellow"
|
Elasticsearch::Model.client.cluster.health wait_for_status: 'yellow'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
RSpec.configure do |config|
|
RSpec.configure do |config|
|
||||||
config.include FactoryBot::Syntax::Methods
|
config.include FactoryBot::Syntax::Methods
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
Shoulda::Matchers.configure do |config|
|
Shoulda::Matchers.configure do |config|
|
||||||
config.integrate do |with|
|
config.integrate do |with|
|
||||||
with.test_framework :rspec
|
with.test_framework :rspec
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class SearchControllerTest < ActionController::TestCase
|
|
||||||
# test "the truth" do
|
|
||||||
# assert true
|
|
||||||
# end
|
|
||||||
end
|
|
|
@ -1,7 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class SessionsControllerTest < ActionController::TestCase
|
|
||||||
# test "the truth" do
|
|
||||||
# assert true
|
|
||||||
# end
|
|
||||||
end
|
|
0
test/fixtures/.keep
vendored
0
test/fixtures/.keep
vendored
11
test/fixtures/importers.yml
vendored
11
test/fixtures/importers.yml
vendored
|
@ -1,11 +0,0 @@
|
||||||
# Read about fixtures at http://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
|
||||||
|
|
||||||
# This model initially had no columns defined. If you add columns to the
|
|
||||||
# model remove the '{}' from the fixture names and add the columns immediately
|
|
||||||
# below each fixture, per the syntax in the comments below
|
|
||||||
#
|
|
||||||
one: {}
|
|
||||||
# column: value
|
|
||||||
#
|
|
||||||
two: {}
|
|
||||||
# column: value
|
|
11
test/fixtures/users.yml
vendored
11
test/fixtures/users.yml
vendored
|
@ -1,11 +0,0 @@
|
||||||
# Read about fixtures at http://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
|
||||||
|
|
||||||
# This model initially had no columns defined. If you add columns to the
|
|
||||||
# model remove the '{}' from the fixture names and add the columns immediately
|
|
||||||
# below each fixture, per the syntax in the comments below
|
|
||||||
#
|
|
||||||
one: {}
|
|
||||||
# column: value
|
|
||||||
#
|
|
||||||
two: {}
|
|
||||||
# column: value
|
|
|
@ -1,8 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class RoutesTest < ActionDispatch::IntegrationTest
|
|
||||||
test "route test" do
|
|
||||||
assert_generates "/import", { :controller => "import", :action => "new_papers_callback" }
|
|
||||||
assert_generates "/", :controller => "search", :action => "index"
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,7 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class ImporterTest < ActiveSupport::TestCase
|
|
||||||
# test "the truth" do
|
|
||||||
# assert true
|
|
||||||
# end
|
|
||||||
end
|
|
|
@ -1,52 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class PaperTest < ActiveSupport::TestCase
|
|
||||||
|
|
||||||
context "Validations" do
|
|
||||||
should validate_presence_of(:name)
|
|
||||||
should validate_length_of(:name).is_at_most(1000)
|
|
||||||
|
|
||||||
should validate_presence_of(:url)
|
|
||||||
should validate_length_of(:url).is_at_most(1000)
|
|
||||||
context "URL uniqueness" do
|
|
||||||
subject { FactoryBot.build(:paper) }
|
|
||||||
should validate_uniqueness_of(:url)
|
|
||||||
end
|
|
||||||
should "validate url format sane" do
|
|
||||||
expected_error = "ist keine gültige URL"
|
|
||||||
paper = FactoryBot.build(:paper, url: "wtf")
|
|
||||||
assert_not paper.valid?, "Expected paper to not be valid with invalid URL"
|
|
||||||
assert_not paper.errors[:url].empty?
|
|
||||||
assert paper.errors[:url].include?(expected_error), "Expected #{paper.errors[:url]} to include \"#{expected_error}\""
|
|
||||||
end
|
|
||||||
|
|
||||||
should validate_presence_of(:reference)
|
|
||||||
should validate_length_of(:reference).is_at_most(100)
|
|
||||||
|
|
||||||
should validate_presence_of(:body)
|
|
||||||
should validate_length_of(:body).is_at_most(100)
|
|
||||||
|
|
||||||
should validate_presence_of(:content)
|
|
||||||
should validate_length_of(:content).is_at_most(100_000)
|
|
||||||
|
|
||||||
should validate_presence_of(:originator)
|
|
||||||
should validate_length_of(:originator).is_at_most(300)
|
|
||||||
|
|
||||||
should validate_presence_of(:paper_type)
|
|
||||||
should validate_length_of(:paper_type).is_at_most(50)
|
|
||||||
|
|
||||||
context "published_at" do
|
|
||||||
should validate_presence_of(:published_at)
|
|
||||||
should "validate date is parseable" do
|
|
||||||
expected_error = "ist kein gültiges Datum"
|
|
||||||
paper = FactoryBot.build(:paper, published_at: "fubar")
|
|
||||||
assert_not paper.valid?
|
|
||||||
assert_not paper.errors[:published_at].empty?
|
|
||||||
assert paper.errors[:published_at].include?(expected_error), "Expected #{paper.errors[:published_at]} to include \"#{expected_error}\""
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
should validate_length_of(:resolution).is_at_most(30_000)
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
|
@ -1,7 +0,0 @@
|
||||||
require 'test_helper'
|
|
||||||
|
|
||||||
class UserTest < ActiveSupport::TestCase
|
|
||||||
# test "the truth" do
|
|
||||||
# assert true
|
|
||||||
# end
|
|
||||||
end
|
|
|
@ -1,19 +0,0 @@
|
||||||
require 'simplecov'
|
|
||||||
SimpleCov.start 'rails'
|
|
||||||
|
|
||||||
ENV['RAILS_ENV'] ||= 'test'
|
|
||||||
require File.expand_path('../../config/environment', __FILE__)
|
|
||||||
require 'rails/test_help'
|
|
||||||
require 'capybara/rails'
|
|
||||||
|
|
||||||
class ActiveSupport::TestCase
|
|
||||||
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
|
|
||||||
# fixtures :all
|
|
||||||
|
|
||||||
# Add more helper methods to be used by all tests here...
|
|
||||||
end
|
|
||||||
|
|
||||||
class ActionDispatch::IntegrationTest
|
|
||||||
# Make the Capybara DSL available in all integration tests
|
|
||||||
include Capybara::DSL
|
|
||||||
end
|
|
Loading…
Reference in a new issue