improve containers

This commit is contained in:
Vri 🌈 2023-05-10 22:08:36 +02:00 committed by GitHub
parent e727f0e438
commit 93a9df63e6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 3255 additions and 176 deletions

58
.github/workflows/container-images.yml vendored Normal file
View file

@ -0,0 +1,58 @@
name: Build Container Images
on:
push:
paths:
- '.github/container-images.yml'
- 'container/**'
- 'Gemfile'
- 'Gemfile.lock'
- 'package.json'
branches:
- 'master'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Log in to package registry
uses: docker/login-action@v1
with:
registry: https://code.cozy.town/
username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_TOKEN }}
- name: Build and push Base image
uses: docker/build-push-action@v2
with:
context: .
file: ./container/base/Dockerfile
push: true
tags: code.cozy.town/codeforleipzig/stadtratmonitor/base:latest
- name: Build and push Deploy image
uses: docker/build-push-action@v2
with:
context: .
file: ./container/deploy/Dockerfile
push: true
tags: code.cozy.town/codeforleipzig/stadtratmonitor/deploy:latest
- name: Build and push Develop image
uses: docker/build-push-action@v2
with:
context: .
file: ./container/develop/Dockerfile
push: true
tags: code.cozy.town/codeforleipzig/stadtratmonitor/develop:latest
- name: Build and push Test image
uses: docker/build-push-action@v2
with:
context: .
file: ./container/test/Dockerfile
push: true
tags: code.cozy.town/codeforleipzig/stadtratmonitor/test:latest

View file

@ -1,73 +0,0 @@
FROM ruby:3.2.2
RUN apt-get update && apt-get install -y curl
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash -
RUN apt-get update && apt-get install -y ruby ruby-dev ruby-bundler \
build-essential zlib1g-dev libsqlite3-dev libxml2-dev libxslt1-dev pkg-config nodejs
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
ENV DOCKERIZE_VERSION v0.6.1
RUN curl -sSLO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
COPY ./docker-entrypoint.sh /
RUN chmod +x docker-entrypoint.sh
RUN mkdir -p /home/srm/tmp
#COPY Gemfile.lock /home/srm/tmp/Gemfile.lock
ARG USER_ID=1000
RUN groupadd srm && useradd --uid $USER_ID -g srm srm
RUN chown -R srm:srm /home/srm
USER srm
WORKDIR /home/srm
RUN git clone https://github.com/sstephenson/rbenv.git .rbenv
RUN git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build
RUN echo 'export PATH="/home/srm/.rbenv/bin:$PATH"' >> .bashrc && echo 'eval "$(~/.rbenv/bin/rbenv init -)"' >> .bashrc && . ~/.bashrc
RUN /home/srm/.rbenv/bin/rbenv install 3.2.2
RUN /home/srm/.rbenv/bin/rbenv rehash
RUN /home/srm/.rbenv/bin/rbenv global 3.2.2
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash
ENV NODE_VERSION 18.16.0
ENV NVM_DIR /home/srm/.nvm
RUN \
. ~/.nvm/nvm.sh \
&& nvm install $NODE_VERSION \
&& nvm alias default $NODE_VERSION \
&& nvm use default;
ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH
RUN gem install bundler
RUN mkdir -p /home/srm/app
ADD . /home/srm/app
USER root
RUN chown -R srm:srm /home/srm/app
USER srm
WORKDIR /home/srm/app
RUN bundle config build.nokogiri --use-system-libraries
RUN bundle update
RUN bundle install
COPY package.json package.json
USER root
RUN chown -R srm:srm package.json
USER srm
RUN npm install -g yarn sass
EXPOSE 3000
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["bundle", "exec", "puma" "-C", "config/puma.rb"]

View file

@ -66,7 +66,7 @@ GEM
i18n (>= 1.6, < 2) i18n (>= 1.6, < 2)
minitest (>= 5.1) minitest (>= 5.1)
tzinfo (~> 2.0) tzinfo (~> 2.0)
addressable (2.8.3) addressable (2.8.4)
public_suffix (>= 2.0.2, < 6.0) public_suffix (>= 2.0.2, < 6.0)
apparition (0.6.0) apparition (0.6.0)
capybara (~> 3.13, < 4) capybara (~> 3.13, < 4)
@ -133,7 +133,7 @@ GEM
factory_bot_rails (6.2.0) factory_bot_rails (6.2.0)
factory_bot (~> 6.2.0) factory_bot (~> 6.2.0)
railties (>= 5.0.0) railties (>= 5.0.0)
faker (3.1.1) faker (3.2.0)
i18n (>= 1.8.11, < 2) i18n (>= 1.8.11, < 2)
faraday (1.10.3) faraday (1.10.3)
faraday-em_http (~> 1.0) faraday-em_http (~> 1.0)
@ -165,9 +165,9 @@ GEM
http-accept (1.7.0) http-accept (1.7.0)
http-cookie (1.0.5) http-cookie (1.0.5)
domain_name (~> 0.5) domain_name (~> 0.5)
i18n (1.12.0) i18n (1.13.0)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
importmap-rails (1.1.5) importmap-rails (1.1.6)
actionpack (>= 6.0.0) actionpack (>= 6.0.0)
railties (>= 6.0.0) railties (>= 6.0.0)
jquery-rails (4.5.1) jquery-rails (4.5.1)
@ -225,7 +225,7 @@ GEM
net-protocol net-protocol
netrc (0.11.0) netrc (0.11.0)
nio4r (2.5.9) nio4r (2.5.9)
nokogiri (1.14.2-x86_64-linux) nokogiri (1.14.3-x86_64-linux)
racc (~> 1.4) racc (~> 1.4)
omniauth (1.9.2) omniauth (1.9.2)
hashie (>= 3.4.6) hashie (>= 3.4.6)
@ -234,20 +234,20 @@ GEM
faraday faraday
multi_json multi_json
omniauth (~> 1.0) omniauth (~> 1.0)
parallel (1.22.1) parallel (1.23.0)
parser (3.2.2.0) parser (3.2.2.1)
ast (~> 2.4.1) ast (~> 2.4.1)
popper_js (2.11.6) popper_js (2.11.7)
pry (0.14.2) pry (0.14.2)
coderay (~> 1.1) coderay (~> 1.1)
method_source (~> 1.0) method_source (~> 1.0)
pry-rails (0.3.9) pry-rails (0.3.9)
pry (>= 0.10.4) pry (>= 0.10.4)
public_suffix (5.0.1) public_suffix (5.0.1)
puma (6.2.1) puma (6.2.2)
nio4r (~> 2.0) nio4r (~> 2.0)
racc (1.6.2) racc (1.6.2)
rack (2.2.6.4) rack (2.2.7)
rack-test (2.1.0) rack-test (2.1.0)
rack (>= 1.3) rack (>= 1.3)
rails (7.0.4.3) rails (7.0.4.3)
@ -285,7 +285,7 @@ GEM
zeitwerk (~> 2.5) zeitwerk (~> 2.5)
rainbow (3.1.1) rainbow (3.1.1)
rake (13.0.6) rake (13.0.6)
regexp_parser (2.7.0) regexp_parser (2.8.0)
rest-client (2.1.0) rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0) http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0) http-cookie (>= 1.0.2, < 2.0)
@ -309,7 +309,7 @@ GEM
rspec-mocks (~> 3.9.0) rspec-mocks (~> 3.9.0)
rspec-support (~> 3.9.0) rspec-support (~> 3.9.0)
rspec-support (3.9.4) rspec-support (3.9.4)
rubocop (1.49.0) rubocop (1.50.2)
json (~> 2.3) json (~> 2.3)
parallel (~> 1.10) parallel (~> 1.10)
parser (>= 3.2.0.0) parser (>= 3.2.0.0)
@ -319,7 +319,7 @@ GEM
rubocop-ast (>= 1.28.0, < 2.0) rubocop-ast (>= 1.28.0, < 2.0)
ruby-progressbar (~> 1.7) ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0) unicode-display_width (>= 2.4.0, < 3.0)
rubocop-ast (1.28.0) rubocop-ast (1.28.1)
parser (>= 3.2.1.0) parser (>= 3.2.1.0)
rubocop-faker (1.1.0) rubocop-faker (1.1.0)
faker (>= 2.12.0) faker (>= 2.12.0)
@ -390,7 +390,7 @@ GEM
websocket-extensions (0.1.5) websocket-extensions (0.1.5)
xpath (3.2.0) xpath (3.2.0)
nokogiri (~> 1.8) nokogiri (~> 1.8)
zeitwerk (2.6.7) zeitwerk (2.6.8)
PLATFORMS PLATFORMS
x86_64-linux x86_64-linux
@ -443,4 +443,4 @@ RUBY VERSION
ruby 3.2.2p53 ruby 3.2.2p53
BUNDLED WITH BUNDLED WITH
2.4.10 2.3.26

View file

@ -1,9 +1,8 @@
# Stadtratmonitor # Stadtratmonitor
**Offical site:** https://stadtratmonitor.leipzig.codefor.de/ **Offical site:** [stadtratmonitor.leipzig.codefor.de](https://stadtratmonitor.leipzig.codefor.de/)
## Usage / Features ## Usage / Features
The Stadtratmonitor Leipzig is a lightweight user interface for performing full text searches against paper contents issued to the city council of Leipzig. Note, this solution uses the data from [Ratsinformationssystem Leipzig](https://ratsinformation.leipzig.de/allris_leipzig_public/) that also offers [text search capabilities](https://ratsinformation.leipzig.de/allris_leipzig_public/vo040). The Stadtratmonitor Leipzig is a lightweight user interface for performing full text searches against paper contents issued to the city council of Leipzig. Note, this solution uses the data from [Ratsinformationssystem Leipzig](https://ratsinformation.leipzig.de/allris_leipzig_public/) that also offers [text search capabilities](https://ratsinformation.leipzig.de/allris_leipzig_public/vo040).
### Sorting ### Sorting
@ -47,48 +46,28 @@ Create a new Feed in RSSOWL and paste the just copied URL in the form
When updating the feed in RSSOWL (or any other RSS reader of your choice) time after time all new papers matching the search query criteria will appear. When updating the feed in RSSOWL (or any other RSS reader of your choice) time after time all new papers matching the search query criteria will appear.
## Setup ## Setup
There are two ways to run this app: using a local development setup, or using There are two ways to run this app: using a local development setup, or using
docker. docker.
### Local machine setup ### Local machine setup
1. Install Ruby, Bundler, Elasticsearch 1. Install Ruby, Bundler, Elasticsearch
1. Start Elasticsearch: `elasticsearch` 2. Start Elasticsearch: `elasticsearch`
1. Setup Rails app: `bundle && bundle exec rake db:setup` 3. Setup Rails app: `bundle && bundle exec rake db:setup`
1. See "Importing data" below 4. See “[Importing data via Allris Scraper](#importing-data-via-allris-scraper-optional)” below
1. Start Rails server: `bundle exec rails s` 5. Start Rails server: `bundle exec rails s`
1. Visit [http://localhost:3000](http://localhost:3000) 6. Open [localhost:3000](http://localhost:3000)
### Using docker ### Using Container (Docker/Podman/…)
1. [Install Docker and Docker Compose](https://docs.docker.com/compose/install/)
2. Start the application: `docker-compose up`
3. Initialize the database: `docker exec stadtratmonitor-web sh -c "rake db:setup && rake import_papers && rake index:rebuild"`
4. Open [localhost:3000](http://localhost:3000)
1. Install docker and docker-compose: https://docs.docker.com/compose/install/ ### Importing data via Allris Scraper (optional)
1. Start the app: `docker-compose up` 1. You can use [our Allris Scraper](https://github.com/CodeforLeipzig/allris-scraper) to download the papers (resp. their links) from the [OPARL](https://oparl.org/) API, this will produce an input.json file
1. Initialize the database: `docker-compose run --user srm web rake db:setup` 2. Put this input.json to a public web server and set the URL to this file then in:
1. See "Importing data" below
1. Get the address of the docker host: `docker-machine ip default`
1. Point your browser to: 'http://\<IP of docker host\>:3000'
```
rake assets:clean
rake assets:clobber
rake tmp:clear
npm install -g sass
#rails css:install:bootstrap
#rails javascript:install:esbuild
rake assets:precompile
```
### Importing data and building the index
1. You can use [our allris-scraper](https://github.com/CodeforLeipzig/allris-scraper) to download the papers (resp. their links) from the [OPARL](https://oparl.org/) API, this will produce an input.json file
1. Put this input.json to a public web server and set the URL to this file then in:
`app/controllers/import_controller.rb` and `lib/tasks/import_papers.rake` `app/controllers/import_controller.rb` and `lib/tasks/import_papers.rake`
1. Import the data from our scraper: `docker-compose run --user srm web rake import_papers`
1. Build the elasticsearch index: `docker-compose run --user srm web rake index:rebuild`
### Running tests ### Running tests
Assuming docker and docker-compose is installed: Assuming docker and docker-compose is installed:
1. `docker-compose run web bin/run-tests`
1. `docker-compose run --user srm web bin/run-tests`

33
container/base/Dockerfile Executable file
View file

@ -0,0 +1,33 @@
# ~~~~~~~~~~~~~~~~~~
# ~~~ BASE IMAGE ~~~
# ~~~~~~~~~~~~~~~~~~
FROM docker.io/ruby:3.2.2-alpine3.17
RUN apk update && apk add \
build-base \
curl \
linux-headers \
nodejs \
npm
ENV DOCKERIZE_VERSION v0.6.1
RUN curl -sSLO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
ARG USER_ID=1000
RUN addgroup srm && \
adduser --disabled-password --uid $USER_ID --ingroup srm srm
RUN mkdir -p /app/
WORKDIR /app/
COPY Gemfile Gemfile
COPY Gemfile.lock Gemfile.lock
COPY package.json package.json
RUN npm install -g yarn sass
RUN bundle config without development test
RUN bundle install

11
container/deploy/Dockerfile Executable file
View file

@ -0,0 +1,11 @@
# ~~~~~~~~~~~~~~~~~~~~
# ~~~ DEPLOY IMAGE ~~~
# ~~~~~~~~~~~~~~~~~~~~
FROM code.cozy.town/codeforleipzig/stadtratmonitor/base:latest
WORKDIR /
USER srm
EXPOSE 3000
CMD bundle exec puma -C /app/config/puma.rb

13
container/develop/Dockerfile Executable file
View file

@ -0,0 +1,13 @@
# ~~~~~~~~~~~~~~~~~~~~~
# ~~~ DEVELOP IMAGE ~~~
# ~~~~~~~~~~~~~~~~~~~~~
FROM code.cozy.town/codeforleipzig/stadtratmonitor/base:latest
RUN bundle config without test
RUN bundle install --prefer-local
WORKDIR /app/
EXPOSE 3000
CMD bundle exec puma -C /app/config/puma.rb

13
container/test/Dockerfile Executable file
View file

@ -0,0 +1,13 @@
# ~~~~~~~~~~~~~~~~~~~~~
# ~~~ TEST IMAGE ~~~
# ~~~~~~~~~~~~~~~~~~~~~
FROM code.cozy.town/codeforleipzig/stadtratmonitor/base:latest
RUN bundle config without reset_dummy_group
RUN bundle install --prefer-local
WORKDIR /
EXPOSE 3000
CMD bundle exec puma -C /app/config/puma.rb

View file

@ -1,23 +1,20 @@
version: "3.7" version: "3.7"
services: services:
web: web:
user: srm image: code.cozy.town/codeforleipzig/stadtratmonitor/develop:latest
build: container_name: stadtratmonitor-web
context: .
args:
- USER_ID=${USER_ID:-1000}
volumes: volumes:
- .:/app - .:/app/
ports: ports:
- "3000:3000" - "127.0.0.1:3000:3000"
links:
- elasticsearch
environment: environment:
- ELASTICSEARCH_URL=${ELASTICSEARCH_URL:-http://elasticsearch:9200} - ELASTICSEARCH_URL=${ELASTICSEARCH_URL:-http://elasticsearch:9200}
- APPLICATION_HOST=${APPLICATION_HOST:-localhost} - APPLICATION_HOST=${APPLICATION_HOST:-localhost}
- SHARED_IMPORT_SECRET=$SHARED_IMPORT_SECRET - SHARED_IMPORT_SECRET=$SHARED_IMPORT_SECRET
command: sh -c "cd /home/srm/app && bundle install && bundle exec puma -C config/puma.rb" networks:
- stadtratmonitor
elasticsearch: elasticsearch:
container_name: stadtratmonitor-elasticsearch
user: elasticsearch user: elasticsearch
build: ./docker/elasticsearch build: ./docker/elasticsearch
environment: environment:
@ -25,17 +22,20 @@ services:
- bootstrap.memory_lock=true - bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m" - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ports: ports:
- "9215:9200" - "127.0.0.1:9215:9200"
volumes: volumes:
- ${ESDATA:-esdata}:/usr/share/elasticsearch/data //read, write and execute - ${ESDATA:-esdata}:/usr/share/elasticsearch/data //read, write and execute
- ./docker/elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml - ./docker/elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
command: sh -c "/usr/share/elasticsearch/bin/elasticsearch" command: sh -c "/usr/share/elasticsearch/bin/elasticsearch"
networks:
- stadtratmonitor
elastichq: elastichq:
container_name: stadtratmonitor-elastichq
image: elastichq/elasticsearch-hq image: elastichq/elasticsearch-hq
ports: ports:
- "5000:5000" - "127.0.0.1:5000:5000"
links:
- elasticsearch
volumes: volumes:
esdata: esdata:
networks:
stadtratmonitor:

View file

@ -1,28 +0,0 @@
version: "3.7"
volumes:
elasticsearch:
services:
web:
build: .
volumes:
- .:/app
ports:
- "3000:3000"
# links:
# - elasticsearch
environment:
ELASTICSEARCH_URL: 'http://leipzig-giesst.de:9200'
elasticsearch:
build: ./docker/elasticsearch
environment:
- discovery.type=single-node
ports:
- "9200:9200"
volumes:
- elasticsearch:/usr/share/elasticsearch/data
elastichq:
image: elastichq/elasticsearch-hq
ports:
- "5000:5000"
# links:
# - elasticsearch

View file

@ -1,8 +0,0 @@
#!/bin/sh
set -e
if [ -f ./tmp/pids/server.pid ]; then
rm ./tmp/pids/server.pid
fi
exec bundle exec "$@"

3081
input.json Normal file

File diff suppressed because one or more lines are too long

View file

@ -3,6 +3,6 @@ task import_papers: :environment do
require 'open-uri' require 'open-uri'
#OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE #OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
#api_key = Rails.application.config_for(:morph)['key'] #api_key = Rails.application.config_for(:morph)['key']
uri = URI.parse("https://joergreichert.de/srm/input.json") input = File.read("input.json")
Paper.import_from_json(uri.read) Paper.import_from_json(input)
end end