Setup docker (#26)

* basic config

* Add github action

* Fix makefile for linux and variable python (#27)

* fix makefile

* remove out

* add to gitignore

* Fix makefile for linux and variable python (#27)

* fix makefile

* remove out

* add to gitignore

* Fix dockerfile

* stash changes

* Make makefile dynamic (#28)

* Remove broken docker packages for now

* Add web

* Make Black Formatter happy?

Co-authored-by: Max Zaremba <max.zaremba@gmail.com>
tweak-docker-port
Justin O'Boyle 2022-12-13 07:46:08 -05:00 committed by GitHub
parent 924d2b2539
commit 535715932d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 156 additions and 16 deletions

19
.github/workflows/test-containers.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: Test Containers
on: push
jobs:
docker:
timeout-minutes: 4
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Start containers
run: docker-compose -f "docker-compose.yml" up -d --build
- name: Stop containers
if: always()
run: docker-compose -f "docker-compose.yml" down

View File

@ -34,7 +34,7 @@ You can find the code for the mining engine in `oapen-engine/`.
* Most popular PostgreSQL database adapter for Python
* `pandas` -- data analysis library
* Maintained by [PYData](https://pandas.pydata.org/) with large amounts of sponsors. 2,700+ contributors.
* `sklearn` -- Scikit Learn
* `scikit-learn` -- Scikit Learn
* Maintained by [a large consortium of corporations and open-source developers](https://scikit-learn.org/stable/).

17
api/Dockerfile Normal file
View File

@ -0,0 +1,17 @@
FROM node:19
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./
RUN npm install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . .
EXPOSE 3001
CMD [ "npm", "start" ]

19
docker-compose.yml Normal file
View File

@ -0,0 +1,19 @@
version: "3.8"
services:
oapen-engine :
build: ./oapen-engine/
api:
build: ./api/
expose:
- 3001
ports:
- "127.0.0.1:3001:3001"
web:
build: ./web/
expose:
- 3000
ports:
- "127.0.0.1:3000:3000"
volumes:
db:
driver: local

36
oapen-engine/Dockerfile Normal file
View File

@ -0,0 +1,36 @@
FROM python:3.10-slim as base
# Setup env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONFAULTHANDLER 1
FROM base AS python-deps
# Install pipenv and compilation dependencies
RUN pip install pipenv
RUN apt-get update && apt-get install -y --no-install-recommends gcc linux-libc-dev
# Install python dependencies in /.venv
COPY Pipfile .
RUN PIPENV_VENV_IN_PROJECT=1 pipenv install --deploy --skip-lock --verbose
FROM base AS runtime
# Copy virtual env from python-deps stage
COPY --from=python-deps /.venv /.venv
ENV PATH="/.venv/bin:$PATH"
# Create and switch to a new user
RUN useradd --create-home appuser
WORKDIR /home/appuser
USER appuser
# Install application into container
COPY . .
# Run the application
ENTRYPOINT ["python", "src/tasks/daemon.py"]

View File

@ -34,3 +34,6 @@ run-tests:
refresh-items:
cd src && $(PYTHON) -m pipenv run python tasks/refresh_items.py
run-daemon:
cd src && $(PYTHON) -m pipenv run python tasks/daemon.py

View File

@ -8,9 +8,8 @@ nltk = "*"
requests = "*"
psycopg2-binary = "*"
pandas = "*"
sklearn = "*"
scikit-learn = "*"
lxml = "*"
psutil = "*"
[dev-packages]
pytest = "*"

View File

@ -0,0 +1,21 @@
# Daemon to run processes in the background
import time
import sys, signal
def signal_handler(signal, frame):
print("\nprogram exiting gracefully")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print("Daemon up and running")
# TODO run cronjobs here
while True:
print("Daemon still running")
time.sleep(60)
print("Daemon down")

View File

@ -10,7 +10,7 @@ import data.oapen as OapenAPI
import model.ngrams as OapenEngine
from data.connection import close_connection, get_connection
from data.oapen_db import OapenDB
from util.kill_processes import kill_child_processes
# from util.kill_processes import kill_child_processes
def ngrams_task(items):
@ -103,7 +103,7 @@ def main():
db_pool.shutdown(wait=False)
io_pool.shutdown(wait=False)
ngrams_pool.shutdown(wait=False)
kill_child_processes(os.getpid())
# kill_child_processes(os.getpid())
close_connection(connection)
for collection in collections:

View File

@ -1,13 +1,13 @@
import signal
import psutil
# import psutil
def kill_child_processes(parent_pid, sig=signal.SIGTERM):
try:
parent = psutil.Process(parent_pid)
except psutil.NoSuchProcess:
return
children = parent.children(recursive=True)
for process in children:
process.send_signal(sig)
# def kill_child_processes(parent_pid, sig=signal.SIGTERM):
# try:
# parent = psutil.Process(parent_pid)
# except psutil.NoSuchProcess:
# return
# children = parent.children(recursive=True)
# for process in children:
# process.send_signal(sig)

19
web/Dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM node:19
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./
RUN npm install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . .
EXPOSE 3000
RUN npm run build
CMD [ "npm", "start" ]

7
web/next.config.js Normal file
View File

@ -0,0 +1,7 @@
module.exports = {
typescript: {
// This ONLY is for type errors
// TODO remove when a better linting system is created
ignoreBuildErrors: true,
},
};

View File

@ -5,8 +5,8 @@ import { fetchSingleItemProps, SingleItemProps } from "../../lib/item/single";
export default function ItemSingle({ item }: SingleItemProps) {
const name =
item.name || item.metadata.find(({ key }) => key == "grantor.name")?.value;
const type = item.metadata.find(({ key }) => key == "dc.type")?.value;
item?.name || item?.metadata.find(({ key }) => key == "grantor.name")?.value;
const type = item?.metadata.find(({ key }) => key == "dc.type")?.value;
console.log({ item });
return (
<>