From 440a03396cd54cbb00527df62f68151e7b314571 Mon Sep 17 00:00:00 2001 From: Sander Vocke Date: Fri, 4 Sep 2020 15:16:21 +0000 Subject: [PATCH] postgres (#14) Make codebase compatible with Postgres. Add test scripts for it and a CI pipeline test. Reviewed-on: https://gitea.octiron.soleus.nu/sander/MuDBase/pulls/14 --- .drone.yml | 56 +++++---- ci/postgres_node/Dockerfile | 8 ++ .../endpoints/AlbumDetailsEndpointHandler.ts | 4 +- .../endpoints/ArtistDetailsEndpointHandler.ts | 3 +- .../endpoints/CreateAlbumEndpointHandler.ts | 1 + .../endpoints/CreateArtistEndpointHandler.ts | 1 + server/endpoints/CreateSongEndpointHandler.ts | 1 + server/endpoints/CreateTagEndpointHandler.ts | 5 +- server/endpoints/QueryEndpointHandler.ts | 23 ++-- .../endpoints/SongDetailsEndpointHandler.ts | 3 +- server/lib/asJson.ts | 4 + server/lib/jasmine_examples/Player.js | 24 ---- server/lib/jasmine_examples/Song.js | 9 -- server/migrations/20200828124218_init_db.ts | 1 + server/test/integration/flows/helpers.js | 10 +- server/test/test.sh | 117 ++++++++++++++++++ 16 files changed, 200 insertions(+), 70 deletions(-) create mode 100644 ci/postgres_node/Dockerfile create mode 100644 server/lib/asJson.ts delete mode 100644 server/lib/jasmine_examples/Player.js delete mode 100644 server/lib/jasmine_examples/Song.js create mode 100755 server/test/test.sh diff --git a/.drone.yml b/.drone.yml index 197911f..140eae7 100644 --- a/.drone.yml +++ b/.drone.yml @@ -2,34 +2,42 @@ kind: pipeline type: kubernetes name: back-end +workspace: + path: /drone/src + steps: - name: install dependencies - image: node + image: sandervocke/postgres_node:12 commands: - npm install - - cd server && npm install; cd .. + - cd /drone/src/server && npm install + +- name: back-end test (SQLite3) + image: sandervocke/postgres_node:12 + commands: + - cd /drone/src/server && ./test/test.sh --sqlite + +- name: back-end test (PostgreSQL) + image: sandervocke/postgres_node:12 + commands: + - cd /drone/src/server && ./test/test.sh --postgres --start-postgres-testcontainer + +--- +kind: pipeline +type: kubernetes +name: front-end + +workspace: + path: /drone/src + +steps: +- name: install dependencies + image: node + commands: + - cd /drone/src && npm install + - cd /drone/src/client && npm install -- name: back-end test +- name: front-end compile image: node commands: - - cd server && npm test; cd .. - - -#kind: pipeline -#type: kubernetes -#name: front-end -# -#steps: -#- name: install dependencies -# image: node -# commands: -# - npm install -# - cd client && npm install; cd .. -# -#- name: front-end build -# image: node -# commands: -# - cd client && npm run-script build; cd .. -# -#--- -# \ No newline at end of file + - cd /drone/src/client && ./node_modules/.bin/tsc; cd .. diff --git a/ci/postgres_node/Dockerfile b/ci/postgres_node/Dockerfile new file mode 100644 index 0000000..539b24c --- /dev/null +++ b/ci/postgres_node/Dockerfile @@ -0,0 +1,8 @@ +# Note: this Dockerfile is written to be executed with the whole source +# as its context. + +FROM postgres:12 + +# Install node.js +RUN apt update -y && apt install -y npm + diff --git a/server/endpoints/AlbumDetailsEndpointHandler.ts b/server/endpoints/AlbumDetailsEndpointHandler.ts index f18822d..e833ff9 100644 --- a/server/endpoints/AlbumDetailsEndpointHandler.ts +++ b/server/endpoints/AlbumDetailsEndpointHandler.ts @@ -1,6 +1,7 @@ import * as api from '../../client/src/api'; import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types'; import Knex from 'knex'; +import asJson from '../lib/asJson'; export const AlbumDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => { if (!api.checkAlbumDetailsRequest(req)) { @@ -42,12 +43,13 @@ export const AlbumDetailsEndpointHandler: EndpointHandler = async (req: any, res await Promise.all([albumPromise, tagIdsPromise, songIdsPromise, artistIdsPromise]); // Respond to the request. + console.log("ALBUM: ", album); const response: api.AlbumDetailsResponse = { name: album['name'], artistIds: artists, tagIds: tags, songIds: songs, - storeLinks: JSON.parse(album['storeLinks']), + storeLinks: asJson(album['storeLinks']), }; await res.send(response); diff --git a/server/endpoints/ArtistDetailsEndpointHandler.ts b/server/endpoints/ArtistDetailsEndpointHandler.ts index 753e12d..ede0836 100644 --- a/server/endpoints/ArtistDetailsEndpointHandler.ts +++ b/server/endpoints/ArtistDetailsEndpointHandler.ts @@ -1,6 +1,7 @@ import * as api from '../../client/src/api'; import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types'; import Knex from 'knex'; +import asJson from '../lib/asJson'; export const ArtistDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => { if (!api.checkArtistDetailsRequest(req)) { @@ -24,7 +25,7 @@ export const ArtistDetailsEndpointHandler: EndpointHandler = async (req: any, re const response: api.ArtistDetailsResponse = { name: results[0].name, tagIds: tagIds, - storeLinks: JSON.parse(results[0].storeLinks), + storeLinks: asJson(results[0].storeLinks), } await res.send(response); diff --git a/server/endpoints/CreateAlbumEndpointHandler.ts b/server/endpoints/CreateAlbumEndpointHandler.ts index b35593e..e9f9509 100644 --- a/server/endpoints/CreateAlbumEndpointHandler.ts +++ b/server/endpoints/CreateAlbumEndpointHandler.ts @@ -51,6 +51,7 @@ export const CreateAlbumEndpointHandler: EndpointHandler = async (req: any, res: name: reqObject.name, storeLinks: JSON.stringify(reqObject.storeLinks || []), }) + .returning('id') // Needed for Postgres )[0]; // Link the artists via the linking table. diff --git a/server/endpoints/CreateArtistEndpointHandler.ts b/server/endpoints/CreateArtistEndpointHandler.ts index 80a19b9..155e9d2 100644 --- a/server/endpoints/CreateArtistEndpointHandler.ts +++ b/server/endpoints/CreateArtistEndpointHandler.ts @@ -41,6 +41,7 @@ export const CreateArtistEndpointHandler: EndpointHandler = async (req: any, res name: reqObject.name, storeLinks: JSON.stringify(reqObject.storeLinks || []), }) + .returning('id') // Needed for Postgres )[0]; // Link the tags via the linking table. diff --git a/server/endpoints/CreateSongEndpointHandler.ts b/server/endpoints/CreateSongEndpointHandler.ts index 0c42585..89fdf58 100644 --- a/server/endpoints/CreateSongEndpointHandler.ts +++ b/server/endpoints/CreateSongEndpointHandler.ts @@ -60,6 +60,7 @@ export const CreateSongEndpointHandler: EndpointHandler = async (req: any, res: title: reqObject.title, storeLinks: JSON.stringify(reqObject.storeLinks || []), }) + .returning('id') // Needed for Postgres )[0]; // Link the artists via the linking table. diff --git a/server/endpoints/CreateTagEndpointHandler.ts b/server/endpoints/CreateTagEndpointHandler.ts index a1ff4d6..183ed1c 100644 --- a/server/endpoints/CreateTagEndpointHandler.ts +++ b/server/endpoints/CreateTagEndpointHandler.ts @@ -40,7 +40,10 @@ export const CreateTagEndpointHandler: EndpointHandler = async (req: any, res: a if (maybeParent) { tag['parentId'] = maybeParent; } - const tagId = (await trx('tags').insert(tag))[0]; + const tagId = (await trx('tags') + .insert(tag) + .returning('id') // Needed for Postgres + )[0]; // Respond to the request. const responseObject: api.CreateTagResponse = { diff --git a/server/endpoints/QueryEndpointHandler.ts b/server/endpoints/QueryEndpointHandler.ts index 4d26a00..4707b32 100644 --- a/server/endpoints/QueryEndpointHandler.ts +++ b/server/endpoints/QueryEndpointHandler.ts @@ -1,6 +1,7 @@ import * as api from '../../client/src/api'; import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types'; import Knex from 'knex'; +import asJson from '../lib/asJson'; enum ObjectType { Song = 0, @@ -176,13 +177,17 @@ function constructQuery(knex: Knex, queryFor: ObjectType, queryElem: api.QueryEl joinObjects.delete(queryFor); // We are already querying this object in the base query. // Figure out what data we want to select from the results. - var columns: string[] = []; - joinObjects.forEach((obj: ObjectType) => columns.push(...objectColumns[obj])); - columns.push(...objectColumns[queryFor]); + var columns: any[] = objectColumns[queryFor]; + + // TODO: there was a line here to add columns for the joined objects. + // Could not get it to work with Postgres, which wants aggregate functions + // to specify exactly how duplicates should be aggregated. + // Not sure whether we need these columns in the first place. + // joinObjects.forEach((obj: ObjectType) => columns.push(...objectColumns[obj])); // First, we create a base query for the type of object we need to yield. var q = knex.select(columns) - .distinct(objectTables[queryFor] + '.' + 'id') + .groupBy(objectTables[queryFor] + '.' + 'id') .from(objectTables[queryFor]); // Now, we need to add join statements for other objects we want to filter on. @@ -213,11 +218,13 @@ async function getLinkedObjects(knex: Knex, base: ObjectType, linked: ObjectType const columns = objectColumns[linked]; await Promise.all(baseIds.map((baseId: number) => { - return knex.select(columns).distinct(otherTable + '.id').from(otherTable) + return knex.select(columns).groupBy(otherTable + '.id').from(otherTable) .join(linkingTable, { [linkingTable + '.' + linkingTableIdNames[linked]]: otherTable + '.id' }) .where({ [linkingTable + '.' + linkingTableIdNames[base]]: baseId }) .then((others: any) => { result[baseId] = others; }) })) + + console.log("Query results for", baseIds, ":", result); return result; } @@ -309,12 +316,12 @@ export const QueryEndpointHandler: EndpointHandler = async (req: any, res: any, return { songId: song['songs.id'], title: song['songs.title'], - storeLinks: JSON.parse(song['songs.storeLinks']), + storeLinks: asJson(song['songs.storeLinks']), artists: songsArtists[song['songs.id']].map((artist: any) => { return { artistId: artist['artists.id'], name: artist['artists.name'], - storeLinks: JSON.parse(artist['artists.storeLinks']), + storeLinks: asJson(artist['artists.storeLinks']), }; }), tags: songsTags[song['songs.id']].map((tag: any) => { @@ -330,7 +337,7 @@ export const QueryEndpointHandler: EndpointHandler = async (req: any, res: any, return { artistId: artist['artists.id'], name: artist['artists.name'], - storeLinks: JSON.parse(artist['artists.storeLinks']), + storeLinks: asJson(artist['artists.storeLinks']), } }), tags: tags.map((tag: any) => { diff --git a/server/endpoints/SongDetailsEndpointHandler.ts b/server/endpoints/SongDetailsEndpointHandler.ts index 7fc3149..591e31a 100644 --- a/server/endpoints/SongDetailsEndpointHandler.ts +++ b/server/endpoints/SongDetailsEndpointHandler.ts @@ -1,6 +1,7 @@ import * as api from '../../client/src/api'; import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types'; import Knex from 'knex'; +import asJson from '../lib/asJson'; export const SongDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => { if (!api.checkSongDetailsRequest(req)) { @@ -51,7 +52,7 @@ export const SongDetailsEndpointHandler: EndpointHandler = async (req: any, res: tagIds: tags, artistIds: artists, albumIds: albums, - storeLinks: JSON.parse(song.storeLinks), + storeLinks: asJson(song.storeLinks), } await res.send(response); diff --git a/server/lib/asJson.ts b/server/lib/asJson.ts new file mode 100644 index 0000000..82ff955 --- /dev/null +++ b/server/lib/asJson.ts @@ -0,0 +1,4 @@ +export default function asJson(s: Object | string) { + return (typeof s === 'string') ? + JSON.parse(s) : s; +} \ No newline at end of file diff --git a/server/lib/jasmine_examples/Player.js b/server/lib/jasmine_examples/Player.js deleted file mode 100644 index fe95f89..0000000 --- a/server/lib/jasmine_examples/Player.js +++ /dev/null @@ -1,24 +0,0 @@ -function Player() { -} -Player.prototype.play = function(song) { - this.currentlyPlayingSong = song; - this.isPlaying = true; -}; - -Player.prototype.pause = function() { - this.isPlaying = false; -}; - -Player.prototype.resume = function() { - if (this.isPlaying) { - throw new Error("song is already playing"); - } - - this.isPlaying = true; -}; - -Player.prototype.makeFavorite = function() { - this.currentlyPlayingSong.persistFavoriteStatus(true); -}; - -module.exports = Player; diff --git a/server/lib/jasmine_examples/Song.js b/server/lib/jasmine_examples/Song.js deleted file mode 100644 index 3415bb8..0000000 --- a/server/lib/jasmine_examples/Song.js +++ /dev/null @@ -1,9 +0,0 @@ -function Song() { -} - -Song.prototype.persistFavoriteStatus = function(value) { - // something complicated - throw new Error("not yet implemented"); -}; - -module.exports = Song; diff --git a/server/migrations/20200828124218_init_db.ts b/server/migrations/20200828124218_init_db.ts index 81a6392..b9883e2 100644 --- a/server/migrations/20200828124218_init_db.ts +++ b/server/migrations/20200828124218_init_db.ts @@ -114,5 +114,6 @@ export async function down(knex: Knex): Promise { await knex.schema.dropTable('songs_tags'); await knex.schema.dropTable('artists_tags'); await knex.schema.dropTable('albums_tags'); + await knex.schema.dropTable('artists_albums'); } diff --git a/server/test/integration/flows/helpers.js b/server/test/integration/flows/helpers.js index 354a16a..6d2da62 100644 --- a/server/test/integration/flows/helpers.js +++ b/server/test/integration/flows/helpers.js @@ -1,7 +1,15 @@ import { expect } from "chai"; export async function initTestDB() { - const knex = await require('knex')({ client: 'sqlite3', connection: ':memory:'}) + // Allow different database configs - but fall back to SQLite in memory if necessary. + const config = process.env.MUDBASE_DB_CONFIG ? + JSON.parse(process.env.MUDBASE_DB_CONFIG) : + { client: 'sqlite3', connection: ':memory:' }; + console.log("Running tests with DB config: ", config); + const knex = await require('knex')(config); + + // Undoing and doing the migrations is a test in itself. + await knex.migrate.rollback(undefined, true); await knex.migrate.latest(); return knex; } diff --git a/server/test/test.sh b/server/test/test.sh new file mode 100755 index 0000000..dc7e7d9 --- /dev/null +++ b/server/test/test.sh @@ -0,0 +1,117 @@ +#!/bin/bash + +# Wrapper around the Jasmine test scripts. +# Can test with different types of databases. + +SCRIPT=`realpath $0` +SCRIPTPATH=`dirname $SCRIPT` +SOURCEPATH="$SCRIPTPATH/.." + +SQLITE_CONFIG='{"client": "sqlite3", "connection": ":memory:"}' +POSTGRES_CONFIG='{"client":"pg","connection":{"host":"localhost","port":5432,"user":"mudbase","password":"mudbase","database":"mudbase"}}' + +DO_SQLITE= +DO_POSTGRES= +START_POSTGRES= +START_POSTGRES_TESTCONTAINER= + +usage() { + cat << EOF | echo +This tool runs MuDBase's Jasmine tests. Different database back-ends can be selected (multiple is allowed too). +Options: + -s,--sqlite Use SQLite in memory for testing. + -p,--postgres Use Postgres for testing. Spins up a temporary Postgres container on localhos, port 9432. + -sp,--start-postgres Start its own Postgres Docker container for testing against. + -spt,--start-postgres-testcontainer Assume running in the sandervocke/postgres_node container. Spin up a PG process. + any other option is passed on to Jasmine. +EOF +} + +# Argument parsing +PARAMS="" +while (( "$#" )); do + case "$1" in + -s|--sqlite) + DO_SQLITE=1 + shift + ;; + -p|--postgres) + DO_POSTGRES=1 + shift + ;; + -sp|--start-postgres) + START_POSTGRES=1 + shift + ;; + -spt|--start-postgres-testcontainer) + START_POSTGRES_TESTCONTAINER=1 + shift + ;; + *) # preserve positional arguments + echo "Preserving params: $1" + PARAMS="$PARAMS \"$1\"" + shift + ;; + esac +done +# set positional arguments in their proper place +eval set -- "$PARAMS" + +pushd $SOURCEPATH +if [ ! -z "${DO_SQLITE}" ]; then + MUDBASE_DB_CONFIG="$SQLITE_CONFIG" ./node_modules/.bin/ts-node node_modules/jasmine/bin/jasmine --config=test/jasmine.json "$@" + SQLITE_RESULT=$(echo $?) +fi +if [ ! -z "${DO_POSTGRES}" ]; then + if [ ! -z "${START_POSTGRES}" ]; then + # Fire up a test Postgres. + docker pull postgres:12 + CONTAINERID=$(docker create --rm \ + --env POSTGRES_USER=mudbase \ + --env POSTGRES_PASSWORD=mudbase \ + --env POSTGRES_DB=mudbase \ + -p 5432:5432 \ + postgres:12) + docker start ${CONTAINERID} + trap "docker stop ${CONTAINERID}" EXIT + # Wait for postgres to be ready. + while true; do + pg_isready -d mudbase -h localhost -p 5432 -U mudbase + [ "$(echo $?)" == "0" ] && break + sleep 1 + done + elif [ ! -z "${START_POSTGRES_TESTCONTAINER}" ]; then + # Fire up a test Postgres process. + POSTGRES_USER=mudbase POSTGRES_PASSWORD=mudbase POSTGRES_DATABASE=mudbase /docker-entrypoint.sh postgres & + PID=$(echo $!) + trap "kill $PID" EXIT + # Wait for postgres to be ready. + while true; do + /usr/bin/pg_isready -d mudbase -h localhost -p 5432 -U mudbase + [ "$(echo $?)" == "0" ] && break + sleep 1 + done + fi + + MUDBASE_DB_CONFIG="$POSTGRES_CONFIG" ./node_modules/.bin/ts-node node_modules/jasmine/bin/jasmine --config=test/jasmine.json "$@" + POSTGRES_RESULT=$(echo $?) + + if [ ! -z "${START_POSTGRES}" ]; then + docker stop ${CONTAINERID} + trap - EXIT + fi +fi + +printf "Tests finished. Results:\n\n" +if [ ! -z "${DO_SQLITE}" ]; then + echo "SQLite: $([ ${SQLITE_RESULT} == '1' ] && echo Fail || echo Success)" +fi +if [ ! -z "${DO_POSTGRES}" ]; then + echo "Postgres: $([ ${POSTGRES_RESULT} == '1' ] && echo Fail || echo Success)" +fi + +popd + +[ "${SQLITE_RESULT}" == '1' -o "${POSTGRES_RESULT}" == '1' ] && exit 1 +exit 0 +