postgres #14

Merged
sander merged 24 commits from postgres into master 5 years ago
  1. 56
      .drone.yml
  2. 8
      ci/postgres_node/Dockerfile
  3. 4
      server/endpoints/AlbumDetailsEndpointHandler.ts
  4. 3
      server/endpoints/ArtistDetailsEndpointHandler.ts
  5. 1
      server/endpoints/CreateAlbumEndpointHandler.ts
  6. 1
      server/endpoints/CreateArtistEndpointHandler.ts
  7. 1
      server/endpoints/CreateSongEndpointHandler.ts
  8. 5
      server/endpoints/CreateTagEndpointHandler.ts
  9. 23
      server/endpoints/QueryEndpointHandler.ts
  10. 3
      server/endpoints/SongDetailsEndpointHandler.ts
  11. 4
      server/lib/asJson.ts
  12. 24
      server/lib/jasmine_examples/Player.js
  13. 9
      server/lib/jasmine_examples/Song.js
  14. 1
      server/migrations/20200828124218_init_db.ts
  15. 10
      server/test/integration/flows/helpers.js
  16. 117
      server/test/test.sh

@ -2,34 +2,42 @@ kind: pipeline
type: kubernetes
name: back-end
workspace:
path: /drone/src
steps:
- name: install dependencies
image: node
image: sandervocke/postgres_node:12
commands:
- npm install
- cd server && npm install; cd ..
- cd /drone/src/server && npm install
- name: back-end test (SQLite3)
image: sandervocke/postgres_node:12
commands:
- cd /drone/src/server && ./test/test.sh --sqlite
- name: back-end test (PostgreSQL)
image: sandervocke/postgres_node:12
commands:
- cd /drone/src/server && ./test/test.sh --postgres --start-postgres-testcontainer
---
kind: pipeline
type: kubernetes
name: front-end
workspace:
path: /drone/src
steps:
- name: install dependencies
image: node
commands:
- cd /drone/src && npm install
- cd /drone/src/client && npm install
- name: back-end test
- name: front-end compile
image: node
commands:
- cd server && npm test; cd ..
#kind: pipeline
#type: kubernetes
#name: front-end
#
#steps:
#- name: install dependencies
# image: node
# commands:
# - npm install
# - cd client && npm install; cd ..
#
#- name: front-end build
# image: node
# commands:
# - cd client && npm run-script build; cd ..
#
#---
#
- cd /drone/src/client && ./node_modules/.bin/tsc; cd ..

@ -0,0 +1,8 @@
# Note: this Dockerfile is written to be executed with the whole source
# as its context.
FROM postgres:12
# Install node.js
RUN apt update -y && apt install -y npm

@ -1,6 +1,7 @@
import * as api from '../../client/src/api';
import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types';
import Knex from 'knex';
import asJson from '../lib/asJson';
export const AlbumDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => {
if (!api.checkAlbumDetailsRequest(req)) {
@ -42,12 +43,13 @@ export const AlbumDetailsEndpointHandler: EndpointHandler = async (req: any, res
await Promise.all([albumPromise, tagIdsPromise, songIdsPromise, artistIdsPromise]);
// Respond to the request.
console.log("ALBUM: ", album);
const response: api.AlbumDetailsResponse = {
name: album['name'],
artistIds: artists,
tagIds: tags,
songIds: songs,
storeLinks: JSON.parse(album['storeLinks']),
storeLinks: asJson(album['storeLinks']),
};
await res.send(response);

@ -1,6 +1,7 @@
import * as api from '../../client/src/api';
import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types';
import Knex from 'knex';
import asJson from '../lib/asJson';
export const ArtistDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => {
if (!api.checkArtistDetailsRequest(req)) {
@ -24,7 +25,7 @@ export const ArtistDetailsEndpointHandler: EndpointHandler = async (req: any, re
const response: api.ArtistDetailsResponse = {
name: results[0].name,
tagIds: tagIds,
storeLinks: JSON.parse(results[0].storeLinks),
storeLinks: asJson(results[0].storeLinks),
}
await res.send(response);

@ -51,6 +51,7 @@ export const CreateAlbumEndpointHandler: EndpointHandler = async (req: any, res:
name: reqObject.name,
storeLinks: JSON.stringify(reqObject.storeLinks || []),
})
.returning('id') // Needed for Postgres
)[0];
// Link the artists via the linking table.

@ -41,6 +41,7 @@ export const CreateArtistEndpointHandler: EndpointHandler = async (req: any, res
name: reqObject.name,
storeLinks: JSON.stringify(reqObject.storeLinks || []),
})
.returning('id') // Needed for Postgres
)[0];
// Link the tags via the linking table.

@ -60,6 +60,7 @@ export const CreateSongEndpointHandler: EndpointHandler = async (req: any, res:
title: reqObject.title,
storeLinks: JSON.stringify(reqObject.storeLinks || []),
})
.returning('id') // Needed for Postgres
)[0];
// Link the artists via the linking table.

@ -40,7 +40,10 @@ export const CreateTagEndpointHandler: EndpointHandler = async (req: any, res: a
if (maybeParent) {
tag['parentId'] = maybeParent;
}
const tagId = (await trx('tags').insert(tag))[0];
const tagId = (await trx('tags')
.insert(tag)
.returning('id') // Needed for Postgres
)[0];
// Respond to the request.
const responseObject: api.CreateTagResponse = {

@ -1,6 +1,7 @@
import * as api from '../../client/src/api';
import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types';
import Knex from 'knex';
import asJson from '../lib/asJson';
enum ObjectType {
Song = 0,
@ -176,13 +177,17 @@ function constructQuery(knex: Knex, queryFor: ObjectType, queryElem: api.QueryEl
joinObjects.delete(queryFor); // We are already querying this object in the base query.
// Figure out what data we want to select from the results.
var columns: string[] = [];
joinObjects.forEach((obj: ObjectType) => columns.push(...objectColumns[obj]));
columns.push(...objectColumns[queryFor]);
var columns: any[] = objectColumns[queryFor];
// TODO: there was a line here to add columns for the joined objects.
// Could not get it to work with Postgres, which wants aggregate functions
// to specify exactly how duplicates should be aggregated.
// Not sure whether we need these columns in the first place.
// joinObjects.forEach((obj: ObjectType) => columns.push(...objectColumns[obj]));
// First, we create a base query for the type of object we need to yield.
var q = knex.select(columns)
.distinct(objectTables[queryFor] + '.' + 'id')
.groupBy(objectTables[queryFor] + '.' + 'id')
.from(objectTables[queryFor]);
// Now, we need to add join statements for other objects we want to filter on.
@ -213,11 +218,13 @@ async function getLinkedObjects(knex: Knex, base: ObjectType, linked: ObjectType
const columns = objectColumns[linked];
await Promise.all(baseIds.map((baseId: number) => {
return knex.select(columns).distinct(otherTable + '.id').from(otherTable)
return knex.select(columns).groupBy(otherTable + '.id').from(otherTable)
.join(linkingTable, { [linkingTable + '.' + linkingTableIdNames[linked]]: otherTable + '.id' })
.where({ [linkingTable + '.' + linkingTableIdNames[base]]: baseId })
.then((others: any) => { result[baseId] = others; })
}))
console.log("Query results for", baseIds, ":", result);
return result;
}
@ -309,12 +316,12 @@ export const QueryEndpointHandler: EndpointHandler = async (req: any, res: any,
return <api.SongDetails>{
songId: song['songs.id'],
title: song['songs.title'],
storeLinks: JSON.parse(song['songs.storeLinks']),
storeLinks: asJson(song['songs.storeLinks']),
artists: songsArtists[song['songs.id']].map((artist: any) => {
return <api.ArtistDetails>{
artistId: artist['artists.id'],
name: artist['artists.name'],
storeLinks: JSON.parse(artist['artists.storeLinks']),
storeLinks: asJson(artist['artists.storeLinks']),
};
}),
tags: songsTags[song['songs.id']].map((tag: any) => {
@ -330,7 +337,7 @@ export const QueryEndpointHandler: EndpointHandler = async (req: any, res: any,
return <api.ArtistDetails>{
artistId: artist['artists.id'],
name: artist['artists.name'],
storeLinks: JSON.parse(artist['artists.storeLinks']),
storeLinks: asJson(artist['artists.storeLinks']),
}
}),
tags: tags.map((tag: any) => {

@ -1,6 +1,7 @@
import * as api from '../../client/src/api';
import { EndpointError, EndpointHandler, catchUnhandledErrors } from './types';
import Knex from 'knex';
import asJson from '../lib/asJson';
export const SongDetailsEndpointHandler: EndpointHandler = async (req: any, res: any, knex: Knex) => {
if (!api.checkSongDetailsRequest(req)) {
@ -51,7 +52,7 @@ export const SongDetailsEndpointHandler: EndpointHandler = async (req: any, res:
tagIds: tags,
artistIds: artists,
albumIds: albums,
storeLinks: JSON.parse(song.storeLinks),
storeLinks: asJson(song.storeLinks),
}
await res.send(response);

@ -0,0 +1,4 @@
export default function asJson(s: Object | string) {
return (typeof s === 'string') ?
JSON.parse(s) : s;
}

@ -1,24 +0,0 @@
function Player() {
}
Player.prototype.play = function(song) {
this.currentlyPlayingSong = song;
this.isPlaying = true;
};
Player.prototype.pause = function() {
this.isPlaying = false;
};
Player.prototype.resume = function() {
if (this.isPlaying) {
throw new Error("song is already playing");
}
this.isPlaying = true;
};
Player.prototype.makeFavorite = function() {
this.currentlyPlayingSong.persistFavoriteStatus(true);
};
module.exports = Player;

@ -1,9 +0,0 @@
function Song() {
}
Song.prototype.persistFavoriteStatus = function(value) {
// something complicated
throw new Error("not yet implemented");
};
module.exports = Song;

@ -114,5 +114,6 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTable('songs_tags');
await knex.schema.dropTable('artists_tags');
await knex.schema.dropTable('albums_tags');
await knex.schema.dropTable('artists_albums');
}

@ -1,7 +1,15 @@
import { expect } from "chai";
export async function initTestDB() {
const knex = await require('knex')({ client: 'sqlite3', connection: ':memory:'})
// Allow different database configs - but fall back to SQLite in memory if necessary.
const config = process.env.MUDBASE_DB_CONFIG ?
JSON.parse(process.env.MUDBASE_DB_CONFIG) :
{ client: 'sqlite3', connection: ':memory:' };
console.log("Running tests with DB config: ", config);
const knex = await require('knex')(config);
// Undoing and doing the migrations is a test in itself.
await knex.migrate.rollback(undefined, true);
await knex.migrate.latest();
return knex;
}

@ -0,0 +1,117 @@
#!/bin/bash
Review

This should be invoked from the Drone CI script somehow! How to handle nested dockerization?

This should be invoked from the Drone CI script somehow! How to handle nested dockerization?
# Wrapper around the Jasmine test scripts.
# Can test with different types of databases.
SCRIPT=`realpath $0`
SCRIPTPATH=`dirname $SCRIPT`
SOURCEPATH="$SCRIPTPATH/.."
SQLITE_CONFIG='{"client": "sqlite3", "connection": ":memory:"}'
POSTGRES_CONFIG='{"client":"pg","connection":{"host":"localhost","port":5432,"user":"mudbase","password":"mudbase","database":"mudbase"}}'
DO_SQLITE=
DO_POSTGRES=
START_POSTGRES=
START_POSTGRES_TESTCONTAINER=
usage() {
cat << EOF | echo
This tool runs MuDBase's Jasmine tests. Different database back-ends can be selected (multiple is allowed too).
Options:
-s,--sqlite Use SQLite in memory for testing.
-p,--postgres Use Postgres for testing. Spins up a temporary Postgres container on localhos, port 9432.
-sp,--start-postgres Start its own Postgres Docker container for testing against.
-spt,--start-postgres-testcontainer Assume running in the sandervocke/postgres_node container. Spin up a PG process.
any other option is passed on to Jasmine.
EOF
}
# Argument parsing
PARAMS=""
while (( "$#" )); do
case "$1" in
-s|--sqlite)
DO_SQLITE=1
shift
;;
-p|--postgres)
DO_POSTGRES=1
shift
;;
-sp|--start-postgres)
START_POSTGRES=1
shift
;;
-spt|--start-postgres-testcontainer)
START_POSTGRES_TESTCONTAINER=1
shift
;;
*) # preserve positional arguments
echo "Preserving params: $1"
PARAMS="$PARAMS \"$1\""
shift
;;
esac
done
# set positional arguments in their proper place
eval set -- "$PARAMS"
pushd $SOURCEPATH
if [ ! -z "${DO_SQLITE}" ]; then
MUDBASE_DB_CONFIG="$SQLITE_CONFIG" ./node_modules/.bin/ts-node node_modules/jasmine/bin/jasmine --config=test/jasmine.json "$@"
SQLITE_RESULT=$(echo $?)
fi
if [ ! -z "${DO_POSTGRES}" ]; then
if [ ! -z "${START_POSTGRES}" ]; then
# Fire up a test Postgres.
docker pull postgres:12
CONTAINERID=$(docker create --rm \
--env POSTGRES_USER=mudbase \
--env POSTGRES_PASSWORD=mudbase \
--env POSTGRES_DB=mudbase \
-p 5432:5432 \
postgres:12)
docker start ${CONTAINERID}
trap "docker stop ${CONTAINERID}" EXIT
# Wait for postgres to be ready.
while true; do
pg_isready -d mudbase -h localhost -p 5432 -U mudbase
[ "$(echo $?)" == "0" ] && break
sleep 1
done
elif [ ! -z "${START_POSTGRES_TESTCONTAINER}" ]; then
# Fire up a test Postgres process.
POSTGRES_USER=mudbase POSTGRES_PASSWORD=mudbase POSTGRES_DATABASE=mudbase /docker-entrypoint.sh postgres &
PID=$(echo $!)
trap "kill $PID" EXIT
# Wait for postgres to be ready.
while true; do
/usr/bin/pg_isready -d mudbase -h localhost -p 5432 -U mudbase
[ "$(echo $?)" == "0" ] && break
sleep 1
done
fi
MUDBASE_DB_CONFIG="$POSTGRES_CONFIG" ./node_modules/.bin/ts-node node_modules/jasmine/bin/jasmine --config=test/jasmine.json "$@"
POSTGRES_RESULT=$(echo $?)
if [ ! -z "${START_POSTGRES}" ]; then
docker stop ${CONTAINERID}
trap - EXIT
fi
fi
printf "Tests finished. Results:\n\n"
if [ ! -z "${DO_SQLITE}" ]; then
echo "SQLite: $([ ${SQLITE_RESULT} == '1' ] && echo Fail || echo Success)"
fi
if [ ! -z "${DO_POSTGRES}" ]; then
echo "Postgres: $([ ${POSTGRES_RESULT} == '1' ] && echo Fail || echo Success)"
fi
popd
[ "${SQLITE_RESULT}" == '1' -o "${POSTGRES_RESULT}" == '1' ] && exit 1
exit 0
Loading…
Cancel
Save