Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP Run tests in parallel #670

Draft
wants to merge 11 commits into
base: stable
Choose a base branch
from
2 changes: 2 additions & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,4 +67,6 @@ jobs:
run: yarn lint

- name: run tests
env:
TEST_THREADS: 4
run: yarn test
26 changes: 25 additions & 1 deletion app/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,10 @@ export async function getSingleton() {

_app.context.pubsub = new PubsubListener(server, _app);

const port = process.env.PEPYATKA_SERVER_PORT || process.env.PORT || _app.context.config.port;
const port = getListeningPort(_app);
await listen(port);
// The actual port
_app.context.port = server.address().port;

const log = createDebug('freefeed:init');

Expand All @@ -56,3 +58,25 @@ export async function getSingleton() {
lock.release();
}
}

function getListeningPort(_app) {
let port = validPortValue(process.env.PEPYATKA_SERVER_PORT);

if (port === undefined) {
port = validPortValue(process.env.PORT);
}

if (port === undefined) {
port = validPortValue(_app.context.config.port);
}

return port ?? 0;
}

function validPortValue(port) {
if (typeof port === 'number' && port >= 0 && port < 65536) {
return port;
}

return undefined;
}
2 changes: 1 addition & 1 deletion app/freefeed-app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class FreefeedApp extends Application<DefaultState, AppContext> {
}

this.context.config = config;
this.context.port = process.env.PORT ? parseInt(process.env.PORT) : config.port;
this.context.port = 0; // to be configured on listen

this.use(asyncContextMiddleware);

Expand Down
44 changes: 30 additions & 14 deletions app/setup/postgres.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,47 @@
import knexjs from 'knex';
import createDebug from 'debug';
import config from 'config';
import { uniq } from 'lodash';

import { stylize } from '../support/debugLogger';
import { getDbSchemaName } from '../support/parallel-testing';

/** @type {import("knex").Knex.Config} */
const pgConfig = { ...config.postgres };

const schemaName = getDbSchemaName();

if (schemaName !== 'public') {
pgConfig.searchPath = uniq([schemaName, 'public', ...(pgConfig.searchPath || [])]);
}

const knex = knexjs(config.postgres);
const log = createDebug('freefeed:sql');
const errLog = createDebug('freefeed:sql:error');

knex.on('start', (builder) => {
const q = builder.toString();
const start = new Date().getTime();
let knex = null;
export function connect() {
if (knex) {
return knex;
}

builder.on('end', () => {
log('%s %s', q, stylize(`[took ${new Date().getTime() - start}ms]`, 'green'));
});
knex = knexjs(pgConfig);
knex.on('start', (builder) => {
const q = builder.toString();
const start = new Date().getTime();

builder.on('error', () => {
errLog('%s %s', stylize('ERROR', 'red'), q);
builder.on('end', () => {
log('%s %s', q, stylize(`[took ${new Date().getTime() - start}ms]`, 'green'));
});

builder.on('error', () => {
errLog('%s %s', stylize('ERROR', 'red'), q);
});
});
});

export function connect() {
return knex;
}

export function setSearchConfig() {
const { textSearchConfigName } = config.postgres;
return knex.raw(`SET default_text_search_config TO '${textSearchConfigName}'`);
export async function setSearchConfig() {
const { textSearchConfigName } = pgConfig;
await knex.raw(`SET default_text_search_config TO '${textSearchConfigName}'`);
}
18 changes: 18 additions & 0 deletions app/support/parallel-testing.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
export function getWorkerId(): number {
if (process.env.NODE_ENV !== 'test') {
return 0;
}

let id = Number.parseInt(process.env.MOCHA_WORKER_ID || '0');

if (!Number.isFinite(id)) {
id = 0;
}

return id;
}

export function getDbSchemaName(): 'public' | string {
const workerId = getWorkerId();
return workerId === 0 ? 'public' : `test${workerId}`;
}
33 changes: 0 additions & 33 deletions bin/clean_test_db.js

This file was deleted.

61 changes: 61 additions & 0 deletions bin/reset_test_db.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/* eslint-disable no-await-in-loop */
import knexLib from 'knex';
import parseArgs from 'minimist';

import { getDbSchemaName } from '../app/support/parallel-testing';

const args = parseArgs(process.argv.slice(2));
let nSchemas = Number.parseInt(args['schemas']);

if (!Number.isFinite(nSchemas) || nSchemas < 1) {
nSchemas = 1;
}

// Forcefully set the NODE_ENV to 'test'
process.env.NODE_ENV = 'test';

const config = require('../knexfile');

if (!('test' in config)) {
process.stderr.write(`Error: no "test" section in knexfile`);
process.exit(1);
}

const knex = knexLib(config.test);

async function resetSchema(schemaName) {
console.log(`Resetting the ${schemaName} schema`);
await knex.raw(`drop schema if exists :schemaName: cascade`, { schemaName });
await knex.raw(`create schema :schemaName:`, { schemaName });
}

async function run() {
// Public schema
await resetSchema('public');
console.log(`Running migrations`);
await knex.migrate.latest();
console.log(`Migration completed`);

// Other schemas
for (let i = 1; i < nSchemas; i++) {
// Emulating MOCHA_WORKER_ID for proper schema name generation
process.env.MOCHA_WORKER_ID = i.toString(10);
const schemaName = getDbSchemaName();
await resetSchema(schemaName);
console.log(`Running migrations on ${schemaName} schema`);
await knex.migrate.latest({ schemaName });
console.log(`Migration completed on ${schemaName} schema`);
}
}

run()
.then(() => {
knex.destroy();
console.log(`All done.`);
process.exit(0);
})
.catch((e) => {
process.stderr.write(`Error: ${e}\n`);
knex.destroy();
process.exit(1);
});
7 changes: 7 additions & 0 deletions config/default.js
Original file line number Diff line number Diff line change
Expand Up @@ -503,4 +503,11 @@ config.translation = {
apiKey: 'OVERRIDE_IT',
};

config.tests = {
realtime: {
eventTimeout: 2000,
silenceTimeout: 500,
},
};

module.exports = config;
8 changes: 6 additions & 2 deletions config/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ import { resolve } from 'path';

import stubTransport from 'nodemailer-stub-transport';

import { getWorkerId } from '../app/support/parallel-testing';

module.exports = {
port: 31337,
database: 3,
port: 31337 + getWorkerId(),
database: 3 + getWorkerId(),
monitorPrefix: 'tests',

application: { EXTRA_STOP_LIST: ['thatcreepyguy', 'nicegirlnextdoor', 'perfectstranger'] },
Expand Down Expand Up @@ -48,6 +50,8 @@ module.exports = {

registrationsLimit: { maxCount: 10 },

maintenance: { messageFile: `tmp/MAINTENANCE${getWorkerId()}.txt` },

userPreferences: {
defaults: {
// User does't want to view banned comments by default (for compatibility
Expand Down
35 changes: 22 additions & 13 deletions migrations/20160405133637_initial.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
const { getDbSchemaName } = require('../app/support/parallel-testing');

exports.up = function (knex) {
const schemaName = getDbSchemaName();
const isPublicSchema = !schemaName || schemaName === 'public';

return Promise.all([
knex.raw('SET statement_timeout = 0'),
knex.raw('SET lock_timeout = 0'),
knex.raw("SET client_encoding = 'UTF8'"),
knex.raw('SET standard_conforming_strings = on'),
knex.raw('SET check_function_bodies = false'),
knex.raw('SET client_min_messages = warning'),
knex.raw('SET row_security = off'),
knex.raw('CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog'),
knex.raw('CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public'),
knex.raw('CREATE EXTENSION IF NOT EXISTS intarray WITH SCHEMA public'),
knex.raw('SET search_path = public, pg_catalog'),
knex.raw("SET default_tablespace = ''"),
knex.raw('SET default_with_oids = false'),
...(isPublicSchema
? [
knex.raw('SET statement_timeout = 0'),
knex.raw('SET lock_timeout = 0'),
knex.raw("SET client_encoding = 'UTF8'"),
knex.raw('SET standard_conforming_strings = on'),
knex.raw('SET check_function_bodies = false'),
knex.raw('SET client_min_messages = warning'),
knex.raw('SET row_security = off'),
knex.raw('CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog'),
knex.raw('CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public'),
knex.raw('CREATE EXTENSION IF NOT EXISTS intarray WITH SCHEMA public'),
knex.raw('SET search_path = public, pg_catalog'),
knex.raw("SET default_tablespace = ''"),
knex.raw('SET default_with_oids = false'),
]
: [knex.raw('SET search_path = :schemaName:, public, pg_catalog', { schemaName })]),

knex.schema.createTable('users', function (table) {
table.increments().notNullable().primary();
Expand Down
14 changes: 7 additions & 7 deletions migrations/20161215174952_posts_privacy_flags.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@ export function up(knex) {
})
.raw(
`CREATE INDEX feeds_id_array_idx
ON public.feeds
ON feeds
USING gin
((ARRAY[id]) gin__int_ops);
`,
)
// Trigger function for individual posts
.raw(
`
CREATE OR REPLACE FUNCTION public.trgfun_set_post_privacy_on_insert_update()
CREATE OR REPLACE FUNCTION trgfun_set_post_privacy_on_insert_update()
RETURNS trigger AS
$BODY$
-- Set proper is_private and is_protected flags on post insert or update
Expand Down Expand Up @@ -46,7 +46,7 @@ export function up(knex) {
// Trigger function for users/groups
.raw(
`
CREATE OR REPLACE FUNCTION public.trgfun_set_posts_privacy_on_user_update()
CREATE OR REPLACE FUNCTION trgfun_set_posts_privacy_on_user_update()
RETURNS trigger AS
$BODY$
-- Set proper is_private and is_protected flags on all user's posts when the user changes his privacy
Expand Down Expand Up @@ -107,17 +107,17 @@ export function up(knex) {
.raw(
`CREATE TRIGGER trg_set_post_privacy_on_insert_update
BEFORE INSERT OR UPDATE OF destination_feed_ids
ON public.posts
ON posts
FOR EACH ROW
EXECUTE PROCEDURE public.trgfun_set_post_privacy_on_insert_update();`,
EXECUTE PROCEDURE trgfun_set_post_privacy_on_insert_update();`,
)
.raw(
`CREATE TRIGGER trg_set_posts_privacy_on_user_update
AFTER UPDATE OF is_private, is_protected
ON public.users
ON users
FOR EACH ROW
WHEN (((old.is_protected <> new.is_protected) OR (old.is_private <> new.is_private)))
EXECUTE PROCEDURE public.trgfun_set_posts_privacy_on_user_update();`,
EXECUTE PROCEDURE trgfun_set_posts_privacy_on_user_update();`,
)
// Data migration
.raw('update posts set destination_feed_ids = destination_feed_ids')
Expand Down
6 changes: 3 additions & 3 deletions migrations/20171205142923_post_is_propagable.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export async function up(knex) {
// Trigger function to update 'is_propagable' flag
.raw(
`
CREATE OR REPLACE FUNCTION public.trgfun_set_post_is_propagable_on_insert_update()
CREATE OR REPLACE FUNCTION trgfun_set_post_is_propagable_on_insert_update()
RETURNS trigger AS
$BODY$
-- Set 'is_propagable' post flag on insert or update.
Expand All @@ -36,9 +36,9 @@ export async function up(knex) {
.raw(
`CREATE TRIGGER trg_set_post_is_propagable_on_insert_update
BEFORE INSERT OR UPDATE OF destination_feed_ids
ON public.posts
ON posts
FOR EACH ROW
EXECUTE PROCEDURE public.trgfun_set_post_is_propagable_on_insert_update();`,
EXECUTE PROCEDURE trgfun_set_post_is_propagable_on_insert_update();`,
)
// Data migration
.raw(
Expand Down
Loading
Loading