tunarr/server/src/dao/legacy_migration/customShowMigrator.ts
Christian Benincasa 5570631adc
Packaging v0: Build and run server in a docker container (#139)
* This is a nightmare

* Checkpointing.... getting closer

* First cut - packaging the server in a docker container

* Remove busted bundles

* Minify build

* Some common commands for building - we're going to look into proper monorepo solutions soon

* Remove dependency on serve-static

* Add web serving, full-stack docker target, and Nvidia container support

* Remove test action graph for now
2024-03-05 13:13:26 -05:00

130 lines
3.9 KiB
TypeScript

import fs from 'fs/promises';
import { chain, isNil, maxBy, partition } from 'lodash-es';
import path from 'path';
import { groupByUniq, mapAsyncSeq } from '../../util.js';
import { withDb } from '../dataSource.js';
import { CustomShow as CustomShowEntity } from '../entities/CustomShow.js';
import { CustomShowContent } from '../entities/CustomShowContent.js';
import { FillerShow } from '../entities/FillerShow.js';
import { CustomShow, logger } from '../legacyDbMigration.js';
import { persistProgram } from './channelMigrator.js';
import {
JSONArray,
JSONObject,
convertProgram,
uniqueProgramId,
} from './migrationUtil.js';
export async function convertCustomShow(
id: string,
fullPath: string,
type: 'custom-shows' | 'filler',
) {
const prettyType = type === 'custom-shows' ? 'custom show' : 'filler';
logger.info(`Migrating ${prettyType}: ${fullPath}`);
const channel = await fs.readFile(fullPath);
const parsed = JSON.parse(channel.toString('utf-8')) as JSONObject;
const show: CustomShow = {
id,
name: parsed['name'] as string,
content: (parsed['content'] as JSONArray).map(convertProgram),
};
return show;
}
export async function migrateCustomShows(
dbPath: string,
type: 'custom-shows' | 'filler',
) {
const customShowsPath = path.join(dbPath, type);
const configFiles = await fs.readdir(customShowsPath);
const newCustomShows = await configFiles.reduce(
async (prev, file) => {
const id = file.replace('.json', '');
return [
...(await prev),
await convertCustomShow(id, path.join(customShowsPath, file), type),
];
},
Promise.resolve([] as CustomShow[]),
);
await withDb(async (em) => {
const uniquePrograms = chain(newCustomShows)
.flatMap((cs) => cs.content)
.uniqBy(uniqueProgramId)
.value();
const persistedPrograms = (
await mapAsyncSeq(uniquePrograms, undefined, (program) =>
persistProgram(program).then((dbProgram) =>
dbProgram
? {
[uniqueProgramId(program)]: dbProgram,
}
: {},
),
)
).reduce((value, prev) => ({ ...value, ...prev }), {});
const entityType = type === 'custom-shows' ? CustomShowEntity : FillerShow;
const repo = em.getRepository(entityType);
const customShowById = groupByUniq(newCustomShows, 'id');
await mapAsyncSeq(newCustomShows, undefined, async (customShow) => {
// Refresh the entity after inserting programs
const existing = await repo.findOne(
{ uuid: customShow.id },
{ populate: ['content'], refresh: true },
);
// If we didn't find one, initialize it
const entity =
existing ??
em.create(entityType, {
uuid: customShow.id,
name: customShow.name,
});
// Reset mappings
const content = customShowById[entity.uuid].content;
entity.content.removeAll();
if (type === 'custom-shows') {
const [hasOrder, noOrder] = partition(
content,
(c) => !isNil(c.customOrder),
);
const maxOrder =
maxBy(hasOrder, (c) => c.customOrder)?.customOrder ?? 0;
const newOrder = noOrder.map((c, idx) => ({
...c,
customOrder: maxOrder + idx + 1,
}));
const csContent = chain(hasOrder)
.sortBy((c) => c.customOrder)
.concat(newOrder)
.map((c) =>
em.create(CustomShowContent, {
index: c.customOrder!,
customShow: entity.uuid,
content: persistedPrograms[uniqueProgramId(c)],
}),
)
.value();
em.persist(csContent);
} else {
// Handle filler shows
entity.content.set(
content.map((c) => persistedPrograms[uniqueProgramId(c)]),
);
em.persist(entity);
}
});
await em.flush();
});
}