Browse Source

fix: duplicate base (#8678)

* fix: duplicate from source

* fix: pubsub & duplicate

Signed-off-by: mertmit <mertmit99@gmail.com>

---------

Signed-off-by: mertmit <mertmit99@gmail.com>
pull/8676/head
Mert E 6 months ago committed by GitHub
parent
commit
bc5094e742
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 5
      packages/nocodb/src/models/Base.ts
  2. 4
      packages/nocodb/src/modules/jobs/jobs.controller.ts
  3. 14
      packages/nocodb/src/modules/jobs/jobs/export-import/duplicate.controller.ts
  4. 63
      packages/nocodb/src/modules/jobs/jobs/export-import/duplicate.processor.ts
  5. 60
      packages/nocodb/src/modules/jobs/redis/jobs-redis.ts
  6. 17
      packages/nocodb/src/redis/pubsub-redis.ts

5
packages/nocodb/src/models/Base.ts

@ -560,10 +560,9 @@ export default class Base implements BaseType {
) {
const base = await this.getByTitleOrId(context, titleOrId, ncMeta);
// parse meta
base.meta = parseMetaProp(base);
if (base) {
// parse meta
base.meta = parseMetaProp(base);
await base.getSources(ncMeta);
}

4
packages/nocodb/src/modules/jobs/jobs.controller.ts

@ -8,7 +8,6 @@ import {
Res,
UseGuards,
} from '@nestjs/common';
import { Request } from 'express';
import { OnEvent } from '@nestjs/event-emitter';
import { customAlphabet } from 'nanoid';
import type { Response } from 'express';
@ -20,8 +19,7 @@ import { CacheGetType, CacheScope } from '~/utils/globals';
import { MetaApiLimiterGuard } from '~/guards/meta-api-limiter.guard';
import { IJobsService } from '~/modules/jobs/jobs-service.interface';
import { JobsRedis } from '~/modules/jobs/redis/jobs-redis';
import { TenantContext } from '~/decorators/tenant-context.decorator';
import { NcContext, NcRequest } from '~/interface/config';
import { NcRequest } from '~/interface/config';
const nanoidv2 = customAlphabet('1234567890abcdefghijklmnopqrstuvwxyz', 14);
const POLLING_INTERVAL = 30000;

14
packages/nocodb/src/modules/jobs/jobs/export-import/duplicate.controller.ts

@ -19,6 +19,7 @@ import { MetaApiLimiterGuard } from '~/guards/meta-api-limiter.guard';
import { IJobsService } from '~/modules/jobs/jobs-service.interface';
import { TenantContext } from '~/decorators/tenant-context.decorator';
import { NcContext, NcRequest } from '~/interface/config';
import { RootScopes } from '~/utils/globals';
@Controller()
@UseGuards(MetaApiLimiterGuard, GlobalGuard)
@ -50,7 +51,13 @@ export class DuplicateController {
base?: any;
},
) {
const base = await Base.getByUuid(context, sharedBaseId);
const base = await Base.getByUuid(
{
workspace_id: RootScopes.BASE,
base_id: RootScopes.BASE,
},
sharedBaseId,
);
if (!base) {
throw new Error(`Base not found for id '${sharedBaseId}'`);
@ -80,7 +87,10 @@ export class DuplicateController {
});
const job = await this.jobsService.add(JobTypes.DuplicateBase, {
context,
context: {
workspace_id: base.fk_workspace_id,
base_id: base.id,
},
baseId: base.id,
sourceId: source.id,
dupProjectId: dupProject.id,

63
packages/nocodb/src/modules/jobs/jobs/export-import/duplicate.processor.ts

@ -49,7 +49,7 @@ export class DuplicateProcessor {
const source = await Source.get(context, sourceId);
const targetContext = {
...context,
workspace_id: dupProject.fk_workspace_id,
base_id: dupProject.id,
};
@ -101,7 +101,7 @@ export class DuplicateProcessor {
}
if (!excludeData) {
await this.importModelsData(targetContext, {
await this.importModelsData(targetContext, context, {
idMap,
sourceProject: base,
sourceModels: models,
@ -225,7 +225,7 @@ export class DuplicateProcessor {
}
}
await this.importModelsData(context, {
await this.importModelsData(context, context, {
idMap,
sourceProject: base,
sourceModels: [sourceModel],
@ -362,7 +362,7 @@ export class DuplicateProcessor {
}
}
await this.importModelsData(context, {
await this.importModelsData(context, context, {
idMap,
sourceProject: base,
sourceModels: [],
@ -405,7 +405,8 @@ export class DuplicateProcessor {
}
async importModelsData(
context: NcContext,
targetContext: NcContext,
sourceContext: NcContext,
param: {
idMap: Map<string, string>;
sourceProject: Base;
@ -444,7 +445,7 @@ export class DuplicateProcessor {
});
this.exportService
.streamModelDataAsCsv(context, {
.streamModelDataAsCsv(sourceContext, {
dataStream,
linkStream,
baseId: sourceProject.id,
@ -459,11 +460,11 @@ export class DuplicateProcessor {
});
const model = await Model.get(
context,
targetContext,
findWithIdentifier(idMap, sourceModel.id),
);
await this.importService.importDataFromCsvStream(context, {
await this.importService.importDataFromCsvStream(targetContext, {
idMap,
dataStream,
destProject,
@ -471,13 +472,16 @@ export class DuplicateProcessor {
destModel: model,
});
handledLinks = await this.importService.importLinkFromCsvStream(context, {
idMap,
linkStream,
destProject,
destBase,
handledLinks,
});
handledLinks = await this.importService.importLinkFromCsvStream(
targetContext,
{
idMap,
linkStream,
destProject,
destBase,
handledLinks,
},
);
elapsedTime(
hrTime,
@ -506,7 +510,7 @@ export class DuplicateProcessor {
let error = null;
this.exportService
.streamModelDataAsCsv(context, {
.streamModelDataAsCsv(targetContext, {
dataStream,
linkStream,
baseId: sourceProject.id,
@ -524,7 +528,7 @@ export class DuplicateProcessor {
const headers: string[] = [];
let chunk = [];
const model = await Model.get(context, sourceModel.id);
const model = await Model.get(targetContext, sourceModel.id);
await new Promise((resolve) => {
papaparse.parse(dataStream, {
@ -535,7 +539,7 @@ export class DuplicateProcessor {
for (const header of results.data as any) {
const id = idMap.get(header);
if (id) {
const col = await Column.get(context, {
const col = await Column.get(targetContext, {
source_id: destBase.id,
colId: id,
});
@ -545,7 +549,7 @@ export class DuplicateProcessor {
(col.colOptions?.type === RelationTypes.ONE_TO_ONE &&
col.meta?.bt)
) {
const childCol = await Column.get(context, {
const childCol = await Column.get(targetContext, {
source_id: destBase.id,
colId: col.colOptions.fk_child_column_id,
});
@ -585,13 +589,16 @@ export class DuplicateProcessor {
// remove empty rows (only pk is present)
chunk = chunk.filter((r) => Object.keys(r).length > 1);
if (chunk.length > 0) {
await this.bulkDataService.bulkDataUpdate(context, {
baseName: destProject.id,
tableName: model.id,
body: chunk,
cookie: null,
raw: true,
});
await this.bulkDataService.bulkDataUpdate(
targetContext,
{
baseName: destProject.id,
tableName: model.id,
body: chunk,
cookie: null,
raw: true,
},
);
}
} catch (e) {
this.debugLog(e);
@ -608,7 +615,7 @@ export class DuplicateProcessor {
// remove empty rows (only pk is present)
chunk = chunk.filter((r) => Object.keys(r).length > 1);
if (chunk.length > 0) {
await this.bulkDataService.bulkDataUpdate(context, {
await this.bulkDataService.bulkDataUpdate(targetContext, {
baseName: destProject.id,
tableName: model.id,
body: chunk,
@ -629,7 +636,7 @@ export class DuplicateProcessor {
if (error) throw error;
handledLinks = await this.importService.importLinkFromCsvStream(
context,
targetContext,
{
idMap,
linkStream,

60
packages/nocodb/src/modules/jobs/redis/jobs-redis.ts

@ -3,9 +3,11 @@ import type { InstanceCommands } from '~/interface/Jobs';
import { PubSubRedis } from '~/redis/pubsub-redis';
import { InstanceTypes } from '~/interface/Jobs';
export class JobsRedis extends PubSubRedis {
export class JobsRedis {
protected static logger = new Logger(JobsRedis.name);
public static available = PubSubRedis.available;
public static primaryCallbacks: {
[key: string]: (...args) => Promise<void>;
} = {};
@ -13,47 +15,61 @@ export class JobsRedis extends PubSubRedis {
{};
static async initJobs() {
if (!this.initialized) {
if (!this.available) {
if (!PubSubRedis.initialized) {
if (!PubSubRedis.available) {
return;
}
await this.init();
await PubSubRedis.init();
}
const onMessage = async (channel, message) => {
const args = message.split(':');
const command = args.shift();
if (channel === InstanceTypes.WORKER) {
this.workerCallbacks[command] &&
(await this.workerCallbacks[command](...args));
} else if (channel === InstanceTypes.PRIMARY) {
this.primaryCallbacks[command] &&
(await this.primaryCallbacks[command](...args));
try {
if (!message) {
return;
}
const args = message.split(':');
const command = args.shift();
if (channel === InstanceTypes.WORKER) {
this.workerCallbacks[command] &&
(await this.workerCallbacks[command](...args));
} else if (channel === InstanceTypes.PRIMARY) {
this.primaryCallbacks[command] &&
(await this.primaryCallbacks[command](...args));
}
} catch (error) {
this.logger.error({
message: `Error processing redis pub-sub message ${message}`,
});
}
};
PubSubRedis.redisSubscriber.on('message', onMessage);
if (process.env.NC_WORKER_CONTAINER === 'true') {
await this.subscribe(InstanceTypes.WORKER, async (message) => {
await PubSubRedis.subscribe(InstanceTypes.WORKER, async (message) => {
await onMessage(InstanceTypes.WORKER, message);
});
} else {
await this.subscribe(InstanceTypes.PRIMARY, async (message) => {
await PubSubRedis.subscribe(InstanceTypes.PRIMARY, async (message) => {
await onMessage(InstanceTypes.PRIMARY, message);
});
}
}
static async workerCount(): Promise<number> {
if (!this.initialized) {
if (!this.available) {
if (!PubSubRedis.initialized) {
if (!PubSubRedis.available) {
return;
}
await this.init();
await PubSubRedis.init();
await this.initJobs();
}
return new Promise((resolve) => {
this.redisClient.publish(
PubSubRedis.redisClient.publish(
InstanceTypes.WORKER,
'count',
(error, numberOfSubscribers) => {
@ -70,11 +86,15 @@ export class JobsRedis extends PubSubRedis {
static async emitWorkerCommand(command: InstanceCommands, ...args: any[]) {
const data = `${command}${args.length ? `:${args.join(':')}` : ''}`;
await this.publish(InstanceTypes.WORKER, data);
await PubSubRedis.publish(InstanceTypes.WORKER, data);
}
static async emitPrimaryCommand(command: InstanceCommands, ...args: any[]) {
const data = `${command}${args.length ? `:${args.join(':')}` : ''}`;
await this.publish(InstanceTypes.PRIMARY, data);
await PubSubRedis.publish(InstanceTypes.PRIMARY, data);
}
static publish = PubSubRedis.publish;
static subscribe = PubSubRedis.subscribe;
static unsubscribe = PubSubRedis.unsubscribe;
}

17
packages/nocodb/src/redis/pubsub-redis.ts

@ -8,11 +8,10 @@ export class PubSubRedis {
protected static logger = new Logger(PubSubRedis.name);
static redisClient: Redis;
private static redisSubscriber: Redis;
public static redisClient: Redis;
public static redisSubscriber: Redis;
private static unsubscribeCallbacks: { [key: string]: () => Promise<void> } =
{};
private static callbacks: Record<string, (...args) => Promise<void>> = {};
public static async init() {
if (!PubSubRedis.available) {
@ -22,12 +21,6 @@ export class PubSubRedis {
PubSubRedis.redisClient = new Redis(process.env.NC_REDIS_JOB_URL);
PubSubRedis.redisSubscriber = new Redis(process.env.NC_REDIS_JOB_URL);
PubSubRedis.redisSubscriber.on('message', async (channel, message) => {
const [command, ...args] = message.split(':');
const callback = PubSubRedis.callbacks[command];
if (callback) await callback(...args);
});
PubSubRedis.initialized = true;
}
@ -78,7 +71,11 @@ export class PubSubRedis {
await PubSubRedis.redisSubscriber.subscribe(channel);
const onMessage = async (_channel, message) => {
const onMessage = async (messageChannel, message) => {
if (channel !== messageChannel) {
return;
}
try {
message = JSON.parse(message);
} catch (e) {}

Loading…
Cancel
Save