Skip to content

Commit 55c3626

Browse files
committed
Clean up PR
1 parent 52c692b commit 55c3626

File tree

7 files changed

+3
-255
lines changed

7 files changed

+3
-255
lines changed

client/platform/desktop/backend/native/common.ts

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import * as dive from 'platform/desktop/backend/serializers/dive';
3030
import kpf from 'platform/desktop/backend/serializers/kpf';
3131
// TODO: Check to Refactor this
3232
// eslint-disable-next-line import/no-cycle
33-
import { checkMedia, convertMedia } from 'platform/desktop/backend/native/mediaJobs';
33+
import { checkMedia } from 'platform/desktop/backend/native/mediaJobs';
3434
import {
3535
websafeImageTypes, websafeVideoTypes, otherImageTypes, otherVideoTypes, MultiType, JsonMetaRegEx,
3636
} from 'dive-common/constants';
@@ -1100,7 +1100,6 @@ export async function completeConversion(settings: Settings, datasetId: string,
11001100
async function finalizeMediaImport(
11011101
settings: Settings,
11021102
args: DesktopMediaImportResponse,
1103-
updater: DesktopJobUpdater,
11041103
): Promise<ConversionArgs> {
11051104
const { jsonMeta, globPattern } = args;
11061105
let { mediaConvertList } = args;
@@ -1134,8 +1133,8 @@ async function finalizeMediaImport(
11341133
}
11351134
}
11361135

1137-
//Now we will kick off any conversions that are necessary
1138-
let jobBase = null;
1136+
// Determine which files, if any, need to be queued for conversion. Consumers
1137+
// of this function are responsible for starting the conversion.
11391138
const srcDstList: [string, string][] = [];
11401139
if (mediaConvertList.length) {
11411140
const extension = datasetType === 'video' ? '.mp4' : '.png';
@@ -1159,7 +1158,6 @@ async function finalizeMediaImport(
11591158
}
11601159
srcDstList.push([absPath, destAbsPath]);
11611160
});
1162-
jsonMeta.transcodingJobKey = jobBase.key;
11631161
}
11641162

11651163
//We need to create datasets for each of the multiCam folders as well

client/platform/desktop/backend/native/jobQueue.ts

Lines changed: 0 additions & 23 deletions
This file was deleted.

client/platform/desktop/backend/native/viame.ts

Lines changed: 0 additions & 187 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ import {
66
Settings, DesktopJob, RunPipeline, RunTraining,
77
DesktopJobUpdater,
88
ExportTrainedPipeline,
9-
// QueuedDesktopJob,
109
} from 'platform/desktop/constants';
1110
import { cleanString } from 'platform/desktop/sharedUtils';
1211
import { serialize } from 'platform/desktop/backend/serializers/viame';
@@ -215,192 +214,6 @@ async function runPipeline(
215214
return jobBase;
216215
}
217216

218-
/**
219-
async function generateDesktopPipelineJob(
220-
settings: Settings,
221-
runPipelineArgs: RunPipeline,
222-
updater: DesktopJobUpdater,
223-
validateViamePath: (settings: Settings) => Promise<true | string>,
224-
viameConstants: ViameConstants,
225-
forceTranscodedVideo?: boolean,
226-
): QueuedDesktopJob {
227-
const { datasetId, pipeline } = runPipelineArgs;
228-
229-
const isValid = await validateViamePath(settings);
230-
if (isValid !== true) {
231-
throw new Error(isValid);
232-
}
233-
234-
let pipelinePath = npath.join(settings.viamePath, PipelineRelativeDir, pipeline.pipe);
235-
if (runPipelineArgs.pipeline.type === 'trained') {
236-
pipelinePath = pipeline.pipe;
237-
}
238-
const projectInfo = await common.getValidatedProjectDir(settings, datasetId);
239-
const meta = await common.loadJsonMetadata(projectInfo.metaFileAbsPath);
240-
const jobWorkDir = await createWorkingDirectory(settings, [meta], pipeline.name);
241-
242-
const detectorOutput = npath.join(jobWorkDir, 'detector_output.csv');
243-
let trackOutput = npath.join(jobWorkDir, 'track_output.csv');
244-
const joblog = npath.join(jobWorkDir, 'runlog.txt');
245-
246-
//TODO: TEMPORARY FIX FOR DEMO PURPOSES
247-
let requiresInput = false;
248-
if ((/utility_|filter_|transcode_|measurement_/g).test(pipeline.pipe)) {
249-
requiresInput = true;
250-
}
251-
let groundTruthFileName;
252-
if (requiresInput) {
253-
// MultiCam ids have '/' in it to designate camera, replace to make a valid location
254-
groundTruthFileName = `groundtruth_${meta.id.replace('/', '_')}.csv`;
255-
const groundTruthFileStream = fs.createWriteStream(
256-
npath.join(jobWorkDir, groundTruthFileName),
257-
);
258-
const inputData = await common.loadAnnotationFile(projectInfo.trackFileAbsPath);
259-
await serialize(groundTruthFileStream, inputData, meta);
260-
groundTruthFileStream.end();
261-
}
262-
263-
let metaType = meta.type;
264-
265-
if (metaType === MultiType && meta.multiCam) {
266-
metaType = meta.multiCam.cameras[meta.multiCam.defaultDisplay].type;
267-
}
268-
269-
let command: string[] = [];
270-
const stereoOrMultiCam = (pipeline.type === stereoPipelineMarker
271-
|| multiCamPipelineMarkers.includes(pipeline.type));
272-
273-
if (metaType === 'video') {
274-
let videoAbsPath = npath.join(meta.originalBasePath, meta.originalVideoFile);
275-
if (meta.type === MultiType) {
276-
videoAbsPath = getMultiCamVideoPath(meta, forceTranscodedVideo);
277-
} else if ((meta.transcodedVideoFile && meta.transcodedMisalign) || forceTranscodedVideo) {
278-
videoAbsPath = npath.join(projectInfo.basePath, meta.transcodedVideoFile);
279-
}
280-
command = [
281-
`${viameConstants.setupScriptAbs} &&`,
282-
`"${viameConstants.kwiverExe}" runner`,
283-
'-s "input:video_reader:type=vidl_ffmpeg"',
284-
`-p "${pipelinePath}"`,
285-
`-s downsampler:target_frame_rate=${meta.fps}`,
286-
];
287-
if (!stereoOrMultiCam) {
288-
command.push(`-s input:video_filename="${videoAbsPath}"`);
289-
command.push(`-s detector_writer:file_name="${detectorOutput}"`);
290-
command.push(`-s track_writer:file_name="${trackOutput}"`);
291-
}
292-
} else if (metaType === 'image-sequence') {
293-
// Create frame image manifest
294-
const manifestFile = npath.join(jobWorkDir, 'image-manifest.txt');
295-
// map image file names to absolute paths
296-
let imageList = meta.originalImageFiles;
297-
if (meta.type === MultiType) {
298-
imageList = getMultiCamImageFiles(meta);
299-
}
300-
const fileData = imageList
301-
.map((f) => npath.join(meta.originalBasePath, f))
302-
.join('\n');
303-
await fs.writeFile(manifestFile, fileData);
304-
command = [
305-
`${viameConstants.setupScriptAbs} &&`,
306-
`"${viameConstants.kwiverExe}" runner`,
307-
`-p "${pipelinePath}"`,
308-
];
309-
if (!stereoOrMultiCam) {
310-
command.push(`-s input:video_filename="${manifestFile}"`);
311-
command.push(`-s detector_writer:file_name="${detectorOutput}"`);
312-
command.push(`-s track_writer:file_name="${trackOutput}"`);
313-
}
314-
}
315-
316-
if (runPipelineArgs.pipeline.type === 'filter') {
317-
command.push(`-s kwa_writer:output_directory="${npath.join(jobWorkDir, 'output')}"`);
318-
command.push(`-s image_writer:file_name_prefix="${jobWorkDir}/"`);
319-
}
320-
if (runPipelineArgs.pipeline.type === 'transcode') {
321-
command.push(`-s video_writer:video_filename="${npath.join(jobWorkDir, `${datasetId}.mp4`)}"`);
322-
}
323-
324-
if (requiresInput && !stereoOrMultiCam) {
325-
command.push(`-s detection_reader:file_name="${groundTruthFileName}"`);
326-
command.push(`-s track_reader:file_name="${groundTruthFileName}"`);
327-
}
328-
329-
let multiOutFiles: Record<string, string>;
330-
if (meta.multiCam && stereoOrMultiCam) {
331-
// eslint-disable-next-line max-len
332-
const { argFilePair, outFiles } = await writeMultiCamStereoPipelineArgs(jobWorkDir, meta, settings, requiresInput);
333-
Object.entries(argFilePair).forEach(([arg, file]) => {
334-
command.push(`-s ${arg}="${file}"`);
335-
});
336-
multiOutFiles = {};
337-
Object.entries(outFiles).forEach(([cameraName, fileName]) => {
338-
multiOutFiles[cameraName] = npath.join(jobWorkDir, fileName);
339-
});
340-
trackOutput = npath.join(jobWorkDir, outFiles[meta.multiCam.defaultDisplay]);
341-
342-
if (meta.multiCam.calibration) {
343-
command.push(`-s measurer:calibration_file="${meta.multiCam.calibration}"`);
344-
command.push(`-s calibration_reader:file="${meta.multiCam.calibration}"`);
345-
}
346-
} else if (pipeline.type === stereoPipelineMarker) {
347-
throw new Error('Attempting to run a multicam pipeline on non multicam data');
348-
}
349-
const queuedJob: QueuedDesktopJob = {
350-
key: 'pipeline', // TODO make this a random thing
351-
command: command.join(' '),
352-
jobType: 'pipeline',
353-
args: runPipelineArgs,
354-
title: runPipelineArgs.pipeline.name,
355-
workingDir: jobWorkDir,
356-
datasetIds: [datasetId],
357-
};
358-
return queuedJob;
359-
}
360-
361-
async function runDesktopPipelineJob(
362-
queuedJob: QueuedDesktopJob,
363-
viameConstants: ViameConstants,
364-
updater: DesktopJobUpdater,
365-
) {
366-
const { command, workingDir } = queuedJob;
367-
const job = observeChild(spawn(command, {
368-
shell: viameConstants.shell,
369-
cwd: workingDir,
370-
}));
371-
fs.writeFile(npath.join(workingDir, DiveJobManifestName), JSON.stringify(queuedJob, null, 2));
372-
const joblog = npath.join(workingDir, 'runlog.txt');
373-
updater({
374-
...queuedJob,
375-
startTime: new Date(),
376-
body: [''],
377-
exitCode: job.exitCode,
378-
pid: job.pid,
379-
});
380-
job.stdout.on('data', jobFileEchoMiddleware(queuedJob as DesktopJob, updater, joblog));
381-
job.stderr.on('data', jobFileEchoMiddleware(queuedJob as DesktopJob, updater, joblog));
382-
383-
job.on('exit', async (code) => {
384-
if (code === 0) {
385-
try {
386-
const { meta: newMeta } = await common.ingestDataFiles(settings, datasetId, [detectorOutput, trackOutput], multiOutFiles);
387-
if (newMeta) {
388-
meta.attributes = newMeta.attributes;
389-
await common.saveMetadata(settings, datasetId, meta);
390-
}
391-
} catch (err) {
392-
console.error(err);
393-
}
394-
}
395-
updater({
396-
...queuedJob,
397-
body: [''],
398-
exitCode: code,
399-
endTime: new Date(),
400-
});
401-
});
402-
}
403-
*/
404217
/**
405218
* a node.js implementation of dive_tasks.tasks.export_trained_model
406219
*/

client/platform/desktop/constants.ts

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -236,22 +236,6 @@ export interface DesktopJob {
236236
endTime?: Date;
237237
}
238238

239-
/**
240-
export interface QueuedDesktopJob {
241-
key?: string;
242-
command: string;
243-
jobType: 'pipeline' | 'training' | 'conversion' | 'export';
244-
title: string;
245-
args: RunPipeline | RunTraining | ExportTrainedPipeline | ConversionArgs;
246-
datasetIds: string[];
247-
pid?: number;
248-
workingDir: string;
249-
exitCode?: number | null;
250-
startTime?: Date;
251-
endTime?: Date;
252-
}
253-
*/
254-
255239
export interface DesktopMediaImportResponse extends MediaImportResponse {
256240
jsonMeta: JsonMeta;
257241
trackFileAbsPath: string;

client/platform/desktop/frontend/store/asyncCpuJobQueue.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@ import {
33
ExportTrainedPipeline,
44
isConversion,
55
isExportTrainedPipeline,
6-
Settings,
76
DesktopJob,
8-
JsonMeta,
97
} from 'platform/desktop/constants';
108
import AsyncJobQueue from './asyncJobQueue';
119

client/platform/desktop/frontend/store/asyncJobQueue.ts

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -52,19 +52,6 @@ export default abstract class AsyncJobQueue<T extends JobArgs> {
5252
} finally {
5353
this.dequeueing = false;
5454
}
55-
/**
56-
*
57-
// Always return if at capacity (running this.count jobs)
58-
if (this.processingJobs.length === this.size) return;
59-
if (this.processingJobs.length + this.queued < this.size) {
60-
this.queued += 1;
61-
const nextSpec = this.jobSpecs.shift();
62-
if (!nextSpec) return;
63-
64-
await this.beginJob(nextSpec);
65-
this.queued -= 1;
66-
}
67-
*/
6855
}
6956

7057
abstract beginJob(spec: T): Promise<void>;

client/platform/desktop/frontend/store/asyncPipelineJobQueue.ts

Lines changed: 0 additions & 9 deletions
This file was deleted.

0 commit comments

Comments
 (0)