Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 | import { ArkErrors } from 'arktype';
import { strFromU8, unzipSync } from 'fflate';
import * as db from './idb.svelte';
import { imageIdToFileId, resizeToMaxSize, storeImageBytes } from './images';
import { serializeMetadataValue } from './metadata';
import { Analysis } from './schemas/results';
import { uiState } from './state.svelte';
import { toasts } from './toasts.svelte';
import { entries, mapValues, pick, safeJSONParse, uint8ArrayToArrayBuffer } from './utils';
/**
* @template {string} HeaderKey
* @param {HeaderKey[]} header
* @param {Array<Record<NoInfer<HeaderKey>, string>>} rows
* @param {string} [separator=";"]
*/
export function toCSV(header, rows, separator = ';') {
/** @param {string} cell */
const quote = (cell) => `"${cell?.replace(/"/g, '""') ?? ''}"`;
return [
header.map(quote).join(separator),
...rows.map((row) => header.map((key) => quote(row[key])).join(separator))
].join('\n');
}
/**
* Import back a results zip file.
* @param {File} file
* @param {string} id
* @param {string} [protocolId] make sure that the protocolId is the same as the one used to export the zip file
*/
export async function importResultsZip(file, id, protocolId) {
const contents = new Uint8Array(await file.arrayBuffer());
const results = unzipSync(contents, {
filter: ({ name }) => {
return name === (uiState.currentProtocol?.exports?.metadata.json ?? 'analysis.json');
}
});
if (Object.keys(results).length === 0) {
uiState.processing.removeFile(id);
toasts.error(`Aucun fichier d'analyse trouvé dans l'export ${file.name}`);
return;
}
const [analysis] = Object.values(results)
.map((d) => strFromU8(d))
.map(safeJSONParse)
.map((obj) => (obj ? Analysis(obj) : undefined));
if (analysis === undefined) {
uiState.processing.removeFile(id);
toasts.error(
`Le fichier d'analyse de ${file.name} n'est pas au format JSON ou est corrompu`
);
return;
}
if (analysis instanceof ArkErrors) {
uiState.processing.removeFile(id);
toasts.error(`Fichier d'analyse de ${file.name} invalide: ${analysis.summary}`);
return;
}
const { protocol, observations } = analysis;
if (protocolId && protocol.id !== protocolId) {
uiState.processing.removeFile(id);
toasts.error(
`Le fichier d'analyse de ${file.name} a été exporté avec le protocole ${protocol.id}, mais le protocole actuel est ${protocolId}`
);
return;
}
const files = Object.values(observations)
.flatMap((o) => o.images)
.map((i) => ({ id: i.id, name: i.exportedAs.original }));
uiState.processing.files.push(...files);
const extractedImages = unzipSync(contents, {
filter: ({ name }) => files.some((f) => f.name === name)
});
if (Object.keys(extractedImages).length === 0) {
uiState.processing.removeFile(id);
// Remove our files from the processing queue
uiState.processing.files = uiState.processing.files.filter(
(f) => !files.find((file) => file.id === f.id)
);
toasts.error(
`Aucune image trouvée dans l'export ${file.name}. L'export doit contenir les images originales, pas seulement les images recadrées`
);
return;
}
for (const [name, bytes] of entries(extractedImages)) {
const id = files.find((f) => f.name === name)?.id;
if (!id) continue;
const observation = entries(observations)
.map(([id, o]) => ({ id, ...o }))
.find((o) => o.images.some((i) => i.exportedAs.original === name));
if (!observation) {
uiState.processing.removeFile(id);
continue;
}
const image = observation.images.find((i) => i.exportedAs.original === name);
if (!image) {
uiState.processing.removeFile(id);
continue;
}
await db.tables.Observation.set({
...pick(observation, 'id', 'label'),
images: observation.images.map((i) => i.id),
// eslint-disable-next-line svelte/prefer-svelte-reactivity
addedAt: new Date().toISOString(),
metadataOverrides: mapValues(observation.metadata, (v) => ({
value: serializeMetadataValue(v.value),
confidence: v.confidence,
manuallyModified: v.manuallyModified,
alternatives: v.alternatives
}))
});
const originalBytes = uint8ArrayToArrayBuffer(bytes);
const [[width, height], resizedBytes] = await resizeToMaxSize({
source: new File([originalBytes], image.filename, { type: image.contentType })
});
await storeImageBytes({
id: imageIdToFileId(image.id),
resizedBytes,
originalBytes,
contentType: image.contentType,
filename: image.filename,
width,
height
});
await db.tables.Image.set({
...pick(image, 'id', 'filename', 'contentType'),
dimensions: { width, height },
fileId: imageIdToFileId(image.id),
boundingBoxesAnalyzed: true,
// eslint-disable-next-line svelte/prefer-svelte-reactivity
addedAt: new Date().toISOString(),
metadata: mapValues(image.metadata, (v) => ({
value: serializeMetadataValue(v.value),
confidence: v.confidence,
manuallyModified: v.manuallyModified,
alternatives: v.alternatives
}))
});
uiState.processing.removeFile(image.id);
}
uiState.processing.removeFile(id);
}
|